code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
#!/usr/bin/env python
import urllib
import urllib2
import re
import os
import sys
import time
# upload('http://www.mywebsite.com:8080/upload.php', {}, 'file', os.path.join('/home/john/', 'a.txt'))
def upload(http_url, form_params, file_item_name, file_path):
boundary = '-----------------%s' % hex(int(time.time() * 1000))
crlf = '\r\n'
separator = '--%s' % boundary
file_type = 'application/octet-stream'
data = []
for key in form_params.keys():
value = form_params[key]
data.append(separator)
data.append('Content-Disposition: form-data; name="%s"%s' % (key, crlf))
data.append(value)
data.append(separator)
data.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (file_item_name, os.path.basename(file_path)))
data.append('Content-Type: %s%s' % (file_type, crlf))
file_res = open(file_path)
data.append(file_res.read())
file_res.close()
data.append('%s--%s' % (separator, crlf))
http_body = crlf.join(data)
req = urllib2.Request(http_url, data=http_body)
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
req.add_header('Connection', 'Keep-Alive')
resp = urllib2.urlopen(req, timeout=30)
print resp.read()
# unzip('/home/john/a.zip', '/home/john/', True)
def unzip(zip_path, extract_dir, delete_zip_on_extracted):
import zipfile
# comment following code is because of the unix file permissions lost
# zip_files = zipfile.ZipFile(zip_path, 'r')
# zip_files.extractall(extract_dir)
# zip_files.close()
if not zipfile.is_zipfile(zip_path):
print "%s is not a zip file" % zip_path
exit(0)
z = zipfile.ZipFile(zip_path)
try:
for info in z.infolist():
name = info.filename
if '..' in name:
continue
if name.startswith('/'):
name = name[1:]
target = os.path.join(extract_dir, *name.split('/'))
if not target:
continue
if name.endswith('/'): # directory
dirname = os.path.dirname(target)
if not os.path.isdir(dirname):
os.makedirs(dirname)
else: # file
dirname = os.path.dirname(target)
if not os.path.isdir(dirname):
os.makedirs(dirname)
data = z.read(info.filename)
f = open(target, 'wb')
try:
f.write(data)
finally:
f.close()
del data
unix_attributes = info.external_attr >> 16
if unix_attributes:
os.chmod(target, unix_attributes)
finally:
z.close()
if delete_zip_on_extracted:
os.remove(zip_path)
# 20161201120909
def get_curr_date_str():
return time.strftime('%Y%m%d%H%M%S', time.localtime(time.time()))
# 20161201120909
def is_valid_date_str(date_str):
try:
time.strptime(date_str, '%Y%m%d%H%M%S')
return True
except ValueError, e:
print e
return False
def remove_dir(top_dir):
if os.path.exists(top_dir):
for root, dirs, files in os.walk(top_dir, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
os.rmdir(top_dir)
def delete_file(src):
if os.path.isfile(src):
os.remove(src)
elif os.path.isdir(src):
for item in os.listdir(src):
delete_file(os.path.join(src, item))
os.rmdir(src)
# # logcat.dump.20160503082219.log
# pattern = re.compile(r'^logcat\.dump\.(\d\d\d\d\d\d\d\d\d\d\d\d\d\d)\.log$')
# def compare_file_index(a, b):
# a_num = int(pattern.match(a).group(1))
# b_num = int(pattern.match(b).group(1))
# if a_num > b_num:
# return 1
# elif a_num < b_num:
# return -1
# else:
# return 0
# merge_files('./logs/', pattern, compare_file_index)
def merge_files(folder, pattern, compare_file_index):
print 'merge all files ...'
file_list = []
for parent, dir_names, file_names in os.walk(folder):
for file_name in file_names:
if pattern.match(file_name):
file_list.append(file_name)
file_list.sort(cmp=compare_file_index)
output_path = os.path.join(folder, file_list[0])
output_fd = open(output_path, mode='a')
for log_file in file_list[1:]:
log_path = os.path.join(folder, log_file)
input_fd = open(log_path)
data = input_fd.read()
output_fd.write(data)
output_fd.flush()
input_fd.close()
del data
os.remove(log_path)
output_fd.close()
return output_path
def fetch_url_with_line(req_url):
request = urllib2.Request(req_url)
resp = urllib2.urlopen(request, timeout=30)
return resp.read().splitlines()
# download(['http://www.mywebsite.com:8080/download.php?file=a.zip'], './zips/', ['a.zip'])
def download(urls, folder, file_names):
if not os.path.exists(folder):
os.makedirs(folder)
for idx, url in enumerate(urls):
print 'downloading ' + url
file_path = os.path.join(folder, file_names[idx])
urllib.urlretrieve(url, file_path)
# def flat_map_each_file(file_name, file_path, file_ext):
# print 'file path is ' + file_path + ", including file name: " \
# + file_name + ", " + file_ext + " is filename extension"
# iter_files('/home/john/logs/', flat_map_each_file)
def iter_files(top_folder, flat_map_each_file):
for parent, dir_names, file_names in os.walk(top_folder):
for file_name in file_names:
file_path = os.path.join(parent, file_name)
file_base, file_ext = os.path.splitext(file_path)
flat_map_each_file(file_name, file_path, file_ext)
def platform_name():
if sys.platform == 'darwin':
return 'macosx'
elif sys.platform == 'linux2':
return 'linux'
elif sys.platform.find('win') >= 0:
return 'windows'
else:
return ''
def binary(name):
if os.name == 'posix':
return './' + name
elif os.name == 'nt':
return name + '.exe'
else:
return name
| johnlee175/LogcatFileReader | examples/simple_utils.py | Python | apache-2.0 | 6,336 |
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from namespaces import PRESENTATIONNS
from element import Element
# ODF 1.0 section 9.6 and 9.7
# Autogenerated
def AnimationGroup(**args):
return Element(qname = (PRESENTATIONNS,'animation-group'), **args)
def Animations(**args):
return Element(qname = (PRESENTATIONNS,'animations'), **args)
def DateTime(**args):
return Element(qname = (PRESENTATIONNS,'date-time'), **args)
def DateTimeDecl(**args):
return Element(qname = (PRESENTATIONNS,'date-time-decl'), **args)
def Dim(**args):
return Element(qname = (PRESENTATIONNS,'dim'), **args)
def EventListener(**args):
return Element(qname = (PRESENTATIONNS,'event-listener'), **args)
def Footer(**args):
return Element(qname = (PRESENTATIONNS,'footer'), **args)
def FooterDecl(**args):
return Element(qname = (PRESENTATIONNS,'footer-decl'), **args)
def Header(**args):
return Element(qname = (PRESENTATIONNS,'header'), **args)
def HeaderDecl(**args):
return Element(qname = (PRESENTATIONNS,'header-decl'), **args)
def HideShape(**args):
return Element(qname = (PRESENTATIONNS,'hide-shape'), **args)
def HideText(**args):
return Element(qname = (PRESENTATIONNS,'hide-text'), **args)
def Notes(**args):
return Element(qname = (PRESENTATIONNS,'notes'), **args)
def Placeholder(**args):
return Element(qname = (PRESENTATIONNS,'placeholder'), **args)
def Play(**args):
return Element(qname = (PRESENTATIONNS,'play'), **args)
def Settings(**args):
return Element(qname = (PRESENTATIONNS,'settings'), **args)
def Show(**args):
return Element(qname = (PRESENTATIONNS,'show'), **args)
def ShowShape(**args):
return Element(qname = (PRESENTATIONNS,'show-shape'), **args)
def ShowText(**args):
return Element(qname = (PRESENTATIONNS,'show-text'), **args)
def Sound(**args):
args.setdefault('type', 'simple')
return Element(qname = (PRESENTATIONNS,'sound'), **args)
| pacoqueen/odfpy | odf/presentation.py | Python | gpl-2.0 | 2,752 |
from django.conf.urls import patterns, url
urlpatterns = patterns(
'openassessment.assessment.views',
url(
r'^(?P<student_id>[^/]+)/(?P<course_id>[^/]+)/(?P<item_id>[^/]+)$',
'get_evaluations_for_student_item'
),
)
| devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/openassessment/assessment/urls.py | Python | agpl-3.0 | 244 |
import random
import string
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.utils.html import escape
from django.utils.safestring import SafeData, mark_safe
from selectable.base import ModelLookup
from selectable.tests import Thing
__all__ = (
'ModelLookupTestCase',
'MultiFieldLookupTestCase',
'LookupEscapingTestCase',
)
class PatchSettingsMixin(object):
def setUp(self):
super(PatchSettingsMixin, self).setUp()
self.is_limit_set = hasattr(settings, 'SELECTABLE_MAX_LIMIT')
if self.is_limit_set:
self.original_limit = settings.SELECTABLE_MAX_LIMIT
settings.SELECTABLE_MAX_LIMIT = 25
def tearDown(self):
super(PatchSettingsMixin, self).tearDown()
if self.is_limit_set:
settings.SELECTABLE_MAX_LIMIT = self.original_limit
class BaseSelectableTestCase(TestCase):
urls = 'selectable.tests.urls'
def get_random_string(self, length=10):
return u''.join(random.choice(string.ascii_letters) for x in xrange(length))
def create_thing(self, data=None):
data = data or {}
defaults = {
'name': self.get_random_string(),
'description': self.get_random_string(),
}
defaults.update(data)
return Thing.objects.create(**defaults)
class SimpleModelLookup(ModelLookup):
model = Thing
search_fields = ('name__icontains', )
class ModelLookupTestCase(BaseSelectableTestCase):
lookup_cls = SimpleModelLookup
def get_lookup_instance(self):
return self.__class__.lookup_cls()
def test_get_name(self):
name = self.__class__.lookup_cls.name()
self.assertEqual(name, 'tests-simplemodellookup')
def test_get_url(self):
url = self.__class__.lookup_cls.url()
test_url = reverse('selectable-lookup', args=['tests-simplemodellookup'])
self.assertEqual(url, test_url)
def test_format_item(self):
lookup = self.get_lookup_instance()
thing = Thing()
item_info = lookup.format_item(thing)
self.assertTrue('id' in item_info)
self.assertTrue('value' in item_info)
self.assertTrue('label' in item_info)
def test_get_query(self):
lookup = self.get_lookup_instance()
thing = self.create_thing(data={'name': 'Thing'})
other_thing = self.create_thing(data={'name': 'Other Thing'})
qs = lookup.get_query(request=None, term='other')
self.assertTrue(thing.pk not in qs.values_list('id', flat=True))
self.assertTrue(other_thing.pk in qs.values_list('id', flat=True))
def test_create_item(self):
value = self.get_random_string()
lookup = self.get_lookup_instance()
thing = lookup.create_item(value)
self.assertEqual(thing.__class__, Thing)
self.assertEqual(thing.name, value)
self.assertFalse(thing.pk)
def test_get_item(self):
lookup = self.get_lookup_instance()
thing = self.create_thing(data={'name': 'Thing'})
item = lookup.get_item(thing.pk)
self.assertEqual(thing, item)
def test_format_item_escaping(self):
"Id, value and label should be escaped."
lookup = self.get_lookup_instance()
thing = self.create_thing(data={'name': 'Thing'})
item_info = lookup.format_item(thing)
self.assertTrue(isinstance(item_info['id'], SafeData))
self.assertTrue(isinstance(item_info['value'], SafeData))
self.assertTrue(isinstance(item_info['label'], SafeData))
class MultiFieldLookup(ModelLookup):
model = Thing
search_fields = ('name__icontains', 'description__icontains', )
class MultiFieldLookupTestCase(ModelLookupTestCase):
lookup_cls = MultiFieldLookup
def test_get_name(self):
name = self.__class__.lookup_cls.name()
self.assertEqual(name, 'tests-multifieldlookup')
def test_get_url(self):
url = self.__class__.lookup_cls.url()
test_url = reverse('selectable-lookup', args=['tests-multifieldlookup'])
self.assertEqual(url, test_url)
def test_description_search(self):
lookup = self.get_lookup_instance()
thing = self.create_thing(data={'description': 'Thing'})
other_thing = self.create_thing(data={'description': 'Other Thing'})
qs = lookup.get_query(request=None, term='other')
self.assertTrue(thing.pk not in qs.values_list('id', flat=True))
self.assertTrue(other_thing.pk in qs.values_list('id', flat=True))
class HTMLLookup(ModelLookup):
model = Thing
search_fields = ('name__icontains', )
def get_item_value(self, item):
"Not marked as safe."
return item.name
def get_item_label(self, item):
"Mark label as safe."
return mark_safe(item.name)
class LookupEscapingTestCase(BaseSelectableTestCase):
lookup_cls = HTMLLookup
def get_lookup_instance(self):
return self.__class__.lookup_cls()
def test_escape_html(self):
"HTML should be escaped by default."
lookup = self.get_lookup_instance()
bad_name = "<script>alert('hacked');</script>"
escaped_name = escape(bad_name)
thing = self.create_thing(data={'name': bad_name})
item_info = lookup.format_item(thing)
self.assertEqual(item_info['value'], escaped_name)
def test_conditional_escape(self):
"Methods should be able to mark values as safe."
lookup = self.get_lookup_instance()
bad_name = "<script>alert('hacked');</script>"
escaped_name = escape(bad_name)
thing = self.create_thing(data={'name': bad_name})
item_info = lookup.format_item(thing)
self.assertEqual(item_info['label'], bad_name)
| hzlf/openbroadcast | website/__old_versions/selectable/tests/base.py | Python | gpl-3.0 | 5,805 |
import pytest
import json
import os.path
import importlib
import jsonpickle
from fixture.application import Application
from fixture.db import DbFixture
fixture = None
target = None
def load_config(file):
global target
if target is None:
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), file)
with open(config_file) as f:
target = json.load(f)
return target
@pytest.fixture()
def app(request):
global fixture
global target
browser = request.config.getoption("--browser")
web_config = load_config(request.config.getoption("--target"))['web']
if fixture is None or not fixture.is_valid():
fixture = Application(browser=browser, base_url=web_config['baseUrl'])
fixture.session.ensure_login(username=web_config['username'], password=web_config['password'])
return fixture
@pytest.fixture(scope="session")
def db(request):
db_config = load_config(request.config.getoption("--target"))['db']
dbfixture = DbFixture(host=db_config['host'], name=db_config['name'], user=db_config['user'], password=db_config['password'])
def fin():
dbfixture.destroy()
request.addfinalizer(fin)
return dbfixture
@pytest.fixture(scope="session", autouse=True)
def stop(request):
def fin():
fixture.session.ensure_logout()
fixture.destroy()
request.addfinalizer(fin)
return fixture
@pytest.fixture
def check_ui(request):
return request.config.getoption("--check_ui")
def pytest_addoption(parser):
parser.addoption("--browser", action="store", default="chrome")
parser.addoption("--target", action="store", default="target.json")
parser.addoption("--check_ui", action="store_true")
def pytest_generate_tests(metafunc):
for fixture in metafunc.fixturenames:
if fixture.startswith("data_"):
testdata = load_from_module(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
elif fixture.startswith("json_"):
testdata = load_from_json(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
def load_from_module(module):
return importlib.import_module("data.%s" % module).testdata
def load_from_json(file):
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f:
return jsonpickle.decode(f.read())
| senin24/python_trainig | conftest.py | Python | apache-2.0 | 2,432 |
## @package hfst.exceptions
## exceptions...
## Base class for HfstExceptions. Holds its own name and the file and line number where it was thrown.
class HfstException:
## A message describing the error in more detail.
def what():
pass
## Two or more HfstTransducers are not of the same type. Same as HfstTransducerTypeMismatchException ???
class HfstTransducerTypeMismatchException(HfstException):
pass
## The library required by the implementation type requested is not linked to HFST.
class ImplementationTypeNotAvailableException(HfstException):
pass
## Function has not been implemented (yet).
class FunctionNotImplementedException(HfstException):
pass
## Flag diacritics encountered on one but not the other side of a transition.
class FlagDiacriticsAreNotIdentitiesException(HfstException):
pass
## The input is not in valid prolog format.
class NotValidPrologFormatException(HfstException):
pass
## Stream cannot be read.
class StreamNotReadableException(HfstException):
pass
## Stream cannot be written.
#
# Thrown by #hfst.HfstOutputStream.write and #hfst.HfstTransducer.write_att
class StreamCannotBeWrittenException(HfstException):
pass
## Stream is closed.
#
# Thrown by #hfst.HfstTransducer.write_att
# #hfst.HfstOutputStream.write
#
# An example:
#
# \verbatim
# try:
# tr = hfst.regex('foo')
# outstr = hfst.HfstOutputStream(filename='testfile')
# outstr.close()
# outstr.write(tr)
# except hfst.exceptions.StreamIsClosedException:
# print("Could not write transducer: stream to file was closed.")
# \endverbatim
class StreamIsClosedException(HfstException):
pass
## The stream is at end.
#
# Thrown by
# #hfst.HfstTransducer
# #hfst.HfstInputStream.__init__
class EndOfStreamException(HfstException):
pass
## Transducer is cyclic.
#
# Thrown by #hfst.HfstTransducer.extract_paths. An example
# \verbatim
# transducer = hfst.regex('[a:b]*')
# try:
# results = transducer.extract_paths(output='text')
# print("The transducer has %i paths:" % len(results))
# print(results)
# except hfst.exceptions.TransducerIsCyclicException:
# print("The transducer is cyclic and has an infinite number of paths. Some of them:")
# results = transducer.extract_paths(output='text', max_cycles=5)
# print(results)
# \endverbatim
class TransducerIsCyclicException(HfstException):
pass
## The stream does not contain transducers.
#
# Thrown by
# #hfst.HfstTransducer
# #hfst.HfstInputStream.__init__
#
# An example.
# \verbatim
# f = open('foofile', 'w')
# f.write('This is an ordinary text file.\n')
# f.close()
# try:
# instr = hfst.HfstInputStream('foofile')
# tr = instr.read()
# print(tr)
# instr.close()
# except hfst.exceptions.NotTransducerStreamException:
# print("Could not print transducer: the file does not contain binary transducers.")
# \endverbatim
class NotTransducerStreamException(HfstException):
pass
## The stream is not in valid AT&T format.
#
# An example:
# \verbatim
# f = open('testfile1.att', 'w')
# f.write('0 1 a b\n\
# 1 2 c\n\
# 2\n')
# f.close()
# f = hfst.hfst_open('testfile1.att', 'r')
# try:
# tr = hfst.read_att(f)
# except hfst.exceptions.NotValidAttFormatException:
# print('Could not read file: it is not in valid ATT format.')
# f.close()
# \endverbatim
# thrown by
# #hfst.HfstTransducer.__init__
class NotValidAttFormatException(HfstException):
pass
## The input is not in valid LexC format.
class NotValidLexcFormatException(HfstException):
pass
## State is not final (and cannot have a final weight).
#
# An example :
#
# \verbatim
# tr = hfst.HfstBasicTransducer()
# tr.add_state(1)
# # An exception is thrown as state number 1 is not final
# try:
# w = tr.get_final_weight(1)
# except hfst.exceptions.StateIsNotFinalException:
# print("State is not final.")
# \endverbatim
#
# You should use function #hfst.HfstBasicTransducer.is_final_state if you are not sure whether a
# state is final.
#
# Thrown by #hfst.HfstBasicTransducer get_final_weight.
class StateIsNotFinalException(HfstException):
pass
## Transducers given as rule context are not automata.
# @see hfst.HfstTransducer.is_automaton()
class ContextTransducersAreNotAutomataException(HfstException):
pass
## Transducers are not automata.
#
# Example:
# \verbatim
# tr1 = hfst.regex('foo:bar')
# tr2 = hfst.regex('bar:baz')
# try:
# tr1.cross_product(tr2)
# except hfst.exceptions.TransducersAreNotAutomataException:
# print('Transducers must be automata in cross product.')
# \endverbatim
# This exception is thrown by
# #hfst.HfstTransducer.cross_product
# when either input transducer does not have equivalent input and
# output symbols in all its transitions.
class TransducersAreNotAutomataException(HfstException):
pass
## The state number argument is not valid.
#
# An example :
#
# \verbatim
# tr = hfst.HfstBasicTransducer()
# tr.add_state(1)
# try:
# w = tr.get_final_weight(2)
# except hfst.exceptions.StateIndexOutOfBoundsException:
# print('State number 2 does not exist')
# \endverbatim
class StateIndexOutOfBoundsException(HfstException):
pass
## Transducer has a malformed HFST header.
#
# Thrown by #hfst.HfstTransducer.__init__ #hfst.HfstInputStream
class TransducerHeaderException(HfstException):
pass
## An OpenFst transducer does not have an input symbol table.
#
# When converting from OpenFst to tropical or log HFST, the OpenFst transducer
# must have at least an input symbol table. If the output symbol table
# is missing, it is assumed to be equivalent to the input symbol table.
#
# Thrown by hfst.HfstTransducer.__init__
class MissingOpenFstInputSymbolTableException(HfstException):
pass
## Two or more transducers do not have the same type.
#
# This can happen if (1) the calling and called transducer in a binary
# operation, (2) two transducers in a pair of transducers,
# (3) two consecutive transducers coming from an HfstInputStream or
# (4) two transducers in a function taking two or more transducers as
# arguments do not have the same type.
#
# An example:
# \verbatim
# hfst.set_default_fst_type(hfst.types.TROPICAL_OPENFST_TYPE)
# tr1 = hfst.regex('foo')
# tr2 = hfst.regex('bar')
# tr2.convert(hfst.types.FOMA_TYPE)
# try:
# tr1.disjunct(tr2)
# except hfst.exceptions.TransducerTypeMismatchException:
# print('The implementation types of transducers must be the same.')
# \endverbatim
class TransducerTypeMismatchException(HfstException):
pass
## The set of transducer pairs is empty.
#
# Thrown by rule functions.
class EmptySetOfContextsException(HfstException):
pass
## The type of a transducer is not specified.
#
# This exception is thrown when an implementation type argument
# is hfst.types.ERROR_TYPE.
class SpecifiedTypeRequiredException(HfstException):
pass
## An error happened probably due to a bug in the HFST code.
class HfstFatalException(HfstException):
pass
## Transducer has wrong type.
#
# This exception suggests that an HfstTransducer has not been properly
# initialized, probably due to a bug in the HFST library. Alternatively
# the default constructor of HfstTransducer has been called at some point.
#
# @see #hfst.HfstTransducer.__init__
class TransducerHasWrongTypeException(HfstException):
pass
## String is not valid utf-8.
#
# This exception suggests that an input string is not valid utf8.
#
class IncorrectUtf8CodingException(HfstException):
pass
## An argument string is an empty string.
# A transition symbol cannot be an empty string.
class EmptyStringException(HfstException):
pass
## A bug in the HFST code.
class SymbolNotFoundException(HfstException):
pass
## A piece of metadata in an HFST header is not supported.
class MetadataException(HfstException):
pass
| wikimedia/operations-debs-contenttranslation-hfst | python/doc/hfst/exceptions/__init__.py | Python | gpl-3.0 | 7,967 |
from __future__ import unicode_literals
from .compat import text_type
class Node(object):
def __str__(self):
children = []
for k, v in self.__dict__.items():
if isinstance(v, (list, tuple)):
v = '[%s]' % ', '.join([text_type(v) for v in v if v])
children.append('%s=%s' % (k, v))
return '<%s%s%s>' % (
self.__class__.__name__,
': ' if children else '',
', '.join(children),
)
__repr__ = __str__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for k, v in self.__dict__.items():
if getattr(other, k) != v:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
class Expression(Node):
def __init__(self, left, operator, right):
self.left = left
self.operator = operator
self.right = right
class Name(Node):
def __init__(self, parts):
if isinstance(parts, list):
self.parts = parts
elif isinstance(parts, tuple):
self.parts = list(parts)
else:
self.parts = [parts]
@property
def value(self):
return '.'.join(self.parts)
class Const(Node):
def __init__(self, value):
self.value = value
class List(Node):
def __init__(self, items):
self.items = items
@property
def value(self):
return [i.value for i in self.items]
class Operator(Node):
def __init__(self, operator):
self.operator = operator
class Logical(Operator):
pass
class Comparison(Operator):
pass
| ivelum/djangoql | djangoql/ast.py | Python | mit | 1,694 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Interact with AWS S3, using the boto3 library."""
import fnmatch
import gzip as gz
import io
import re
import shutil
from functools import wraps
from inspect import signature
from io import BytesIO
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union, cast
from urllib.parse import urlparse
from boto3.s3.transfer import S3Transfer, TransferConfig
from botocore.exceptions import ClientError
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
from airflow.utils.helpers import chunks
T = TypeVar("T", bound=Callable)
def provide_bucket_name(func: T) -> T:
"""
Function decorator that provides a bucket name taken from the connection
in case no bucket name has been passed to the function.
"""
function_signature = signature(func)
@wraps(func)
def wrapper(*args, **kwargs) -> T:
bound_args = function_signature.bind(*args, **kwargs)
if 'bucket_name' not in bound_args.arguments:
self = args[0]
if self.aws_conn_id:
connection = self.get_connection(self.aws_conn_id)
if connection.schema:
bound_args.arguments['bucket_name'] = connection.schema
return func(*bound_args.args, **bound_args.kwargs)
return cast(T, wrapper)
def unify_bucket_name_and_key(func: T) -> T:
"""
Function decorator that unifies bucket name and key taken from the key
in case no bucket name and at least a key has been passed to the function.
"""
function_signature = signature(func)
@wraps(func)
def wrapper(*args, **kwargs) -> T:
bound_args = function_signature.bind(*args, **kwargs)
if 'wildcard_key' in bound_args.arguments:
key_name = 'wildcard_key'
elif 'key' in bound_args.arguments:
key_name = 'key'
else:
raise ValueError('Missing key parameter!')
if 'bucket_name' not in bound_args.arguments:
bound_args.arguments['bucket_name'], bound_args.arguments[key_name] = S3Hook.parse_s3_url(
bound_args.arguments[key_name]
)
return func(*bound_args.args, **bound_args.kwargs)
return cast(T, wrapper)
class S3Hook(AwsBaseHook):
"""
Interact with AWS S3, using the boto3 library.
Additional arguments (such as ``aws_conn_id``) may be specified and
are passed down to the underlying AwsBaseHook.
.. seealso::
:class:`~airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
"""
conn_type = 's3'
hook_name = 'S3'
def __init__(self, *args, **kwargs) -> None:
kwargs['client_type'] = 's3'
self.extra_args = {}
if 'extra_args' in kwargs:
self.extra_args = kwargs['extra_args']
if not isinstance(self.extra_args, dict):
raise ValueError(f"extra_args '{self.extra_args!r}' must be of type {dict}")
del kwargs['extra_args']
self.transfer_config = TransferConfig()
if 'transfer_config_args' in kwargs:
transport_config_args = kwargs['transfer_config_args']
if not isinstance(transport_config_args, dict):
raise ValueError(f"transfer_config_args '{transport_config_args!r} must be of type {dict}")
self.transfer_config = TransferConfig(**transport_config_args)
del kwargs['transfer_config_args']
super().__init__(*args, **kwargs)
@staticmethod
def parse_s3_url(s3url: str) -> Tuple[str, str]:
"""
Parses the S3 Url into a bucket name and key.
:param s3url: The S3 Url to parse.
:rtype s3url: str
:return: the parsed bucket name and key
:rtype: tuple of str
"""
parsed_url = urlparse(s3url)
if not parsed_url.netloc:
raise AirflowException(f'Please provide a bucket_name instead of "{s3url}"')
bucket_name = parsed_url.netloc
key = parsed_url.path.lstrip('/')
return bucket_name, key
@provide_bucket_name
def check_for_bucket(self, bucket_name: Optional[str] = None) -> bool:
"""
Check if bucket_name exists.
:param bucket_name: the name of the bucket
:type bucket_name: str
:return: True if it exists and False if not.
:rtype: bool
"""
try:
self.get_conn().head_bucket(Bucket=bucket_name)
return True
except ClientError as e:
self.log.error(e.response["Error"]["Message"])
return False
@provide_bucket_name
def get_bucket(self, bucket_name: Optional[str] = None) -> str:
"""
Returns a boto3.S3.Bucket object
:param bucket_name: the name of the bucket
:type bucket_name: str
:return: the bucket object to the bucket name.
:rtype: boto3.S3.Bucket
"""
# Buckets have no regions, and we cannot remove the region name from _get_credentials as we would
# break compatibility, so we set it explicitly to None.
session, endpoint_url = self._get_credentials(region_name=None)
s3_resource = session.resource(
"s3",
endpoint_url=endpoint_url,
config=self.config,
verify=self.verify,
)
return s3_resource.Bucket(bucket_name)
@provide_bucket_name
def create_bucket(self, bucket_name: Optional[str] = None, region_name: Optional[str] = None) -> None:
"""
Creates an Amazon S3 bucket.
:param bucket_name: The name of the bucket
:type bucket_name: str
:param region_name: The name of the aws region in which to create the bucket.
:type region_name: str
"""
if not region_name:
region_name = self.get_conn().meta.region_name
if region_name == 'us-east-1':
self.get_conn().create_bucket(Bucket=bucket_name)
else:
self.get_conn().create_bucket(
Bucket=bucket_name, CreateBucketConfiguration={'LocationConstraint': region_name}
)
@provide_bucket_name
def check_for_prefix(self, prefix: str, delimiter: str, bucket_name: Optional[str] = None) -> bool:
"""
Checks that a prefix exists in a bucket
:param bucket_name: the name of the bucket
:type bucket_name: str
:param prefix: a key prefix
:type prefix: str
:param delimiter: the delimiter marks key hierarchy.
:type delimiter: str
:return: False if the prefix does not exist in the bucket and True if it does.
:rtype: bool
"""
prefix = prefix + delimiter if prefix[-1] != delimiter else prefix
prefix_split = re.split(fr'(\w+[{delimiter}])$', prefix, 1)
previous_level = prefix_split[0]
plist = self.list_prefixes(bucket_name, previous_level, delimiter)
return prefix in plist
@provide_bucket_name
def list_prefixes(
self,
bucket_name: Optional[str] = None,
prefix: Optional[str] = None,
delimiter: Optional[str] = None,
page_size: Optional[int] = None,
max_items: Optional[int] = None,
) -> list:
"""
Lists prefixes in a bucket under prefix
:param bucket_name: the name of the bucket
:type bucket_name: str
:param prefix: a key prefix
:type prefix: str
:param delimiter: the delimiter marks key hierarchy.
:type delimiter: str
:param page_size: pagination size
:type page_size: int
:param max_items: maximum items to return
:type max_items: int
:return: a list of matched prefixes
:rtype: list
"""
prefix = prefix or ''
delimiter = delimiter or ''
config = {
'PageSize': page_size,
'MaxItems': max_items,
}
paginator = self.get_conn().get_paginator('list_objects_v2')
response = paginator.paginate(
Bucket=bucket_name, Prefix=prefix, Delimiter=delimiter, PaginationConfig=config
)
prefixes = []
for page in response:
if 'CommonPrefixes' in page:
for common_prefix in page['CommonPrefixes']:
prefixes.append(common_prefix['Prefix'])
return prefixes
@provide_bucket_name
def list_keys(
self,
bucket_name: Optional[str] = None,
prefix: Optional[str] = None,
delimiter: Optional[str] = None,
page_size: Optional[int] = None,
max_items: Optional[int] = None,
) -> list:
"""
Lists keys in a bucket under prefix and not containing delimiter
:param bucket_name: the name of the bucket
:type bucket_name: str
:param prefix: a key prefix
:type prefix: str
:param delimiter: the delimiter marks key hierarchy.
:type delimiter: str
:param page_size: pagination size
:type page_size: int
:param max_items: maximum items to return
:type max_items: int
:return: a list of matched keys
:rtype: list
"""
prefix = prefix or ''
delimiter = delimiter or ''
config = {
'PageSize': page_size,
'MaxItems': max_items,
}
paginator = self.get_conn().get_paginator('list_objects_v2')
response = paginator.paginate(
Bucket=bucket_name, Prefix=prefix, Delimiter=delimiter, PaginationConfig=config
)
keys = []
for page in response:
if 'Contents' in page:
for k in page['Contents']:
keys.append(k['Key'])
return keys
@provide_bucket_name
@unify_bucket_name_and_key
def check_for_key(self, key: str, bucket_name: Optional[str] = None) -> bool:
"""
Checks if a key exists in a bucket
:param key: S3 key that will point to the file
:type key: str
:param bucket_name: Name of the bucket in which the file is stored
:type bucket_name: str
:return: True if the key exists and False if not.
:rtype: bool
"""
try:
self.get_conn().head_object(Bucket=bucket_name, Key=key)
return True
except ClientError as e:
if e.response["ResponseMetadata"]["HTTPStatusCode"] == 404:
return False
else:
raise e
@provide_bucket_name
@unify_bucket_name_and_key
def get_key(self, key: str, bucket_name: Optional[str] = None) -> S3Transfer:
"""
Returns a boto3.s3.Object
:param key: the path to the key
:type key: str
:param bucket_name: the name of the bucket
:type bucket_name: str
:return: the key object from the bucket
:rtype: boto3.s3.Object
"""
# Buckets have no regions, and we cannot remove the region name from _get_credentials as we would
# break compatibility, so we set it explicitly to None.
session, endpoint_url = self._get_credentials(region_name=None)
s3_resource = session.resource(
"s3",
endpoint_url=endpoint_url,
config=self.config,
verify=self.verify,
)
obj = s3_resource.Object(bucket_name, key)
obj.load()
return obj
@provide_bucket_name
@unify_bucket_name_and_key
def read_key(self, key: str, bucket_name: Optional[str] = None) -> str:
"""
Reads a key from S3
:param key: S3 key that will point to the file
:type key: str
:param bucket_name: Name of the bucket in which the file is stored
:type bucket_name: str
:return: the content of the key
:rtype: str
"""
obj = self.get_key(key, bucket_name)
return obj.get()['Body'].read().decode('utf-8')
@provide_bucket_name
@unify_bucket_name_and_key
def select_key(
self,
key: str,
bucket_name: Optional[str] = None,
expression: Optional[str] = None,
expression_type: Optional[str] = None,
input_serialization: Optional[Dict[str, Any]] = None,
output_serialization: Optional[Dict[str, Any]] = None,
) -> str:
"""
Reads a key with S3 Select.
:param key: S3 key that will point to the file
:type key: str
:param bucket_name: Name of the bucket in which the file is stored
:type bucket_name: str
:param expression: S3 Select expression
:type expression: str
:param expression_type: S3 Select expression type
:type expression_type: str
:param input_serialization: S3 Select input data serialization format
:type input_serialization: dict
:param output_serialization: S3 Select output data serialization format
:type output_serialization: dict
:return: retrieved subset of original data by S3 Select
:rtype: str
.. seealso::
For more details about S3 Select parameters:
http://boto3.readthedocs.io/en/latest/reference/services/s3.html#S3.Client.select_object_content
"""
expression = expression or 'SELECT * FROM S3Object'
expression_type = expression_type or 'SQL'
if input_serialization is None:
input_serialization = {'CSV': {}}
if output_serialization is None:
output_serialization = {'CSV': {}}
response = self.get_conn().select_object_content(
Bucket=bucket_name,
Key=key,
Expression=expression,
ExpressionType=expression_type,
InputSerialization=input_serialization,
OutputSerialization=output_serialization,
)
return b''.join(
event['Records']['Payload'] for event in response['Payload'] if 'Records' in event
).decode('utf-8')
@provide_bucket_name
@unify_bucket_name_and_key
def check_for_wildcard_key(
self, wildcard_key: str, bucket_name: Optional[str] = None, delimiter: str = ''
) -> bool:
"""
Checks that a key matching a wildcard expression exists in a bucket
:param wildcard_key: the path to the key
:type wildcard_key: str
:param bucket_name: the name of the bucket
:type bucket_name: str
:param delimiter: the delimiter marks key hierarchy
:type delimiter: str
:return: True if a key exists and False if not.
:rtype: bool
"""
return (
self.get_wildcard_key(wildcard_key=wildcard_key, bucket_name=bucket_name, delimiter=delimiter)
is not None
)
@provide_bucket_name
@unify_bucket_name_and_key
def get_wildcard_key(
self, wildcard_key: str, bucket_name: Optional[str] = None, delimiter: str = ''
) -> S3Transfer:
"""
Returns a boto3.s3.Object object matching the wildcard expression
:param wildcard_key: the path to the key
:type wildcard_key: str
:param bucket_name: the name of the bucket
:type bucket_name: str
:param delimiter: the delimiter marks key hierarchy
:type delimiter: str
:return: the key object from the bucket or None if none has been found.
:rtype: boto3.s3.Object
"""
prefix = re.split(r'[\[\*\?]', wildcard_key, 1)[0]
key_list = self.list_keys(bucket_name, prefix=prefix, delimiter=delimiter)
key_matches = [k for k in key_list if fnmatch.fnmatch(k, wildcard_key)]
if key_matches:
return self.get_key(key_matches[0], bucket_name)
return None
@provide_bucket_name
@unify_bucket_name_and_key
def load_file(
self,
filename: Union[Path, str],
key: str,
bucket_name: Optional[str] = None,
replace: bool = False,
encrypt: bool = False,
gzip: bool = False,
acl_policy: Optional[str] = None,
) -> None:
"""
Loads a local file to S3
:param filename: path to the file to load.
:type filename: Union[Path, str]
:param key: S3 key that will point to the file
:type key: str
:param bucket_name: Name of the bucket in which to store the file
:type bucket_name: str
:param replace: A flag to decide whether or not to overwrite the key
if it already exists. If replace is False and the key exists, an
error will be raised.
:type replace: bool
:param encrypt: If True, the file will be encrypted on the server-side
by S3 and will be stored in an encrypted form while at rest in S3.
:type encrypt: bool
:param gzip: If True, the file will be compressed locally
:type gzip: bool
:param acl_policy: String specifying the canned ACL policy for the file being
uploaded to the S3 bucket.
:type acl_policy: str
"""
filename = str(filename)
if not replace and self.check_for_key(key, bucket_name):
raise ValueError(f"The key {key} already exists.")
extra_args = self.extra_args
if encrypt:
extra_args['ServerSideEncryption'] = "AES256"
if gzip:
with open(filename, 'rb') as f_in:
filename_gz = f_in.name + '.gz'
with gz.open(filename_gz, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
filename = filename_gz
if acl_policy:
extra_args['ACL'] = acl_policy
client = self.get_conn()
client.upload_file(filename, bucket_name, key, ExtraArgs=extra_args, Config=self.transfer_config)
@provide_bucket_name
@unify_bucket_name_and_key
def load_string(
self,
string_data: str,
key: str,
bucket_name: Optional[str] = None,
replace: bool = False,
encrypt: bool = False,
encoding: Optional[str] = None,
acl_policy: Optional[str] = None,
compression: Optional[str] = None,
) -> None:
"""
Loads a string to S3
This is provided as a convenience to drop a string in S3. It uses the
boto infrastructure to ship a file to s3.
:param string_data: str to set as content for the key.
:type string_data: str
:param key: S3 key that will point to the file
:type key: str
:param bucket_name: Name of the bucket in which to store the file
:type bucket_name: str
:param replace: A flag to decide whether or not to overwrite the key
if it already exists
:type replace: bool
:param encrypt: If True, the file will be encrypted on the server-side
by S3 and will be stored in an encrypted form while at rest in S3.
:type encrypt: bool
:param encoding: The string to byte encoding
:type encoding: str
:param acl_policy: The string to specify the canned ACL policy for the
object to be uploaded
:type acl_policy: str
:param compression: Type of compression to use, currently only gzip is supported.
:type compression: str
"""
encoding = encoding or 'utf-8'
bytes_data = string_data.encode(encoding)
# Compress string
available_compressions = ['gzip']
if compression is not None and compression not in available_compressions:
raise NotImplementedError(
f"Received {compression} compression type. "
f"String can currently be compressed in {available_compressions} only."
)
if compression == 'gzip':
bytes_data = gz.compress(bytes_data)
file_obj = io.BytesIO(bytes_data)
self._upload_file_obj(file_obj, key, bucket_name, replace, encrypt, acl_policy)
file_obj.close()
@provide_bucket_name
@unify_bucket_name_and_key
def load_bytes(
self,
bytes_data: bytes,
key: str,
bucket_name: Optional[str] = None,
replace: bool = False,
encrypt: bool = False,
acl_policy: Optional[str] = None,
) -> None:
"""
Loads bytes to S3
This is provided as a convenience to drop a string in S3. It uses the
boto infrastructure to ship a file to s3.
:param bytes_data: bytes to set as content for the key.
:type bytes_data: bytes
:param key: S3 key that will point to the file
:type key: str
:param bucket_name: Name of the bucket in which to store the file
:type bucket_name: str
:param replace: A flag to decide whether or not to overwrite the key
if it already exists
:type replace: bool
:param encrypt: If True, the file will be encrypted on the server-side
by S3 and will be stored in an encrypted form while at rest in S3.
:type encrypt: bool
:param acl_policy: The string to specify the canned ACL policy for the
object to be uploaded
:type acl_policy: str
"""
file_obj = io.BytesIO(bytes_data)
self._upload_file_obj(file_obj, key, bucket_name, replace, encrypt, acl_policy)
file_obj.close()
@provide_bucket_name
@unify_bucket_name_and_key
def load_file_obj(
self,
file_obj: BytesIO,
key: str,
bucket_name: Optional[str] = None,
replace: bool = False,
encrypt: bool = False,
acl_policy: Optional[str] = None,
) -> None:
"""
Loads a file object to S3
:param file_obj: The file-like object to set as the content for the S3 key.
:type file_obj: file-like object
:param key: S3 key that will point to the file
:type key: str
:param bucket_name: Name of the bucket in which to store the file
:type bucket_name: str
:param replace: A flag that indicates whether to overwrite the key
if it already exists.
:type replace: bool
:param encrypt: If True, S3 encrypts the file on the server,
and the file is stored in encrypted form at rest in S3.
:type encrypt: bool
:param acl_policy: The string to specify the canned ACL policy for the
object to be uploaded
:type acl_policy: str
"""
self._upload_file_obj(file_obj, key, bucket_name, replace, encrypt, acl_policy)
def _upload_file_obj(
self,
file_obj: BytesIO,
key: str,
bucket_name: Optional[str] = None,
replace: bool = False,
encrypt: bool = False,
acl_policy: Optional[str] = None,
) -> None:
if not replace and self.check_for_key(key, bucket_name):
raise ValueError(f"The key {key} already exists.")
extra_args = self.extra_args
if encrypt:
extra_args['ServerSideEncryption'] = "AES256"
if acl_policy:
extra_args['ACL'] = acl_policy
client = self.get_conn()
client.upload_fileobj(
file_obj,
bucket_name,
key,
ExtraArgs=extra_args,
Config=self.transfer_config,
)
def copy_object(
self,
source_bucket_key: str,
dest_bucket_key: str,
source_bucket_name: Optional[str] = None,
dest_bucket_name: Optional[str] = None,
source_version_id: Optional[str] = None,
acl_policy: Optional[str] = None,
) -> None:
"""
Creates a copy of an object that is already stored in S3.
Note: the S3 connection used here needs to have access to both
source and destination bucket/key.
:param source_bucket_key: The key of the source object.
It can be either full s3:// style url or relative path from root level.
When it's specified as a full s3:// url, please omit source_bucket_name.
:type source_bucket_key: str
:param dest_bucket_key: The key of the object to copy to.
The convention to specify `dest_bucket_key` is the same
as `source_bucket_key`.
:type dest_bucket_key: str
:param source_bucket_name: Name of the S3 bucket where the source object is in.
It should be omitted when `source_bucket_key` is provided as a full s3:// url.
:type source_bucket_name: str
:param dest_bucket_name: Name of the S3 bucket to where the object is copied.
It should be omitted when `dest_bucket_key` is provided as a full s3:// url.
:type dest_bucket_name: str
:param source_version_id: Version ID of the source object (OPTIONAL)
:type source_version_id: str
:param acl_policy: The string to specify the canned ACL policy for the
object to be copied which is private by default.
:type acl_policy: str
"""
acl_policy = acl_policy or 'private'
if dest_bucket_name is None:
dest_bucket_name, dest_bucket_key = self.parse_s3_url(dest_bucket_key)
else:
parsed_url = urlparse(dest_bucket_key)
if parsed_url.scheme != '' or parsed_url.netloc != '':
raise AirflowException(
'If dest_bucket_name is provided, '
'dest_bucket_key should be relative path '
'from root level, rather than a full s3:// url'
)
if source_bucket_name is None:
source_bucket_name, source_bucket_key = self.parse_s3_url(source_bucket_key)
else:
parsed_url = urlparse(source_bucket_key)
if parsed_url.scheme != '' or parsed_url.netloc != '':
raise AirflowException(
'If source_bucket_name is provided, '
'source_bucket_key should be relative path '
'from root level, rather than a full s3:// url'
)
copy_source = {'Bucket': source_bucket_name, 'Key': source_bucket_key, 'VersionId': source_version_id}
response = self.get_conn().copy_object(
Bucket=dest_bucket_name, Key=dest_bucket_key, CopySource=copy_source, ACL=acl_policy
)
return response
@provide_bucket_name
def delete_bucket(self, bucket_name: str, force_delete: bool = False) -> None:
"""
To delete s3 bucket, delete all s3 bucket objects and then delete the bucket.
:param bucket_name: Bucket name
:type bucket_name: str
:param force_delete: Enable this to delete bucket even if not empty
:type force_delete: bool
:return: None
:rtype: None
"""
if force_delete:
bucket_keys = self.list_keys(bucket_name=bucket_name)
if bucket_keys:
self.delete_objects(bucket=bucket_name, keys=bucket_keys)
self.conn.delete_bucket(Bucket=bucket_name)
def delete_objects(self, bucket: str, keys: Union[str, list]) -> None:
"""
Delete keys from the bucket.
:param bucket: Name of the bucket in which you are going to delete object(s)
:type bucket: str
:param keys: The key(s) to delete from S3 bucket.
When ``keys`` is a string, it's supposed to be the key name of
the single object to delete.
When ``keys`` is a list, it's supposed to be the list of the
keys to delete.
:type keys: str or list
"""
if isinstance(keys, str):
keys = [keys]
s3 = self.get_conn()
# We can only send a maximum of 1000 keys per request.
# For details see:
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.delete_objects
for chunk in chunks(keys, chunk_size=1000):
response = s3.delete_objects(Bucket=bucket, Delete={"Objects": [{"Key": k} for k in chunk]})
deleted_keys = [x['Key'] for x in response.get("Deleted", [])]
self.log.info("Deleted: %s", deleted_keys)
if "Errors" in response:
errors_keys = [x['Key'] for x in response.get("Errors", [])]
raise AirflowException(f"Errors when deleting: {errors_keys}")
@provide_bucket_name
@unify_bucket_name_and_key
def download_file(
self, key: str, bucket_name: Optional[str] = None, local_path: Optional[str] = None
) -> str:
"""
Downloads a file from the S3 location to the local file system.
:param key: The key path in S3.
:type key: str
:param bucket_name: The specific bucket to use.
:type bucket_name: Optional[str]
:param local_path: The local path to the downloaded file. If no path is provided it will use the
system's temporary directory.
:type local_path: Optional[str]
:return: the file name.
:rtype: str
"""
self.log.info('Downloading source S3 file from Bucket %s with path %s', bucket_name, key)
try:
s3_obj = self.get_key(key, bucket_name)
except ClientError as e:
if e.response.get('Error', {}).get('Code') == 404:
raise AirflowException(
f'The source file in Bucket {bucket_name} with path {key} does not exist'
)
else:
raise e
with NamedTemporaryFile(dir=local_path, prefix='airflow_tmp_', delete=False) as local_tmp_file:
s3_obj.download_fileobj(local_tmp_file)
return local_tmp_file.name
def generate_presigned_url(
self,
client_method: str,
params: Optional[dict] = None,
expires_in: int = 3600,
http_method: Optional[str] = None,
) -> Optional[str]:
"""
Generate a presigned url given a client, its method, and arguments
:param client_method: The client method to presign for.
:type client_method: str
:param params: The parameters normally passed to ClientMethod.
:type params: dict
:param expires_in: The number of seconds the presigned url is valid for.
By default it expires in an hour (3600 seconds).
:type expires_in: int
:param http_method: The http method to use on the generated url.
By default, the http method is whatever is used in the method's model.
:type http_method: str
:return: The presigned url.
:rtype: str
"""
s3_client = self.get_conn()
try:
return s3_client.generate_presigned_url(
ClientMethod=client_method, Params=params, ExpiresIn=expires_in, HttpMethod=http_method
)
except ClientError as e:
self.log.error(e.response["Error"]["Message"])
return None
@provide_bucket_name
def get_bucket_tagging(self, bucket_name: Optional[str] = None) -> Optional[List[Dict[str, str]]]:
"""
Gets a List of tags from a bucket.
:param bucket_name: The name of the bucket.
:type bucket_name: str
:return: A List containing the key/value pairs for the tags
:rtype: Optional[List[Dict[str, str]]]
"""
try:
s3_client = self.get_conn()
result = s3_client.get_bucket_tagging(Bucket=bucket_name)['TagSet']
self.log.info("S3 Bucket Tag Info: %s", result)
return result
except ClientError as e:
self.log.error(e)
raise e
@provide_bucket_name
def put_bucket_tagging(
self,
tag_set: Optional[List[Dict[str, str]]] = None,
key: Optional[str] = None,
value: Optional[str] = None,
bucket_name: Optional[str] = None,
) -> None:
"""
Overwrites the existing TagSet with provided tags. Must provide either a TagSet or a key/value pair.
:param tag_set: A List containing the key/value pairs for the tags.
:type tag_set: List[Dict[str, str]]
:param key: The Key for the new TagSet entry.
:type key: str
:param value: The Value for the new TagSet entry.
:type value: str
:param bucket_name: The name of the bucket.
:type bucket_name: str
:return: None
:rtype: None
"""
self.log.info("S3 Bucket Tag Info:\tKey: %s\tValue: %s\tSet: %s", key, value, tag_set)
if not tag_set:
tag_set = []
if key and value:
tag_set.append({'Key': key, 'Value': value})
elif not tag_set or (key or value):
message = 'put_bucket_tagging() requires either a predefined TagSet or a key/value pair.'
self.log.error(message)
raise ValueError(message)
try:
s3_client = self.get_conn()
s3_client.put_bucket_tagging(Bucket=bucket_name, Tagging={'TagSet': tag_set})
except ClientError as e:
self.log.error(e)
raise e
@provide_bucket_name
def delete_bucket_tagging(self, bucket_name: Optional[str] = None) -> None:
"""
Deletes all tags from a bucket.
:param bucket_name: The name of the bucket.
:type bucket_name: str
:return: None
:rtype: None
"""
s3_client = self.get_conn()
s3_client.delete_bucket_tagging(Bucket=bucket_name)
| mistercrunch/airflow | airflow/providers/amazon/aws/hooks/s3.py | Python | apache-2.0 | 34,493 |
#
# Copyright 2017 The E2C Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from inspect import \
getfullargspec, \
ismethod
from typing import \
Callable, \
Any,\
Dict,\
List
from . import errors
class Actor(object):
"""
A wrapper around a callable function.
"""
def __init__(self, session: 'Session', name: str, callable: Callable or None) -> None:
"""
A wrapper around a callable function.
:type session: :class:`e2c.session.Session`
:param session: The session to that the actor belong.
:type name: str
:param name: The name of the actor to register on the session.
:type callable: Callable
:param callable: Any callable function.
"""
self.name = name
self.session = session
self.callable = callable
self.actors: Dict[str, List['Actor']] = {}
self._specs: Dict[str, type] = {}
def on(self, name: str, actor: 'Actor') -> None:
"""
Method to register the given actor under specified name.
:type name: str
:param name: The name to register the actor in this actor.
:type actor: Actor
:param actor: An instance of the actor to register.
:rtype: None
"""
if not name:
raise errors.E2CActorError(
'Name cannot be None or empty!')
if not name in self.actors:
self.actors[name] = []
self.actors[name].append(actor)
def run(self, *args) -> object:
"""
Runs the callable internal function with specified arguments.
:type args: List[object]
:param args: A list of arguments.
:rtype: object
:return: The result of the callable function.
"""
from .event import Event
from .resolve import resolve
params = resolve(self, [*args], Event)
if self.session.activate_trace:
self.session.on_trace(self.name)
if not self.callable:
raise errors.E2CActorError(
'Actor {0} has no callable function!'.format(self.name))
return self.callable(*params)
def run_with_params(self, *params) -> object:
"""
Runs the callable internal function with specified parameters.
:type params: List[Callable]
:param params: A list of parameters
:rtype: object
:return: The result of the callable function.
"""
if self.session.activate_trace:
self.session.on_trace(self.name)
return self.callable(*params)
def clone(self) -> 'Actor':
"""
Gets a new instance of type `Actor`
:rtype: `Actor`
:return: The flat clone of that actor.
"""
c_actor = Actor(self.session, self.name, self.callable)
for name, actors in self.actors.items():
for actor in actors:
c_actor.on(name, actor)
return c_actor
@property
def specs(self) -> Dict[str, type]:
"""
Getter property to get the introspection parameter
of the internal callable function.
:rtype: Dict[str, type]
:return: A dictionary of name and type for each parameter.
"""
if not self._specs and self.callable:
result = getfullargspec(self.callable)
args = result.args
if ismethod(self.callable):
args = args[1:] # skip self
self._specs = dict([(a, result.annotations.get(a, Any)) for a in args])
return self._specs
| elastic-event-components/e2c | source/python/e2c/actor.py | Python | apache-2.0 | 4,201 |
import re
from Message import *
from AnnotationParser import AnnotationParser
from TagsParser import TagsParser
from Name import *
##########################################
class TestCase:
########################################
def __init__(self, name, scope, file, line, annotations):
self.traditional_name = name[0]
self.name = name[1]
self.p_test = name[2]
if self.name != None:
self.name = escape_name(self.name)
self.scope = scope
self.file = file
self.line = line
annotation = None
if len(annotations) > 0:
annotation = annotations[0]
self.annotations = AnnotationParser(annotation, {"id":None, "depends":None, "memcheck":None, "data":None, "tags":[]}).parse()
if self.p_test and self.annotations["data"] == None:
raw_fatal(file, line, "parameterized test should have data provider")
self.annotations['tags'] = TagsParser(self.annotations['tags']).parse()
self.depends = None
self.generated = None
########################################
def get_data_provider_name(self):
return self.annotations['data']
########################################
def is_p_test(self):
return self.p_test
########################################
def get_tags(self):
return self.annotations['tags']
########################################
def get_memcheck_switch(self):
return self.annotations['memcheck']
########################################
def set_scope(self, scope):
self.scope = scope
########################################
def has_been_generated(self):
return self.generated
########################################
def mark_as_generated(self):
self.generated = True
########################################
def matches_id(self, id):
return id != None and self.annotations["id"] == id
########################################
def report_non_existing_data_provider(self):
raw_fatal(self.file, self.line, "data provider \"" + self.get_data_provider_name() + "\" does not exist.")
########################################
def report_cyclic_depend_error(self):
raw_fatal(self.file, self.line, "cyclic depends.")
########################################
def __get_depends(self):
if self.annotations["depends"] == None:
return None
depends = self.scope.find_testcase_with_id(self.annotations["depends"])
if depends == None:
raw_fatal(self.file, self.line, "no testcase was specified with id=" + self.annotations["depends"])
return depends
########################################
def get_depends(self):
if not self.depends:
self.depends = self.__get_depends()
return self.depends
########################################
def show(self):
content = "TEST(" + self.get_name() + ")"
print content.encode('utf-8')
########################################
def get_name(self):
if not self.name:
return self.traditional_name
return self.name
########################################
def get_traditional_name(self):
return self.traditional_name
########################################
def get_file_name(self):
return self.file
########################################
def get_line_number(self):
return self.line
##########################################
| aprovy/test-ng-pp | tests/3rdparty/testngppst/scripts/testngppstgen/TestCase.py | Python | lgpl-3.0 | 3,550 |
from django.contrib import admin
from . import models
# Register your models here.
admin.site.register(models.SharedItem)
admin.site.register(models.Album)
admin.site.register(models.Artist)
admin.site.register(models.AudioCodec)
admin.site.register(models.ItemAccessibility)
admin.site.register(models.ItemRating)
admin.site.register(models.ItemType)
admin.site.register(models.Suggestion)
admin.site.register(models.VideoCodec)
| iiitv/legbook-backend | mediavault/web/admin.py | Python | mit | 432 |
import os
import os.path
from collections import Counter
import glob
#import threading
import time
'''
This file is running on server
Which is able to monitor new wifi data sent from individual app
user and then output location(coordinate, calculated by hallway_cod.txt)to the
locotioon folder on the server
'''
class server_init(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
pass
crd = open('/home/student/hallway_cod.txt','r')
crd_base = [x.strip() for x in crd.readlines()]
while(1):
file_open = open('/home/student/data_filtered.txt','r')
input_path = glob.glob('*.txt')
all_data = [x.strip() for x in file_open.readlines()]
for files in input_path:
cad = []
i = 0
f = open(files,'r')
input_info = [x.strip() for x in f.readlines()]
for element in input_info:
if (i >= 20): break
idata = element.split(',')
iname = idata[0]
ivalue = int(idata[1])
if (ivalue <= -85.0):
continue
i = i + 1
for base in all_data:
jdata = base.split(',')
jname = jdata[0]
jvalue = float(jdata[1])
if (iname == jname and (jvalue-2 <= ivalue and ivalue-2 <= jvalue )):
cad.append(jdata[2])
result = [loc for loc, count in Counter(cad).most_common(1)]
for ref in crd_base:
name = ref.split(',')
# print(result[0])
# print(name[0])
# we want to creat new file or each individual user and keep update(override) the result
# so we need to use try with mode 'w'/write and except with mode 'a'/append(creat new file)
if (name[0] == result[0]):
try:
o = open('/home/student/location/'+files,'w')
o.write(name[1] + ',' + name[2])
except:
o = open('/home/student/location/'+files,'a')
o.write(name[1] + ',' + name[2])
f.close()
o.close()
# else:
# print("why not equal")
file_open.close()
time.sleep(5)
| tikael1011/aiflee_python | Server/server_init.py | Python | gpl-3.0 | 1,867 |
"""
Fixer that adds ``from builtins import object`` if there is a line
like this:
class Foo(object):
"""
from lib2to3 import fixer_base
from libfuturize.fixer_util import touch_import_top
class FixObject(fixer_base.BaseFix):
PATTERN = u"classdef< 'class' NAME '(' name='object' ')' colon=':' any >"
def transform(self, node, results):
touch_import_top(u'builtins', 'object', node)
| hughperkins/kgsgo-dataset-preprocessor | thirdparty/future/src/libfuturize/fixes/fix_object.py | Python | mpl-2.0 | 407 |
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Dag Wieers (@dagwieers) <dag@wieers.com>
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""This file implements the Kodi xbmc module, either using stubs or alternative functionality"""
# pylint: disable=invalid-name,no-self-use,too-many-branches,unused-argument
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import json
import time
import weakref
from xbmcextra import ADDON_ID, global_settings, import_language
from utils import to_unicode
try: # Python 2
basestring
except NameError: # Python 3
basestring = str # pylint: disable=redefined-builtin
LOGLEVELS = ['Debug', 'Info', 'Notice', 'Warning', 'Error', 'Severe', 'Fatal', 'None']
LOGDEBUG = 0
LOGINFO = 1
LOGNOTICE = 2
LOGWARNING = 3
LOGERROR = 4
LOGSEVERE = 5
LOGFATAL = 6
LOGNONE = 7
INFO_LABELS = {
'Container.FolderPath': 'plugin://' + ADDON_ID + '/',
'System.BuildVersion': '18.2',
}
REGIONS = {
'datelong': '%A, %e %B %Y',
'dateshort': '%Y-%m-%d',
}
settings = global_settings()
LANGUAGE = import_language(language=settings.get('locale.language'))
class Keyboard(object): # pylint: disable=useless-object-inheritance
"""A stub implementation of the xbmc Keyboard class"""
def __init__(self, line='', heading=''):
"""A stub constructor for the xbmc Keyboard class"""
def doModal(self, autoclose=0):
"""A stub implementation for the xbmc Keyboard class doModal() method"""
def isConfirmed(self):
"""A stub implementation for the xbmc Keyboard class isConfirmed() method"""
return True
def getText(self):
"""A stub implementation for the xbmc Keyboard class getText() method"""
return 'test'
class Monitor(object): # pylint: disable=useless-object-inheritance
"""A stub implementation of the xbmc Monitor class"""
_instances = set()
def __init__(self, line='', heading=''):
"""A stub constructor for the xbmc Monitor class"""
self.iteration = 0
self._instances.add(weakref.ref(self))
def abortRequested(self):
"""A stub implementation for the xbmc Keyboard class abortRequested() method"""
self.iteration += 1
print('Iteration: %s' % self.iteration)
return self.iteration % 5 == 0
def waitForAbort(self, timeout=None):
"""A stub implementation for the xbmc Monitor class waitForAbort() method"""
try:
time.sleep(timeout)
except KeyboardInterrupt:
return True
except Exception: # pylint: disable=broad-except
return True
return False
@classmethod
def getinstances(cls):
"""Return the instances for this class"""
dead = set()
for ref in cls._instances:
obj = ref()
if obj is not None:
yield obj
else:
dead.add(ref)
cls._instances -= dead
class Player(object): # pylint: disable=useless-object-inheritance
"""A stub implementation of the xbmc Player class"""
def __init__(self):
"""A stub constructor for the xbmc Player class"""
self._count = 0
def play(self, item='', listitem=None, windowed=False, startpos=-1):
"""A stub implementation for the xbmc Player class play() method"""
return
def stop(self):
"""A stub implementation for the xbmc Player class stop() method"""
return
def getPlayingFile(self):
"""A stub implementation for the xbmc Player class getPlayingFile() method"""
return '/foo/bar'
def isPlaying(self):
"""A stub implementation for the xbmc Player class isPlaying() method"""
# Return True four times out of five
self._count += 1
return bool(self._count % 5 != 0)
def seekTime(self, seekTime):
"""A stub implementation for the xbmc Player class seekTime() method"""
return
def showSubtitles(self, bVisible):
"""A stub implementation for the xbmc Player class showSubtitles() method"""
return
def getTotalTime(self):
"""A stub implementation for the xbmc Player class getTotalTime() method"""
return 0
def getTime(self):
"""A stub implementation for the xbmc Player class getTime() method"""
return 0
def getVideoInfoTag(self):
"""A stub implementation for the xbmc Player class getVideoInfoTag() method"""
return VideoInfoTag()
class PlayList(object): # pylint: disable=useless-object-inheritance
"""A stub implementation of the xbmc PlayList class"""
def __init__(self, playList):
"""A stub constructor for the xbmc PlayList class"""
def getposition(self):
"""A stub implementation for the xbmc PlayList class getposition() method"""
return 0
def add(self, url, listitem=None, index=-1):
"""A stub implementation for the xbmc PlayList class add() method"""
def size(self):
"""A stub implementation for the xbmc PlayList class size() method"""
class VideoInfoTag(object): # pylint: disable=useless-object-inheritance
"""A stub implementation of the xbmc VideoInfoTag class"""
def __init__(self):
"""A stub constructor for the xbmc VideoInfoTag class"""
def getSeason(self):
"""A stub implementation for the xbmc VideoInfoTag class getSeason() method"""
return 0
def getEpisode(self):
"""A stub implementation for the xbmc VideoInfoTag class getEpisode() method"""
return 0
def getTVShowTitle(self):
"""A stub implementation for the xbmc VideoInfoTag class getTVShowTitle() method"""
return ''
def getPlayCount(self):
"""A stub implementation for the xbmc VideoInfoTag class getPlayCount() method"""
return 0
def getRating(self):
"""A stub implementation for the xbmc VideoInfoTag class getRating() method"""
return 0
def executebuiltin(string, wait=False): # pylint: disable=unused-argument
"""A stub implementation of the xbmc executebuiltin() function"""
assert isinstance(string, basestring)
assert isinstance(wait, bool)
def executeJSONRPC(jsonrpccommand):
"""A reimplementation of the xbmc executeJSONRPC() function"""
assert isinstance(jsonrpccommand, basestring)
command = json.loads(jsonrpccommand)
# Handle a list of commands sequentially
if isinstance(command, list):
ret = []
for action in command:
ret.append(executeJSONRPC(json.dumps(action)))
return json.dumps(ret)
ret = dict(id=command.get('id'), jsonrpc='2.0', result='OK')
if command.get('method').startswith('Input'):
pass
elif command.get('method') == 'Player.Open':
pass
elif command.get('method') == 'Settings.GetSettingValue':
key = command.get('params').get('setting')
ret.update(result=dict(value=settings.get(key)))
elif command.get('method') == 'Addons.GetAddonDetails':
if command.get('params', {}).get('addonid') == 'script.module.inputstreamhelper':
ret.update(result=dict(addon=dict(enabled='true', version='0.3.5')))
else:
ret.update(result=dict(addon=dict(enabled='true', version='1.2.3')))
elif command.get('method') == 'Textures.GetTextures':
ret.update(result=dict(textures=[dict(cachedurl="", imagehash="", lasthashcheck="", textureid=4837, url="")]))
elif command.get('method') == 'Textures.RemoveTexture':
pass
elif command.get('method') == 'JSONRPC.NotifyAll':
# Send a notification to all instances of subclasses
for sub in Monitor.__subclasses__():
for obj in sub.getinstances():
obj.onNotification(
sender=command.get('params').get('sender'),
method=command.get('params').get('message'),
data=json.dumps(command.get('params').get('data')),
)
else:
log("executeJSONRPC does not implement method '{method}'".format(**command), LOGERROR)
return json.dumps(dict(error=dict(code=-1, message='Not implemented'), id=command.get('id'), jsonrpc='2.0'))
return json.dumps(ret)
def getCondVisibility(string):
"""A reimplementation of the xbmc getCondVisibility() function"""
assert isinstance(string, basestring)
if string == 'system.platform.android':
return False
if string.startswith('System.HasAddon'):
return True
return True
def getInfoLabel(key):
"""A reimplementation of the xbmc getInfoLabel() function"""
assert isinstance(key, basestring)
return INFO_LABELS.get(key)
def getLocalizedString(msgctxt):
"""A reimplementation of the xbmc getLocalizedString() function"""
assert isinstance(msgctxt, int)
for entry in LANGUAGE:
if entry.msgctxt == '#%s' % msgctxt:
return entry.msgstr or entry.msgid
if int(msgctxt) >= 30000:
log('Unable to translate #{msgctxt}'.format(msgctxt=msgctxt), LOGERROR)
return '<Untranslated>'
def getRegion(key):
"""A reimplementation of the xbmc getRegion() function"""
assert isinstance(key, basestring)
return REGIONS.get(key)
def log(msg, level=0):
"""A reimplementation of the xbmc log() function"""
assert isinstance(msg, basestring)
assert isinstance(level, int)
color1 = '\033[32;1m'
color2 = '\033[32;0m'
name = LOGLEVELS[level]
if level in (4, 5, 6, 7):
color1 = '\033[31;1m'
if level in (6, 7):
raise Exception(msg)
elif level in (2, 3):
color1 = '\033[33;1m'
elif level == 0:
color2 = '\033[30;1m'
print('{color1}{name}: {color2}{msg}\033[39;0m'.format(name=name, color1=color1, color2=color2, msg=to_unicode(msg)))
def sleep(timemillis):
"""A reimplementation of the xbmc sleep() function"""
assert isinstance(timemillis, int)
time.sleep(timemillis / 1000)
def translatePath(path):
"""A stub implementation of the xbmc translatePath() function"""
assert isinstance(path, basestring)
if path.startswith('special://home'):
return path.replace('special://home', os.path.join(os.getcwd(), 'tests/'))
if path.startswith('special://masterprofile'):
return path.replace('special://masterprofile', os.path.join(os.getcwd(), 'tests/userdata/'))
if path.startswith('special://profile'):
return path.replace('special://profile', os.path.join(os.getcwd(), 'tests/userdata/'))
if path.startswith('special://userdata'):
return path.replace('special://userdata', os.path.join(os.getcwd(), 'tests/userdata/'))
return path
| pietje666/plugin.video.vrt.nu | tests/xbmc.py | Python | gpl-3.0 | 10,771 |
#!/usr/bin/env python
from __future__ import print_function
import sip
sip.setapi('QString', 1)
import sys
import copy
import configobj
import validate
from PyQt4 import QtGui
from PyQt4 import QtCore
class Option(object):
"""Description and value of an option"""
def __init__(self, name, section, type, args, kwargs, default, comment, widget_maker, check):
self.name = name
self.section = section
self.type = type
self.args = args
self.kwargs = kwargs
self.default = default
self.comment = comment
self.check = check
self.widget_maker = widget_maker
def get(self):
"""Get current value of the option"""
return self.section[self.name]
def set(self, value):
"""Get current value of the option"""
# Workaround for problem in validate with lists from string
value = str(value) # Start with a normal string
if self.type.endswith('list'):
value = [x.strip() for x in value.split(',')]
try:
self.section[self.name] = self.check(value, *self.args, **self.kwargs)
except:
pass
def __repr__(self):
"""Convert option to string for debugging purposes"""
return 'Option(%s,%s,%s,%s,%s,%s,%s)'%(self.name, self.section, self.type, self.args, self.kwargs, self.default, self.comment)
def restoreDefault(self):
"""Change option value to the default value"""
self.section.restore_default(self.name)
def isDefault(self):
"""Check whether the option has the default value"""
return self.name in self.section.defaults
def widget(self):
return self.widget_maker(self, *self.args, **self.kwargs)
class ConfigPage(QtGui.QWidget):
"""Container for widgets describing options in a section"""
def __init__(self, section, item, parent=None):
QtGui.QWidget.__init__(self, parent)
layout = QtGui.QFormLayout(self)
for option in [section[x] for x in section.scalars]:
valueWidget = option.widget()
valueWidget.optionChanged.connect(self.optionChanged.emit)
option_title = option.name.replace('_',' ')
option_title = option_title[0].upper() + option_title[1:]
layout.addRow(option_title, valueWidget)
self.item = item # Store SectionBrowser item corresponding to this page
self.conf = section # Store configuration section corresponding to this page
optionChanged = QtCore.pyqtSignal(Option) # Chain signal upwards
def restoreDefault(self):
"""Restore default value to all widgets on the page"""
for widget in [self.layout().itemAt(i) for i in range(self.layout().count())]:
try:
widget.widget().restoreDefault()
except AttributeError: # Skip widgets that can't be restored
pass
class SectionBrowser(QtGui.QWidget):
"""TreeView browser of configuration sections. Also manages creating of config pages. It's a bit messy."""
def __init__(self, conf, validator, parent=None):
QtGui.QWidget.__init__(self, parent)
layout = QtGui.QVBoxLayout(self)
self.validator = validator
# Create treeview
self.tree = QtGui.QTreeWidget()
self.tree.header().hide()
self.tree.currentItemChanged.connect(lambda new, old: self.currentItemChanged.emit(new))
layout.addWidget(self.tree)
# Box that displays add/remove section buttons
buttonBox = QtGui.QWidget()
buttonLayout = QtGui.QHBoxLayout(buttonBox)
self.addButton = QtGui.QPushButton('Add section')
self.addButton.setIcon(QtGui.QIcon.fromTheme('list-add'))
self.addButton.setEnabled(False)
self.addButton.clicked.connect(lambda: self.addEmptySection(self.tree.currentItem()))
buttonLayout.addWidget(self.addButton)
self.removeButton = QtGui.QPushButton('Remove section')
self.removeButton.setIcon(QtGui.QIcon.fromTheme('list-remove'))
self.removeButton.setEnabled(False)
buttonLayout.addWidget(self.removeButton)
self.removeButton.clicked.connect(lambda: self.removeSection(self.tree.currentItem()))
layout.addWidget(buttonBox)
self.tree.currentItemChanged.connect(self.activateButtons)
self.conf = conf # Store configuration
self.page_lookup = {} # Mappig from treeview item id to configuration page
# A few signals
currentItemChanged = QtCore.pyqtSignal(QtGui.QTreeWidgetItem)
pageAdded = QtCore.pyqtSignal(ConfigPage)
pageRemoved = QtCore.pyqtSignal(ConfigPage)
sectionAdded = QtCore.pyqtSignal(configobj.Section)
sectionRemoved = QtCore.pyqtSignal(configobj.Section)
def addSection(self, newsection):
"""Take a configuration section and add corresponding page and treeview item"""
if newsection.name == None: # Top-level
item = QtGui.QTreeWidgetItem(self.tree, ['Root'])
self.tree.addTopLevelItem(item)
else:
parent_item = newsection.parent.tree_item
item = QtGui.QTreeWidgetItem(parent_item, [newsection.name])
item.setExpanded(True)
page = ConfigPage(newsection, item)
self.pageAdded.emit(page)
newsection.tree_item = item
self.page_lookup[id(item)] = page
pages = [page]
for section in [newsection[x] for x in newsection.sections]:
pages.extend(self.addSection(section))
return pages
def activateButtons(self, item):
"""Activate add/remove section buttons if appropriate"""
page = self.page_lookup[id(item)]
conf = page.conf
self.addButton.setEnabled(conf.many)
self.removeButton.setEnabled(conf.optional)
def addEmptySection(self, item):
"""Add a new empty section based on the spec of the parent section corresponding to item"""
parent = self.page_lookup[id(item)].conf # Load combined config for page matching selected item
spec = parent.spec['__many__'] # Get spec
conf = configobj.ConfigObj(configspec=spec)
conf.validate(self.validator) # Create an empty config matching spec
combined = merge_spec(conf, spec, self.type_mapping) # Combine spec and new config
name, ok = QtGui.QInputDialog.getText(self, 'Add new section', 'Section name:')
if ok:
name = str(name)
combined.name = name
combined.parent = parent
parent[name] = combined
# Copy new config information into old config
# Workaround for ConfigObj issues
parent.conf[name] = {}
def fix_depth(section):
section.depth = section.parent.depth + 1
[fix_depth(s) for s in section.sections]
for key in conf:
parent.conf[name][key] = conf[key]
conf[key].parent = parent.conf[name][key]
if isinstance(conf[key], configobj.Section):
fix_depth(conf[key])
self.addSection(combined)
self.sectionAdded.emit(combined)
def removeSection(self, item):
"""Delete configuration section corresponding to item"""
item.parent().removeChild(item)
page = self.page_lookup[id(item)]
self.sectionRemoved.emit(page.conf)
del page.conf.conf.parent[str(item.text(0))]
self.pageRemoved.emit(page)
del self.page_lookup[id(item)]
class MyScrollArea(QtGui.QScrollArea):
"""QtGui.QScrollArea which has a more sensible sizeHint"""
def __init__(self, parent=None):
QtGui.QScrollArea.__init__(self, parent)
def sizeHint(self):
if self.widget() != None:
return self.widget().sizeHint()*1.1
else:
return QtCore.QSize(10, 10)
class MyWidget(QtGui.QWidget):
"""Base for widget describing an option"""
def __init__(self, option, parent = None):
QtGui.QWidget.__init__(self, parent)
self.layout = QtGui.QHBoxLayout()
self.setLayout(self.layout)
self.option = option
self.onlywidget = False
def init(self, option, main_widget, change_signal):
"""Initialization that has to be performed after some actions made in __init__ in derived classes"""
self.main_widget = main_widget
self.main_widget.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
self.layout.addWidget(main_widget)
self.myconnect(change_signal, self.setValue)
self.myconnect(change_signal, self.validate)
# Add validity icon
self.isValidIcon = QtGui.QLabel()
self.isValidIcon.setScaledContents(True)
self.isValidIcon.setPixmap(QtGui.QApplication.style().standardIcon(QtGui.QStyle.SP_MessageBoxWarning).pixmap(256, 256))
self.isValidIcon.setSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
self.isValidIcon.setMaximumHeight(self.main_widget.height()*0.8)
self.isValidIcon.setMaximumWidth(self.main_widget.height()*0.8)
self.isValidIcon.hide()
self.layout.addWidget(self.isValidIcon)
# Add button to restore default value
self.restoreDefaultButton = QtGui.QPushButton(self.style().standardIcon(QtGui.QStyle.SP_DialogResetButton), '')
self.restoreDefaultButton.setSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Maximum)
self.restoreDefaultButton.clicked.connect(self.restoreDefault)
self.restoreDefaultButton.setEnabled(self.option.default != None)
self.restoreDefaultButton.setToolTip('Restore default value')
self.layout.addWidget(self.restoreDefaultButton)
if option.comment:
main_widget.setToolTip(option.comment)
# Set displayed value if possible
try:
self.option.get()
except KeyError:
return
# No actual change has happened, so prevent the new value from being written to config
self.onlywidget = True
self.updateDisplay()
self.onlywidget = False
def validate(self, value):
"""Check if the entered value is valid accoring to the spec"""
value = str(value) # Start with a normal string
if self.option.type.endswith('list'):
value = [x.strip() for x in value.split(',')]
try:
self.option.check(value, *self.option.args, **self.option.kwargs)
except Exception as e:
self.isValidIcon.setToolTip(str(e))
self.isValidIcon.show()
return
self.isValidIcon.hide()
def myconnect(self, signal, func):
"""Helper to conenct to both new and old-style signals"""
if isinstance(signal, str):
QtCore.QObject.connect(self.main_widget, QtCore.SIGNAL(signal), func)
else:
signal.connect(func)
def setIsDefault(self):
"""Tell widget that it represents a default value"""
style = ''
for widget in ['QCheckBox', 'QSpinBox', 'QDoubleSpinBox', 'QComboBox', 'QLineEdit']:
style += '%s {color: gray; font-style: italic}\n'%widget
self.main_widget.setStyleSheet(style)
self.restoreDefaultButton.setEnabled(False)
def unsetIsDefault(self):
"""Tell widget that it no longer represents a default value"""
self.main_widget.setStyleSheet('')
self.restoreDefaultButton.setEnabled(self.option.default != None)
def restoreDefault(self):
"""Reset option to default value"""
try:
self.option.restoreDefault()
except KeyError:
return
self.onlywidget = True
self.updateDisplay()
self.onlywidget = False
self.setIsDefault()
self.optionChanged.emit(self.option)
def updateDisplay(self):
"""Update widget after a change in the options"""
if self.option.isDefault():
self.setIsDefault()
optionChanged = QtCore.pyqtSignal(Option)
def setValue(self, value):
"""Set option value to value"""
if not self.onlywidget:
self.option.set(value)
self.optionChanged.emit(self.option)
self.unsetIsDefault()
# Validator to check string length
class LengthValidator(QtGui.QValidator):
"""Validator which enforces string lenght limits"""
def __init__(self, min=0, max=None, parent = None):
QtGui.QValidator.__init__(self, parent)
self.min = min
self.max = max
def fixup(self, input):
if self.min and input.length() < self.min:
input.resize(self.min)
elif self.max and input.length() > self.max:
input.resize(self.max)
def validate(self, input, pos):
if self.min and input.length() < self.min:
return (QtGui.QValidator.Invalid, pos)
elif self.max and input.length() > self.max:
return (QtGui.QValidator.Invalid, pos)
else:
return (QtGui.QValidator.Acceptable, pos)
class MyLineEdit(MyWidget):
"""Widget representing a text-like option"""
def __init__(self, option, min = None, max = None, parent = None):
MyWidget.__init__(self, option, parent)
main_widget = QtGui.QLineEdit(self)
if min != None:
min = int(min)
if max != None:
max = int(max)
main_widget.setValidator(LengthValidator(min, max))
self.init(option, main_widget, main_widget.textChanged)
def updateDisplay(self):
MyWidget.updateDisplay(self)
self.main_widget.setText(str(self.option.get()))
class MyIpEdit(MyLineEdit):
"""Widget representing an IP address"""
def __init__(self, option, parent = None):
MyLineEdit.__init__(self, option, parent)
self.main_widget.setInputMask('000.000.000.000')
if option.get() == option.default: # Seems like a bug in QtGui.QLineEdit. If setInputMask is used, the stylesheet must be set again
self.setIsDefault()
class MyListEdit(MyWidget):
"""Widget representing a list"""
def __init__(self, option, min = None, max = None, parent = None):
MyWidget.__init__(self, option, parent)
main_widget = QtGui.QLineEdit(self)
self.init(option, main_widget, main_widget.textChanged)
def updateDisplay(self):
MyWidget.updateDisplay(self)
self.main_widget.setText(', '.join([str(x) for x in self.option.get()]))
class MyCheckBox(MyWidget):
"""Widget representing a boolean option"""
def __init__(self, option, parent=None):
MyWidget.__init__(self, option, parent)
main_widget = QtGui.QCheckBox(self)
self.init(option, main_widget, main_widget.toggled)
def updateDisplay(self):
MyWidget.updateDisplay(self)
self.main_widget.setChecked(validate.bool_dict[self.option.get()])
class MyComboBox(MyWidget):
"""Widget representing a multiple-choice option"""
def __init__(self, option, options=[], parent=None):
MyWidget.__init__(self, option, parent)
main_widget = QtGui.QComboBox(self)
for value in options:
main_widget.addItem(str(value))
self.init(option, main_widget, 'currentIndexChanged(QString)')
def updateDisplay(self = False):
MyWidget.updateDisplay(self)
if self.option.get() != None:
self.main_widget.setCurrentIndex(self.main_widget.findText(self.option.get()))
class SliderWithLineEdit(QtGui.QWidget):
"""Slider which displays its current value in a box next to it"""
def __init__(self, type, min, max, parent = None):
QtGui.QWidget.__init__(self, parent)
if type == 'float':
self.decimals = 2
else:
self.decimals = 0
self.type = type
self.layout = QtGui.QHBoxLayout()
self.setLayout(self.layout)
self.slider = QtGui.QSlider(QtCore.Qt.Horizontal)
self.slider.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
if type == 'float':
min = float(min)*10**self.decimals
max = float(max)*10**self.decimals
else:
min = int(min)
max = int(max)
self.slider.setMinimum(min)
self.slider.setMaximum(max)
self.layout.addWidget(self.slider)
self.edit = QtGui.QLineEdit(str(self.slider.value()))
self.edit.setSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Preferred)
if type == 'float':
self.edit.setValidator(QtGui.QDoubleValidator(min, max, self.decimals, None)) # Provide parent explicitly (QtGui.QTBUG-16100)
else:
self.edit.setValidator(QtGui.QIntValidator(min, max, None)) # Provide parent explicitly (QtGui.QTBUG-16100)
self.layout.addWidget(self.edit)
metrics = QtGui.QFontMetrics(QtGui.QApplication.font())
if type == 'float':
self.edit.setMaximumWidth(metrics.width(len(str(max))*"8"+"."+"8"))
else:
self.edit.setMaximumWidth(metrics.width(len(str(max))*"8"+"8"))
self.edit.textChanged.connect(self.setSliderValue)
self.slider.valueChanged.connect(self.setEditValue)
self.reaction = False
def setSliderValue(self, s):
if self.reaction: # Prevent lineedit change from triggering this
self.reaction = False
return
self.reaction = True
try:
if self.type == 'float':
self.slider.setValue((round(float(s)*10**self.decimals)))
else:
self.slider.setValue(int(s))
except TypeError:
pass
except ValueError:
if s == '':
self.slider.setValue(self.slider.minimum())
def setEditValue(self, i):
if self.reaction: # Prevent slider change from triggering this
self.reaction = False
return
self.reaction = True
if self.type == 'float':
format = '%.'+str(self.decimals)+'f'
self.edit.setText(format%(float(i)/float(10**self.decimals)))
else:
self.edit.setText(str(i))
def setValue(self, value):
self.setSliderValue(str(value))
self.edit.setText(str(value))
class MySlider(MyWidget):
"""Widget representing a number with min and max specified"""
def __init__(self, option, min=0, max=100, parent=None):
MyWidget.__init__(self, option, parent)
main_widget = SliderWithLineEdit(option.type, min, max)
self.init(option, main_widget, main_widget.edit.textChanged)
def updateDisplay(self):
MyWidget.updateDisplay(self)
self.main_widget.setValue(self.option.get())
class MySpinBox(MyWidget):
"""Widget representing a number with min or max unspecified"""
def __init__(self, option, min=None, max=None, parent=None):
self.decimals = 2
MyWidget.__init__(self, option, parent)
if option.type == 'float':
main_widget = QtGui.QDoubleSpinBox()
main_widget.setDecimals(self.decimals)
conv = float
else:
main_widget = QtGui.QSpinBox()
if option.default != None:
option.default = int(option.default)
conv = int
if option.default != None:
option.default = conv(option.default)
if min != None:
main_widget.setMinimum(conv(min))
if max != None:
main_widget.setMaximum(conv(max))
self.init(option, main_widget, main_widget.valueChanged)
def updateDisplay(self):
MyWidget.updateDisplay(self)
if self.option.get() != None:
self.main_widget.setValue(self.option.get())
def create_widget_integer(option, min=None, max=None):
"""Create widget for integer option"""
if min != None and max != None:
widget = MySlider(option, min, max)
else:
widget = MySpinBox(option, min, max)
return widget
def create_widget_string(option, min=None, max=None):
"""Create widget for string option"""
widget = MyLineEdit(option, min, max)
return widget
def create_widget_float(option, min=None, max=None):
"""Create widget for float option"""
if min != None and max != None:
widget = MySlider(option, min, max)
else:
widget = MySpinBox(option, min, max)
return widget
def create_widget_ip_addr(option):
"""Create widget for ip_addr option"""
widget = MyIpEdit(option)
return widget
def create_widget_boolean(option):
"""Create widget for boolean option"""
widget = MyCheckBox(option)
return widget
def create_widget_option(option, *options):
"""Create widget for option option"""
widget = MyComboBox(option, options)
return widget
def create_widget_list(option, min=None, max=None):
"""Create widget for any kind of list option"""
widget = MyListEdit(option, min, max)
return widget
validator = validate.Validator()
class ConfigWindow(QtGui.QMainWindow):
"""Window which contains controls for making changes to a ConfigObj"""
APPLY_IMMEDIATELY = 1 # GNOME style, apply settings immediately
APPLY_OK = 2 # KDE style, apply settings when OK is pressed
type_mapping = {'integer':(create_widget_integer, validator.functions['integer']),
'float':(create_widget_float, validator.functions['float']),
'boolean':(create_widget_boolean, validator.functions['boolean']),
'string':(create_widget_string, validator.functions['string']),
'ip_addr':(create_widget_ip_addr, validator.functions['ip_addr']),
'list':(create_widget_list, validator.functions['list']),
'force_list':(create_widget_list, validator.functions['force_list']),
'tuple':(create_widget_list, validator.functions['tuple']),
'int_list':(create_widget_list, validator.functions['int_list']),
'float_list':(create_widget_list, validator.functions['float_list']),
'bool_list':(create_widget_list, validator.functions['bool_list']),
'string_list':(create_widget_list, validator.functions['string_list']),
'ip_addr_list':(create_widget_list, validator.functions['ip_addr_list']),
'mixed_list':(create_widget_list, validator.functions['mixed_list']),
'pass':(create_widget_string, validator.functions['pass']), # BUG: This will lead to a string always being saved back
'option':(create_widget_option, validator.functions['option'])}
def __init__(self, conf, spec, title = 'Configure', when_apply = APPLY_IMMEDIATELY, debug = False, type_mapping=None, parent = None):
QtGui.QMainWindow.__init__(self, parent)
self.when_apply = when_apply
self.type_mapping = ConfigWindow.type_mapping
if type_mapping != None:
self.type_mapping.update(type_mapping)
self.validator = validate.Validator()
res = conf.validate(self.validator, preserve_errors=True)
# Make changes to a copy of the original conf if needed
if when_apply != ConfigWindow.APPLY_IMMEDIATELY:
self.original_conf = conf
conf = copy.deepcopy(conf)
else:
self.original_conf = conf
self.conf = conf
self.setWindowTitle(title)
options = merge_spec(conf, spec, self.type_mapping)
self.options = options
main = QtGui.QWidget()
layout = QtGui.QVBoxLayout(main)
self.setCentralWidget(main)
splitter = QtGui.QSplitter()
layout.addWidget(splitter)
self.splitter = splitter
browser = SectionBrowser(conf, self.validator)
browser.currentItemChanged.connect(self.changePage)
browser.pageAdded.connect(self.addPage)
browser.pageRemoved.connect(self.removePage)
if when_apply == ConfigWindow.APPLY_IMMEDIATELY:
browser.sectionAdded.connect(self.sectionAdded.emit)
browser.sectionRemoved.connect(self.sectionRemoved.emit)
if spec.sections != []: # Sections are possible
splitter.addWidget(browser)
if when_apply == ConfigWindow.APPLY_IMMEDIATELY:
buttons = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.RestoreDefaults)
elif when_apply == ConfigWindow.APPLY_OK:
buttons = QtGui.QDialogButtonBox(QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel | QtGui.QDialogButtonBox.RestoreDefaults)
buttons.button(QtGui.QDialogButtonBox.RestoreDefaults).clicked.connect(self.resetAll)
buttons.button(QtGui.QDialogButtonBox.RestoreDefaults).setIcon(QtGui.QApplication.style().standardIcon(QtGui.QStyle.SP_DialogResetButton))
if debug: # Show button to print current config as seen from outside
dump_config = QtGui.QPushButton('Dump')
buttons.addButton(dump_config, QtGui.QDialogButtonBox.HelpRole)
def dump():
print(self.original_conf)
dump_config.clicked.connect(dump)
buttons.accepted.connect(self.close)
buttons.accepted.connect(self.updateOriginalConf)
buttons.rejected.connect(self.close)
layout.addWidget(buttons)
configArea = MyScrollArea()
self.configArea = configArea
splitter.addWidget(configArea)
splitter.setStretchFactor(1, 2)
stacked = QtGui.QStackedWidget()
configArea.setWidget(stacked)
splitter.addWidget(configArea)
configArea.setWidgetResizable(True)
self.stacked = stacked
self.pages = {}
pages = browser.addSection(options)
optionChanged = QtCore.pyqtSignal(Option)
sectionAdded = QtCore.pyqtSignal(configobj.Section)
sectionRemoved = QtCore.pyqtSignal(configobj.Section)
def changePage(self, newItem):
index = self.pages[id(newItem)]
self.stacked.setCurrentIndex(index)
def updateOriginalConf(self):
if self.when_apply != ConfigWindow.APPLY_IMMEDIATELY: # Check what has changed
def update(new, old, newly_added):
added = [x for x in new.sections if x not in old.sections]
for section in added:
if not newly_added:
self.sectionAdded.emit(new[section])
old[section] = {}
removed = [x for x in old.sections if x not in new.conf.sections]
for section in removed:
self.sectionRemoved.emit(new[section])
del old[section]
for scalar in new.scalars:
# New section
if not scalar in old.scalars:
if not new[scalar].isDefault():
try:
old[scalar] = new[scalar].get()
self.optionChanged.emit(new[scalar])
except KeyError:
continue
else: # Old section
try:
if new[scalar].get() != old[scalar]:
self.optionChanged.emit(new[scalar])
except KeyError:
continue
if not new[scalar].isDefault():
old[scalar] = new[scalar].get()
else:
old.restore_default(scalar)
for section in [x for x in new.sections]:
try:
update(new[section],old[section], newly_added or section in added)
except KeyError: # Section was removed
continue
update(self.options,self.original_conf,False)
def resetAll(self):
for page in [self.stacked.widget(i) for i in range(self.stacked.count())]:
page.restoreDefault()
def addPage(self, page):
self.pages[id(page.item)] = self.stacked.addWidget(page)
if self.when_apply == ConfigWindow.APPLY_IMMEDIATELY:
page.optionChanged.connect(self.optionChanged.emit)
def removePage(self, page):
self.pages[id(page.item)] = self.stacked.removeWidget(page)
del self.pages[id(page.item)]
def merge_spec(config, spec, type_mapping):
"""Combine config and spec into one tree in the form of Option objects"""
combined = configobj.ConfigObj()
combined.optional = '__many__' in spec.parent and spec != spec.parent
combined.many = '__many__' in spec
# Store origial conf and spec
combined.conf = config
combined.spec = spec
# Recursively combine sections
for section in config.sections:
if section in spec:
combined[section] = merge_spec(config[section], spec[section], type_mapping)
elif '__many__' in spec:
combined[section] = merge_spec(config[section], spec['__many__'], type_mapping)
combined[section].name = section
combined[section].parent = combined
# Combine individual options
for option in spec.scalars:
comment = spec.inline_comments[option]
if comment and comment.startswith('#'):
comment = comment[1:].strip()
fun_name, fun_args, fun_kwargs, default = validate.Validator()._parse_with_caching(spec[option]) # WARNING: Uses unoffical method!
combined[option] = Option(option, config, fun_name, fun_args, fun_kwargs, default, comment, type_mapping[fun_name][0], type_mapping[fun_name][1])
return combined
def configure_externally(config, spec):
"""Launch a ConfigWindow in an external process"""
import pickle, subprocess, time
path = __file__
if path.endswith('.pyc'):
path = path[:-1]
proc = subprocess.Popen([path], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
newconf = pickle.loads(proc.communicate(pickle.dumps((config, spec)))[0])
newconf.write(sys.stdout)
if __name__ == '__main__':
import pickle
conf, spec = pickle.loads(sys.stdin.read())
app = QtGui.QApplication(sys.argv)
wnd = ConfigWindow(conf, spec)
wnd.show()
app.exec_()
print(pickle.dumps(conf), end=' ')
| pafcu/ConfigObj-GUI | configobj_gui.py | Python | isc | 26,308 |
# Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
import subprocess
import time
from Queue import Queue
from alsaaudio import Mixer
from threading import Thread, Timer
import serial
from mycroft.client.enclosure.arduino import EnclosureArduino
from mycroft.client.enclosure.eyes import EnclosureEyes
from mycroft.client.enclosure.mouth import EnclosureMouth
from mycroft.client.enclosure.weather import EnclosureWeather
from mycroft.configuration import ConfigurationManager
from mycroft.messagebus.client.ws import WebsocketClient
from mycroft.messagebus.message import Message
from mycroft.util import play_wav, create_signal
from mycroft.util.audio_test import record
from mycroft.util.log import getLogger
__author__ = 'aatchison', 'jdorleans', 'iward'
LOG = getLogger("EnclosureClient")
class EnclosureReader(Thread):
"""
Reads data from Serial port.
Listens to all commands sent by Arduino that must be be performed on
Mycroft Core.
E.g. Mycroft Stop Feature
#. Arduino sends a Stop command after a button press on a Mycroft unit
#. ``EnclosureReader`` captures the Stop command
#. Notify all Mycroft Core processes (e.g. skills) to be stopped
Note: A command is identified by a line break
"""
def __init__(self, serial, ws):
super(EnclosureReader, self).__init__(target=self.read)
self.alive = True
self.daemon = True
self.serial = serial
self.ws = ws
self.start()
def read(self):
while self.alive:
try:
data = self.serial.readline()[:-2]
if data:
self.process(data)
LOG.info("Reading: " + data)
except Exception as e:
LOG.error("Reading error: {0}".format(e))
def process(self, data):
self.ws.emit(Message(data))
if "Command: system.version" in data:
self.ws.emit(Message("enclosure.start"))
if "mycroft.stop" in data:
create_signal('buttonPress') # FIXME - Must use WS instead
self.ws.emit(Message("mycroft.stop"))
if "volume.up" in data:
self.ws.emit(
Message("VolumeSkill:IncreaseVolumeIntent",
{'play_sound': True}))
if "volume.down" in data:
self.ws.emit(
Message("VolumeSkill:DecreaseVolumeIntent",
{'play_sound': True}))
if "system.test.begin" in data:
self.ws.emit(Message('recognizer_loop:sleep'))
if "system.test.end" in data:
self.ws.emit(Message('recognizer_loop:wake_up'))
if "mic.test" in data:
mixer = Mixer()
prev_vol = mixer.getvolume()[0]
mixer.setvolume(35)
self.ws.emit(Message("speak", {
'utterance': "I am testing one two three"}))
time.sleep(0.5) # Prevents recording the loud button press
record("/tmp/test.wav", 3.0)
mixer.setvolume(prev_vol)
play_wav("/tmp/test.wav").communicate()
# Test audio muting on arduino
subprocess.call('speaker-test -P 10 -l 0 -s 1', shell=True)
if "unit.shutdown" in data:
self.ws.emit(
Message("enclosure.eyes.timedspin",
{'length': 12000}))
self.ws.emit(Message("enclosure.mouth.reset"))
subprocess.call('systemctl poweroff -i', shell=True)
if "unit.reboot" in data:
self.ws.emit(
Message("enclosure.eyes.spin"))
self.ws.emit(Message("enclosure.mouth.reset"))
subprocess.call('systemctl reboot -i', shell=True)
if "unit.setwifi" in data:
self.ws.emit(Message("mycroft.wifi.start"))
if "unit.factory-reset" in data:
subprocess.call(
'rm ~/.mycroft/identity/identity2.json',
shell=True)
self.ws.emit(
Message("enclosure.eyes.spin"))
self.ws.emit(Message("enclosure.mouth.reset"))
subprocess.call('systemctl reboot -i', shell=True)
def stop(self):
self.alive = False
class EnclosureWriter(Thread):
"""
Writes data to Serial port.
#. Enqueues all commands received from Mycroft enclosures
implementation
#. Process them on the received order by writing on the Serial port
E.g. Displaying a text on Mycroft's Mouth
#. ``EnclosureMouth`` sends a text command
#. ``EnclosureWriter`` captures and enqueue the command
#. ``EnclosureWriter`` removes the next command from the queue
#. ``EnclosureWriter`` writes the command to Serial port
Note: A command has to end with a line break
"""
def __init__(self, serial, ws, size=16):
super(EnclosureWriter, self).__init__(target=self.flush)
self.alive = True
self.daemon = True
self.serial = serial
self.ws = ws
self.commands = Queue(size)
self.start()
def flush(self):
while self.alive:
try:
cmd = self.commands.get()
self.serial.write(cmd + '\n')
LOG.info("Writing: " + cmd)
self.commands.task_done()
except Exception as e:
LOG.error("Writing error: {0}".format(e))
def write(self, command):
self.commands.put(str(command))
def stop(self):
self.alive = False
class Enclosure(object):
"""
Serves as a communication interface between Arduino and Mycroft Core.
``Enclosure`` initializes and aggregates all enclosures implementation.
E.g. ``EnclosureEyes``, ``EnclosureMouth`` and ``EnclosureArduino``
It also listens to the basis events in order to perform those core actions
on the unit.
E.g. Start and Stop talk animation
"""
def __init__(self):
self.ws = WebsocketClient()
ConfigurationManager.init(self.ws)
self.config = ConfigurationManager.get().get("enclosure")
self.__init_serial()
self.reader = EnclosureReader(self.serial, self.ws)
self.writer = EnclosureWriter(self.serial, self.ws)
self.writer.write("system.version")
self.ws.on("enclosure.start", self.start)
self.started = False
Timer(5, self.stop).start() # WHY? This at least
# needs an explanation, this is non-obvious behavior
def start(self, event=None):
self.eyes = EnclosureEyes(self.ws, self.writer)
self.mouth = EnclosureMouth(self.ws, self.writer)
self.system = EnclosureArduino(self.ws, self.writer)
self.weather = EnclosureWeather(self.ws, self.writer)
self.__register_events()
self.__reset()
self.started = True
def __init_serial(self):
try:
self.port = self.config.get("port")
self.rate = self.config.get("rate")
self.timeout = self.config.get("timeout")
self.serial = serial.serial_for_url(
url=self.port, baudrate=self.rate, timeout=self.timeout)
LOG.info("Connected to: %s rate: %s timeout: %s" %
(self.port, self.rate, self.timeout))
except:
LOG.error("Impossible to connect to serial port: " + self.port)
raise
def __register_events(self):
self.ws.on('enclosure.mouth.events.activate',
self.__register_mouth_events)
self.ws.on('enclosure.mouth.events.deactivate',
self.__remove_mouth_events)
self.ws.on('enclosure.reset',
self.__reset)
self.__register_mouth_events()
def __register_mouth_events(self, event=None):
self.ws.on('recognizer_loop:record_begin', self.mouth.listen)
self.ws.on('recognizer_loop:record_end', self.mouth.reset)
self.ws.on('recognizer_loop:audio_output_start', self.mouth.talk)
self.ws.on('recognizer_loop:audio_output_end', self.mouth.reset)
def __remove_mouth_events(self, event=None):
self.ws.remove('recognizer_loop:record_begin', self.mouth.listen)
self.ws.remove('recognizer_loop:record_end', self.mouth.reset)
self.ws.remove('recognizer_loop:audio_output_start',
self.mouth.talk)
self.ws.remove('recognizer_loop:audio_output_end',
self.mouth.reset)
def __reset(self, event=None):
# Reset both the mouth and the eye elements to indicate the unit is
# ready for input.
self.writer.write("eyes.reset")
self.writer.write("mouth.reset")
def speak(self, text):
self.ws.emit(Message("speak", {'utterance': text}))
def run(self):
try:
self.ws.run_forever()
except Exception as e:
LOG.error("Error: {0}".format(e))
self.stop()
def stop(self):
if not self.started:
self.writer.stop()
self.reader.stop()
self.serial.close()
self.ws.close()
| jasonehines/mycroft-core | mycroft/client/enclosure/__init__.py | Python | gpl-3.0 | 9,809 |
from graphite.thirdparty.pyparsing import *
ParserElement.enablePackrat()
grammar = Forward()
expression = Forward()
# Literals
intNumber = Combine(
Optional('-') + Word(nums)
)('integer')
floatNumber = Combine(
Optional('-') + Word(nums) + Literal('.') + Word(nums)
)('float')
aString = quotedString('string')
# Use lookahead to match only numbers in a list (can't remember why this is necessary)
afterNumber = FollowedBy(",") ^ FollowedBy(")") ^ FollowedBy(LineEnd())
number = Group(
(floatNumber + afterNumber) |
(intNumber + afterNumber)
)('number')
boolean = Group(
CaselessKeyword("true") |
CaselessKeyword("false")
)('boolean')
# Function calls
arg = Group(
boolean |
number |
aString |
expression
)
args = delimitedList(arg)('args')
func = Word(alphas+'_', alphanums+'_')('func')
call = Group(
func + Literal('(').suppress() +
args + Literal(')').suppress()
)('call')
# Metric pattern (aka. pathExpression)
validMetricChars = alphanums + r'''!#$%&"'*+-.:;<=>?@[\]^_`|~'''
pathExpression = Combine(
Optional(Word(validMetricChars)) +
Combine(
ZeroOrMore(
Group(
Literal('{') +
Word(validMetricChars + ',') +
Literal('}') + Optional( Word(validMetricChars) )
)
)
)
)('pathExpression')
expression << Group(call | pathExpression)('expression')
grammar << expression
def enableDebug():
for name,obj in globals().items():
try:
obj.setName(name)
obj.setDebug(True)
except:
pass
| afilipovich/graphite-web | webapp/graphite/render/grammar.py | Python | apache-2.0 | 1,494 |
"""
NAME: prefixNodesNames_Script
ICON: icon.png
DROP_TYPES:
SCOPE:
Prefix Nodes Names
"""
# The following symbols are added when run as shelf buttons:
# exit(): Allows 'error-free' early exit from the script.
# dropEvent: If your script registers DROP_TYPES, this is a QDropEvent
# upon a valid drop. Otherwise, it is None.
# Example: Registering for "nodegraph/nodes" DROP_TYPES
# allows the user to get dropped nodes using
# nodes = [NodegraphAPI.GetNode(x) for x in
# str(dropEvent.encodedData( 'nodegraph/nodes' )).split(',')]
# console_print(message, raisePanel = False):
# If the Python Console exists, print the message to it.
# Otherwise, print the message to the shell. If raisePanel
# is passed as True, the panel will be raised to the front.
import sys
SNIPPETS_PATH = "/usr/people/thomas-ma/Developement/Snippets/katana"
not SNIPPETS_PATH in sys. path and sys.path.append(SNIPPETS_PATH)
import snippets.libraries.utilities
snippets.libraries.utilities.importScriptNode("/usr/people/thomas-ma/Developement/Snippets/katana/snippets/resources/recipes/prefixNodesNames_Script_vLatest.katana")
| KelSolaar/Snippets | katana/snippets/resources/shelves/scripts/prefixNodesNames_Script.py | Python | gpl-3.0 | 1,225 |
# Copyright (c) 2014 ProphetStor, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import errno
from oslo_log import log as logging
from cinder import exception
from cinder.i18n import _, _LI, _LW
import cinder.volume.driver
from cinder.volume.drivers.prophetstor import dplcommon
LOG = logging.getLogger(__name__)
class DPLISCSIDriver(dplcommon.DPLCOMMONDriver,
cinder.volume.driver.ISCSIDriver):
def __init__(self, *args, **kwargs):
super(DPLISCSIDriver, self).__init__(*args, **kwargs)
def initialize_connection(self, volume, connector):
"""Allow connection to connector and return connection info."""
properties = {}
properties['target_lun'] = None
properties['target_discovered'] = True
properties['target_portal'] = ''
properties['target_iqn'] = None
properties['volume_id'] = volume['id']
dpl_server = self.configuration.san_ip
dpl_iscsi_port = self.configuration.iscsi_port
ret, output = self.dpl.assign_vdev(self._conver_uuid2hex(
volume['id']), connector['initiator'].lower(), volume['id'],
'%s:%d' % (dpl_server, dpl_iscsi_port), 0)
if ret == errno.EAGAIN:
ret, event_uuid = self._get_event_uuid(output)
if len(event_uuid):
ret = 0
status = self._wait_event(
self.dpl.get_vdev_status, self._conver_uuid2hex(
volume['id']), event_uuid)
if status['state'] == 'error':
ret = errno.EFAULT
msg = _('Flexvisor failed to assign volume %(id)s: '
'%(status)s.') % {'id': volume['id'],
'status': status}
raise exception.VolumeBackendAPIException(data=msg)
else:
ret = errno.EFAULT
msg = _('Flexvisor failed to assign volume %(id)s due to '
'unable to query status by event '
'id.') % {'id': volume['id']}
raise exception.VolumeBackendAPIException(data=msg)
elif ret != 0:
msg = _('Flexvisor assign volume failed.:%(id)s:'
'%(status)s.') % {'id': volume['id'], 'status': ret}
raise exception.VolumeBackendAPIException(data=msg)
if ret == 0:
ret, output = self.dpl.get_vdev(
self._conver_uuid2hex(volume['id']))
if ret == 0:
for tgInfo in output['exports']['Network/iSCSI']:
if tgInfo['permissions'] and \
isinstance(tgInfo['permissions'][0], dict):
for assign in tgInfo['permissions']:
if connector['initiator'].lower() in assign.keys():
for tgportal in tgInfo.get('portals', {}):
properties['target_portal'] = tgportal
break
properties['target_lun'] = \
assign[connector['initiator'].lower()]
break
if properties['target_portal'] != '':
properties['target_iqn'] = tgInfo['target_identifier']
break
else:
if connector['initiator'].lower() in tgInfo['permissions']:
for tgportal in tgInfo.get('portals', {}):
properties['target_portal'] = tgportal
break
if properties['target_portal'] != '':
properties['target_lun'] = \
tgInfo['logical_unit_number']
properties['target_iqn'] = \
tgInfo['target_identifier']
break
if not (ret == 0 or properties['target_portal']):
msg = _('Flexvisor failed to assign volume %(volume)s '
'iqn %(iqn)s.') % {'volume': volume['id'],
'iqn': connector['initiator']}
raise exception.VolumeBackendAPIException(data=msg)
return {'driver_volume_type': 'iscsi', 'data': properties}
def terminate_connection(self, volume, connector, **kwargs):
"""Disallow connection from connector."""
ret, output = self.dpl.unassign_vdev(
self._conver_uuid2hex(volume['id']),
connector['initiator'])
if ret == errno.EAGAIN:
ret, event_uuid = self._get_event_uuid(output)
if ret == 0:
status = self._wait_event(
self.dpl.get_vdev_status, volume['id'], event_uuid)
if status['state'] == 'error':
ret = errno.EFAULT
msg = _('Flexvisor failed to unassign volume %(id)s:'
' %(status)s.') % {'id': volume['id'],
'status': status}
raise exception.VolumeBackendAPIException(data=msg)
else:
msg = _('Flexvisor failed to unassign volume (get event) '
'%(id)s.') % {'id': volume['id']}
raise exception.VolumeBackendAPIException(data=msg)
elif ret == errno.ENODATA:
LOG.info(_LI('Flexvisor already unassigned volume '
'%(id)s.'), {'id': volume['id']})
elif ret != 0:
msg = _('Flexvisor failed to unassign volume:%(id)s:'
'%(status)s.') % {'id': volume['id'], 'status': ret}
raise exception.VolumeBackendAPIException(data=msg)
def get_volume_stats(self, refresh=False):
if refresh:
try:
data = super(DPLISCSIDriver, self).get_volume_stats(refresh)
if data:
data['storage_protocol'] = 'iSCSI'
backend_name = \
self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = \
(backend_name or 'DPLISCSIDriver')
self._stats = data
except Exception as exc:
LOG.warning(_LW('Cannot get volume status '
'%(exc)s.'), {'exc': exc})
return self._stats
| dims/cinder | cinder/volume/drivers/prophetstor/dpl_iscsi.py | Python | apache-2.0 | 7,014 |
#!/usr/bin/env python
# Copyright 2012 Google Inc. All Rights Reserved.
"""Utils exporting data from AFF4 to the rest of the world."""
import os
import Queue
import stat
import time
import logging
from grr.lib import aff4
from grr.lib import client_index
from grr.lib import rdfvalue
from grr.lib import serialize
from grr.lib import threadpool
from grr.lib import utils
from grr.lib.aff4_objects import aff4_grr
from grr.lib.flows.general import file_finder
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import flows as rdf_flows
BUFFER_SIZE = 16 * 1024 * 1024
def GetAllClients(token=None):
"""Return a list of all client urns."""
index = aff4.FACTORY.Create(
client_index.MAIN_INDEX, aff4_type="ClientIndex",
mode="rw", object_exists=True, token=token)
return index.LookupClients(["."])
class IterateAllClientUrns(object):
"""Class to iterate over all URNs."""
THREAD_POOL_NAME = "ClientUrnIter"
QUEUE_TIMEOUT = 30
def __init__(self, func=None, max_threads=10, token=None):
"""Iterate over all clients in a threadpool.
Args:
func: A function to call with each client urn.
max_threads: Number of threads to use.
token: Auth token.
Raises:
RuntimeError: If function not specified.
"""
self.thread_pool = threadpool.ThreadPool.Factory(self.THREAD_POOL_NAME,
max_threads)
self.thread_pool.Start()
self.token = token
self.func = func
self.broken_subjects = [] # Entries that are broken or fail to run.
self.out_queue = Queue.Queue()
def GetInput(self):
"""Yield client urns."""
clients = GetAllClients(token=self.token)
logging.debug("Got %d clients", len(clients))
return clients
def Run(self):
"""Run the iteration."""
count = 0
for count, input_data in enumerate(self.GetInput()):
if count % 2000 == 0:
logging.debug("%d processed.", count)
args = (input_data, self.out_queue, self.token)
self.thread_pool.AddTask(target=self.IterFunction, args=args,
name=self.THREAD_POOL_NAME)
while count >= 0:
try:
# We only use the timeout to wait if we got to the end of the Queue but
# didn't process everything yet.
out = self.out_queue.get(timeout=self.QUEUE_TIMEOUT, block=True)
if out:
yield out
count -= 1
except Queue.Empty:
break
# Join and stop to clean up the threadpool.
self.thread_pool.Stop()
def IterFunction(self, *args):
"""Function to run on each input. This can be overridden."""
self.func(*args)
class IterateAllClients(IterateAllClientUrns):
"""Class to iterate over all GRR Client objects."""
def __init__(self, max_age, client_chunksize=25, **kwargs):
"""Iterate over all clients in a threadpool.
Args:
max_age: Maximum age in seconds of clients to check.
client_chunksize: A function to call with each client urn.
**kwargs: Arguments passed to init.
"""
super(IterateAllClients, self).__init__(**kwargs)
self.client_chunksize = client_chunksize
self.max_age = max_age
def GetInput(self):
"""Yield client urns."""
client_list = GetAllClients(token=self.token)
logging.debug("Got %d clients", len(client_list))
for client_group in utils.Grouper(client_list, self.client_chunksize):
for fd in aff4.FACTORY.MultiOpen(client_group, mode="r",
aff4_type="VFSGRRClient",
token=self.token):
if isinstance(fd, aff4_grr.VFSGRRClient):
# Skip if older than max_age
oldest_time = (time.time() - self.max_age) * 1e6
if fd.Get(aff4.VFSGRRClient.SchemaCls.PING) >= oldest_time:
yield fd
def DownloadFile(file_obj, target_path, buffer_size=BUFFER_SIZE):
"""Download an aff4 file to the local filesystem overwriting it if it exists.
Args:
file_obj: An aff4 object that supports the file interface (Read, Seek)
target_path: Full path of file to write to.
buffer_size: Read in chunks this size.
"""
logging.info(u"Downloading: %s to: %s", file_obj.urn, target_path)
target_file = open(target_path, "w")
file_obj.Seek(0)
count = 0
data_buffer = file_obj.Read(buffer_size)
while data_buffer:
target_file.write(data_buffer)
data_buffer = file_obj.Read(buffer_size)
count += 1
if not count % 3:
logging.debug(u"Downloading: %s: %s done", file_obj.urn,
utils.FormatNumberAsString(count * buffer_size))
target_file.close()
def RecursiveDownload(dir_obj, target_dir, max_depth=10, depth=1,
overwrite=False, max_threads=10):
"""Recursively downloads a file entry to the target path.
Args:
dir_obj: An aff4 object that contains children.
target_dir: Full path of the directory to write to.
max_depth: Depth to download to. 1 means just the directory itself.
depth: Current depth of recursion.
overwrite: Should we overwrite files that exist.
max_threads: Use this many threads to do the downloads.
"""
if (not isinstance(dir_obj, aff4.AFF4Volume) or
isinstance(dir_obj, aff4.HashImage)):
return
# Reuse the same threadpool as we call recursively.
thread_pool = threadpool.ThreadPool.Factory("Downloader", max_threads)
thread_pool.Start()
for sub_file_entry in dir_obj.OpenChildren():
path_elements = [target_dir]
sub_target_dir = u"/".join(path_elements)
try:
# Any file-like object with data in AFF4 should inherit AFF4Stream.
if isinstance(sub_file_entry, aff4.AFF4Stream):
args = (sub_file_entry.urn, sub_target_dir, sub_file_entry.token,
overwrite)
thread_pool.AddTask(target=CopyAFF4ToLocal, args=args,
name="Downloader")
elif "Container" in sub_file_entry.behaviours:
if depth >= max_depth: # Don't go any deeper.
continue
try:
os.makedirs(sub_target_dir)
except OSError:
pass
RecursiveDownload(sub_file_entry, sub_target_dir, overwrite=overwrite,
depth=depth + 1)
except IOError:
logging.exception("Unable to download %s", sub_file_entry.urn)
finally:
sub_file_entry.Close()
# Join and stop the threadpool.
if depth <= 1:
thread_pool.Stop()
def DownloadCollection(coll_path, target_path, token=None, overwrite=False,
dump_client_info=False, flatten=False,
max_threads=10):
"""Iterate through a Collection object downloading all files.
Args:
coll_path: Path to an AFF4 collection.
target_path: Base directory to write to.
token: Token for access.
overwrite: If True, overwrite existing files.
dump_client_info: If True, this will detect client paths, and dump a yaml
version of the client object to the root path. This is useful for seeing
the hostname/users of the machine the client id refers to.
flatten: If True, produce a "files" flat folder with links to all the found
files.
max_threads: Use this many threads to do the downloads.
"""
completed_clients = set()
try:
coll = aff4.FACTORY.Open(coll_path, aff4_type="RDFValueCollection",
token=token)
except IOError:
logging.error("%s is not a valid collection. Typo? "
"Are you sure something was written to it?", coll_path)
return
thread_pool = threadpool.ThreadPool.Factory("Downloader", max_threads)
thread_pool.Start()
logging.info("Expecting to download %s files", coll.size)
# Collections can include anything they want, but we only handle RDFURN and
# StatEntry entries in this function.
for grr_message in coll:
source = None
# If a raw message, work out the type.
if isinstance(grr_message, rdf_flows.GrrMessage):
source = grr_message.source
grr_message = grr_message.payload
# Collections can contain AFF4ObjectSummary objects which encapsulate
# RDFURNs and StatEntrys.
if isinstance(grr_message, rdf_client.AFF4ObjectSummary):
urn = grr_message.urn
elif isinstance(grr_message, rdfvalue.RDFURN):
urn = grr_message
elif isinstance(grr_message, rdf_client.StatEntry):
urn = rdfvalue.RDFURN(grr_message.aff4path)
elif isinstance(grr_message, file_finder.FileFinderResult):
urn = rdfvalue.RDFURN(grr_message.stat_entry.aff4path)
elif isinstance(grr_message, rdfvalue.RDFBytes):
try:
os.makedirs(target_path)
except OSError:
pass
try:
# We just dump out bytes and carry on.
client_id = source.Split()[0]
with open(os.path.join(target_path, client_id), "wb") as fd:
fd.write(str(grr_message))
except AttributeError:
pass
continue
else:
continue
# Handle dumping client info, but only once per client.
client_id = urn.Split()[0]
re_match = aff4.AFF4Object.VFSGRRClient.CLIENT_ID_RE.match(client_id)
if dump_client_info and re_match and client_id not in completed_clients:
args = (rdf_client.ClientURN(client_id), target_path, token, overwrite)
thread_pool.AddTask(target=DumpClientYaml, args=args,
name="ClientYamlDownloader")
completed_clients.add(client_id)
# Now queue downloading the actual files.
args = (urn, target_path, token, overwrite)
if flatten:
target = CopyAndSymlinkAFF4ToLocal
else:
target = CopyAFF4ToLocal
thread_pool.AddTask(target=target, args=args, name="Downloader")
# Join and stop the threadpool.
thread_pool.Stop()
def CopyAFF4ToLocal(aff4_urn, target_dir, token=None, overwrite=False):
"""Copy an AFF4 object that supports a read interface to local filesystem.
Args:
aff4_urn: URN of thing to copy.
target_dir: Directory to copy the file to.
token: Auth token.
overwrite: If True overwrite the file if it exists.
Returns:
If aff4_urn points to a file, returns path to the downloaded file.
Otherwise returns None.
By default file will only be overwritten if file size differs.
"""
try:
fd = aff4.FACTORY.Open(aff4_urn, token=token)
filepath = os.path.join(target_dir, fd.urn.Path()[1:])
# If urn points to a directory, just create it.
if isinstance(fd, aff4.VFSDirectory):
try:
os.makedirs(filepath)
except OSError:
pass
return None
# If urn points to a file, download it.
elif isinstance(fd, aff4.AFF4Stream):
if not os.path.isfile(filepath):
try:
# Ensure directory exists.
os.makedirs(os.path.dirname(filepath))
except OSError:
pass
DownloadFile(fd, filepath)
elif (os.stat(filepath)[stat.ST_SIZE] != fd.Get(fd.Schema.SIZE) or
overwrite):
# We should overwrite because user said, or file sizes differ.
DownloadFile(fd, filepath)
else:
logging.info("File %s exists, skipping", filepath)
return filepath
else:
raise RuntimeError("Opened urn is neither a downloaded file nor a "
"directory: %s" % aff4_urn)
except IOError as e:
logging.exception("Failed to read %s due to %s", aff4_urn, e)
raise
def CopyAndSymlinkAFF4ToLocal(aff4_urn, target_dir, token=None,
overwrite=False):
path = CopyAFF4ToLocal(aff4_urn, target_dir, token=token,
overwrite=overwrite)
if path:
files_output_dir = os.path.join(target_dir, "files")
try:
os.makedirs(files_output_dir)
except OSError:
pass
unique_name = "_".join(aff4_urn.Split())
symlink_path = os.path.join(files_output_dir, unique_name)
try:
os.symlink(path, symlink_path)
except OSError:
logging.exception("Can't create symlink to a file: %s -> %s",
symlink_path, path)
def DumpClientYaml(client_urn, target_dir, token=None, overwrite=False):
"""Dump a yaml file containing client info."""
fd = aff4.FACTORY.Open(client_urn, "VFSGRRClient", token=token)
dirpath = os.path.join(target_dir, fd.urn.Split()[0])
try:
# Due to threading this can actually be created by another thread.
os.makedirs(dirpath)
except OSError:
pass
filepath = os.path.join(dirpath, "client_info.yaml")
if not os.path.isfile(filepath) or overwrite:
with open(filepath, "w") as out_file:
out_file.write(serialize.YamlDumper(fd))
| statik/grr | lib/export_utils.py | Python | apache-2.0 | 12,651 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import mimetypes
import os
import random
import time
import unittest
from six import StringIO
from mock import patch
from mock import MagicMock as Mock
import pyrax
import pyrax.object_storage
from pyrax.object_storage import ACCOUNT_META_PREFIX
from pyrax.object_storage import assure_container
from pyrax.object_storage import BulkDeleter
from pyrax.object_storage import Container
from pyrax.object_storage import CONTAINER_META_PREFIX
from pyrax.object_storage import Fault_cls
from pyrax.object_storage import FAULT
from pyrax.object_storage import FolderUploader
from pyrax.object_storage import get_file_size
from pyrax.object_storage import _handle_container_not_found
from pyrax.object_storage import _handle_object_not_found
from pyrax.object_storage import OBJECT_META_PREFIX
from pyrax.object_storage import _massage_metakeys
from pyrax.object_storage import StorageClient
from pyrax.object_storage import StorageObject
from pyrax.object_storage import StorageObjectIterator
from pyrax.object_storage import _validate_file_or_path
from pyrax.object_storage import _valid_upload_key
import pyrax.exceptions as exc
import pyrax.utils as utils
import pyrax.fakes as fakes
class ObjectStorageTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(ObjectStorageTest, self).__init__(*args, **kwargs)
self.identity = fakes.FakeIdentity()
self.maxDiff = 1000
def setUp(self):
self.client = fakes.FakeStorageClient(self.identity)
self.container = self.client.create("fake")
nm = "fake_object"
ctype = "text/fake"
self.obj = StorageObject(self.container.object_manager,
{"name": nm, "content_type": ctype, "bytes": 42})
def tearDown(self):
pass
def test_fault(self):
f = Fault_cls()
self.assertFalse(f)
def test_assure_container(self):
class TestClient(object):
_manager = fakes.FakeManager()
@assure_container
def test_method(self, container):
return container
client = TestClient()
client.get = Mock(return_value=self.container)
# Pass the container
ret = client.test_method(self.container)
self.assertTrue(ret is self.container)
# Pass the name
ret = client.test_method(self.container.name)
self.assertTrue(ret is self.container)
def test_massage_metakeys(self):
prefix = "ABC-"
orig = {"ABC-yyy": "ok", "zzz": "change"}
expected = {"ABC-yyy": "ok", "ABC-zzz": "change"}
fixed = _massage_metakeys(orig, prefix)
self.assertEqual(fixed, expected)
def test_validate_file_or_path(self):
obj_name = utils.random_unicode()
with utils.SelfDeletingTempfile() as tmp:
ret = _validate_file_or_path(tmp, obj_name)
self.assertEqual(ret, obj_name)
def test_validate_file_or_path_not_found(self):
pth = utils.random_unicode()
obj_name = utils.random_unicode()
self.assertRaises(exc.FileNotFound, _validate_file_or_path, pth,
obj_name)
def test_validate_file_or_path_object(self):
pth = object()
obj_name = utils.random_unicode()
ret = _validate_file_or_path(pth, obj_name)
self.assertEqual(ret, obj_name)
def test_valid_upload_key_good(self):
clt = self.client
@_valid_upload_key
def test(self, upload_key):
return "OK"
key = utils.random_unicode()
fake_status = utils.random_unicode()
clt.folder_upload_status = {key: fake_status}
ret = test(clt, key)
self.assertEqual(ret, "OK")
def test_valid_upload_key_bad(self):
clt = self.client
@_valid_upload_key
def test(self, upload_key):
return "OK"
key = utils.random_unicode()
bad_key = utils.random_unicode()
fake_status = utils.random_unicode()
clt.folder_upload_status = {key: fake_status}
self.assertRaises(exc.InvalidUploadID, test, clt, bad_key)
def test_handle_container_not_found(self):
clt = self.client
msg = utils.random_unicode()
@_handle_container_not_found
def test(self, container):
raise exc.NotFound(msg)
container = utils.random_unicode()
self.assertRaises(exc.NoSuchContainer, test, self, container)
def test_handle_object_not_found(self):
clt = self.client
msg = utils.random_unicode()
@_handle_object_not_found
def test(self, obj):
raise exc.NotFound(msg)
obj = utils.random_unicode()
self.assertRaises(exc.NoSuchObject, test, self, obj)
def test_get_file_size(self):
sz = random.randint(42, 420)
fobj = StringIO("x" * sz)
ret = get_file_size(fobj)
self.assertEqual(sz, ret)
@patch('pyrax.object_storage.StorageObjectManager',
new=fakes.FakeStorageObjectManager)
def test_container_create(self):
api = utils.random_unicode()
mgr = fakes.FakeManager()
mgr.api = api
nm = utils.random_unicode()
info = {"name": nm}
cont = Container(mgr, info)
self.assertEqual(cont.manager, mgr)
self.assertEqual(cont._info, info)
self.assertEqual(cont.name, nm)
def test_backwards_aliases(self):
cont = self.container
get_func = cont.get_objects.im_func
list_func = cont.list.im_func
self.assertTrue(get_func is list_func)
def test_repr(self):
cont = self.container
rpr = cont.__repr__()
self.assertTrue("Container" in rpr)
self.assertTrue(cont.name in rpr)
def test_id(self):
cont = self.container
self.assertEqual(cont.id, cont.name)
cont.name = utils.random_unicode()
self.assertEqual(cont.id, cont.name)
def test_set_cdn_defaults(self):
cont = self.container
self.assertTrue(isinstance(cont._cdn_uri, Fault_cls))
cont._set_cdn_defaults()
self.assertIsNone(cont._cdn_uri)
def test_fetch_cdn_data(self):
cont = self.container
self.assertTrue(isinstance(cont._cdn_uri, Fault_cls))
cdn_uri = utils.random_unicode()
cdn_ssl_uri = utils.random_unicode()
cdn_streaming_uri = utils.random_unicode()
cdn_ios_uri = utils.random_unicode()
cdn_log_retention = random.choice(("True", "False"))
bool_retention = (cdn_log_retention == "True")
cdn_ttl = str(random.randint(1, 1000))
hdrs = {"X-Cdn-Uri": cdn_uri,
"X-Ttl": cdn_ttl,
"X-Cdn-Ssl-Uri": cdn_ssl_uri,
"X-Cdn-Streaming-Uri": cdn_streaming_uri,
"X-Cdn-Ios-Uri": cdn_ios_uri,
"X-Log-Retention": cdn_log_retention,
}
cont.manager.fetch_cdn_data = Mock(return_value=hdrs)
self.assertEqual(cont.cdn_uri, cdn_uri)
self.assertEqual(cont.cdn_uri, cdn_uri)
self.assertEqual(cont.cdn_ttl, int(cdn_ttl))
self.assertEqual(cont.cdn_ssl_uri, cdn_ssl_uri)
self.assertEqual(cont.cdn_streaming_uri, cdn_streaming_uri)
self.assertEqual(cont.cdn_ios_uri, cdn_ios_uri)
self.assertEqual(cont.cdn_log_retention, bool_retention)
def test_fetch_cdn_data_no_headers(self):
cont = self.container
cont._cdn_enabled = True
ret = cont._fetch_cdn_data()
self.assertTrue(cont._cdn_enabled)
def test_fetch_cdn_data_not_enabled(self):
cont = self.container
cont.manager.fetch_cdn_data = Mock(return_value={})
ret = cont._fetch_cdn_data()
self.assertIsNone(ret)
self.assertIsNone(cont.cdn_uri)
def test_cont_get_metadata(self):
cont = self.container
prefix = utils.random_unicode()
cont.manager.get_metadata = Mock()
cont.get_metadata(prefix=prefix)
cont.manager.get_metadata.assert_called_once_with(cont, prefix=prefix)
def test_cont_set_metadata(self):
cont = self.container
prefix = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
cont.manager.set_metadata = Mock()
cont.set_metadata(metadata, prefix=prefix)
cont.manager.set_metadata.assert_called_once_with(cont, metadata,
prefix=prefix, clear=False)
def test_cont_remove_metadata_key(self):
cont = self.container
prefix = utils.random_unicode()
key = utils.random_unicode()
cont.manager.remove_metadata_key = Mock()
cont.remove_metadata_key(key, prefix=prefix)
cont.manager.remove_metadata_key.assert_called_once_with(cont, key,
prefix=prefix)
def test_cont_set_web_index_page(self):
cont = self.container
page = utils.random_unicode()
cont.manager.set_web_index_page = Mock()
cont.set_web_index_page(page)
cont.manager.set_web_index_page.assert_called_once_with(cont, page)
def test_cont_set_web_error_page(self):
cont = self.container
page = utils.random_unicode()
cont.manager.set_web_error_page = Mock()
cont.set_web_error_page(page)
cont.manager.set_web_error_page.assert_called_once_with(cont, page)
def test_cont_make_public(self):
cont = self.container
ttl = utils.random_unicode()
cont.manager.make_public = Mock()
cont.make_public(ttl=ttl)
cont.manager.make_public.assert_called_once_with(cont, ttl=ttl)
def test_cont_make_private(self):
cont = self.container
cont.manager.make_private = Mock()
cont.make_private()
cont.manager.make_private.assert_called_once_with(cont)
def test_cont_purge_cdn_object(self):
cont = self.container
obj = utils.random_unicode()
email_addresses = utils.random_unicode()
cont.object_manager.purge = Mock()
cont.purge_cdn_object(obj, email_addresses=email_addresses)
cont.object_manager.purge.assert_called_once_with(obj,
email_addresses=email_addresses)
def test_cont_get(self):
cont = self.container
item = utils.random_unicode()
item_obj = utils.random_unicode()
cont.object_manager.get = Mock(return_value=item_obj)
ret = cont.get_object(item)
self.assertEqual(ret, item_obj)
def test_cont_list(self):
cont = self.container
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
end_marker = utils.random_unicode()
full_listing = False
return_raw = utils.random_unicode()
cont.object_manager.list = Mock()
cont.list(marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, end_marker=end_marker,
full_listing=full_listing, return_raw=return_raw)
cont.object_manager.list.assert_called_once_with(marker=marker,
limit=limit, prefix=prefix, delimiter=delimiter,
end_marker=end_marker, return_raw=return_raw)
def test_cont_list_full(self):
cont = self.container
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
end_marker = utils.random_unicode()
full_listing = True
return_raw = utils.random_unicode()
cont.manager.object_listing_iterator = Mock()
cont.list(marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, end_marker=end_marker,
full_listing=full_listing, return_raw=return_raw)
cont.manager.object_listing_iterator.assert_called_once_with(cont,
prefix=prefix)
def test_cont_list_all(self):
cont = self.container
prefix = utils.random_unicode()
cont.manager.object_listing_iterator = Mock()
cont.list_all(prefix=prefix)
cont.manager.object_listing_iterator.assert_called_once_with(cont,
prefix=prefix)
def test_cont_list_object_names_full(self):
cont = self.container
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
end_marker = utils.random_unicode()
full_listing = True
name1 = utils.random_unicode()
name2 = utils.random_unicode()
obj1 = fakes.FakeStorageObject(cont.object_manager, name=name1)
obj2 = fakes.FakeStorageObject(cont.object_manager, name=name2)
cont.list_all = Mock(return_value=[obj1, obj2])
nms = cont.list_object_names(marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, end_marker=end_marker,
full_listing=full_listing)
cont.list_all.assert_called_once_with(prefix=prefix)
self.assertEqual(nms, [name1, name2])
def test_cont_list_object_names(self):
cont = self.container
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
end_marker = utils.random_unicode()
full_listing = False
name1 = utils.random_unicode()
name2 = utils.random_unicode()
obj1 = fakes.FakeStorageObject(cont.object_manager, name=name1)
obj2 = fakes.FakeStorageObject(cont.object_manager, name=name2)
cont.list = Mock(return_value=[obj1, obj2])
nms = cont.list_object_names(marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, end_marker=end_marker,
full_listing=full_listing)
cont.list.assert_called_once_with(marker=marker, limit=limit,
prefix=prefix, delimiter=delimiter, end_marker=end_marker)
self.assertEqual(nms, [name1, name2])
def test_cont_find(self):
cont = self.container
cont.object_manager.find = Mock()
key1 = utils.random_unicode()
val1 = utils.random_unicode()
key2 = utils.random_unicode()
val2 = utils.random_unicode()
cont.find(key1=val1, key2=val2)
cont.object_manager.find.assert_called_once_with(key1=val1, key2=val2)
def test_cont_findall(self):
cont = self.container
cont.object_manager.findall = Mock()
key1 = utils.random_unicode()
val1 = utils.random_unicode()
key2 = utils.random_unicode()
val2 = utils.random_unicode()
cont.findall(key1=val1, key2=val2)
cont.object_manager.findall.assert_called_once_with(key1=val1,
key2=val2)
def test_cont_create(self):
cont = self.container
cont.object_manager.create = Mock()
file_or_path = utils.random_unicode()
data = utils.random_unicode()
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = utils.random_unicode()
ttl = utils.random_unicode()
chunked = utils.random_unicode()
metadata = utils.random_unicode()
chunk_size = utils.random_unicode()
headers = utils.random_unicode()
return_none = utils.random_unicode()
cont.create(file_or_path=file_or_path, data=data, obj_name=obj_name,
content_type=content_type, etag=etag,
content_encoding=content_encoding,
content_length=content_length, ttl=ttl, chunked=chunked,
metadata=metadata, chunk_size=chunk_size, headers=headers,
return_none=return_none)
cont.object_manager.create.assert_called_once_with(
file_or_path=file_or_path, data=data, obj_name=obj_name,
content_type=content_type, etag=etag,
content_encoding=content_encoding,
content_length=content_length, ttl=ttl, chunked=chunked,
metadata=metadata, chunk_size=chunk_size, headers=headers,
return_none=return_none)
def test_cont_store_object(self):
cont = self.container
cont.create = Mock()
obj_name = utils.random_unicode()
data = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
ttl = utils.random_unicode()
return_none = utils.random_unicode()
headers = utils.random_unicode()
extra_info = utils.random_unicode()
cont.store_object(obj_name, data, content_type=content_type, etag=etag,
content_encoding=content_encoding, ttl=ttl,
return_none=return_none, headers=headers, extra_info=extra_info)
cont.create.assert_called_once_with(obj_name=obj_name, data=data,
content_type=content_type, etag=etag, headers=headers,
content_encoding=content_encoding, ttl=ttl,
return_none=return_none)
def test_cont_upload_file(self):
cont = self.container
cont.create = Mock()
file_or_path = utils.random_unicode()
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
ttl = utils.random_unicode()
return_none = utils.random_unicode()
content_length = utils.random_unicode()
headers = utils.random_unicode()
cont.upload_file(file_or_path, obj_name=obj_name,
content_type=content_type, etag=etag,
content_encoding=content_encoding, ttl=ttl,
return_none=return_none, content_length=content_length,
headers=headers)
cont.create.assert_called_once_with(file_or_path=file_or_path,
obj_name=obj_name, content_type=content_type, etag=etag,
content_encoding=content_encoding, headers=headers,
content_length=content_length, ttl=ttl,
return_none=return_none)
def test_cont_fetch(self):
cont = self.container
cont.object_manager.fetch = Mock()
obj = utils.random_unicode()
include_meta = utils.random_unicode()
chunk_size = utils.random_unicode()
size = utils.random_unicode()
extra_info = utils.random_unicode()
cont.fetch(obj, include_meta=include_meta, chunk_size=chunk_size,
size=size, extra_info=extra_info)
cont.object_manager.fetch.assert_called_once_with(obj,
include_meta=include_meta, chunk_size=chunk_size, size=size)
def test_cont_fetch_object(self):
cont = self.container
cont.fetch = Mock()
obj_name = utils.random_unicode()
include_meta = utils.random_unicode()
chunk_size = utils.random_unicode()
cont.fetch_object(obj_name, include_meta=include_meta,
chunk_size=chunk_size)
cont.fetch.assert_called_once_with(obj=obj_name,
include_meta=include_meta, chunk_size=chunk_size)
def test_cont_fetch_partial(self):
cont = self.container
cont.object_manager.fetch_partial = Mock()
obj = utils.random_unicode()
size = utils.random_unicode()
cont.fetch_partial(obj, size)
cont.object_manager.fetch_partial.assert_called_once_with(obj, size)
def test_cont_download(self):
cont = self.container
cont.object_manager.download = Mock()
obj = utils.random_unicode()
directory = utils.random_unicode()
structure = utils.random_unicode()
cont.download(obj, directory, structure=structure)
cont.object_manager.download.assert_called_once_with(obj, directory,
structure=structure)
def test_cont_download_object(self):
cont = self.container
cont.download = Mock()
obj_name = utils.random_unicode()
directory = utils.random_unicode()
structure = utils.random_unicode()
cont.download_object(obj_name, directory, structure=structure)
cont.download.assert_called_once_with(obj=obj_name,
directory=directory, structure=structure)
def test_cont_delete(self):
cont = self.container
cont.manager.delete = Mock()
del_objects = utils.random_unicode()
cont.delete(del_objects=del_objects)
cont.manager.delete.assert_called_once_with(cont,
del_objects=del_objects)
def test_cont_delete_object(self):
cont = self.container
cont.object_manager.delete = Mock()
obj = utils.random_unicode()
cont.delete_object(obj)
cont.object_manager.delete.assert_called_once_with(obj)
def test_cont_delete_object_in_seconds(self):
cont = self.container
cont.manager.delete_object_in_seconds = Mock()
obj = utils.random_unicode()
seconds = utils.random_unicode()
extra_info = utils.random_unicode()
cont.delete_object_in_seconds(obj, seconds, extra_info=extra_info)
cont.manager.delete_object_in_seconds.assert_called_once_with(cont,
obj, seconds)
def test_cont_delete_all_objects(self):
cont = self.container
cont.object_manager.delete_all_objects = Mock()
name1 = utils.random_unicode()
name2 = utils.random_unicode()
async = utils.random_unicode()
cont.list_object_names = Mock(return_value=[name1, name2])
cont.delete_all_objects(async=async)
cont.object_manager.delete_all_objects.assert_called_once_with(
[name1, name2], async=async)
def test_cont_copy_object(self):
cont = self.container
cont.manager.copy_object = Mock()
obj = utils.random_unicode()
new_container = utils.random_unicode()
new_obj_name = utils.random_unicode()
content_type = utils.random_unicode()
cont.copy_object(obj, new_container, new_obj_name=new_obj_name,
content_type=content_type)
cont.manager.copy_object.assert_called_once_with(cont, obj,
new_container, new_obj_name=new_obj_name,
content_type=content_type)
def test_cont_move_object(self):
cont = self.container
cont.manager.move_object = Mock()
obj = utils.random_unicode()
new_container = utils.random_unicode()
new_obj_name = utils.random_unicode()
new_reference = utils.random_unicode()
content_type = utils.random_unicode()
extra_info = utils.random_unicode()
cont.move_object(obj, new_container, new_obj_name=new_obj_name,
new_reference=new_reference, content_type=content_type,
extra_info=extra_info)
cont.manager.move_object.assert_called_once_with(cont, obj,
new_container, new_obj_name=new_obj_name,
new_reference=new_reference, content_type=content_type)
def test_cont_change_object_content_type(self):
cont = self.container
cont.manager.change_object_content_type = Mock()
obj = utils.random_unicode()
new_ctype = utils.random_unicode()
guess = utils.random_unicode()
cont.change_object_content_type(obj, new_ctype, guess=guess)
cont.manager.change_object_content_type.assert_called_once_with(cont,
obj, new_ctype, guess=guess)
def test_cont_get_temp_url(self):
cont = self.container
cont.manager.get_temp_url = Mock()
obj = utils.random_unicode()
seconds = utils.random_unicode()
method = utils.random_unicode()
cached = utils.random_unicode()
key = utils.random_unicode()
cont.get_temp_url(obj, seconds, method=method, key=key, cached=cached)
cont.manager.get_temp_url.assert_called_once_with(cont, obj, seconds,
method=method, key=key, cached=cached)
def test_cont_get_object_metadata(self):
cont = self.container
cont.object_manager.get_metadata = Mock()
obj = utils.random_unicode()
cont.get_object_metadata(obj)
cont.object_manager.get_metadata.assert_called_once_with(obj, None)
def test_cont_set_object_metadata(self):
cont = self.container
cont.object_manager.set_metadata = Mock()
obj = utils.random_unicode()
meta_key = utils.random_unicode()
meta_val = utils.random_unicode()
metadata = {meta_key: meta_val}
clear = utils.random_unicode()
extra_info = utils.random_unicode()
prefix = utils.random_unicode()
cont.set_object_metadata(obj, metadata, clear=clear,
extra_info=extra_info, prefix=prefix)
cont.object_manager.set_metadata.assert_called_once_with(obj, metadata,
clear=clear, prefix=prefix)
def test_cont_list_subdirs(self):
cont = self.container
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
full_listing = False
cont.manager.list_subdirs = Mock()
cont.list_subdirs(marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, full_listing=full_listing)
cont.manager.list_subdirs.assert_called_once_with(cont, marker=marker,
limit=limit, prefix=prefix, delimiter=delimiter,
full_listing=full_listing)
def test_cont_remove_from_cache(self):
obj = utils.random_unicode()
self.assertIsNone(self.container.remove_from_cache(obj))
def test_cont_cdn_props(self):
for prop in ("cdn_enabled", "cdn_log_retention", "cdn_uri", "cdn_ttl",
"cdn_ssl_uri", "cdn_streaming_uri", "cdn_ios_uri"):
# Need a fresh container for each
cont = self.client.create("fake")
cont.manager.set_cdn_log_retention = Mock()
val = getattr(cont, prop)
self.assertTrue(val is not FAULT)
newval = utils.random_unicode()
setattr(cont, prop, newval)
self.assertEqual(getattr(cont, prop), newval)
def test_cmgr_list(self):
cont = self.container
mgr = cont.manager
limit = utils.random_unicode()
marker = utils.random_unicode()
end_marker = utils.random_unicode()
prefix = utils.random_unicode()
qs = utils.dict_to_qs({"marker": marker, "limit": limit,
"prefix": prefix, "end_marker": end_marker})
exp_uri = "/%s?%s" % (mgr.uri_base, qs)
name1 = utils.random_unicode()
name2 = utils.random_unicode()
resp_body = [{"name": name1}, {"name": name2}]
mgr.api.method_get = Mock(return_value=(None, resp_body))
ret = mgr.list(limit=limit, marker=marker, end_marker=end_marker,
prefix=prefix)
mgr.api.method_get.assert_called_once_with(exp_uri)
self.assertEqual(len(ret), 2)
self.assertTrue(isinstance(ret[0], Container))
def test_cmgr_get(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
cbytes = random.randint(1, 1000)
ccount = random.randint(1, 1000)
resp.headers = {"x-container-bytes-used": cbytes,
"x-container-object-count": ccount}
mgr.api.method_head = Mock(return_value=(resp, None))
name = utils.random_unicode()
ret = mgr.get(name)
self.assertTrue(isinstance(ret, Container))
self.assertEqual(ret.name, name)
self.assertEqual(ret.total_bytes, cbytes)
self.assertEqual(ret.object_count, ccount)
def test_cmgr_get_not_found(self):
cont = self.container
mgr = cont.manager
mgr.api.method_head = Mock(side_effect=exc.NoSuchContainer(""))
name = utils.random_unicode()
self.assertRaises(exc.NoSuchContainer, mgr.get, name)
def test_cmgr_create(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
resp.status_code = 201
mgr.api.method_put = Mock(return_value=(resp, None))
head_resp = fakes.FakeResponse()
cbytes = random.randint(1, 1000)
ccount = random.randint(1, 1000)
head_resp.headers = {"x-container-bytes-used": cbytes,
"x-container-object-count": ccount}
mgr.api.method_head = Mock(return_value=(head_resp, None))
name = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
prefix = utils.random_unicode()
ret = mgr.create(name, metadata=metadata, prefix=prefix)
exp_uri = "/%s" % name
exp_headers = _massage_metakeys(metadata, prefix)
mgr.api.method_put.assert_called_once_with(exp_uri, headers=exp_headers)
mgr.api.method_head.assert_called_once_with(exp_uri)
def test_cmgr_create_no_prefix(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
resp.status_code = 201
mgr.api.method_put = Mock(return_value=(resp, None))
head_resp = fakes.FakeResponse()
cbytes = random.randint(1, 1000)
ccount = random.randint(1, 1000)
head_resp.headers = {"x-container-bytes-used": cbytes,
"x-container-object-count": ccount}
mgr.api.method_head = Mock(return_value=(head_resp, None))
name = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
prefix = None
ret = mgr.create(name, metadata=metadata, prefix=prefix)
exp_uri = "/%s" % name
exp_headers = _massage_metakeys(metadata, CONTAINER_META_PREFIX)
mgr.api.method_put.assert_called_once_with(exp_uri, headers=exp_headers)
mgr.api.method_head.assert_called_once_with(exp_uri)
def test_cmgr_create_fail(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
resp.status_code = 400
mgr.api.method_put = Mock(return_value=(resp, None))
name = utils.random_unicode()
self.assertRaises(exc.ClientException, mgr.create, name)
def test_cmgr_delete(self):
cont = self.container
mgr = cont.manager
names = utils.random_unicode()
mgr.list_object_names = Mock(return_value=names)
mgr.api.bulk_delete = Mock()
exp_uri = "/%s" % cont.name
mgr.api.method_delete = Mock(return_value=(None, None))
mgr.delete(cont, del_objects=True)
mgr.list_object_names.assert_called_once_with(cont, full_listing=True)
mgr.api.bulk_delete.assert_called_once_with(cont, names, async=False)
mgr.api.method_delete.assert_called_once_with(exp_uri)
def test_cmgr_create_body(self):
cont = self.container
mgr = cont.manager
name = utils.random_unicode()
ret = mgr._create_body(name)
self.assertIsNone(ret)
def test_cmgr_fetch_cdn_data(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
resp.headers = utils.random_unicode()
mgr.api.cdn_request = Mock(return_value=(resp, None))
ret = mgr.fetch_cdn_data(cont)
exp_uri = "/%s" % cont.name
mgr.api.cdn_request.assert_called_once_with(exp_uri, "HEAD")
self.assertEqual(ret, resp.headers)
def test_cmgr_fetch_cdn_data_not_cdn_enabled(self):
cont = self.container
mgr = cont.manager
mgr.api.cdn_request = Mock(side_effect=exc.NotCDNEnabled(""))
ret = mgr.fetch_cdn_data(cont)
self.assertEqual(ret, {})
def test_cmgr_get_account_headers(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
resp.headers = utils.random_unicode()
mgr.api.method_head = Mock(return_value=(resp, None))
ret = mgr.get_account_headers()
self.assertEqual(ret, resp.headers)
mgr.api.method_head.assert_called_once_with("/")
def test_cmgr_get_headers(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
resp.headers = utils.random_unicode()
mgr.api.method_head = Mock(return_value=(resp, None))
ret = mgr.get_headers(cont)
exp_uri = "/%s" % cont.name
self.assertEqual(ret, resp.headers)
mgr.api.method_head.assert_called_once_with(exp_uri)
def test_cmgr_get_account_metadata(self):
cont = self.container
mgr = cont.manager
prefix = utils.random_ascii()
key_good = prefix + utils.random_ascii()
key_bad = utils.random_ascii()
val_good = utils.random_ascii()
val_bad = utils.random_ascii()
headers = {key_good: val_good, key_bad: val_bad}
mgr.get_account_headers = Mock(return_value=headers)
ret = mgr.get_account_metadata(prefix=prefix)
self.assertEqual(ret, {key_good: val_good})
def test_cmgr_get_account_metadata_no_prefix(self):
cont = self.container
mgr = cont.manager
prefix = None
key_good_base = utils.random_ascii()
key_good = ACCOUNT_META_PREFIX.lower() + key_good_base
key_bad = utils.random_ascii()
val_good = utils.random_ascii()
val_bad = utils.random_ascii()
headers = {key_good: val_good, key_bad: val_bad}
mgr.get_account_headers = Mock(return_value=headers)
ret = mgr.get_account_metadata(prefix=prefix)
self.assertEqual(ret, {key_good_base: val_good})
def test_cmgr_set_account_metadata(self):
cont = self.container
mgr = cont.manager
prefix = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
resp = fakes.FakeResponse()
mgr.api.method_post = Mock(return_value=(resp, None))
resp.status_code = 200
ret = mgr.set_account_metadata(metadata, clear=False, prefix=prefix)
self.assertTrue(ret)
resp.status_code = 400
ret = mgr.set_account_metadata(metadata, clear=False, prefix=prefix)
self.assertFalse(ret)
def test_cmgr_set_account_metadata_no_prefix(self):
cont = self.container
mgr = cont.manager
prefix = None
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
resp = fakes.FakeResponse()
mgr.api.method_post = Mock(return_value=(resp, None))
resp.status_code = 200
ret = mgr.set_account_metadata(metadata, clear=False, prefix=prefix)
self.assertTrue(ret)
resp.status_code = 400
ret = mgr.set_account_metadata(metadata, clear=False, prefix=prefix)
def test_cmgr_set_account_metadata_clear(self):
cont = self.container
mgr = cont.manager
prefix = None
resp = fakes.FakeResponse()
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
old_key = utils.random_unicode()
old_val = utils.random_unicode()
old_metadata = {old_key: old_val}
mgr.api.method_post = Mock(return_value=(resp, None))
mgr.get_account_metadata = Mock(return_value=old_metadata)
resp.status_code = 200
ret = mgr.set_account_metadata(metadata, clear=True, prefix=prefix)
self.assertTrue(ret)
def test_cmgr_delete_account_metadata(self):
cont = self.container
mgr = cont.manager
prefix = None
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
mgr.get_account_metadata = Mock(return_value=metadata)
resp = fakes.FakeResponse()
mgr.api.method_post = Mock(return_value=(resp, None))
resp.status_code = 200
ret = mgr.delete_account_metadata(prefix=prefix)
self.assertTrue(ret)
resp.status_code = 400
ret = mgr.delete_account_metadata(prefix=prefix)
self.assertFalse(ret)
def test_cmgr_get_metadata(self):
cont = self.container
mgr = cont.manager
prefix = utils.random_ascii()
key_good = prefix + utils.random_ascii()
key_bad = utils.random_ascii()
val_good = utils.random_ascii()
val_bad = utils.random_ascii()
headers = {key_good: val_good, key_bad: val_bad}
mgr.get_headers = Mock(return_value=headers)
ret = mgr.get_metadata(cont, prefix=prefix)
self.assertEqual(ret, {key_good: val_good})
def test_cmgr_get_metadata_no_prefix(self):
cont = self.container
mgr = cont.manager
prefix = None
key_good_base = utils.random_ascii()
key_good = CONTAINER_META_PREFIX.lower() + key_good_base
key_bad = utils.random_ascii()
val_good = utils.random_ascii()
val_bad = utils.random_ascii()
headers = {key_good: val_good, key_bad: val_bad}
mgr.get_headers = Mock(return_value=headers)
ret = mgr.get_metadata(cont, prefix=prefix)
self.assertEqual(ret, {key_good_base: val_good})
def test_cmgr_set_metadata(self):
cont = self.container
mgr = cont.manager
prefix = None
key = utils.random_ascii()
val = utils.random_ascii()
metadata = {key: val}
resp = fakes.FakeResponse()
mgr.api.method_post = Mock(return_value=(resp, None))
resp.status_code = 200
ret = mgr.set_metadata(cont, metadata, clear=False, prefix=prefix)
self.assertTrue(ret)
resp.status_code = 400
ret = mgr.set_metadata(cont, metadata, clear=False, prefix=prefix)
def test_cmgr_set_metadata_clear(self):
cont = self.container
mgr = cont.manager
prefix = None
resp = fakes.FakeResponse()
key = utils.random_ascii()
val = utils.random_ascii()
metadata = {key: val}
old_key = utils.random_ascii()
old_val = utils.random_ascii()
old_metadata = {old_key: old_val}
mgr.api.method_post = Mock(return_value=(resp, None))
mgr.get_metadata = Mock(return_value=old_metadata)
resp.status_code = 200
ret = mgr.set_metadata(cont, metadata, clear=True, prefix=prefix)
self.assertTrue(ret)
def test_cmgr_remove_metadata_key(self):
cont = self.container
mgr = cont.manager
key = utils.random_ascii()
mgr.set_metadata = Mock()
mgr.remove_metadata_key(cont, key)
mgr.set_metadata.assert_called_once_with(cont, {key: ""})
def test_cmgr_delete_metadata(self):
cont = self.container
mgr = cont.manager
prefix = None
key = utils.random_ascii()
val = utils.random_ascii()
metadata = {key: val}
mgr.get_metadata = Mock(return_value=metadata)
resp = fakes.FakeResponse()
mgr.api.method_post = Mock(return_value=(resp, None))
resp.status_code = 200
ret = mgr.delete_metadata(cont, prefix=prefix)
self.assertTrue(ret)
def test_cmgr_get_cdn_metadata(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
key = utils.random_ascii()
val = utils.random_ascii()
headers = {key: val, "date": time.ctime()}
resp.headers = headers
mgr.api.cdn_request = Mock(return_value=(resp, None))
ret = mgr.get_cdn_metadata(cont)
self.assertTrue(key in ret)
self.assertFalse("date" in ret)
def test_cmgr_set_cdn_metadata(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
key = "x-ttl"
val = 666
metadata = {key: val}
exp_meta = {key: str(val)}
exp_uri = "/%s" % cont.name
mgr.api.cdn_request = Mock(return_value=(resp, None))
ret = mgr.set_cdn_metadata(cont, metadata)
mgr.api.cdn_request.assert_called_once_with(exp_uri, "POST",
headers=exp_meta)
def test_cmgr_set_cdn_metadata_invalid(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
key = "INVALID"
val = 666
metadata = {key: val}
self.assertRaises(exc.InvalidCDNMetadata, mgr.set_cdn_metadata, cont,
metadata)
def test_cmgr_get_temp_url_no_key(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
seconds = utils.random_unicode()
key = None
mgr.api.get_temp_url_key = Mock(return_value=None)
self.assertRaises(exc.MissingTemporaryURLKey, mgr.get_temp_url, cont,
obj, seconds, key=key)
def test_cmgr_get_temp_url_bad_method(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
seconds = utils.random_unicode()
key = utils.random_unicode()
method = "INVALID"
self.assertRaises(exc.InvalidTemporaryURLMethod, mgr.get_temp_url, cont,
obj, seconds, method=method, key=key)
def test_cmgr_get_temp_url_unicode_error(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
seconds = random.randint(1, 1000)
key = utils.random_unicode()
method = "GET"
mgr.api.management_url = "%s/v2/" % fakes.example_uri
self.assertRaises(exc.UnicodePathError, mgr.get_temp_url, cont,
obj, seconds, method=method, key=key)
def test_cmgr_get_temp_url(self):
cont = self.container
mgr = cont.manager
obj = utils.random_ascii()
seconds = random.randint(1, 1000)
key = utils.random_ascii()
method = "GET"
mgmt_url = "%s/v2/" % fakes.example_uri
mgr.api.management_url = mgmt_url
ret = mgr.get_temp_url(cont, obj, seconds, method=method, key=key)
self.assertTrue(ret.startswith(mgmt_url))
self.assertTrue("temp_url_sig" in ret)
self.assertTrue("temp_url_expires" in ret)
def test_cmgr_list_containers_info(self):
cont = self.container
mgr = cont.manager
limit = utils.random_unicode()
marker = utils.random_unicode()
body = utils.random_unicode()
mgr.api.method_get = Mock(return_value=(None, body))
ret = mgr.list_containers_info(limit=limit, marker=marker)
self.assertEqual(mgr.api.method_get.call_count, 1)
self.assertEqual(ret, body)
def test_cmgr_list_public_containers(self):
cont = self.container
mgr = cont.manager
name1 = utils.random_unicode()
name2 = utils.random_unicode()
body = [{"name": name1}, {"name": name2}]
mgr.api.cdn_request = Mock(return_value=(None, body))
ret = mgr.list_public_containers()
mgr.api.cdn_request.assert_called_once_with("", "GET")
self.assertTrue(isinstance(ret, list))
self.assertTrue(name1 in ret)
self.assertTrue(name2 in ret)
def test_cmgr_make_public(self):
cont = self.container
mgr = cont.manager
ttl = utils.random_unicode()
mgr._set_cdn_access = Mock()
mgr.make_public(cont, ttl=ttl)
mgr._set_cdn_access.assert_called_once_with(cont, public=True, ttl=ttl)
def test_cmgr_make_private(self):
cont = self.container
mgr = cont.manager
mgr._set_cdn_access = Mock()
mgr.make_private(cont)
mgr._set_cdn_access.assert_called_once_with(cont, public=False)
def test_cmgr_set_cdn_access(self):
cont = self.container
mgr = cont.manager
for pub in (True, False):
ttl = utils.random_unicode()
exp_headers = {"X-Cdn-Enabled": str(pub)}
if pub:
exp_headers["X-Ttl"] = ttl
exp_uri = "/%s" % cont.name
mgr.api.cdn_request = Mock()
mgr._set_cdn_access(cont, pub, ttl=ttl)
mgr.api.cdn_request.assert_called_once_with(exp_uri, method="PUT",
headers=exp_headers)
def test_cmgr_get_cdn_log_retention(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
retain = random.choice((True, False))
headers = {"fake": "fake", "x-log-retention": str(retain)}
resp.headers = headers
mgr.api.cdn_request = Mock(return_value=(resp, None))
exp_uri = "/%s" % cont.name
ret = mgr.get_cdn_log_retention(cont)
self.assertEqual(ret, retain)
mgr.api.cdn_request.assert_called_once_with(exp_uri, method="HEAD")
def test_cmgr_set_cdn_log_retention(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
retain = random.choice((True, False))
exp_headers = {"X-Log-Retention": str(retain)}
mgr.api.cdn_request = Mock(return_value=(resp, None))
exp_uri = "/%s" % cont.name
mgr.set_cdn_log_retention(cont, retain)
mgr.api.cdn_request.assert_called_once_with(exp_uri, method="PUT",
headers=exp_headers)
def test_cmgr_get_container_streaming_uri(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
uri = utils.random_unicode()
headers = {"fake": "fake", "x-cdn-streaming-uri": uri}
resp.headers = headers
mgr.api.cdn_request = Mock(return_value=(resp, None))
exp_uri = "/%s" % cont.name
ret = mgr.get_container_streaming_uri(cont)
self.assertEqual(ret, uri)
mgr.api.cdn_request.assert_called_once_with(exp_uri, method="HEAD")
def test_cmgr_get_container_ios_uri(self):
cont = self.container
mgr = cont.manager
resp = fakes.FakeResponse()
uri = utils.random_unicode()
headers = {"fake": "fake", "x-cdn-ios-uri": uri}
resp.headers = headers
mgr.api.cdn_request = Mock(return_value=(resp, None))
exp_uri = "/%s" % cont.name
ret = mgr.get_container_ios_uri(cont)
self.assertEqual(ret, uri)
mgr.api.cdn_request.assert_called_once_with(exp_uri, method="HEAD")
def test_cmgr_set_web_index_page(self):
cont = self.container
mgr = cont.manager
page = utils.random_unicode()
exp_headers = {"X-Container-Meta-Web-Index": page}
exp_uri = "/%s" % cont.name
mgr.api.cdn_request = Mock()
mgr.set_web_index_page(cont, page)
mgr.api.cdn_request.assert_called_once_with(exp_uri, method="POST",
headers=exp_headers)
def test_cmgr_set_web_error_page(self):
cont = self.container
mgr = cont.manager
page = utils.random_unicode()
exp_headers = {"X-Container-Meta-Web-Error": page}
exp_uri = "/%s" % cont.name
mgr.api.cdn_request = Mock()
mgr.set_web_error_page(cont, page)
mgr.api.cdn_request.assert_called_once_with(exp_uri, method="POST",
headers=exp_headers)
@patch("pyrax.object_storage.assure_container")
def test_cmgr_purge_cdn_object(self, mock_ac):
cont = self.container
mgr = cont.manager
mock_ac.return_value = cont
cont.purge_cdn_object = Mock()
obj = utils.random_unicode()
email_addresses = utils.random_unicode()
mgr.purge_cdn_object(cont, obj, email_addresses=email_addresses)
cont.purge_cdn_object.assert_called_once_with(obj,
email_addresses=email_addresses)
def test_cmgr_list_objects(self):
cont = self.container
mgr = cont.manager
mgr.get = Mock(return_value=cont)
cont.list = Mock()
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
end_marker = utils.random_unicode()
full_listing = False
mgr.list_objects(cont, marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, end_marker=end_marker,
full_listing=full_listing)
cont.list.assert_called_once_with(marker=marker, limit=limit,
prefix=prefix, delimiter=delimiter, end_marker=end_marker)
def test_cmgr_list_objects_full(self):
cont = self.container
mgr = cont.manager
mgr.get = Mock(return_value=cont)
cont.list_all = Mock()
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
end_marker = utils.random_unicode()
full_listing = True
mgr.list_objects(cont, marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, end_marker=end_marker,
full_listing=full_listing)
cont.list_all.assert_called_once_with(prefix=prefix)
def test_cmgr_list_object_names(self):
cont = self.container
mgr = cont.manager
mgr.get = Mock(return_value=cont)
cont.list_object_names = Mock()
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
end_marker = utils.random_unicode()
full_listing = True
mgr.list_object_names(cont, marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, end_marker=end_marker,
full_listing=full_listing)
cont.list_object_names.assert_called_once_with(marker=marker,
limit=limit, prefix=prefix, delimiter=delimiter,
end_marker=end_marker, full_listing=full_listing)
@patch("pyrax.object_storage.StorageObjectIterator", new=fakes.FakeIterator)
def test_cmgr_object_listing_iterator(self):
cont = self.container
mgr = cont.manager
prefix = utils.random_unicode()
ret = mgr.object_listing_iterator(cont, prefix=prefix)
self.assertTrue(isinstance(ret, utils.ResultsIterator))
def test_cmgr_list_subdirs(self):
cont = self.container
mgr = cont.manager
mgr.get = Mock(return_value=cont)
name1 = utils.random_ascii()
name2 = utils.random_ascii()
sdir = utils.random_ascii()
objs = [{"name": name1, "content_type": "fake"},
{"subdir": sdir}]
cont.list = Mock(return_value=objs)
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
end_marker = utils.random_unicode()
full_listing = False
ret = mgr.list_subdirs(cont, marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, full_listing=full_listing)
cont.list.assert_called_once_with(marker=marker, limit=limit,
prefix=prefix, delimiter="/", return_raw=True)
self.assertEqual(ret[0].name, sdir)
def test_cmgr_get_object(self):
cont = self.container
mgr = cont.manager
mgr.get = Mock(return_value=cont)
cont.get_object = Mock()
obj = utils.random_unicode()
mgr.get_object(cont, obj)
cont.get_object.assert_called_once_with(obj)
def test_cmgr_create_object(self):
cont = self.container
mgr = cont.manager
cont.create = Mock()
file_or_path = utils.random_unicode()
data = utils.random_unicode()
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = utils.random_unicode()
ttl = utils.random_unicode()
chunked = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
chunk_size = utils.random_unicode()
headers = utils.random_unicode()
return_none = utils.random_unicode()
mgr.create_object(cont, file_or_path=file_or_path, data=data,
obj_name=obj_name, content_type=content_type, etag=etag,
content_encoding=content_encoding,
content_length=content_length, ttl=ttl, chunked=chunked,
metadata=metadata, chunk_size=chunk_size, headers=headers,
return_none=return_none)
cont.create.assert_called_once_with(file_or_path=file_or_path,
data=data, obj_name=obj_name, content_type=content_type,
etag=etag, content_encoding=content_encoding,
content_length=content_length, ttl=ttl, chunked=chunked,
metadata=metadata, chunk_size=chunk_size, headers=headers,
return_none=return_none)
def test_cmgr_fetch_object(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
include_meta = utils.random_unicode()
chunk_size = utils.random_unicode()
size = utils.random_unicode()
extra_info = utils.random_unicode()
cont.fetch = Mock()
mgr.fetch_object(cont, obj, include_meta=include_meta,
chunk_size=chunk_size, size=size, extra_info=extra_info)
cont.fetch.assert_called_once_with(obj, include_meta=include_meta,
chunk_size=chunk_size, size=size)
def test_cmgr_fetch_partial(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
size = utils.random_unicode()
cont.fetch_partial = Mock()
mgr.fetch_partial(cont, obj, size)
cont.fetch_partial.assert_called_once_with(obj, size)
def test_cmgr_download_object(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
directory = utils.random_unicode()
structure = utils.random_unicode()
cont.download = Mock()
mgr.download_object(cont, obj, directory, structure=structure)
cont.download.assert_called_once_with(obj, directory,
structure=structure)
def test_cmgr_delete_object(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
cont.delete_object = Mock()
mgr.delete_object(cont, obj)
cont.delete_object.assert_called_once_with(obj)
def test_cmgr_copy_object(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
new_container = utils.random_unicode()
new_obj_name = utils.random_unicode()
content_type = utils.random_unicode()
resp = fakes.FakeResponse()
etag = utils.random_unicode()
resp.headers = {"etag": etag}
mgr.api.method_put = Mock(return_value=(resp, None))
exp_uri = "/%s/%s" % (new_container, new_obj_name)
exp_from = "/%s/%s" % (cont.name, obj)
exp_headers = {"X-Copy-From": exp_from,
"Content-Length": "0",
"Content-Type": content_type}
ret = mgr.copy_object(cont, obj, new_container,
new_obj_name=new_obj_name, content_type=content_type)
mgr.api.method_put.assert_called_once_with(exp_uri, headers=exp_headers)
self.assertEqual(ret, etag)
def test_cmgr_move_object(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
new_container = utils.random_unicode()
new_obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
new_obj = utils.random_unicode()
mgr.copy_object = Mock(return_value=etag)
mgr.delete_object = Mock()
mgr.get_object = Mock(return_value=new_obj)
for new_reference in (True, False):
ret = mgr.move_object(cont, obj, new_container,
new_obj_name=new_obj_name, new_reference=new_reference,
content_type=content_type)
if new_reference:
self.assertEqual(ret, new_obj)
else:
self.assertEqual(ret, etag)
def test_cmgr_move_object_fail(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
new_container = utils.random_unicode()
new_obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
new_obj = utils.random_unicode()
mgr.copy_object = Mock(return_value=None)
mgr.delete_object = Mock()
mgr.get_object = Mock()
new_reference = False
ret = mgr.move_object(cont, obj, new_container,
new_obj_name=new_obj_name, new_reference=new_reference,
content_type=content_type)
self.assertIsNone(ret)
@patch("mimetypes.guess_type")
def test_cmgr_change_object_content_type(self, mock_guess):
cont = self.container
mgr = cont.manager
mgr.get = Mock(return_value=cont)
obj = utils.random_unicode()
new_ctype = utils.random_unicode()
cont.cdn_enabled = True
cont.cdn_uri = utils.random_unicode()
for guess in (True, False):
if guess:
mock_guess.return_value = (new_ctype, None)
mgr.copy_object = Mock()
mgr.change_object_content_type(cont, obj, new_ctype, guess=guess)
mgr.copy_object.assert_called_once_with(cont, obj, cont,
content_type=new_ctype)
def test_cmgr_delete_object_in_seconds(self):
cont = self.container
mgr = cont.manager
obj = utils.random_unicode()
seconds = utils.random_unicode()
extra_info = utils.random_unicode()
mgr.set_object_metadata = Mock()
exp_meta = {"X-Delete-After": seconds}
mgr.delete_object_in_seconds(cont, obj, seconds, extra_info=extra_info)
mgr.set_object_metadata.assert_called_once_with(cont, obj, exp_meta,
clear=True, prefix="")
def test_cmgr_get_object_metadata(self):
cont = self.container
mgr = cont.manager
mgr.get = Mock(return_value=cont)
obj = utils.random_unicode()
cont.get_object_metadata = Mock()
mgr.get_object_metadata(cont, obj)
cont.get_object_metadata.assert_called_once_with(obj, prefix=None)
def test_cmgr_set_object_metadata(self):
cont = self.container
mgr = cont.manager
mgr.get = Mock(return_value=cont)
obj = utils.random_unicode()
metadata = utils.random_unicode()
clear = random.choice((True, False))
prefix = utils.random_unicode()
cont.set_object_metadata = Mock()
mgr.set_object_metadata(cont, obj, metadata, clear=clear,
prefix=prefix)
cont.set_object_metadata.assert_called_once_with(obj, metadata,
clear=clear, prefix=prefix)
def test_sobj_repr(self):
obj = self.obj
obj_repr = "%s" % obj
self.assertTrue("<Object " in obj_repr)
self.assertTrue(obj.name in obj_repr)
def test_sobj_id(self):
cont = self.container
nm = utils.random_unicode()
obj = StorageObject(cont.object_manager, {"name": nm})
self.assertEqual(obj.name, nm)
self.assertEqual(obj.id, nm)
def test_sobj_total_bytes(self):
obj = self.obj
num_bytes = random.randint(1, 100000)
obj.bytes = num_bytes
self.assertEqual(obj.total_bytes, num_bytes)
def test_sobj_etag(self):
obj = self.obj
hashval = utils.random_unicode()
obj.hash = hashval
self.assertEqual(obj.etag, hashval)
def test_sobj_container(self):
obj = self.obj
fake_cont = utils.random_unicode()
obj.manager._container = fake_cont
cont = obj.container
self.assertEqual(cont, fake_cont)
def test_sobj_get(self):
obj = self.obj
mgr = obj.manager
mgr.fetch = Mock()
include_meta = utils.random_unicode()
chunk_size = utils.random_unicode()
obj.get(include_meta=include_meta, chunk_size=chunk_size)
mgr.fetch.assert_called_once_with(obj=obj, include_meta=include_meta,
chunk_size=chunk_size)
def test_sobj_fetch(self):
obj = self.obj
mgr = obj.manager
mgr.fetch = Mock()
include_meta = utils.random_unicode()
chunk_size = utils.random_unicode()
obj.fetch(include_meta=include_meta, chunk_size=chunk_size)
mgr.fetch.assert_called_once_with(obj=obj, include_meta=include_meta,
chunk_size=chunk_size)
def test_sobj_download(self):
obj = self.obj
mgr = obj.manager
mgr.download = Mock()
directory = utils.random_unicode()
structure = utils.random_unicode()
obj.download(directory, structure=structure)
mgr.download.assert_called_once_with(obj, directory,
structure=structure)
def test_sobj_copy(self):
obj = self.obj
mgr = obj.manager
cont = obj.container
cont.copy_object = Mock()
new_container = utils.random_unicode()
new_obj_name = utils.random_unicode()
extra_info = utils.random_unicode()
obj.copy(new_container, new_obj_name=new_obj_name,
extra_info=extra_info)
cont.copy_object.assert_called_once_with(obj, new_container,
new_obj_name=new_obj_name)
def test_sobj_move(self):
obj = self.obj
mgr = obj.manager
cont = obj.container
cont.move_object = Mock()
new_container = utils.random_unicode()
new_obj_name = utils.random_unicode()
extra_info = utils.random_unicode()
obj.move(new_container, new_obj_name=new_obj_name,
extra_info=extra_info)
cont.move_object.assert_called_once_with(obj, new_container,
new_obj_name=new_obj_name)
def test_sobj_change_content_type(self):
obj = self.obj
mgr = obj.manager
cont = obj.container
cont.change_object_content_type = Mock()
new_ctype = utils.random_unicode()
guess = utils.random_unicode()
obj.change_content_type(new_ctype, guess=guess)
cont.change_object_content_type.assert_called_once_with(obj,
new_ctype=new_ctype, guess=guess)
def test_sobj_purge(self):
obj = self.obj
mgr = obj.manager
email_addresses = utils.random_unicode()
mgr.purge = Mock()
obj.purge(email_addresses=email_addresses)
mgr.purge.assert_called_once_with(obj, email_addresses=email_addresses)
def test_sobj_get_metadata(self):
obj = self.obj
mgr = obj.manager
mgr.get_metadata = Mock()
obj.get_metadata()
mgr.get_metadata.assert_called_once_with(obj, None)
def test_sobj_set_metadata(self):
obj = self.obj
mgr = obj.manager
mgr.set_metadata = Mock()
metadata = utils.random_unicode()
clear = utils.random_unicode()
prefix = utils.random_unicode()
obj.set_metadata(metadata, clear=clear, prefix=prefix)
mgr.set_metadata.assert_called_once_with(obj, metadata, clear=clear,
prefix=prefix)
def test_sobj_remove_metadata_key(self):
obj = self.obj
mgr = obj.manager
mgr.remove_metadata_key = Mock()
key = utils.random_unicode()
prefix = utils.random_unicode()
obj.remove_metadata_key(key, prefix=prefix)
mgr.remove_metadata_key.assert_called_once_with(obj, key, prefix=prefix)
def test_sobj_get_temp_url(self):
obj = self.obj
cont = obj.container
cont.get_temp_url = Mock()
seconds = utils.random_unicode()
method = utils.random_unicode()
obj.get_temp_url(seconds, method=method)
cont.get_temp_url.assert_called_once_with(obj, seconds=seconds,
method=method)
def test_sobj_delete_in_seconds(self):
obj = self.obj
cont = obj.container
cont.delete_object_in_seconds = Mock()
seconds = utils.random_unicode()
obj.delete_in_seconds(seconds)
cont.delete_object_in_seconds.assert_called_once_with(obj, seconds)
def test_sobj_iter_init_methods(self):
client = self.client
mgr = client._manager
it = StorageObjectIterator(mgr)
self.assertEqual(it.list_method, mgr.list)
def test_sobj_mgr_name(self):
cont = self.container
mgr = cont.object_manager
self.assertEqual(mgr.name, mgr.uri_base)
def test_sobj_mgr_container(self):
cont = self.container
mgr = cont.object_manager
new_cont = utils.random_unicode()
mgr._container = new_cont
self.assertEqual(mgr.container, new_cont)
def test_sobj_mgr_container_missing(self):
cont = self.container
mgr = cont.object_manager
delattr(mgr, "_container")
new_cont = utils.random_unicode()
mgr.api.get = Mock(return_value=new_cont)
self.assertEqual(mgr.container, new_cont)
def test_sobj_mgr_list_raw(self):
cont = self.container
mgr = cont.object_manager
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
end_marker = utils.random_unicode()
return_raw = utils.random_unicode()
fake_resp = utils.random_unicode()
fake_resp_body = utils.random_unicode()
mgr.api.method_get = Mock(return_value=(fake_resp, fake_resp_body))
ret = mgr.list(marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, end_marker=end_marker,
return_raw=return_raw)
self.assertEqual(ret, fake_resp_body)
def test_sobj_mgr_list_obj(self):
cont = self.container
mgr = cont.object_manager
marker = utils.random_unicode()
limit = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
end_marker = utils.random_unicode()
return_raw = False
fake_resp = utils.random_unicode()
nm = utils.random_unicode()
fake_resp_body = [{"name": nm}]
mgr.api.method_get = Mock(return_value=(fake_resp, fake_resp_body))
ret = mgr.list(marker=marker, limit=limit, prefix=prefix,
delimiter=delimiter, end_marker=end_marker,
return_raw=return_raw)
self.assertTrue(isinstance(ret, list))
self.assertEqual(len(ret), 1)
obj = ret[0]
self.assertEqual(obj.name, nm)
def test_sobj_mgr_get(self):
cont = self.container
mgr = cont.object_manager
obj = utils.random_unicode()
contlen = random.randint(100, 1000)
conttype = utils.random_unicode()
etag = utils.random_unicode()
lastmod = utils.random_unicode()
timestamp = utils.random_unicode()
fake_resp = fakes.FakeResponse()
fake_resp.headers = {"content-length": contlen,
"content-type": conttype,
"etag": etag,
"last-modified": lastmod,
"x-timestamp": timestamp,
}
mgr.api.method_head = Mock(return_value=(fake_resp, None))
ret = mgr.get(obj)
self.assertEqual(ret.name, obj)
self.assertEqual(ret.bytes, contlen)
self.assertEqual(ret.content_type, conttype)
self.assertEqual(ret.hash, etag)
self.assertEqual(ret.last_modified, lastmod)
self.assertEqual(ret.timestamp, timestamp)
def test_sobj_mgr_get_no_length(self):
cont = self.container
mgr = cont.object_manager
obj = utils.random_unicode()
contlen = None
conttype = utils.random_unicode()
etag = utils.random_unicode()
lastmod = utils.random_unicode()
fake_resp = fakes.FakeResponse()
fake_resp.headers = {"content-length": contlen,
"content-type": conttype,
"etag": etag,
"last-modified": lastmod,
}
mgr.api.method_head = Mock(return_value=(fake_resp, None))
ret = mgr.get(obj)
self.assertEqual(ret.bytes, contlen)
def test_sobj_mgr_create_empty(self):
cont = self.container
mgr = cont.object_manager
self.assertRaises(exc.NoContentSpecified, mgr.create)
def test_sobj_mgr_create_no_name(self):
cont = self.container
mgr = cont.object_manager
self.assertRaises(exc.MissingName, mgr.create, data="x")
def test_sobj_mgr_create_data(self):
cont = self.container
mgr = cont.object_manager
data = utils.random_unicode()
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = utils.random_unicode()
ttl = utils.random_unicode()
chunked = utils.random_unicode()
chunk_size = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
headers = {"X-Delete-After": ttl}
massaged = _massage_metakeys(metadata, OBJECT_META_PREFIX)
headers.update(massaged)
for return_none in (True, False):
mgr._upload = Mock()
get_resp = utils.random_unicode()
mgr.get = Mock(return_value=get_resp)
ret = mgr.create(data=data, obj_name=obj_name,
content_type=content_type, etag=etag,
content_encoding=content_encoding,
content_length=content_length, ttl=ttl, chunked=chunked,
metadata=metadata, chunk_size=chunk_size, headers=headers,
return_none=return_none)
mgr._upload.assert_called_once_with(obj_name, data, content_type,
content_encoding, content_length, etag, bool(chunk_size),
chunk_size, headers)
if return_none:
self.assertIsNone(ret)
else:
self.assertEqual(ret, get_resp)
def test_sobj_mgr_create_file(self):
cont = self.container
mgr = cont.object_manager
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = utils.random_unicode()
ttl = utils.random_unicode()
chunked = utils.random_unicode()
chunk_size = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
headers = {"X-Delete-After": ttl}
massaged = _massage_metakeys(metadata, OBJECT_META_PREFIX)
headers.update(massaged)
for return_none in (True, False):
mgr._upload = Mock()
get_resp = utils.random_unicode()
mgr.get = Mock(return_value=get_resp)
with utils.SelfDeletingTempfile() as tmp:
ret = mgr.create(tmp, obj_name=obj_name,
content_type=content_type, etag=etag,
content_encoding=content_encoding,
content_length=content_length, ttl=ttl,
chunked=chunked, metadata=metadata,
chunk_size=chunk_size, headers=headers,
return_none=return_none)
self.assertEqual(mgr._upload.call_count, 1)
call_args = list(mgr._upload.call_args)[0]
for param in (obj_name, content_type, content_encoding,
content_length, etag, False, headers):
self.assertTrue(param in call_args)
if return_none:
self.assertIsNone(ret)
else:
self.assertEqual(ret, get_resp)
def test_sobj_mgr_create_file_obj(self):
cont = self.container
mgr = cont.object_manager
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = utils.random_unicode()
ttl = utils.random_unicode()
chunked = utils.random_unicode()
chunk_size = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
headers = {"X-Delete-After": ttl}
massaged = _massage_metakeys(metadata, OBJECT_META_PREFIX)
headers.update(massaged)
for return_none in (True, False):
mgr._upload = Mock()
get_resp = utils.random_unicode()
mgr.get = Mock(return_value=get_resp)
with utils.SelfDeletingTempfile() as tmp:
with open(tmp) as tmpfile:
ret = mgr.create(tmpfile, obj_name=obj_name,
content_type=content_type, etag=etag,
content_encoding=content_encoding,
content_length=content_length, ttl=ttl,
chunked=chunked, metadata=metadata,
chunk_size=chunk_size, headers=headers,
return_none=return_none)
self.assertEqual(mgr._upload.call_count, 1)
call_args = list(mgr._upload.call_args)[0]
for param in (obj_name, content_type, content_encoding,
content_length, etag, False, headers):
self.assertTrue(param in call_args)
if return_none:
self.assertIsNone(ret)
else:
self.assertEqual(ret, get_resp)
def test_sobj_mgr_create_file_like_obj(self):
cont = self.container
mgr = cont.object_manager
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = utils.random_unicode()
ttl = utils.random_unicode()
chunked = utils.random_unicode()
chunk_size = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
headers = {"X-Delete-After": ttl}
massaged = _massage_metakeys(metadata, OBJECT_META_PREFIX)
headers.update(massaged)
class Foo:
pass
file_like_object = Foo()
file_like_object.read = lambda: utils.random_unicode()
for return_none in (True, False):
mgr._upload = Mock()
get_resp = utils.random_unicode()
mgr.get = Mock(return_value=get_resp)
ret = mgr.create(file_like_object, obj_name=obj_name,
content_type=content_type, etag=etag,
content_encoding=content_encoding,
content_length=content_length, ttl=ttl,
chunked=chunked, metadata=metadata,
chunk_size=chunk_size, headers=headers,
return_none=return_none)
self.assertEqual(mgr._upload.call_count, 1)
call_args = list(mgr._upload.call_args)[0]
for param in (obj_name, content_type, content_encoding,
content_length, etag, False, headers):
self.assertTrue(param in call_args)
if return_none:
self.assertIsNone(ret)
else:
self.assertEqual(ret, get_resp)
def test_sobj_mgr_upload(self):
obj = self.obj
mgr = obj.manager
obj_name = utils.random_unicode()
content = utils.random_unicode()
content_type = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = utils.random_unicode()
etag = utils.random_unicode()
chunked = utils.random_unicode()
chunk_size = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
headers = {key: val}
mgr._store_object = Mock()
ret = mgr._upload(obj_name, content, content_type, content_encoding,
content_length, etag, chunked, chunk_size, headers)
mgr._store_object.assert_called_once_with(obj_name, content=content,
etag=etag, chunked=chunked, chunk_size=chunk_size,
headers=headers)
def test_sobj_mgr_upload_file(self):
obj = self.obj
mgr = obj.manager
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = random.randint(10, 1000)
etag = utils.random_unicode()
chunked = utils.random_unicode()
chunk_size = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
headers = {key: val}
mgr._store_object = Mock()
with utils.SelfDeletingTempfile() as tmp:
with open(tmp) as content:
ret = mgr._upload(obj_name, content, content_type,
content_encoding, content_length, etag, chunked,
chunk_size, headers)
mgr._store_object.assert_called_once_with(obj_name, content=content,
etag=etag, chunked=chunked, chunk_size=chunk_size,
headers=headers)
def test_sobj_mgr_upload_file_unchunked(self):
obj = self.obj
mgr = obj.manager
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = random.randint(10, 1000)
etag = utils.random_unicode()
chunked = None
chunk_size = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
headers = {key: val}
mgr._store_object = Mock()
with utils.SelfDeletingTempfile() as tmp:
with open(tmp) as content:
ret = mgr._upload(obj_name, content, content_type,
content_encoding, content_length, etag, chunked,
chunk_size, headers)
mgr._store_object.assert_called_once_with(obj_name, content=content,
etag=etag, chunked=chunked, chunk_size=chunk_size,
headers=headers)
def test_sobj_mgr_upload_file_unchunked_no_length(self):
obj = self.obj
mgr = obj.manager
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = None
etag = utils.random_unicode()
chunked = None
chunk_size = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
headers = {key: val}
mgr._store_object = Mock()
with utils.SelfDeletingTempfile() as tmp:
with open(tmp) as content:
ret = mgr._upload(obj_name, content, content_type,
content_encoding, content_length, etag, chunked,
chunk_size, headers)
mgr._store_object.assert_called_once_with(obj_name, content=content,
etag=etag, chunked=chunked, chunk_size=chunk_size,
headers=headers)
def test_sobj_mgr_upload_multiple(self):
obj = self.obj
mgr = obj.manager
sav = pyrax.object_storage.MAX_FILE_SIZE
pyrax.object_storage.MAX_FILE_SIZE = 42
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
content_encoding = utils.random_unicode()
content_length = None
etag = utils.random_unicode()
chunked = None
chunk_size = None
key = utils.random_unicode()
val = utils.random_unicode()
headers = {key: val}
mgr._store_object = Mock()
with utils.SelfDeletingTempfile() as tmp:
with open(tmp, "w") as content:
content.write("x" * 66)
with open(tmp) as content:
ret = mgr._upload(obj_name, content, content_type,
content_encoding, content_length, etag, chunked,
chunk_size, headers)
self.assertEqual(mgr._store_object.call_count, 3)
pyrax.object_storage.MAX_FILE_SIZE = sav
def test_sobj_mgr_store_object(self):
obj = self.obj
mgr = obj.manager
obj_name = utils.random_unicode()
content = utils.random_unicode()
etag = None
chunk_size = utils.random_unicode()
val = utils.random_unicode()
headers = {"Content-Length": val}
exp_uri = "/%s/%s" % (mgr.uri_base, obj_name)
for chunked in (True, False):
mgr.api.method_put = Mock(return_value=(None, None))
exp_hdrs = {"Content-Length": val, "Content-Type": None}
if chunked:
exp_hdrs.pop("Content-Length")
exp_hdrs["Transfer-Encoding"] = "chunked"
else:
exp_hdrs["ETag"] = utils.get_checksum(content)
mgr._store_object(obj_name, content, etag=etag, chunked=chunked,
chunk_size=chunk_size, headers=headers)
mgr.api.method_put.assert_called_once_with(exp_uri, data=content,
headers=headers)
def test_sobj_mgr_fetch_no_chunk(self):
obj = self.obj
mgr = obj.manager
chunk_size = None
size = random.randint(1, 1000)
extra_info = utils.random_unicode()
key = utils.random_unicode()
val = utils.random_unicode()
hdrs = {key: val}
resp = fakes.FakeResponse()
resp.headers = hdrs
resp_body = utils.random_unicode()
exp_uri = "/%s/%s" % (mgr.uri_base, obj.name)
exp_headers = {"Range": "bytes=0-%s" % size}
for include_meta in (True, False):
mgr.api.method_get = Mock(return_value=(resp, resp_body))
mgr.api.method_head = Mock(return_value=(resp, resp_body))
ret = mgr.fetch(obj, include_meta=include_meta,
chunk_size=chunk_size, size=size, extra_info=extra_info)
mgr.api.method_get.assert_called_once_with(exp_uri,
headers=exp_headers, raw_content=True)
if include_meta:
self.assertEqual(ret, (hdrs, resp_body))
else:
self.assertEqual(ret, resp_body)
def test_sobj_mgr_fetch_chunk(self):
obj = self.obj
mgr = obj.manager
chunk_size = random.randint(1, 100)
size = random.randint(200, 1000)
extra_info = utils.random_unicode()
exp_uri = "/%s/%s" % (mgr.uri_base, obj.name)
for include_meta in (True, False):
mgr.get = Mock(return_value=obj)
mgr._fetch_chunker = Mock()
mgr.fetch(obj.name, include_meta=include_meta,
chunk_size=chunk_size, size=size, extra_info=extra_info)
mgr._fetch_chunker.assert_called_once_with(exp_uri, chunk_size,
size, obj.bytes)
def test_sobj_mgr_fetch_chunker(self):
obj = self.obj
mgr = obj.manager
uri = utils.random_unicode()
chunk_size = random.randint(10, 50)
num_chunks, remainder = divmod(obj.total_bytes, chunk_size)
if remainder:
num_chunks += 1
resp = fakes.FakeResponse()
resp_body = "x" * chunk_size
mgr.api.method_get = Mock(return_value=(resp, resp_body))
ret = mgr._fetch_chunker(uri, chunk_size, None, obj.total_bytes)
txt = "".join([part for part in ret])
self.assertEqual(mgr.api.method_get.call_count, num_chunks)
def test_sobj_mgr_fetch_chunker_eof(self):
obj = self.obj
mgr = obj.manager
uri = utils.random_unicode()
chunk_size = random.randint(10, 50)
num_chunks = int(obj.total_bytes / chunk_size) + 1
resp = fakes.FakeResponse()
resp_body = ""
mgr.api.method_get = Mock(return_value=(resp, resp_body))
ret = mgr._fetch_chunker(uri, chunk_size, None, obj.total_bytes)
self.assertRaises(StopIteration, ret.next)
def test_sobj_mgr_fetch_partial(self):
obj = self.obj
mgr = obj.manager
mgr.fetch = Mock()
size = random.randint(1, 1000)
mgr.fetch_partial(obj, size)
mgr.fetch.assert_called_once_with(obj, size=size)
@patch("pyrax.manager.BaseManager.delete")
def test_sobj_mgr_delete(self, mock_del):
obj = self.obj
mgr = obj.manager
mgr.delete(obj)
mock_del.assert_called_once_with(obj)
@patch("pyrax.manager.BaseManager.delete")
def test_sobj_mgr_delete_not_found(self, mock_del):
obj = self.obj
mgr = obj.manager
msg = utils.random_unicode()
mock_del.side_effect = exc.NotFound(msg)
self.assertRaises(exc.NoSuchObject, mgr.delete, obj)
def test_sobj_mgr_delete_all_objects(self):
obj = self.obj
mgr = obj.manager
nms = utils.random_unicode()
async = utils.random_unicode()
mgr.api.bulk_delete = Mock()
mgr.delete_all_objects(nms, async=async)
mgr.api.bulk_delete.assert_called_once_with(mgr.name, nms, async=async)
def test_sobj_mgr_delete_all_objects_no_names(self):
obj = self.obj
mgr = obj.manager
nms = utils.random_unicode()
async = utils.random_unicode()
mgr.api.list_object_names = Mock(return_value=nms)
mgr.api.bulk_delete = Mock()
mgr.delete_all_objects(None, async=async)
mgr.api.list_object_names.assert_called_once_with(mgr.name,
full_listing=True)
mgr.api.bulk_delete.assert_called_once_with(mgr.name, nms, async=async)
def test_sobj_mgr_download_no_directory(self):
obj = self.obj
mgr = obj.manager
self.assertRaises(exc.FolderNotFound, mgr.download, obj, "FAKE")
def test_sobj_mgr_download_no_structure(self):
obj = self.obj
mgr = obj.manager
txt = utils.random_unicode()
mgr.fetch = Mock(return_value=txt)
with utils.SelfDeletingTempDirectory() as directory:
mgr.download(obj, directory, structure=False)
mgr.fetch.assert_called_once_with(obj)
fpath = os.path.join(directory, obj.name)
self.assertTrue(os.path.exists(fpath))
def test_sobj_mgr_download_structure(self):
obj = self.obj
obj.name = "%s/%s/%s" % (obj.name, obj.name, obj.name)
mgr = obj.manager
txt = utils.random_unicode()
mgr.fetch = Mock(return_value=txt)
with utils.SelfDeletingTempDirectory() as directory:
mgr.download(obj, directory, structure=True)
mgr.fetch.assert_called_once_with(obj)
fpath = os.path.join(directory, obj.name)
self.assertTrue(os.path.exists(fpath))
def test_sobj_mgr_purge(self):
obj = self.obj
mgr = obj.manager
email_address1 = utils.random_unicode()
email_address2 = utils.random_unicode()
email_addresses = [email_address1, email_address2]
exp_uri = "/%s/%s" % (utils.get_name(obj.container), obj.name)
exp_headers = {"X-Purge-Email": ", ".join(email_addresses)}
mgr.api.cdn_request = Mock(return_value=(None, None))
mgr.purge(obj, email_addresses=email_addresses)
mgr.api.cdn_request.assert_called_once_with(exp_uri, method="DELETE",
headers=exp_headers)
def test_sobj_mgr_get_metadata(self):
obj = self.obj
mgr = obj.manager
prefix = utils.random_unicode()
key = utils.random_unicode()
good_key = "%s%s" % (prefix, key)
good_val = utils.random_unicode()
bad_key = utils.random_unicode()
bad_val = utils.random_unicode()
exp_key = key.lower().replace("-", "_")
resp = fakes.FakeResponse()
resp.headers = {good_key.lower(): good_val, bad_key.lower(): bad_val}
exp_uri = "/%s/%s" % (utils.get_name(obj.container), obj.name)
mgr.api.method_head = Mock(return_value=(resp, None))
ret = mgr.get_metadata(obj, prefix=prefix)
self.assertEqual(ret, {exp_key: good_val})
mgr.api.method_head.assert_called_once_with(exp_uri)
def test_sobj_mgr_get_metadata_no_prefix(self):
obj = self.obj
mgr = obj.manager
prefix = None
good_key = utils.random_unicode()
good_val = utils.random_unicode()
bad_key = utils.random_unicode()
bad_val = utils.random_unicode()
exp_key = good_key.lower().replace("-", "_")
resp = fakes.FakeResponse()
default_key = "%s%s" % (OBJECT_META_PREFIX, good_key)
default_key = default_key.lower()
resp.headers = {default_key: good_val, bad_key.lower(): bad_val}
exp_uri = "/%s/%s" % (utils.get_name(obj.container), obj.name)
mgr.api.method_head = Mock(return_value=(resp, None))
ret = mgr.get_metadata(obj, prefix=prefix)
self.assertEqual(ret, {exp_key: good_val})
mgr.api.method_head.assert_called_once_with(exp_uri)
def test_sobj_mgr_set_metadata(self):
obj = self.obj
mgr = obj.manager
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
prefix = utils.random_unicode()
clear = False
old_key = utils.random_unicode()
old_val = utils.random_unicode()
old_meta = {old_key: old_val}
mgr.get_metadata = Mock(return_value=old_meta)
exp_meta = _massage_metakeys(dict(old_meta, **metadata), prefix)
exp_uri = "/%s/%s" % (utils.get_name(obj.container), obj.name)
mgr.api.method_post = Mock(return_value=(None, None))
mgr.set_metadata(obj, metadata, clear=clear, prefix=prefix)
mgr.api.method_post.assert_called_once_with(exp_uri, headers=exp_meta)
def test_sobj_mgr_set_metadata_clear(self):
obj = self.obj
mgr = obj.manager
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
prefix = utils.random_unicode()
clear = True
old_key = utils.random_unicode()
old_val = utils.random_unicode()
old_meta = {old_key: old_val}
mgr.get_metadata = Mock(return_value=old_meta)
exp_meta = _massage_metakeys(metadata, prefix)
exp_uri = "/%s/%s" % (utils.get_name(obj.container), obj.name)
mgr.api.method_post = Mock(return_value=(None, None))
mgr.set_metadata(obj, metadata, clear=clear, prefix=prefix)
mgr.api.method_post.assert_called_once_with(exp_uri, headers=exp_meta)
def test_sobj_mgr_set_metadata_no_prefix(self):
obj = self.obj
mgr = obj.manager
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
prefix = None
clear = True
old_key = utils.random_unicode()
old_val = utils.random_unicode()
old_meta = {old_key: old_val}
mgr.get_metadata = Mock(return_value=old_meta)
exp_meta = _massage_metakeys(metadata, OBJECT_META_PREFIX)
exp_uri = "/%s/%s" % (utils.get_name(obj.container), obj.name)
mgr.api.method_post = Mock(return_value=(None, None))
mgr.set_metadata(obj, metadata, clear=clear, prefix=prefix)
mgr.api.method_post.assert_called_once_with(exp_uri, headers=exp_meta)
def test_sobj_mgr_set_metadata_empty_vals(self):
obj = self.obj
mgr = obj.manager
key = utils.random_unicode()
val = utils.random_unicode()
metadata = {key: val}
prefix = None
clear = False
empty_key = utils.random_unicode()
empty_val = ""
empty_meta = {empty_key: empty_val}
mgr.get_metadata = Mock(return_value=empty_meta)
exp_meta = _massage_metakeys(metadata, OBJECT_META_PREFIX)
exp_uri = "/%s/%s" % (utils.get_name(obj.container), obj.name)
mgr.api.method_post = Mock(return_value=(None, None))
mgr.set_metadata(obj, metadata, clear=clear, prefix=prefix)
mgr.api.method_post.assert_called_once_with(exp_uri, headers=exp_meta)
def test_sobj_mgr_remove_metadata_key(self):
obj = self.obj
mgr = obj.manager
key = utils.random_unicode()
exp_uri = "/%s/%s" % (utils.get_name(obj.container), obj.name)
mgr.set_metadata = Mock()
mgr.remove_metadata_key(obj, key)
mgr.set_metadata.assert_called_once_with(obj, {key: ""})
def test_clt_configure_cdn(self):
clt = self.client
ident = clt.identity
fake_service = fakes.FakeService()
fake_ep = fakes.FakeEndpoint()
fake_ep.public_url = utils.random_unicode()
ident.services["object_cdn"] = fake_service
fake_service.endpoints = {clt.region_name: fake_ep}
clt._configure_cdn()
self.assertEqual(clt.cdn_management_url, fake_ep.public_url)
def test_clt_backwards_aliases(self):
clt = self.client
self.assertEqual(clt.list_containers, clt.list_container_names)
self.assertEqual(clt.delete_container, clt.delete)
@patch("pyrax.client.BaseClient.get")
def test_clt_get(self, mock_get):
clt = self.client
cont = self.container
mock_get.return_value = cont
item = utils.random_unicode()
ret = clt.get(item)
self.assertEqual(ret, cont)
def test_clt_get_cont(self):
clt = self.client
cont = self.container
ret = clt.get(cont)
self.assertEqual(ret, cont)
def test_clt_remove_container_from_cache(self):
clt = self.client
cont = self.container
ret = clt.remove_container_from_cache(cont)
# noop
self.assertIsNone(ret)
def test_clt_get_account_details(self):
clt = self.client
mgr = clt._manager
good_prefix = "x-account-"
key_include = utils.random_unicode()
val_include = utils.random_unicode()
key_exclude = utils.random_unicode()
val_exclude = utils.random_unicode()
headers = {"%s%s" % (good_prefix, key_include): val_include,
"%s%s" % (ACCOUNT_META_PREFIX, key_exclude): val_exclude}
mgr.get_account_headers = Mock(return_value=headers)
ret = clt.get_account_details()
self.assertTrue(key_include in ret)
self.assertFalse(key_exclude in ret)
def test_clt_get_account_info(self):
clt = self.client
mgr = clt._manager
key_count = "x-account-container-count"
val_count = random.randint(1, 100)
key_bytes = "x-account-bytes-used"
val_bytes = random.randint(1, 100)
key_not_used = "x-account-useless"
val_not_used = random.randint(1, 100)
headers = {key_count: val_count, key_bytes: val_bytes,
key_not_used: val_not_used}
mgr.get_account_headers = Mock(return_value=headers)
ret = clt.get_account_info()
self.assertEqual(ret[0], val_count)
self.assertEqual(ret[1], val_bytes)
def test_clt_get_account_metadata(self):
clt = self.client
mgr = clt._manager
mgr.get_account_metadata = Mock()
prefix = utils.random_unicode()
clt.get_account_metadata(prefix=prefix)
mgr.get_account_metadata.assert_called_once_with(prefix=prefix)
def test_clt_set_account_metadata(self):
clt = self.client
mgr = clt._manager
mgr.set_account_metadata = Mock()
metadata = utils.random_unicode()
clear = utils.random_unicode()
prefix = utils.random_unicode()
extra_info = utils.random_unicode()
clt.set_account_metadata(metadata, clear=clear, prefix=prefix,
extra_info=extra_info)
mgr.set_account_metadata.assert_called_once_with(metadata, clear=clear,
prefix=prefix)
def test_clt_delete_account_metadata(self):
clt = self.client
mgr = clt._manager
mgr.delete_account_metadata = Mock()
prefix = utils.random_unicode()
clt.delete_account_metadata(prefix=prefix)
mgr.delete_account_metadata.assert_called_once_with(prefix=prefix)
def test_clt_get_temp_url_key(self):
clt = self.client
mgr = clt._manager
clt._cached_temp_url_key = None
key = utils.random_unicode()
meta = {"temp_url_key": key, "ignore": utils.random_unicode()}
mgr.get_account_metadata = Mock(return_value=meta)
ret = clt.get_temp_url_key(cached=True)
self.assertEqual(ret, key)
def test_clt_get_temp_url_key_cached(self):
clt = self.client
mgr = clt._manager
cached_key = utils.random_unicode()
clt._cached_temp_url_key = cached_key
key = utils.random_unicode()
meta = {"temp_url_key": key, "ignore": utils.random_unicode()}
mgr.get_account_metadata = Mock(return_value=meta)
ret = clt.get_temp_url_key(cached=True)
self.assertEqual(ret, cached_key)
def test_clt_set_temp_url_key(self):
clt = self.client
mgr = clt._manager
clt.set_account_metadata = Mock()
key = utils.random_unicode()
meta = {"Temp-Url-Key": key}
clt.set_temp_url_key(key)
clt.set_account_metadata.assert_called_once_with(meta)
self.assertEqual(clt._cached_temp_url_key, key)
def test_clt_set_temp_url_key_not_supplied(self):
clt = self.client
mgr = clt._manager
clt.set_account_metadata = Mock()
key = None
clt.set_temp_url_key(key)
exp_meta = {"Temp-Url-Key": clt._cached_temp_url_key}
clt.set_account_metadata.assert_called_once_with(exp_meta)
def test_clt_get_temp_url(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
seconds = utils.random_unicode()
method = utils.random_unicode()
key = utils.random_unicode()
cached = utils.random_unicode()
mgr.get_temp_url = Mock()
clt.get_temp_url(cont, obj, seconds, method=method, key=key,
cached=cached)
mgr.get_temp_url.assert_called_once_with(cont, obj, seconds,
method=method, key=key, cached=cached)
def test_clt_list(self):
clt = self.client
mgr = clt._manager
limit = utils.random_unicode()
marker = utils.random_unicode()
end_marker = utils.random_unicode()
prefix = utils.random_unicode()
mgr.list = Mock()
clt.list(limit=limit, marker=marker, end_marker=end_marker,
prefix=prefix)
mgr.list.assert_called_once_with(limit=limit, marker=marker,
end_marker=end_marker, prefix=prefix)
def test_clt_list_public_containers(self):
clt = self.client
mgr = clt._manager
mgr.list_public_containers = Mock()
clt.list_public_containers()
mgr.list_public_containers.assert_called_once_with()
def test_clt_make_container_public(self):
clt = self.client
mgr = clt._manager
cont = self.container
mgr.make_public = Mock()
ttl = utils.random_unicode()
clt.make_container_public(cont, ttl=ttl)
mgr.make_public.assert_called_once_with(cont, ttl=ttl)
def test_clt_make_container_private(self):
clt = self.client
mgr = clt._manager
cont = self.container
mgr.make_private = Mock()
clt.make_container_private(cont)
mgr.make_private.assert_called_once_with(cont)
def test_clt_get_cdn_log_retention(self):
clt = self.client
mgr = clt._manager
cont = self.container
mgr.get_cdn_log_retention = Mock()
clt.get_cdn_log_retention(cont)
mgr.get_cdn_log_retention.assert_called_once_with(cont)
def test_clt_set_cdn_log_retention(self):
clt = self.client
mgr = clt._manager
cont = self.container
enabled = utils.random_unicode()
mgr.set_cdn_log_retention = Mock()
clt.set_cdn_log_retention(cont, enabled)
mgr.set_cdn_log_retention.assert_called_once_with(cont, enabled)
def test_clt_get_container_streaming_uri(self):
clt = self.client
mgr = clt._manager
cont = self.container
mgr.get_container_streaming_uri = Mock()
clt.get_container_streaming_uri(cont)
mgr.get_container_streaming_uri.assert_called_once_with(cont)
def test_clt_get_container_ios_uri(self):
clt = self.client
mgr = clt._manager
cont = self.container
mgr.get_container_ios_uri = Mock()
clt.get_container_ios_uri(cont)
mgr.get_container_ios_uri.assert_called_once_with(cont)
def test_clt_set_container_web_index_page(self):
clt = self.client
mgr = clt._manager
cont = self.container
page = utils.random_unicode()
mgr.set_web_index_page = Mock()
clt.set_container_web_index_page(cont, page)
mgr.set_web_index_page.assert_called_once_with(cont, page)
def test_clt_set_container_web_error_page(self):
clt = self.client
mgr = clt._manager
cont = self.container
page = utils.random_unicode()
mgr.set_web_error_page = Mock()
clt.set_container_web_error_page(cont, page)
mgr.set_web_error_page.assert_called_once_with(cont, page)
def test_clt_purge_cdn_object(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
email_addresses = utils.random_unicode()
mgr.purge_cdn_object = Mock()
clt.purge_cdn_object(cont, obj, email_addresses=email_addresses)
mgr.purge_cdn_object.assert_called_once_with(cont, obj,
email_addresses=email_addresses)
def test_clt_list_container_names(self):
clt = self.client
mgr = clt._manager
nm1 = utils.random_unicode()
nm2 = utils.random_unicode()
nm3 = utils.random_unicode()
cont1 = clt.create(nm1)
cont2 = clt.create(nm2)
cont3 = clt.create(nm3)
clt.list = Mock(return_value=[cont1, cont2, cont3])
ret = clt.list_container_names()
self.assertEqual(ret, [nm1, nm2, nm3])
def test_clt_list_containers_info(self):
clt = self.client
mgr = clt._manager
limit = utils.random_unicode()
marker = utils.random_unicode()
mgr.list_containers_info = Mock()
clt.list_containers_info(limit=limit, marker=marker)
mgr.list_containers_info.assert_called_once_with(limit=limit,
marker=marker)
def test_clt_list_container_subdirs(self):
clt = self.client
mgr = clt._manager
cont = self.container
limit = utils.random_unicode()
marker = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
full_listing = utils.random_unicode()
mgr.list_subdirs = Mock()
clt.list_container_subdirs(cont, limit=limit, marker=marker,
prefix=prefix, delimiter=delimiter, full_listing=full_listing)
mgr.list_subdirs.assert_called_once_with(cont, limit=limit,
marker=marker, prefix=prefix, delimiter=delimiter,
full_listing=full_listing)
def test_clt_list_container_object_names(self):
clt = self.client
mgr = clt._manager
cont = self.container
limit = utils.random_unicode()
marker = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
full_listing = utils.random_unicode()
mgr.list_object_names = Mock()
clt.list_container_object_names(cont, limit=limit, marker=marker,
prefix=prefix, delimiter=delimiter, full_listing=full_listing)
mgr.list_object_names.assert_called_once_with(cont, limit=limit,
marker=marker, prefix=prefix, delimiter=delimiter,
full_listing=full_listing)
def test_clt_get_container_metadata(self):
clt = self.client
mgr = clt._manager
cont = self.container
prefix = utils.random_unicode()
mgr.get_metadata = Mock()
clt.get_container_metadata(cont, prefix=prefix)
mgr.get_metadata.assert_called_once_with(cont, prefix=prefix)
def test_clt_set_container_metadata(self):
clt = self.client
mgr = clt._manager
cont = self.container
metadata = utils.random_unicode()
clear = utils.random_unicode()
prefix = utils.random_unicode()
mgr.set_metadata = Mock()
clt.set_container_metadata(cont, metadata, clear=clear, prefix=prefix)
mgr.set_metadata.assert_called_once_with(cont, metadata, clear=clear,
prefix=prefix)
def test_clt_remove_container_metadata_key(self):
clt = self.client
mgr = clt._manager
cont = self.container
key = utils.random_unicode()
mgr.remove_metadata_key = Mock()
clt.remove_container_metadata_key(cont, key)
mgr.remove_metadata_key.assert_called_once_with(cont, key)
def test_clt_delete_container_metadata(self):
clt = self.client
mgr = clt._manager
cont = self.container
prefix = utils.random_unicode()
mgr.delete_metadata = Mock()
clt.delete_container_metadata(cont, prefix=prefix)
mgr.delete_metadata.assert_called_once_with(cont, prefix=prefix)
def test_clt_get_container_cdn_metadata(self):
clt = self.client
mgr = clt._manager
cont = self.container
mgr.get_cdn_metadata = Mock()
clt.get_container_cdn_metadata(cont)
mgr.get_cdn_metadata.assert_called_once_with(cont)
def test_clt_set_container_cdn_metadata(self):
clt = self.client
mgr = clt._manager
cont = self.container
metadata = utils.random_unicode()
mgr.set_cdn_metadata = Mock()
clt.set_container_cdn_metadata(cont, metadata)
mgr.set_cdn_metadata.assert_called_once_with(cont, metadata)
def test_clt_get_object_metadata(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
mgr.get_object_metadata = Mock()
clt.get_object_metadata(cont, obj)
mgr.get_object_metadata.assert_called_once_with(cont, obj, prefix=None)
def test_clt_set_object_metadata(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
metadata = utils.random_unicode()
clear = utils.random_unicode()
extra_info = utils.random_unicode()
prefix = utils.random_unicode()
mgr.set_object_metadata = Mock()
clt.set_object_metadata(cont, obj, metadata, clear=clear,
extra_info=extra_info, prefix=prefix)
mgr.set_object_metadata.assert_called_once_with(cont, obj, metadata,
clear=clear, prefix=prefix)
def test_clt_remove_object_metadata_key(self):
clt = self.client
cont = self.container
obj = self.obj
key = utils.random_unicode()
prefix = utils.random_unicode()
clt.set_object_metadata = Mock()
clt.remove_object_metadata_key(cont, obj, key, prefix=prefix)
clt.set_object_metadata.assert_called_once_with(cont, obj, {key: ""},
prefix=prefix)
def test_clt_list_container_objects(self):
clt = self.client
mgr = clt._manager
cont = self.container
limit = utils.random_unicode()
marker = utils.random_unicode()
end_marker = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
full_listing = False
mgr.list_objects = Mock()
clt.list_container_objects(cont, limit=limit, marker=marker,
prefix=prefix, delimiter=delimiter, end_marker=end_marker,
full_listing=full_listing)
mgr.list_objects.assert_called_once_with(cont, limit=limit,
marker=marker, prefix=prefix, delimiter=delimiter,
end_marker=end_marker)
def test_clt_list_container_objects_full(self):
clt = self.client
mgr = clt._manager
cont = self.container
limit = utils.random_unicode()
marker = utils.random_unicode()
end_marker = utils.random_unicode()
prefix = utils.random_unicode()
delimiter = utils.random_unicode()
full_listing = True
mgr.object_listing_iterator = Mock()
clt.list_container_objects(cont, limit=limit, marker=marker,
prefix=prefix, delimiter=delimiter, end_marker=end_marker,
full_listing=full_listing)
mgr.object_listing_iterator.assert_called_once_with(cont, prefix=prefix)
def test_clt_object_listing_iterator(self):
clt = self.client
mgr = clt._manager
cont = self.container
prefix = utils.random_unicode()
mgr.object_listing_iterator = Mock()
clt.object_listing_iterator(cont, prefix=prefix)
mgr.object_listing_iterator.assert_called_once_with(cont, prefix=prefix)
def test_clt_object_listing_iterator(self):
clt = self.client
mgr = clt._manager
cont = self.container
prefix = utils.random_unicode()
mgr.object_listing_iterator = Mock()
clt.object_listing_iterator(cont, prefix=prefix)
mgr.object_listing_iterator.assert_called_once_with(cont, prefix=prefix)
def test_clt_delete_object_in_seconds(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
seconds = utils.random_unicode()
extra_info = utils.random_unicode()
mgr.delete_object_in_seconds = Mock()
clt.delete_object_in_seconds(cont, obj, seconds, extra_info=extra_info)
mgr.delete_object_in_seconds.assert_called_once_with(cont, obj, seconds)
def test_clt_get_object(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
mgr.get_object = Mock()
clt.get_object(cont, obj)
mgr.get_object.assert_called_once_with(cont, obj)
def test_clt_store_object(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj_name = utils.random_unicode()
data = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
ttl = utils.random_unicode()
return_none = utils.random_unicode()
chunk_size = utils.random_unicode()
headers = utils.random_unicode()
metadata = utils.random_unicode()
extra_info = utils.random_unicode()
clt.create_object = Mock()
clt.store_object(cont, obj_name, data, content_type=content_type,
etag=etag, content_encoding=content_encoding, ttl=ttl,
return_none=return_none, chunk_size=chunk_size,
headers=headers, metadata=metadata, extra_info=extra_info)
clt.create_object.assert_called_once_with(cont, obj_name=obj_name,
data=data, content_type=content_type, etag=etag,
content_encoding=content_encoding, ttl=ttl,
return_none=return_none, chunk_size=chunk_size,
headers=headers, metadata=metadata)
def test_clt_upload_file(self):
clt = self.client
mgr = clt._manager
cont = self.container
file_or_path = utils.random_unicode()
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
ttl = utils.random_unicode()
content_length = utils.random_unicode()
return_none = utils.random_unicode()
headers = utils.random_unicode()
metadata = utils.random_unicode()
extra_info = utils.random_unicode()
clt.create_object = Mock()
clt.upload_file(cont, file_or_path, obj_name=obj_name,
content_type=content_type,
etag=etag, content_encoding=content_encoding, ttl=ttl,
content_length=content_length, return_none=return_none,
headers=headers, metadata=metadata, extra_info=extra_info)
clt.create_object.assert_called_once_with(cont,
file_or_path=file_or_path, obj_name=obj_name,
content_type=content_type, etag=etag,
content_encoding=content_encoding, ttl=ttl, headers=headers,
metadata=metadata, return_none=return_none)
def test_clt_create_object(self):
clt = self.client
mgr = clt._manager
cont = self.container
file_or_path = utils.random_unicode()
data = utils.random_unicode()
obj_name = utils.random_unicode()
content_type = utils.random_unicode()
etag = utils.random_unicode()
content_encoding = utils.random_unicode()
ttl = utils.random_unicode()
chunk_size = utils.random_unicode()
content_length = utils.random_unicode()
return_none = utils.random_unicode()
headers = utils.random_unicode()
metadata = utils.random_unicode()
mgr.create_object = Mock()
clt.create_object(cont, file_or_path=file_or_path, data=data,
obj_name=obj_name, content_type=content_type, etag=etag,
content_encoding=content_encoding, ttl=ttl,
chunk_size=chunk_size, content_length=content_length,
return_none=return_none, headers=headers, metadata=metadata)
mgr.create_object.assert_called_once_with(cont,
file_or_path=file_or_path, data=data, obj_name=obj_name,
content_type=content_type, etag=etag,
content_encoding=content_encoding,
content_length=content_length, ttl=ttl, chunk_size=chunk_size,
metadata=metadata, headers=headers, return_none=return_none)
def test_clt_fetch_object(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
include_meta = utils.random_unicode()
chunk_size = utils.random_unicode()
size = utils.random_unicode()
extra_info = utils.random_unicode()
mgr.fetch_object = Mock()
clt.fetch_object(cont, obj, include_meta=include_meta,
chunk_size=chunk_size, size=size, extra_info=extra_info)
mgr.fetch_object.assert_called_once_with(cont, obj,
include_meta=include_meta, chunk_size=chunk_size, size=size)
def test_clt_fetch_partial(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
size = utils.random_unicode()
mgr.fetch_partial = Mock()
clt.fetch_partial(cont, obj, size)
mgr.fetch_partial.assert_called_once_with(cont, obj, size)
@patch("sys.stdout")
def test_clt_fetch_dlo(self, mock_stdout):
clt = self.client
mgr = clt._manager
cont = self.container
ctype = "text/fake"
num_objs = random.randint(1, 3)
objs = [StorageObject(cont.object_manager,
{"name": "obj%s" % num, "content_type": ctype, "bytes": 42})
for num in range(num_objs)]
clt.get_container_objects = Mock(return_value=objs)
name = utils.random_unicode()
clt.method_get = Mock(side_effect=[(None, "aaa"), (None, "bbb"),
(None, "ccc"), (None, "")] * num_objs)
def fake_get(obj_name):
return [obj for obj in objs
if obj.name == obj_name][0]
cont.object_manager.get = Mock(side_effect=fake_get)
job = clt.fetch_dlo(cont, name, chunk_size=None)
self.assertTrue(isinstance(job, list))
self.assertEqual(len(job), num_objs)
for name, chunker in job:
txt = ""
chunker.interval = 2
chunker.verbose = True
while True:
try:
txt += chunker.read()
except StopIteration:
break
self.assertEqual(txt, "aaabbbccc")
def test_clt_download_object(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
directory = utils.random_unicode()
structure = utils.random_unicode()
mgr.download_object = Mock()
clt.download_object(cont, obj, directory, structure=structure)
mgr.download_object.assert_called_once_with(cont, obj, directory,
structure=structure)
def test_clt_delete(self):
clt = self.client
mgr = clt._manager
cont = self.container
del_objects = utils.random_unicode()
mgr.delete = Mock()
clt.delete(cont, del_objects=del_objects)
mgr.delete.assert_called_once_with(cont, del_objects=del_objects)
def test_clt_delete_object(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
mgr.delete_object = Mock()
clt.delete_object(cont, obj)
mgr.delete_object.assert_called_once_with(cont, obj)
def test_clt_copy_object(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
new_container = utils.random_unicode()
new_obj_name = utils.random_unicode()
content_type = utils.random_unicode()
extra_info = utils.random_unicode()
mgr.copy_object = Mock()
clt.copy_object(cont, obj, new_container, new_obj_name=new_obj_name,
content_type=content_type, extra_info=extra_info)
mgr.copy_object.assert_called_once_with(cont, obj, new_container,
new_obj_name=new_obj_name, content_type=content_type)
def test_clt_move_object(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
new_container = utils.random_unicode()
new_obj_name = utils.random_unicode()
new_reference = utils.random_unicode()
content_type = utils.random_unicode()
extra_info = utils.random_unicode()
mgr.move_object = Mock()
clt.move_object(cont, obj, new_container, new_obj_name=new_obj_name,
new_reference=new_reference, content_type=content_type,
extra_info=extra_info)
mgr.move_object.assert_called_once_with(cont, obj, new_container,
new_obj_name=new_obj_name, new_reference=new_reference,
content_type=content_type)
def test_clt_change_object_content_type(self):
clt = self.client
mgr = clt._manager
cont = self.container
obj = self.obj
new_ctype = utils.random_unicode()
guess = utils.random_unicode()
extra_info = utils.random_unicode()
mgr.change_object_content_type = Mock()
clt.change_object_content_type(cont, obj, new_ctype, guess=guess,
extra_info=extra_info)
mgr.change_object_content_type.assert_called_once_with(cont, obj,
new_ctype, guess=guess)
def test_clt_upload_folder_bad_path(self):
clt = self.client
mgr = clt._manager
folder_path = utils.random_unicode()
self.assertRaises(exc.FolderNotFound, clt.upload_folder, folder_path)
def test_clt_upload_folder(self):
clt = self.client
mgr = clt._manager
cont = self.container
ignore = utils.random_unicode()
ttl = utils.random_unicode()
clt._upload_folder_in_background = Mock()
with utils.SelfDeletingTempDirectory() as folder_path:
key, total = clt.upload_folder(folder_path, container=cont,
ignore=ignore, ttl=ttl)
clt._upload_folder_in_background.assert_called_once_with(
folder_path, cont, [ignore], key, ttl)
@patch("pyrax.object_storage.FolderUploader.start")
def test_clt_upload_folder_in_background(self, mock_start):
clt = self.client
cont = self.container
folder_path = utils.random_unicode()
ignore = utils.random_unicode()
upload_key = utils.random_unicode()
ttl = utils.random_unicode()
clt._upload_folder_in_background(folder_path, cont, ignore, upload_key,
ttl)
mock_start.assert_called_once_with()
@patch("logging.Logger.info")
def test_clt_sync_folder_to_container(self, mock_log):
clt = self.client
cont = self.container
folder_path = utils.random_unicode()
delete = utils.random_unicode()
include_hidden = utils.random_unicode()
ignore = utils.random_unicode()
ignore_timestamps = utils.random_unicode()
object_prefix = utils.random_unicode()
verbose = utils.random_unicode()
num_objs = random.randint(1, 3)
ctype = "text/fake"
objs = [StorageObject(cont.object_manager,
{"name": "obj%s" % num, "content_type": ctype, "bytes": 42})
for num in range(num_objs)]
cont.get_objects = Mock(return_value=objs)
clt._sync_folder_to_container = Mock()
clt.sync_folder_to_container(folder_path, cont, delete=delete,
include_hidden=include_hidden, ignore=ignore,
ignore_timestamps=ignore_timestamps,
object_prefix=object_prefix, verbose=verbose)
clt._sync_folder_to_container.assert_called_once_with(folder_path, cont,
prefix="", delete=delete, include_hidden=include_hidden,
ignore=ignore, ignore_timestamps=ignore_timestamps,
object_prefix=object_prefix, verbose=verbose)
@patch("logging.Logger.info")
def test_clt_sync_folder_to_container_failures(self, mock_log):
clt = self.client
cont = self.container
folder_path = utils.random_unicode()
delete = utils.random_unicode()
include_hidden = utils.random_unicode()
ignore = utils.random_unicode()
ignore_timestamps = utils.random_unicode()
object_prefix = utils.random_unicode()
verbose = utils.random_unicode()
num_objs = random.randint(1, 3)
ctype = "text/fake"
objs = [StorageObject(cont.object_manager,
{"name": "obj%s" % num, "content_type": ctype, "bytes": 42})
for num in range(num_objs)]
cont.get_objects = Mock(return_value=objs)
reason = utils.random_unicode()
def mock_fail(*args, **kwargs):
clt._sync_summary["failed"] += 1
clt._sync_summary["failure_reasons"].append(reason)
clt._sync_folder_to_container = Mock(side_effect=mock_fail)
clt.sync_folder_to_container(folder_path, cont, delete=delete,
include_hidden=include_hidden, ignore=ignore,
ignore_timestamps=ignore_timestamps,
object_prefix=object_prefix, verbose=verbose)
clt._sync_folder_to_container.assert_called_once_with(folder_path, cont,
prefix="", delete=delete, include_hidden=include_hidden,
ignore=ignore, ignore_timestamps=ignore_timestamps,
object_prefix=object_prefix, verbose=verbose)
@patch("logging.Logger.info")
@patch("os.listdir")
def test_clt_under_sync_folder_to_container(self, mock_listdir, mock_log):
clt = self.client
cont = self.container
cont.upload_file = Mock()
clt._local_files = []
rem_obj = StorageObject(cont.object_manager, {"name": "test2",
"last_modified": "2014-01-01T00:00:00.000001", "bytes": 42,
"content_type": "text/fake", "hash": "FAKE"})
clt._remote_files = {"test2": rem_obj}
clt._delete_objects_not_in_list = Mock()
prefix = ""
delete = True
include_hidden = False
ignore = "fake*"
ignore_timestamps = False
object_prefix = ""
verbose = utils.random_unicode()
with utils.SelfDeletingTempDirectory() as folder_path:
# Create a few files
fnames = ["test1", "test2", "test3", "fake1", "fake2"]
for fname in fnames:
pth = os.path.join(folder_path, fname)
open(pth, "w").write("faketext")
mock_listdir.return_value = fnames
clt._sync_folder_to_container(folder_path, cont, prefix, delete,
include_hidden, ignore, ignore_timestamps, object_prefix,
verbose)
self.assertEqual(cont.upload_file.call_count, 3)
@patch("logging.Logger.info")
@patch("logging.Logger.error")
@patch("os.listdir")
def test_clt_under_sync_folder_to_container_upload_fail(self, mock_listdir,
mock_log_error, mock_log_info):
clt = self.client
cont = self.container
cont.upload_file = Mock(side_effect=Exception(""))
clt._local_files = []
rem_obj = StorageObject(cont.object_manager, {"name": "test2",
"last_modified": "2014-01-01T00:00:00.000001", "bytes": 42,
"content_type": "text/fake", "hash": "FAKE"})
clt._remote_files = {"test2": rem_obj}
clt._delete_objects_not_in_list = Mock()
prefix = ""
delete = True
include_hidden = False
ignore = "fake*"
ignore_timestamps = False
object_prefix = ""
verbose = utils.random_unicode()
with utils.SelfDeletingTempDirectory() as folder_path:
# Create a few files
fnames = ["test1", "test2", "test3", "fake1", "fake2"]
for fname in fnames:
pth = os.path.join(folder_path, fname)
open(pth, "w").write("faketext")
mock_listdir.return_value = fnames
clt._sync_folder_to_container(folder_path, cont, prefix, delete,
include_hidden, ignore, ignore_timestamps, object_prefix,
verbose)
self.assertEqual(cont.upload_file.call_count, 3)
@patch("logging.Logger.info")
@patch("os.listdir")
def test_clt_under_sync_folder_to_container_newer(self, mock_listdir,
mock_log):
clt = self.client
cont = self.container
cont.upload_file = Mock()
clt._local_files = []
rem_obj = StorageObject(cont.object_manager, {"name": "test2",
"last_modified": "3000-01-01T00:00:00.000001", "bytes": 42,
"content_type": "text/fake", "hash": "FAKE"})
clt._remote_files = {"test2": rem_obj}
clt._delete_objects_not_in_list = Mock()
prefix = ""
delete = True
include_hidden = False
ignore = "fake*"
ignore_timestamps = False
object_prefix = ""
verbose = utils.random_unicode()
with utils.SelfDeletingTempDirectory() as folder_path:
# Create a few files
fnames = ["test1", "test2", "test3", "fake1", "fake2"]
for fname in fnames:
pth = os.path.join(folder_path, fname)
open(pth, "w").write("faketext")
mock_listdir.return_value = fnames
clt._sync_folder_to_container(folder_path, cont, prefix, delete,
include_hidden, ignore, ignore_timestamps, object_prefix,
verbose)
self.assertEqual(cont.upload_file.call_count, 2)
@patch("logging.Logger.info")
@patch("os.listdir")
def test_clt_under_sync_folder_to_container_same(self, mock_listdir,
mock_log):
clt = self.client
cont = self.container
cont.upload_file = Mock()
clt._local_files = []
txt = utils.random_ascii()
rem_obj = StorageObject(cont.object_manager, {"name": "test2",
"last_modified": "3000-01-01T00:00:00.000001", "bytes": 42,
"content_type": "text/fake", "hash": utils.get_checksum(txt)})
clt._remote_files = {"test2": rem_obj}
clt._delete_objects_not_in_list = Mock()
prefix = ""
delete = True
include_hidden = False
ignore = "fake*"
ignore_timestamps = False
object_prefix = ""
verbose = utils.random_unicode()
with utils.SelfDeletingTempDirectory() as folder_path:
# Create a few files
fnames = ["test1", "test2", "test3", "fake1", "fake2"]
for fname in fnames:
pth = os.path.join(folder_path, fname)
with open(pth, "w") as f:
f.write(txt)
mock_listdir.return_value = fnames
clt._sync_folder_to_container(folder_path, cont, prefix, delete,
include_hidden, ignore, ignore_timestamps, object_prefix,
verbose)
self.assertEqual(cont.upload_file.call_count, 2)
args_list = mock_log.call_args_list
exist_call = any(["already exists" in call[0][0] for call in args_list])
self.assertTrue(exist_call)
@patch("logging.Logger.info")
def test_clt_under_sync_folder_to_container_nested(self, mock_log):
clt = self.client
clt._local_files = []
clt._remote_files = {}
cont = self.container
cont.upload_file = Mock()
clt._delete_objects_not_in_list = Mock()
sav = os.listdir
os.listdir = Mock()
prefix = "XXXXX"
delete = True
include_hidden = False
ignore = "fake*"
ignore_timestamps = False
object_prefix = utils.random_unicode(5)
verbose = utils.random_unicode()
with utils.SelfDeletingTempDirectory() as folder_path:
# Create a few files
fnames = ["test1", "test2", "test3", "fake1", "fake2"]
for fname in fnames:
pth = os.path.join(folder_path, fname)
open(pth, "w").write("faketext")
# Create a nested directory
dirname = "nested"
dirpth = os.path.join(folder_path, dirname)
os.mkdir(dirpth)
fnames.append(dirname)
os.listdir.side_effect = [fnames, []]
clt._sync_folder_to_container(folder_path, cont, prefix, delete,
include_hidden, ignore, ignore_timestamps, object_prefix,
verbose)
os.listdir = sav
self.assertEqual(cont.upload_file.call_count, 3)
def test_clt_delete_objects_not_in_list(self):
clt = self.client
clt._local_files = []
cont = self.container
object_prefix = utils.random_unicode(5)
obj_names = ["test1", "test2"]
cont.get_object_names = Mock(return_value=obj_names)
clt._local_files = ["test2"]
clt.bulk_delete = Mock()
exp_del = ["test1"]
clt._delete_objects_not_in_list(cont, object_prefix=object_prefix)
cont.get_object_names.assert_called_once_with(prefix=object_prefix,
full_listing=True)
clt.bulk_delete.assert_called_once_with(cont, exp_del, async=True)
@patch("pyrax.object_storage.BulkDeleter.start")
def test_clt_bulk_delete_async(self, mock_del):
clt = self.client
cont = self.container
obj_names = ["test1", "test2"]
ret = clt.bulk_delete(cont, obj_names, async=True)
self.assertTrue(isinstance(ret, BulkDeleter))
def test_clt_bulk_delete_sync(self):
clt = self.client
cont = self.container
obj_names = ["test1", "test2"]
resp = fakes.FakeResponse()
fake_res = utils.random_unicode()
body = {
"Number Not Found": 1,
"Response Status": "200 OK",
"Errors": [],
"Number Deleted": 10,
"Response Body": ""
}
expected = {
'deleted': 10,
'errors': [],
'not_found': 1,
'status': '200 OK'
}
clt.bulk_delete_interval = 0.01
def fake_bulk_resp(uri, data=None, headers=None):
time.sleep(0.05)
return (resp, body)
clt.method_delete = Mock(side_effect=fake_bulk_resp)
ret = clt.bulk_delete(cont, obj_names, async=False)
self.assertEqual(ret, expected)
def test_clt_bulk_delete_sync_413(self):
clt = self.client
cont = self.container
obj_names = ["test1", "test2"]
resp = fakes.FakeResponse()
fake_res = utils.random_unicode()
body = {
"Number Not Found": 0,
"Response Status": "413 Request Entity Too Large",
"Errors": [],
"Number Deleted": 0,
"Response Body": "Maximum Bulk Deletes: 10000 per request"
}
expected = {
'deleted': 0,
'errors': [
[
'Maximum Bulk Deletes: 10000 per request',
'413 Request Entity Too Large'
]
],
'not_found': 0,
'status': '413 Request Entity Too Large'
}
clt.bulk_delete_interval = 0.01
def fake_bulk_resp(uri, data=None, headers=None):
time.sleep(0.05)
return (resp, body)
clt.method_delete = Mock(side_effect=fake_bulk_resp)
ret = clt.bulk_delete(cont, obj_names, async=False)
self.assertEqual(ret, expected)
def test_clt_cdn_request_not_enabled(self):
clt = self.client
uri = utils.random_unicode()
method = random.choice(list(clt.method_dict.keys()))
clt.cdn_management_url = None
self.assertRaises(exc.NotCDNEnabled, clt.cdn_request, uri, method)
def test_clt_cdn_request(self):
clt = self.client
uri = utils.random_unicode()
method = "GET"
method = random.choice(list(clt.method_dict.keys()))
resp = utils.random_unicode()
body = utils.random_unicode()
clt.cdn_management_url = utils.random_unicode()
clt.method_dict[method] = Mock(return_value=(resp, body))
ret = clt.cdn_request(uri, method)
self.assertEqual(ret, (resp, body))
def test_clt_cdn_request_cont_not_cdn_enabled(self):
clt = self.client
uri = utils.random_unicode()
method = random.choice(list(clt.method_dict.keys()))
resp = utils.random_unicode()
body = utils.random_unicode()
clt.cdn_management_url = utils.random_unicode()
clt.method_dict[method] = Mock(side_effect=exc.NotFound(""))
clt.method_head = Mock(return_value=(resp, body))
self.assertRaises(exc.NotCDNEnabled, clt.cdn_request, uri, method)
def test_clt_cdn_request_not_found(self):
clt = self.client
uri = utils.random_unicode()
method = random.choice(list(clt.method_dict.keys()))
resp = utils.random_unicode()
body = utils.random_unicode()
clt.cdn_management_url = utils.random_unicode()
clt.method_dict[method] = Mock(side_effect=exc.NotFound(""))
clt.method_head = Mock(side_effect=exc.NotFound(""))
self.assertRaises(exc.NotFound, clt.cdn_request, uri, method)
def test_clt_update_progress(self):
clt = self.client
key = utils.random_unicode()
curr = random.randint(1, 100)
size = random.randint(1, 100)
clt.folder_upload_status = {key: {"uploaded": curr}}
clt._update_progress(key, size)
new_size = clt.get_uploaded(key)
self.assertEqual(new_size, curr + size)
def test_clt_cancel_folder_upload(self):
clt = self.client
key = utils.random_unicode()
clt.folder_upload_status = {key: {"continue": True}}
self.assertFalse(clt._should_abort_folder_upload(key))
clt.cancel_folder_upload(key)
self.assertTrue(clt._should_abort_folder_upload(key))
def test_folder_uploader_no_container(self):
pth1 = utils.random_unicode().replace(os.sep, "")
pth2 = utils.random_unicode().replace(os.sep, "")
pth3 = utils.random_unicode().replace(os.sep, "")
pth4 = utils.random_unicode().replace(os.sep, "")
root_folder = os.path.join(pth1, pth2, pth3, pth4)
container = None
ignore = utils.random_unicode()
upload_key = utils.random_unicode()
client = self.client
ttl = utils.random_unicode()
ret = FolderUploader(root_folder, container, ignore, upload_key,
client, ttl=ttl)
self.assertEqual(ret.container.name, pth4)
self.assertEqual(ret.root_folder, root_folder)
self.assertEqual(ret.ignore, [ignore])
self.assertEqual(ret.upload_key, upload_key)
self.assertEqual(ret.ttl, ttl)
self.assertEqual(ret.client, client)
def test_folder_uploader_container_name(self):
root_folder = utils.random_unicode()
container = utils.random_unicode()
ignore = utils.random_unicode()
upload_key = utils.random_unicode()
client = self.client
ttl = utils.random_unicode()
client.create = Mock()
ret = FolderUploader(root_folder, container, ignore, upload_key,
client, ttl=ttl)
client.create.assert_called_once_with(container)
def test_folder_uploader_folder_name_from_path(self):
pth1 = utils.random_unicode().replace(os.sep, "")
pth2 = utils.random_unicode().replace(os.sep, "")
pth3 = utils.random_unicode().replace(os.sep, "")
fullpath = os.path.join(pth1, pth2, pth3) + os.sep
ret = FolderUploader.folder_name_from_path(fullpath)
self.assertEqual(ret, pth3)
def test_folder_uploader_upload_files_in_folder_bad_dirname(self):
clt = self.client
cont = self.container
root_folder = utils.random_unicode()
ignore = "*FAKE*"
upload_key = utils.random_unicode()
folder_up = FolderUploader(root_folder, cont, ignore, upload_key, clt)
arg = utils.random_unicode()
dirname = "FAKE DIRECTORY"
fname1 = utils.random_unicode()
fname2 = utils.random_unicode()
fnames = [fname1, fname2]
ret = folder_up.upload_files_in_folder(arg, dirname, fnames)
self.assertFalse(ret)
def test_folder_uploader_upload_files_in_folder_abort(self):
clt = self.client
cont = self.container
root_folder = utils.random_unicode()
ignore = "*FAKE*"
upload_key = utils.random_unicode()
folder_up = FolderUploader(root_folder, cont, ignore, upload_key, clt)
arg = utils.random_unicode()
dirname = utils.random_unicode()
fname1 = utils.random_unicode()
fname2 = utils.random_unicode()
fnames = [fname1, fname2]
clt._should_abort_folder_upload = Mock(return_value=True)
clt.upload_file = Mock()
ret = folder_up.upload_files_in_folder(arg, dirname, fnames)
self.assertEqual(clt.upload_file.call_count, 0)
def test_folder_uploader_upload_files_in_folder(self):
clt = self.client
cont = self.container
ignore = "*FAKE*"
upload_key = utils.random_unicode()
arg = utils.random_unicode()
fname1 = utils.random_ascii()
fname2 = utils.random_ascii()
fname3 = utils.random_ascii()
with utils.SelfDeletingTempDirectory() as tmpdir:
fnames = [tmpdir, fname1, fname2, fname3]
for fname in fnames[1:]:
pth = os.path.join(tmpdir, fname)
open(pth, "w").write("faketext")
clt._should_abort_folder_upload = Mock(return_value=False)
clt.upload_file = Mock()
clt._update_progress = Mock()
folder_up = FolderUploader(tmpdir, cont, ignore, upload_key, clt)
ret = folder_up.upload_files_in_folder(arg, tmpdir, fnames)
self.assertEqual(clt.upload_file.call_count, len(fnames) - 1)
def test_folder_uploader_run(self):
clt = self.client
cont = self.container
ignore = "*FAKE*"
upload_key = utils.random_unicode()
arg = utils.random_unicode()
fname1 = utils.random_ascii()
fname2 = utils.random_ascii()
fname3 = utils.random_ascii()
with utils.SelfDeletingTempDirectory() as tmpdir:
fnames = [tmpdir, fname1, fname2, fname3]
for fname in fnames[1:]:
pth = os.path.join(tmpdir, fname)
open(pth, "w").write("faketext")
clt._should_abort_folder_upload = Mock(return_value=False)
folder_up = FolderUploader(tmpdir, cont, ignore, upload_key, clt)
folder_up.upload_files_in_folder = Mock()
folder_up.run()
self.assertEqual(folder_up.upload_files_in_folder.call_count, 1)
if __name__ == "__main__":
unittest.main()
| naemono/pyrax | tests/unit/test_object_storage.py | Python | apache-2.0 | 142,805 |
# 271. Encode and Decode String
# Design an algorithm to encode a list of strings to a string.
# The encoded string is then sent over the network
# and is decoded back to the original list of strings.
# Machine 1 (sender) has the function:
# string encode(vector strs) {
# // ... your code
# return encoded_string;
# }
# Machine 2 (receiver) has the function:
# vector decode(string s) {
# //... your code
# return strs;
# }
# So Machine 1 does:
# string encoded_string = encode(strs);
# and Machine 2 does:
# vector strs2 = decode(encoded_string);
# strs2 in Machine 2 should be the same as strs in Machine 1.
# Implement the encode and decode methods.
# Note:
# The string may contain any possible characters out of 256 valid ascii characters.
# Your algorithm should be generalized enough to work on any possible characters.
# Do not use class member/global/static variables to store states.
# Your encode and decode algorithms should be stateless.
# Do not rely on any library method such as eval or serialize methods.
# You should implement your own encode/decode algorithm.
# https://gengwg.blogspot.com/2018/06/leetcode-271-encode-and-decode-strings.html
# 用长度+特殊字符+字符串来编码
class Codec(object):
def encode(self, strs):
"""
Encodes a list of strings to a single string.
Algorithm: Length info
:type strs: List[str]
:rtype: str
To encode the string, we can add some symbols before it.
For example, for string “abc”, we can add the length 3 before it.
However, for string starting from numbers, like 123abc.
This could result in 6123abc, which is not acceptable.
Then we add an ‘#’ between the length and the string.
So 3#abc is the encoded string.
To decode the string, we can search the ‘#’ symbol.
The number before it is the length for the string,
and the string after it with length is the string we want to decode.
"""
encoded = ''
for s in strs:
encoded += str(len(s)) + '/' + s
return encoded
def decode(self, s):
"""
Decodes a single string to a list of strings.
:type s: str
:rtype: List[str]
"""
decoded = []
j = 0
for i, c in enumerate(s):
if c == '/':
offset = int(s[j:i])
decoded.append(s[i+1:i+offset+1])
j = i+offset+1
return decoded
if __name__ == '__main__':
encoded = Codec().encode(['123abc', 'xyz'])
print(encoded)
decoded = Codec().decode(encoded)
print(decoded) | gengwg/leetcode | 271_encode_decode_string.py | Python | apache-2.0 | 2,688 |
#!/usr/bin/env/python
import datetime
from subprocess import call
while (True):
time = str(datetime.datetime.now())
filename = time.replace(' ', '_') + '.jpg'
call(['raspistill', '-f', '-fp', '-vf', '-k', '-t', '99999', '-o', filename])
# call("raspistill -f -fp -ex auto -awb auto -vf -k -t 99999999 -o test.jpg"}
| shingkai/asa_photobooth | tests/raspistillTest.py | Python | gpl-3.0 | 332 |
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
riko.modules.hash
~~~~~~~~~~~~~~~~~
Provides functions for hashing text.
Note: If the PYTHONHASHSEED environment variable is set to an integer value,
it is used as a fixed seed for generating the hash. Its purpose is to allow
repeatable hashing across python processes and versions. The integer must be a
decimal number in the range [0, 4294967295].
Specifying the value 0 will disable hash randomization. If this variable is set
to `random`, a random value is used to seed the hashes. Hash randomization is
is enabled by default for Python 3.2.3+, and disabled otherwise.
Examples:
basic usage::
>>> from riko.modules.hash import pipe
>>>
>>> _hash = ctypes.c_uint(hash('hello world')).value
>>> next(pipe({'content': 'hello world'}))['hash'] == _hash
True
Attributes:
OPTS (dict): The default pipe options
DEFAULTS (dict): The default parser options
"""
import ctypes
from . import processor
import pygogo as gogo
OPTS = {'ftype': 'text', 'ptype': 'none', 'field': 'content'}
DEFAULTS = {}
logger = gogo.Gogo(__name__, monolog=True).logger
def parser(word, _, skip=False, **kwargs):
""" Parsers the pipe content
Args:
item (obj): The entry to process (a DotDict instance)
_ (None): Ignored.
skip (bool): Don't parse the content
kwargs (dict): Keyword arguments
Kwargs:
assign (str): Attribute to assign parsed content (default: hash)
stream (dict): The original item
Returns:
dict: The item
Examples:
>>> from meza.fntools import Objectify
>>>
>>> _hash = ctypes.c_uint(hash('hello world')).value
>>> item = {'content': 'hello world'}
>>> kwargs = {'stream': item}
>>> parser(item['content'], None, **kwargs) == _hash
True
"""
return kwargs['stream'] if skip else ctypes.c_uint(hash(word)).value
@processor(DEFAULTS, isasync=True, **OPTS)
def async_pipe(*args, **kwargs):
"""A processor module that asynchronously hashes the field of an item.
Args:
item (dict): The entry to process
kwargs (dict): The keyword arguments passed to the wrapper
Kwargs:
assign (str): Attribute to assign parsed content (default: hash)
field (str): Item attribute to operate on (default: 'content')
Returns:
Deferred: twisted.internet.defer.Deferred item with hashed content
Examples:
>>> from riko.bado import react
>>> from riko.bado.mock import FakeReactor
>>>
>>> _hash = ctypes.c_uint(hash('hello world')).value
>>>
>>> def run(reactor):
... callback = lambda x: print(next(x)['hash'] == _hash)
... d = async_pipe({'content': 'hello world'})
... return d.addCallbacks(callback, logger.error)
>>>
>>> try:
... react(run, _reactor=FakeReactor())
... except SystemExit:
... pass
...
True
"""
return parser(*args, **kwargs)
@processor(**OPTS)
def pipe(*args, **kwargs):
"""A processor that hashes the field of an item.
Args:
item (dict): The entry to process
kwargs (dict): The keyword arguments passed to the wrapper
Kwargs:
assign (str): Attribute to assign parsed content (default: hash)
field (str): Item attribute to operate on (default: 'content')
Yields:
dict: an item with hashed content
Examples:
>>> _hash = ctypes.c_uint(hash('hello world')).value
>>> next(pipe({'content': 'hello world'}))['hash'] == _hash
True
>>> _hash = ctypes.c_uint(hash('greeting')).value
>>> kwargs = {'field': 'title', 'assign': 'result'}
>>> next(pipe({'title': 'greeting'}, **kwargs))['result'] == _hash
True
"""
return parser(*args, **kwargs)
| nerevu/riko | riko/modules/hash.py | Python | mit | 3,926 |
"""Test functionalities of model component pruning functions."""
from itertools import chain
from typing import List, Set, Union
from cobra.core import Gene, Metabolite, Model, Reaction
from cobra.manipulation import (
delete_model_genes,
find_gene_knockout_reactions,
get_compiled_gene_reaction_rules,
prune_unused_metabolites,
prune_unused_reactions,
remove_genes,
undelete_model_genes,
)
def test_prune_unused_metabolites_output_type(model: Model) -> None:
"""Test the output type of unused metabolites pruning."""
[model.reactions.get_by_id(x).remove_from_model() for x in ["RPI", "RPE", "GND"]]
model_pruned, unused = prune_unused_metabolites(model)
assert isinstance(model_pruned, Model)
# test that the output contains metabolite objects
assert isinstance(unused[0], Metabolite)
def test_prune_unused_metabolites_sanity(model: Model) -> None:
"""Test the sanity of unused metabolites pruning."""
metabolite1 = model.metabolites.ru5p__D_c
metabolite2 = model.metabolites.akg_e
metabolite3 = model.metabolites.akg_c
reactions = set(
chain(metabolite1.reactions, metabolite2.reactions, metabolite3.reactions)
)
model.remove_reactions(reactions)
model_pruned, unused = prune_unused_metabolites(model)
assert metabolite1 in model.metabolites
assert metabolite2 in model.metabolites
assert metabolite3 in model.metabolites
# test that the unused metabolites are not used in the model
assert metabolite1 not in model_pruned.metabolites
assert metabolite2 not in model_pruned.metabolites
assert metabolite3 not in model_pruned.metabolites
def test_prune_unused_reactions_output_type(model: Model) -> None:
"""Test the output type of unused reactions pruning."""
reaction = Reaction("foo")
model.add_reactions([reaction])
model_pruned, unused = prune_unused_reactions(model)
assert isinstance(model_pruned, Model)
# test that the output contains reaction objects
assert isinstance(unused[0], Reaction)
def test_prune_unused_rxns_functionality(model: Model) -> None:
"""Test the sanity of unused reactions pruning."""
for x in ["foo1", "foo2", "foo3"]:
model.add_reactions([Reaction(x)])
model_pruned, unused = prune_unused_reactions(model)
assert "foo1" in model.reactions
assert "foo2" in model.reactions
assert "foo3" in model.reactions
# test that the unused reactions are not used in the model
assert "foo1" not in model_pruned.reactions
assert "foo2" not in model_pruned.reactions
assert "foo3" not in model_pruned.reactions
def _find_gene_knockout_reactions_fast(
m: Model, gene_list: List[Gene]
) -> List[Reaction]:
"""Quickly find gene knockout reactions."""
compiled_rules = get_compiled_gene_reaction_rules(m)
return find_gene_knockout_reactions(
m, gene_list, compiled_gene_reaction_rules=compiled_rules
)
def _get_removed(m: Model) -> Set[str]:
"""Get trimmed reactions."""
return {x.id for x in m._trimmed_reactions}
def _gene_knockout_computation(
m: Model,
gene_ids: Union[List[str], Set[str]],
expected_reaction_ids: Union[List[str], Set[str]],
) -> None:
"""Compute gene knockout."""
genes = [m.genes.get_by_id(i) for i in gene_ids]
expected_reactions = {m.reactions.get_by_id(i) for i in expected_reaction_ids}
removed1 = set(find_gene_knockout_reactions(m, genes))
removed2 = set(_find_gene_knockout_reactions_fast(m, genes))
assert removed1 == expected_reactions
assert removed2 == expected_reactions
delete_model_genes(m, gene_ids, cumulative_deletions=False)
assert _get_removed(m) == expected_reaction_ids
undelete_model_genes(m)
def test_gene_knockout(salmonella: Model) -> None:
"""Test gene knockout."""
gene_list = ["STM1067", "STM0227"]
dependent_reactions = {
"3HAD121",
"3HAD160",
"3HAD80",
"3HAD140",
"3HAD180",
"3HAD100",
"3HAD181",
"3HAD120",
"3HAD60",
"3HAD141",
"3HAD161",
"T2DECAI",
"3HAD40",
}
_gene_knockout_computation(salmonella, gene_list, dependent_reactions)
_gene_knockout_computation(salmonella, ["STM4221"], {"PGI"})
_gene_knockout_computation(salmonella, ["STM1746.S"], {"4PEPTabcpp"})
# test cumulative behavior
delete_model_genes(salmonella, gene_list[:1])
delete_model_genes(salmonella, gene_list[1:], cumulative_deletions=True)
delete_model_genes(salmonella, ["STM4221"], cumulative_deletions=True)
dependent_reactions.add("PGI")
assert _get_removed(salmonella) == dependent_reactions
# non-cumulative following cumulative
delete_model_genes(salmonella, ["STM4221"], cumulative_deletions=False)
assert _get_removed(salmonella) == {"PGI"}
# make sure on reset that the bounds are correct
reset_bound = salmonella.reactions.get_by_id("T2DECAI").upper_bound
assert reset_bound == 1000.0
# test computation when gene name is a subset of another
test_model = Model()
test_reaction_1 = Reaction("test1")
test_reaction_1.gene_reaction_rule = "eggs or (spam and eggspam)"
test_model.add_reactions([test_reaction_1])
_gene_knockout_computation(test_model, ["eggs"], set())
_gene_knockout_computation(test_model, ["eggs", "spam"], {"test1"})
# test computation with nested boolean expression
test_reaction_1.gene_reaction_rule = "g1 and g2 and (g3 or g4 or (g5 and g6))"
_gene_knockout_computation(test_model, ["g3"], set())
_gene_knockout_computation(test_model, ["g1"], {"test1"})
_gene_knockout_computation(test_model, ["g5"], set())
_gene_knockout_computation(test_model, ["g3", "g4", "g5"], {"test1"})
# test computation when gene names are python expressions
test_reaction_1.gene_reaction_rule = "g1 and (for or in)"
_gene_knockout_computation(test_model, ["for", "in"], {"test1"})
_gene_knockout_computation(test_model, ["for"], set())
test_reaction_1.gene_reaction_rule = "g1 and g2 and g2.conjugate"
_gene_knockout_computation(test_model, ["g2"], {"test1"})
_gene_knockout_computation(test_model, ["g2.conjugate"], {"test1"})
test_reaction_1.gene_reaction_rule = "g1 and (try:' or 'except:1)"
_gene_knockout_computation(test_model, ["try:'"], set())
_gene_knockout_computation(test_model, ["try:'", "'except:1"], {"test1"})
def test_remove_genes() -> None:
"""Test gene removal."""
m = Model("test")
m.add_reactions([Reaction("r" + str(i + 1)) for i in range(8)])
assert len(m.reactions) == 8
rxns = m.reactions
rxns.r1.gene_reaction_rule = "(a and b) or (c and a)"
rxns.r2.gene_reaction_rule = "(a and b and d and e)"
rxns.r3.gene_reaction_rule = "(a and b) or (b and c)"
rxns.r4.gene_reaction_rule = "(f and b) or (b and c)"
rxns.r5.gene_reaction_rule = "x"
rxns.r6.gene_reaction_rule = "y"
rxns.r7.gene_reaction_rule = "x or z"
rxns.r8.gene_reaction_rule = ""
assert "a" in m.genes
assert "x" in m.genes
remove_genes(m, ["a"], remove_reactions=False)
assert "a" not in m.genes
assert "x" in m.genes
assert rxns.r1.gene_reaction_rule == ""
assert rxns.r2.gene_reaction_rule == ""
assert rxns.r3.gene_reaction_rule == "b and c"
assert rxns.r4.gene_reaction_rule == "(f and b) or (b and c)"
assert rxns.r5.gene_reaction_rule == "x"
assert rxns.r6.gene_reaction_rule == "y"
assert rxns.r7.genes == {m.genes.x, m.genes.z}
assert rxns.r8.gene_reaction_rule == ""
remove_genes(m, ["x"], remove_reactions=True)
assert len(m.reactions) == 7
assert "r5" not in m.reactions
assert "x" not in m.genes
assert rxns.r1.gene_reaction_rule == ""
assert rxns.r2.gene_reaction_rule == ""
assert rxns.r3.gene_reaction_rule == "b and c"
assert rxns.r4.gene_reaction_rule == "(f and b) or (b and c)"
assert rxns.r6.gene_reaction_rule == "y"
assert rxns.r7.gene_reaction_rule == "z"
assert rxns.r7.genes == {m.genes.z}
assert rxns.r8.gene_reaction_rule == ""
| opencobra/cobrapy | src/cobra/test/test_manipulation/test_delete.py | Python | gpl-2.0 | 8,120 |
"""
CMSIS-DAP Interface Firmware
Copyright (c) 2009-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Extract and patch the interface without bootloader
"""
from options import get_options
from paths import get_interface_path, TMP_DIR
from utils import gen_binary, is_lpc, split_path
from os.path import join
if __name__ == '__main__':
options = get_options()
in_path = get_interface_path(options.interface, options.target, bootloader=False)
_, name, _ = split_path(in_path)
out_path = join(TMP_DIR, name + '.bin')
print '\nELF: %s' % in_path
gen_binary(in_path, out_path, is_lpc(options.interface))
print "\nBINARY: %s" % out_path
| flyhung/CMSIS-DAP | tools/get_binary.py | Python | apache-2.0 | 1,166 |
'''
if __name__ == '__main__':
import multiprocessing
multiprocessing.freeze_support()
from hsviz import draw_func2 as df2
np.random.seed(seed=0) # RANDOM SEED (for reproducibility)
is_whiten = helpers.get_flag('--whiten')
dim = helpers.get_arg('--dim', type_=int, default=3)
K = helpers.get_arg('--K', type_=int, default=10)
num_clusters = K
__REAL_DATA_MODE__ = True
if __REAL_DATA_MODE__:
import main
hs = main.main(defaultdb='NAUTS')
cache_dir = hs.dirs.cache_dir
cx2_desc = hs.feats.cx2_desc
data = np.vstack(cx2_desc)
else:
cache_dir = 'akmeans_test'
data = np.random.rand(1000, 3)
datax2_clusterx, clusters = precompute_akmeans(data, num_clusters,
force_recomp=False,
cache_dir=cache_dir)
fig = plot_clusters(data, datax2_clusterx, clusters, num_pca_dims=dim,
whiten=is_whiten)
fig.show()
exec(df2.present())
#IDEA:
#intead have each datapoint "pull" on one another. Maybe warp the space
#in which they sit with a covariance matrix. basically let gravity do
#the clustering. Check to see if any algos like this.
#itertools.groupby
#groups
'''
| SU-ECE-17-7/hotspotter | hstest/test_algos.py | Python | apache-2.0 | 1,296 |
import gtk
from dnd.drag import DragTarget
from dnd.drop import DropTarget
#TODO build a GenericDragProvider and a TreeDragProvider
class DragProvider(object):
"""
A DragProvider handles complicated Drag&Drop interactions with multiple sources
and targets.
"""
inspector = None
SOURCE_ACTIONS = gtk.gdk.ACTION_COPY | gtk.gdk.ACTION_MOVE | gtk.gdk.ACTION_LINK
DEST_ACTIONS = gtk.gdk.ACTION_COPY | gtk.gdk.ACTION_MOVE | gtk.gdk.ACTION_LINK
def __init__(self, widget):
self.widget = widget
self.drag_targets = []
self.drop_targets = []
self.connect()
tv = widget
tv.connect("drag_begin", self._on_drag_begin)
tv.connect("drag-data-get", self._on_drag_get_data)
tv.connect("drag-data-received", self._on_drag_received_data)
tv.connect("drag-data-delete", self._on_drag_delete_data)
tv.connect('drag_motion', self._on_drag_motion)
tv.connect('drag_drop', self._on_drag_drop)
tv.get_selection().connect('changed', self._on_selection_change)
def connect(self):
"""
connect the currently available targets to the drag and drop interface
of gtk, also does some magic (that is not yet completely understood) so
that the treeview uses approriate drag icons (row preview)
this method is implicitly called by append()
"""
tv = self.widget
# TODO make this customizeable, allow LINK action
# stuff that gtk shall do automatically
GTK_HANDLERS = gtk.DEST_DEFAULT_HIGHLIGHT | gtk.DEST_DEFAULT_DROP
drag_targets = []
for i, target in enumerate(self.drag_targets):
drag_targets.append((target.mime, target.app | target.widget, 1500+i))
# first enable tree model drag/drop (to get the actual row as drag_icon)
# however this alone will only work for TreeStore/ListStore,
# so we need to manage drag and drop by hand due to the GenericTreeModel
tv.enable_model_drag_source(gtk.gdk.BUTTON1_MASK,
drag_targets,
self.SOURCE_ACTIONS)
tv.drag_source_set(gtk.gdk.BUTTON1_MASK,
drag_targets,
self.SOURCE_ACTIONS)
tv.enable_model_drag_dest([], self.DEST_ACTIONS)
tv.drag_dest_set(0, # gtk.DEST_DEFAULT_ALL, # if DEFAULT_ALL is set, data preview won't work
[],
self.DEST_ACTIONS)
def append(self, obj):
"""
Add a mime type, that this widgets can drag and drop with
"""
if isinstance(obj, DragTarget):
self.drag_targets.append(obj)
if isinstance(obj, DropTarget):
self.drop_targets.append(obj)
self.connect()
def _on_drag_begin(self, widget, context):
"""
save the current selection to the context
"""
# this is tree-view specific
try:
context.set_data("org-selection", widget.get_selection().get_selected())
except:
pass
def _on_selection_change(self, tree_selection):
"""
the DragProvider listens to selection_change events as targets may be valid
for a certain selection, but invalid for others. Thus we update the target
list here. This cannot be done in drag_begin as there, the context is already
completely established
"""
drag_targets = []
for i, target in enumerate(self.drag_targets):
if target.get_data(self.widget, None) is not None:
drag_targets.append((target.mime, target.app | target.widget, 1600+i))
# first enable tree model drag/drop (to get the actual row as drag_icon)
# however this alone will only work for TreeStore/ListStore,
# so we need to manage drag and drop by hand due to the GenericTreeModel
tv = self.widget
tv.enable_model_drag_source(gtk.gdk.BUTTON1_MASK,
drag_targets,
self.SOURCE_ACTIONS)
tv.drag_source_set(gtk.gdk.BUTTON1_MASK,
drag_targets,
self.SOURCE_ACTIONS)
def get_source_target(self, context, mime):
for target in self.drag_targets:
if target.mime == mime:
return target
return None
def _on_drag_get_data(self, widget, context, selection, info, etime):
"""
called when the destination requests data from the source
selects a target and fill the selection with the data provided
by the target
"""
target = self.get_source_target(context, selection.target)
if not target:
return False
# save the selected target, so we can use it in the drag-delete event
context.set_data("target", target.mime)
data = target.get_data(widget, context)
if target.mime == "TEXT": # so type will be COMPOUND_TEXT whatever foo?
selection.set_text(data, -1)
else:
selection.set(selection.target, 8, data)
return True
def get_suiting_target(self, widget, context, x, y, data=None):
"""
find a suiting target within the registered drop_targets
that allows to drop
"""
for target in self.drop_targets:
if target.mime in context.targets:
same_app = context.get_source_widget() is not None
if target.app & gtk.TARGET_SAME_APP != 0 and not same_app:
continue
if target.app & gtk.TARGET_OTHER_APP != 0 and same_app:
continue
same_widget = context.get_source_widget() is widget
if target.widget & gtk.TARGET_SAME_WIDGET != 0 and not same_widget:
continue
if target.widget & gtk.TARGET_OTHER_WIDGET != 0 and same_widget:
continue
if data is None and target.preview_required:
# we can potentially drop here, however need a data preview first
return target
if target.can_drop(widget, context, x, y, data):
return target
# no suitable target found
return None
def can_handle_data(self, widget, context, x, y, time, data=None):
"""
Determines if the widget accepts this drag.
Uses context.drag_status(action, time) to force a certain action.
Note this always returns True and uses drag_status to indicate
available actions (which may be none).
If a target has the preview_required attribute set, a preview will be
requested, so that the target can determine if it can drop the data.
"""
target = self.get_suiting_target(widget, context, x, y, data)
if target is None:
# normally return false, however if a target is only valid at a certain
# position, we want to reevaluate constantly
context.drag_status(0, time)
return True
if data is None and target.preview_required:
recv_func = lambda context, data, time: \
self.can_handle_data(widget, context, x, y, time, data)
self.preview(widget, context, target.mime, recv_func, time)
return True
if context.suggested_action & target.actions != 0:
context.drag_status(context.suggested_action, time)
else:
# TODO or do i have to select one explicitly?
context.drag_status(target.actions, time)
return True
def preview(self, widget, context, mime, callback, etime):
"""
can be called to retrieve the dragged data in a drag-motion event
"""
def inspector(context, data, time):
ret = callback(context, data, time)
self.inspector = None
return ret
self.inspector = inspector
# if gtk.DEST_DEFAULT_ALL is set do:
# widget.drag_dest_set(0, [], 0)
widget.drag_get_data(context, mime, etime)
def _on_drag_received_data(self, widget, context, x, y, selection,
target_id, etime):
"""callback function for received data upon dnd-completion"""
data = selection.data
widget.emit_stop_by_name('drag-data-received')
# if we want to preview the data in the drag-motion handler
# we will call drag_get_data there which eventually calls this
# method, however the context will not be the actual drop
# operation, so we forward this to a callback function
# that needs to be set up for this
if self.inspector is not None:
return self.inspector(context, data, etime)
target = self.get_suiting_target(widget, context, x, y, data)
if target is None:
return False
self.context = context
ret = bool(target.receive_data(widget, context, x, y, data, etime))
# only delete successful move actions
delete = ret and context.action == gtk.gdk.ACTION_MOVE
context.finish(ret, delete, etime)
def _on_drag_drop(self, widget, context, x, y, time):
"""
User initiated drop action, return False if drop is not allowed
otherwise request the data. The actual drop handling is then done
in the _on_drag_receive_data function.
"""
widget.emit_stop_by_name('drag-drop')
target = self.get_suiting_target(widget, context, x, y)
if target is None:
return False
widget.drag_get_data(context, target.mime, time)
return True
def _on_drag_delete_data(self, widget, context):
"""
Delete data from original site, when `ACTION_MOVE` is used.
"""
widget.emit_stop_by_name('drag-data-delete')
# select the target based on the mime type we stored in the drag-get-data handler
target = self.get_source_target(context, context.get_data("target"))
cmd = target.delete_data(widget, context)
if cmd:
self.execute(cmd)
def _on_drag_motion(self, widget, context, x, y, time):
"""
figure out if a drop at the current coordinates is possible for any
registered target
"""
widget.emit_stop_by_name('drag-motion')
if not self.can_handle_data(widget, context, x, y, time):
context.drag_status(0, time)
return False
# do the highlighting
# TODO: this is treeview dependent, move it to TreeDropTarget
try:
path, pos = widget.get_dest_row_at_pos(x, y)
widget.set_drag_dest_row(path, pos)
except TypeError:
widget.set_drag_dest_row(len(widget.get_model()) - 1, gtk.TREE_VIEW_DROP_AFTER)
return True
| carloscanova/python-odml | odml/gui/DragProvider.py | Python | bsd-3-clause | 11,021 |
#!/usr/bin/env python3
#-*- coding:utf-8 -*-
"""
Very basic 2D abstract geometry package. It defines these geometrical
constructs:
* `GeometricObject` - abstract base class, not meant to be used
directly
* `Point`
* `Vector`
* `BoundingBox`
* `Line`
* `Ray`
* `Segment`
* `Polygon`
* ...for now
Notes
-----
Except for the `Point` and `Vector` classes which will be discussed below, all
of the other classes define a `__getitem__` method that can be used to retreive
the points defining the `GeometricObject` by indices.
The `Point` class defines the `__getitem__` method in a sperate way,
i.e. it returns the Cartesian coordinates of the `Point` by indinces.
The `Vector` class does the same except it returns the x & y Cartesian
coordinates in this case.
"""
# system modules
import math
import random
# user defined module
import utils as u
# acceptable uncertainty for calculating intersections and such
UNCERTAINTY = 1e-5
def get_perpendicular_to(obj, at_point=None):
"""
Creates a new `Vector` or `Line` perpendicular with
`obj` (`Vector` or `Line`-like) depending on `at_point`
parameter.
The perpendicular vector to the `obj` is not necessarily the unit
`Vector`.
Parameters
----------
obj : vector, line-like
The object to retreive the perpendicular vector to.
at_point : point-like, optional
If this is given then a `Line` is returned instead,
perpendicular to `obj` and passing through `at_point`.
Returns
-------
out : vector
A new `Vector` or `Line` passing through `at_point`
with the components in such a way it is perpendicular with `obj`..
Raises
------
TypeError:
If `obj` is not `Vector` nor `Line`-like or if
`at_point` is not point-like.
"""
if not isinstance(obj, (Vector, Line)):
raise TypeError('Expected vector or line-like, but got: '
'{0} instead.'.format(obj))
if not Point.is_point_like(at_point) and at_point is not None:
raise TypeError('Expected point-like, but got: '
'{0} instead.'.format(at_point))
if isinstance(obj, Line):
# if it's Line-like get the directional vector
obj = obj.v
# this is the Vector defining the direction of the perpendicular
perpendicular_vector = Vector(1, obj.phi + math.pi/2, coordinates='polar')
if Point.is_point_like(at_point):
# if at_point was also provided then return a Line
# passing through that point which is perpendicular to obj
return Line(at_point, perpendicular_vector)
# if not just return the perpendicular_vector
return perpendicular_vector
class GeometricObject(object):
"""
Abstract geometric object class.
It's not meant to be used directly. This only implements methods that
are called on other objects.
"""
def __str__(self, **kwargs):
return '{0}({1})'.format(type(self).__name__, kwargs)
def __contains__(self, x):
"""
Searches for x in "itself". If we're talking about a `Point`
or a `Vector` then this searches within their components (x,
y). For everything else it searches within the list of points
(vertices).
Parameters
----------
x : {point, scalar}
The object to search for.
Returns
-------
out : {True, False}
`True` if we find `x` in `self`, else `False`.
"""
try:
next(i for i in self if i == x)
return True
except StopIteration:
return False
def intersection(self, obj):
"""
Return points of intersection if any.
This method just calls the intersection method on the other objects
that have it implemented.
Parameters
----------
obj : geometric object
`obj` is any object that has intersection implemented.
Returns
-------
ret : {point, None}
The point of intersection if any, if not, just `None`.
"""
return obj.intersection(self)
def translate(self, dx, dy):
"""
Translate `self` by given amounts on x and y.
Parameters
----------
dx, dy : scalar
Amount to translate (relative movement).
"""
if isinstance(self, Polygon):
# we don't want to include the last point since that's also the
# first point and if we were to translate it, it would end up being
# translated two times
sl = slice(0, -1)
else:
sl = slice(None)
for p in self[sl]:
p.translate(dx, dy)
def rotate(self, theta, point=None, angle='degrees'):
"""
Rotate `self` around pivot `point`.
Parameters
----------
theta : scalar
The angle to be rotated by.
point : {point-like}, optional
If given this will be used as the rotation pivot.
angle : {'degrees', 'radians'}, optional
This tells the function how `theta` is passed: as degrees or as
radians. Default is degrees.
"""
polygon_list = None
if isinstance(self, Polygon):
# we don't want to include the last point since that's also the
# first point and if we were to rotate it, it would end up being
# rotated two times
sl = slice(0, -1)
# we are going to create a new Polygon actually after rotation
# since it's much easier to do it this way
polygon_list = []
else:
sl = slice(None)
for p in self[sl]:
# rotate each individual point
p.rotate(theta, point, angle)
if polygon_list is not None:
polygon_list.append(p)
if polygon_list:
# in the case of Polygon we build a new rotated one
self = Polygon(polygon_list)
else:
# in case of other GeometricObjects
self._v = Vector(self.p1, self.p2).normalized
# reset former cached values in self
if hasattr(self, '_cached'):
self._cached = {}
class Point(GeometricObject):
"""
An abstract mathematical point.
It can be built by passing no parameters to the constructor,
this way having the origin coordinates `(0, 0)`, or by passing
a `Point`, a `tuple` or a `list` of length two
or even two scalar values.
Parameters
----------
*args : {two scalars, point-like}, optional
`Point`-like means that it can be either of `tuple` or `list`
of length 2 (see ~`Point.is_point_like`).
Raises
------
TypeError
If the arguments are not the correct type (`Point`, list,
tuple -of length 2- or two values) a `TypeError` is raised.
"""
def __init__(self, *args):
if len(args) == 0:
self._x = 0.
self._y = 0.
elif len(args) == 1:
arg = args[0]
if Point.is_point_like(arg):
self._x = float(arg[0])
self._y = float(arg[1])
if isinstance(arg, Vector):
self._x = arg.x
self._y = arg.y
elif len(args) == 2:
self._x = float(args[0])
self._y = float(args[1])
else:
raise TypeError('The construct needs no arguments, '
'Point, list, tuple (of length 2) or two '
'values, but got instead: {0}'.format(args))
@property
def x(self):
"""[scalar] Get the `x` coordinate."""
return self._x
@property
def y(self):
"""[scalar] Get the `y` coordinate."""
return self._y
def __str__(self):
return super(Point, self).__str__(x=self.x, y=self.y)
def __getitem__(self, idx):
"""
Return values as a `list` for easier acces.
"""
return (self.x, self.y)[idx]
def __len__(self):
"""
The length of a `Point` object is 2.
"""
return 2
def __eq__(self, point):
"""
Equality (==) operator for two points.
Parameters
----------
point : {point-like}
The point to test against.
Returns
-------
res : {True, False}
If the `x` and `y` components of the points are equal then return
`True`, else `False`.
Raises
------
TypeError
In case something other than `Point`-like is given.
"""
if Point.is_point_like(point):
return abs(self.x - point[0]) < UNCERTAINTY and \
abs(self.y - point[1]) < UNCERTAINTY
return False
def __lt__(self, point):
"""
Less than (<) operator for two points.
Parameters
----------
point : {point-like}
The point to test against.
Returns
-------
res : {True, False}
This operator returns `True` if:
1. `self.y` < `point.y`
2. in the borderline case `self.y` == `point.y` then if `self.x` <
`point.x`
Otherwise it returns `False`.
"""
if self.y < point[1]:
return True
if self.y > point[1]:
return False
if self.x < point[0]:
return True
return False
@staticmethod
def is_point_like(obj):
"""
See if `obj` is of `Point`-like.
`Point`-like means `Point` or a list or tuple of
length 2.
Parameters
----------
obj : geometric object
Returns
-------
out : {True, False}
`True` if obj is `Point`-like, else `False`.
"""
if isinstance(obj, Point):
return True
if isinstance(obj, (tuple, list)) and len(obj) == 2:
return True
return False
def is_left(self, obj):
"""
Determine if `self` is left|on|right of an infinite `Line` or
`Point`.
Parameters
----------
obj : {point-like, line-like}
The `GeometricObject` to test against.
Returns
-------
out : {scalar, `None`}
>0 if `self` is left of `Line`,
=0 if `self` is on of `Line`,
<0 if `self` is right of `Line`,
Raises
------
ValueError
In case something else than a `Line`-like or
`Point`-like is given.
"""
if Line.is_line_like(obj):
return ((obj[1][0] - obj[0][0]) * (self.y - obj[0][1]) - \
(self.x - obj[0][0]) * (obj[1][1] - obj[0][1]))
if Point.is_point_like(obj):
return obj[0] - self.x
raise ValueError('Expected a Line or Point, but got: {}'
.format(obj))
def distance_to(self, obj):
"""
Calculate the distance to another `GeometricObject`.
For now it can only calculate the distance to `Line`,
`Ray`, `Segment` and `Point`.
Parameters
----------
obj : geometric object
The object for which to calculate the distance to.
Returns
-------
out : (float, point)
Floating point number representing the distance from
this `Point` to the provided object and the
`Point` of intersection.
"""
if Point.is_point_like(obj):
return ((self.x - obj[0])**2 + (self.y - obj[1])**2)**(.5)
if isinstance(obj, Line):
perpendicular = get_perpendicular_to(obj)
distance_to = abs(perpendicular.x*(self.x - obj.p1.x) + \
perpendicular.y*(self.y - obj.p1.y))
return distance_to
def belongs_to(self, obj):
"""
Check if the `Point` is part of a `GeometricObject`.
This method is actually using the method defined on the passed `obj`.
Returns
-------
out : {True, False}
"""
return obj.has(self)
def translate(self, dx, dy):
"""
See `GeometricObject.translate`.
"""
self._x += dx
self._y += dy
def move(self, x, y):
"""
The difference between this and `translate` is that this
function moves `self` to the given coordinates instead.
"""
self._x = x
self._y = y
def rotate(self, theta, point=None, angle='degrees'):
"""
Rotate `self` by angle theta.
Parameters
----------
theta : scalar
Angle to rotate by. Default in radians (see `angle`).
point : {None, point-like}, optional
Pivot point to rotate against (instead of origin). If not given,
the point will be rotated against origin.
angle : {'radians', 'degrees'}, optional
How is `theta` passed? in radians or degrees.
"""
if angle == 'degrees':
theta = math.radians(theta)
if point is None:
x_new = math.cos(theta) * self.x - math.sin(theta) * self.y
y_new = math.sin(theta) * self.x + math.cos(theta) * self.y
else:
point = Point(point)
x_new = math.cos(theta) * (self.x - point.x) - math.sin(theta) * \
(self.y - point.y) + point.x
y_new = math.sin(theta) * (self.x - point.x) + math.cos(theta) * \
(self.y - point.y) + point.y
self._x = x_new
self._y = y_new
class Vector(GeometricObject):
"""
An abstract `Vector` object.
It's defined by `x`, `y` components or `rho` (length) and `phi` (angle
relative to X axis in radians).
Parameters
----------
*args : {two scalars, vector, point, (list, tuple of length 2)}
Given `coordinates`, `args` compose the vector components. If
the Cartesian coordinates are given, the Polar are calculated and
vice-versa. If `args` is of `Vector` type then all of the
other arguments are ignored and we create a `Vector` copy of
the given parameter. It can also be `Point`-like element; if
there are two `Point`-like elements given then the vector will
have `rho` equal to the distance between the two points and the
direction of point1 -> point2 (i.e. args[0] -> args[1]). If only one
`Point`-like is given then this object's `x` and `y` values
are used, having obviously the direction ``Point(0, 0)`` -> ``Point(x,
y)``.
**kwargs : coordinates={"cartesian", "polar"}, optional
If `cartesian` then `arg1` is `x` and `arg2` is `y` components, else
if `polar` then `arg1` is rho and `arg2` is `phi` (in radians).
Raises
------
TypeError
In case `args` is not the correct type(`Vector`, two scalars
or point-like).
"""
def __init__(self, *args, **kwargs):
coordinates = kwargs.get('coordinates', 'cartesian')
if len(args) == 1:
if isinstance(args[0], Vector):
self._x = args[0].x
self._y = args[0].y
self._rho = args[0].rho
self._phi = args[0].phi
if Point.is_point_like(args[0]):
self._x = args[0][0]
self._y = args[0][1]
self._calculate_polar_coords()
elif len(args) == 2:
if Point.is_point_like(args[0]) and Point.is_point_like(args[1]):
self._x = args[1][0] - args[0][0]
self._y = args[1][1] - args[0][1]
self._calculate_polar_coords()
return
if coordinates is 'cartesian':
self._x = args[0]
self._y = args[1]
self._calculate_polar_coords()
if coordinates is 'polar':
self._rho = args[0]
self._phi = u.float_to_2pi(args[1])
self._calculate_cartesian_coords()
else:
raise TypeError('The constructor needs vector, point-like or '
'two numbers, but instead it was given: '
'{0}'.format(args))
@property
def x(self):
"""[scalar] Get the x component of the `Vector`."""
return self._x
@property
def y(self):
"""[scalar] Get the y component of the `Vector`."""
return self._y
@property
def rho(self):
"""[scalar] Get the length of the `Vector` (polar coordinates)."""
return self._rho
@property
def phi(self):
"""
[scalar] Get the angle (radians).
Get the angle (in radians) of the `Vector` with the X axis
(polar coordinates). `phi` will always be mapped to ``[0, 2PI)``.
"""
return self._phi
@u.cached_property
def normalized(self):
"""
[Vector] Get a normalized `self`.
"""
return Vector(1, self.phi, coordinates='polar')
def __str__(self):
return super(Vector, self).__str__(x=self.x, y=self.y, rho=self.rho,
phi=math.degrees(self.phi))
def __getitem__(self, idx):
"""
Return values as a list for easier acces some times.
"""
return (self.x, self.y)[idx]
def __len__(self):
"""
The length of a `Vector` is 2.
"""
return 2
def __neg__(self):
"""
Turns `self` to 180 degrees and returns the new `Vector`.
Returns
-------
out : vector
Return a new `Vector` with same `self.rho`, but
`self.phi`-`math.pi`.
"""
return Vector(-self.x, -self.y)
def __mul__(self, arg):
"""
Calculates the dot product with another `Vector`, or
multiplication by scalar.
For more details see `dot`.
"""
return self.dot(arg)
def __add__(self, vector):
"""
Add two vectors.
Parameters
----------
vector : vector
The vector to be added to `self`.
Returns
-------
A new vector with components ``self.x + vector.x``,
``self.y + vector.y``.
"""
return Vector(self.x + vector.x, self.y + vector.y)
def __sub__(self, vector):
"""
Subtraction of two vectors.
It is `__add__` passed with turnerd round vector.
"""
return self.__add__(-vector)
def _calculate_polar_coords(self):
"""
Helper function for internally calculating `self.rho` and `self.phi`.
"""
# calculate the length of the vector and store it in self.rho
self._rho = Point(0, 0).distance_to(Point(self.x, self.y))
# we now calculate the angle with the X axis
self._phi = math.atan2(self.y, self.x)
if self.phi < 0:
self._phi += 2*math.pi
def _calculate_cartesian_coords(self):
"""
Helper function for internally calculating `self.x` and `self.y`.
Raises
------
ValueError
In case self.phi is outside of the interval ``[0, 2PI)`` an
`Exception` is raised.
"""
self._x = self.rho * math.cos(self.phi)
self._y = self.rho * math.sin(self.phi)
@staticmethod
def random_direction():
"""
Create a randomly oriented `Vector` (with `phi` in the
interval ``[0, PI)``) and with unit length.
Returns
-------
out : vector
A `Vector` with random orientation in positive Y direction
and with unit length.
"""
return Vector(1, random.random()*math.pi, coordinates='polar')
def dot(self, arg):
"""
Calculates the dot product with another `Vector`, or
multiplication by scalar.
Parameters
----------
arg : {scalar, vector}
If it's a number then calculates the product of that number
with this `Vector`, if it's another `Vector`
then it will calculate the dot product.
Returns
-------
res : {float, vector}
Take a look at the parameters section.
Raises
------
TypeError
In case `arg` is not number or `Vector`.
"""
if isinstance(arg, Vector):
# if arg is Vector then return the dot product
return self.x * arg.x + self.y * arg.y
elif isinstance(arg, (int, float)):
# if arg is number return a Vector multiplied by that number
return Vector(self.x * arg, self.y * arg)
# if arg is not the correct type then raise TypeError
raise TypeError('Expected a vector or number, but got '.format(arg))
def cross(self, arg):
"""
Calculates the cross product with another `Vector`, as defined
in 2D space (not really a cross product since it gives a scalar, not
another `Vector`).
Parameters
----------
arg : vector
Another `Vector` to calculate the cross product with.
Returns
-------
res : float
Take a look at the parameters section.
Raises
------
TypeError
In case `arg` is not a `Vector`.
"""
if isinstance(arg, Vector):
return self.x * arg.y - self.y * arg.x
raise TypeError('Expected a vector, but got '.format(arg))
def parallel_to(self, obj):
"""
Is `self` parallel with `obj`?
Find out if this `Vector` is parallel with another object
(`Vector` or `Line`-like). Since we are in a 2D
plane, we can use the geometric interpretation of the cross product.
Parameters
----------
obj : {vector, line-like}
The object to be parallel with.
Returns
-------
res : {True, False}
If it's parallel return `True`, else `False`.
"""
if isinstance(obj, Line):
obj = obj.v
return abs(self.cross(obj)) < UNCERTAINTY
def perpendicular_to(self, obj):
"""
Is `self` perpendicular to `obj`?
Find out if this `Vector` is perpendicular to another object
(`Vector` or `Line`-like). If the dot product
between the two vectors is 0 then they are perpendicular.
Parameters
----------
obj : {vector, line-like}
The object to be parallel with.
Returns
-------
res : {True, False}
If they are perpendicular return `True`, else `False`.
"""
if isinstance(obj, Line):
obj = obj.v
return self * obj == 0
def translate(*args):
"""Dummy function since it doesn't make sense to translate a
`Vector`."""
pass
def rotate(self, theta, angle='degrees'):
"""
Rotate `self` by `theta` degrees.
Properties
----------
theta : scalar
Angle by which to rotate.
angle : {'degrees', 'radians'}, optional
Specifies how `theta` is given. Default is degrees.
"""
if angle == 'degrees':
theta = math.radians(theta)
self.phi += theta
self._calculate_cartesian_coords()
class BoundingBox(GeometricObject):
"""
Represents the far extremeties of another `GeometricObject`
(except for `Vector`).
It is totally defined by two points. For convenience it also has `left`,
`top`, `right` and `bottom` attributes.
Parameters
----------
obj : geometric object
The object for which to assign a `BoundingBox`.
"""
def __init__(self, obj):
if not isinstance(obj, GeometricObject) or isinstance(obj, Vector):
raise TypeError('The argument must be of type GeometricObject '
'(except for Vector), but got {} instead'
.format(obj))
# make min the biggest values possible and max the minimum
xs = [point.x for point in obj]
ys = [point.y for point in obj]
self._left = min(xs)
self._top = max(ys)
self._right = max(xs)
self._bottom = min(ys)
self._p1 = Point(self.bottom, self.left)
self._p2 = Point(self.top, self.right)
self._width = abs(self.right - self.left)
self._height = abs(self.top - self.bottom)
@property
def left(self):
"""[scalar]"""
return self._left
@property
def top(self):
"""[scalar]"""
return self._top
@property
def right(self):
"""[scalar]"""
return self._right
@property
def bottom(self):
"""[scalar]"""
return self._bottom
@property
def p1(self):
"""
(point-like) Get the bottom-left `Point`.
"""
return self._p1
@property
def p2(self):
"""
(point-like) Get the top-right `Point`.
"""
return self._p2
@property
def width(self):
"""[scalar]"""
return self._width
@property
def height(self):
"""[scalar]"""
return self._height
def __str__(self):
return super(BoundingBox, self).__str__(left=self.left, top=self.top,
right=self.right,
bottom=self.bottom,
p1=str(self.p1),
p2=str(self.p2))
def __getitem__(self, idx):
"""
Get points through index.
Parameters
----------
idx : scalar
The index of the `Point`.
Returns
-------
out : point
The selected `Point` through the provided index.
"""
return (self.p1, self.p2)[idx]
def __len__(self):
"""
The `BoundingBox` is made of 2 points so it's length is 2.
"""
return 2
class Line(GeometricObject):
"""
An abstract mathematical `Line`.
It is defined by either two points or by a `Point` and a
`Vector`.
Parameters
----------
arg1 : point-like
The passed in parameters can be either two points or a `Point`
and a `Vector`. For more on `Point`-like see the
`Point` class.
arg2 : {point-like, vector}
If a `Vector` is given as `arg2` instead of a
`Point`-like, then `p2` will be calculated for t = 1 in the
vectorial definition of the line (see notes).
See Also
--------
Point, Vector
Notes
-----
A line can be defined in three ways, but we use here only the vectorial
definition for which we need a `Point` and a `Vector`.
If two points are given the `Vector`
:math:`\\boldsymbol{\mathtt{p_1p_2}}` will be calculated and then we can
define the `Line` as:
.. math::
\\boldsymbol{r} = \\boldsymbol{r_0} + t \cdot
\\boldsymbol{\mathtt{p_1p_2}}
Here :math:`t` is a parameter.
"""
def __init__(self, arg1, arg2):
if Point.is_point_like(arg1) and Point.is_point_like(arg2):
# detect if arguments are of type Point-like, if so
# store them and calculate the directional Vector
self._p1, self._p2 = Point(arg1), Point(arg2)
self._v = Vector(self.p1, self.p2).normalized
else:
# if we have instead a Point and a Vector just calculate
# self.p2
self._p1, self._v = Point(arg1), arg2.normalized
self._p2 = Point(self.p1.x + self.v.x, self.p1.y + self.v.y)
@property
def p1(self):
"""
[point] Get the 1st `Point` that defines the `Line`.
"""
return self._p1
@property
def p2(self):
"""
[point] Get the 2nd `Point` that defines the `Line`.
"""
return self._p2
@property
def v(self):
"""
[vector] Get the `Vector` pointing from `self.p1` to`self.p2`.
"""
return self._v
@property
def phi(self):
"""
[scalar] Get `self.v.phi`. Convenience method.
"""
return self.v.phi
def __str__(self, **kwargs):
return super(Line, self).__str__(v=str(self.v),
p1=str(self.p1), p2=str(self.p2),
**kwargs)
def __getitem__(self, idx):
"""
Get the points that define the `Line` by index.
Parameters
----------
idx : scalar
The index for `Point`.
Returns
-------
ret : point
Selected `Point` by index.
"""
return (self.p1, self.p2)[idx]
def __len__(self):
"""The `Line` is made of 2 points so it's length is 2.'"""
return 2
@staticmethod
def is_line_like(obj):
"""
Check if an object is in the form of `Line`-like for fast
computations (not necessary to build lines).
Parameters
----------
obj : anything
`obj` is checked if is of type `Line` (i.e. not `Ray` nor
`Segment`) or if this is not true then of the form: ((0, 1),
(3, 2)) or [[0, 2], [3, 2]] or even combinations of these.
Returns
-------
res : {True, False}
"""
if type(obj) == Line or (all(len(item) == 2 for item in obj) and \
len(obj) == 2):
return True
return False
def intersection(self, obj):
"""
Find if `self` is intersecting the provided object.
If an intersection is found, the `Point` of intersection is
returned, except for a few special cases. For further explanation
see the notes.
Parameters
----------
obj : geometric object
Returns
-------
out : {geometric object, tuple}
If they intersect then return the `Point` where this
happened, else return `None` (except for `Line` and
`Polygon`: see notes).
Raises
------
TypeError
If argument is not geometric object then a `TypeError` is raised.
Notes
-----
* `Line`: in case `obj` is `Line`-like and `self`
then `self` and the `Line` defined by `obj` are checked for
colinearity also in which case `utils.inf` is returned.
* `Polygon`: in the case of intersection with a
`Polygon` a tuple of tuples is returned. The nested tuple is
made up by the index of the intersected side and intersection point
(e.g. ``((intersection_point1, 1), ( intersection_point2, 4))`` where
`1` is the first intersected side of the `Polygon` and `4`
is the second one). If the `Line` doesn't intersect any
sides then `None` is returned as in the usual case.
"""
if isinstance(obj, Line):
self_p1 = Vector(self.p1)
obj_p1 = Vector(obj.p1)
denominator = self.v.cross(obj.v)
numerator = (obj_p1 - self_p1).cross(self.v)
if abs(denominator) < UNCERTAINTY:
# parallel lines
if abs(numerator) < UNCERTAINTY:
# colinear lines
return u.inf
return None
# calculate interpolation parameter (t): Vector(obj.p1) + obj.v * t
t = numerator/denominator
intersection_point = Point(obj_p1 + obj.v * t)
if type(obj) is Ray:
# in case it's a Ray we restrict the values to [0, inf)
if not (t >= UNCERTAINTY):
return None
if type(obj) is Segment:
# and for Segment we have values in the
# interval [0, obj.p1.distance_to(obj.p2)]
if not (UNCERTAINTY <= t <= obj.p1.distance_to(obj.p2) - \
UNCERTAINTY):
return None
return intersection_point
if isinstance(obj, Polygon):
# if it's a Polygon traverse all the edges and return
# the intersections as a list of items. The first element in
# one item is the intersection Point and the second element in
# the item is the edge's number
intersections = []
for idx, side in enumerate(obj.edges):
intersection_point = self.intersection(side)
if intersection_point is None or \
intersection_point == u.inf:
continue
if intersections and intersection_point == intersections[-1][0]:
continue
intersections.append([intersection_point, idx])
# if there are no intersections return the usual None
return intersections or None
raise TypeError('Argument needs to be geometric object, but '
'got instead: {0}'.format(obj))
def has(self, point):
"""
Inspect if `point` (`Point`-like) is part of this `Line`.
Parameters
----------
point : point-like
The `Point` to test if it's part of this `Line`.
Returns
-------
ret : {True, False}
If it's part of this `Line` then return True, else False.
See also
--------
Line.intersection, Ray.has, Segment.has
"""
# if the intersection failes then the object is not
# on this Line
# create a Vector from p1 to the point of interest
# if this Vector is parallel to our direction Vector
# then it is on the Line, if not, it's not on the Line
vector = Vector(self.p1, point)
return vector.parallel_to(self)
def perpendicular_to(self, obj):
"""
Find out if provided `Line` is perpendicular to `self`.
Returns
-------
ret : {True, False}
"""
if isinstance(obj, Line):
obj = obj.v
return self.v.perpendicular_to(obj)
def parallel_to(self, obj):
"""
Find out if provided `Vector` or `Line`-like is
parllel to `self`.
Parameters
----------
obj : {vector, line-like}
The `Vector` or `Line`-like to compare
parallelism with.
Returns
-------
ret : {True, False}
If `self` and `Line` are parallel then retrun `True`,
else `False`.
"""
if isinstance(obj, Line):
obj = obj.v
return self.v.parallel_to(obj)
class Ray(Line):
"""
A `Ray` extension on `Line`.
The only difference is that this has a starting `Point` (`p1`)
which represents the end of the `Ray` in that direction.
Parameters
----------
arg1 : point-like
The passed in parameters can be either two points or a `Point`
and a `Vector` For more on `Point`-like see the
`Point` class.
arg2 : {point-like, vector}
See `arg1`.
See also
--------
Line, Segment, Vector
"""
def intersection(self, obj):
"""
Tries to find the `Point` of intersection.
The difference between this and the `Line` intersection method
is that this has also the constrain that if the `Point` of
intersection is on the line then it also must be within the
bounds of the `Ray`.
Parameters
----------
obj : geometric object
Returns
-------
out : {gometric object, None}
`GeometricObject` if intersection is possible, else the
cases from `Line`.intersection.
See also
--------
Line.intersection, Segment.intersection
"""
# if we're not dealing with a Line-like then skin the parent
# intersection method
if type(obj) is Line:
return obj.intersection(self)
intersections = super(Ray, self).intersection(obj)
if isinstance(obj, Polygon):
if intersections:
intersections = [item for item in intersections \
if self.has(item[0])]
return intersections
if intersections and intersections != u.inf:
if abs(self.p1.x - self.p2.x) < UNCERTAINTY:
# vertical line
r = (intersections.y - self.p1.y) / self.v.y
else:
r = (intersections.x - self.p1.x) / self.v.x
if not (r >= UNCERTAINTY):
return None
return intersections
def has(self, point):
"""
Check if `point` is part of `self`.
Parameters
----------
point : point-like
The `Point` to check.
Returns
-------
ret : {True, False}
If the point is on the `Ray` then return `True`, else
`False`.
See also
--------
Ray.intersection, Line.has, Segment.has
"""
if super(Ray, self).has(point):
p1_to_point = Vector(self.p1, point)
return p1_to_point * self.v >= UNCERTAINTY
class Segment(Line):
"""
An extension on `Line`.
This class emposes the `length` property on a `Line`. A
`Segment` is a finite `Line`.
Parameters
----------
arg1 : point-like
The passed in parameters can be either two points or a `Point`
and a `Vector` For more on `Point`-like see the `Point` class.
arg2 : {point-like, vector}
See `arg1`.
Raises
------
ValueError
If length is less than or equal to 0.
See also
--------
Line, Ray, Vector
"""
@u.cached_property
def length(self):
"""
[scalar] Get the length of the `Segment`.
I.e. the distance from `self.p1` to `self.p2`.
"""
return self.p1.distance_to(self.p2)
@u.cached_property
def bounding_box(self):
"""
[BoundingBox] get the `BoundingBox` of `self`.
"""
return BoundingBox(self)
def __str__(self):
return super(Segment, self).__str__(length=self.length)
def intersection(self, obj):
"""
Tries to find the `Point` of intersection.
The difference between this and the `Line` intersection method
is that this has also the constrain that if the `Point` of
intersection is on the line then it also must be within the
bounds of the `Segment`.
Parameters
----------
obj : geometric object
Returns
-------
out : {gometrical object, None}
`GeometricObject` if intersection is possible, else the
cases from `Line`.intersection.
See also
--------
Line.intersection, Ray.intersection
"""
# in case we need to check for another geometricObject
if type(obj) is Line:
return obj.intersection(self)
intersections = super(Segment, self).intersection(obj)
if isinstance(obj, Polygon):
if intersections:
intersections = [item for item in intersections \
if self.has(item[0])]
return intersections
if intersections and intersections != u.inf:
if abs(self.p1.x - self.p2.x) < UNCERTAINTY:
# vertical line
r = (intersections.y - self.p1.y) / self.v.y
else:
r = (intersections.x - self.p1.x) / self.v.x
if not (UNCERTAINTY <= r <= self.p1.distance_to(self.p2) - \
UNCERTAINTY):
return None
return intersections
def has(self, point):
"""
Check if `point` is part of `self`.
Parameters
----------
point : point-like
The point to check.
Returns
-------
ret : {True, False}
If the point is on the `Ray` then return `True`, else
`False`.
See also
--------
Segment.intersection, Line.has, Ray.has
"""
if super(Segment, self).has(point):
p1_to_point = self.p1.distance_to(point)
p2_to_point = self.p2.distance_to(point)
return p1_to_point + p2_to_point - self.length < UNCERTAINTY
def get_point_on_self(self, frac=None):
"""
Get a point on this `Segment` based on `frac`.
If no argument is given then the `Point` on the
`Segment` will be placed randomly.
Parameters
----------
frac : float, optional
If `frac` is given then the new `Point`'s position will
be relative to the length of the `Segment` and to the
first `Point` (`self.p1`). `frac` can be only in the
interval (0, 1).
Returns
-------
out : point
The new `Point`'s position on the `Segment`.
Raises
------
ValueError
If `frac` is outside the open interval (0, 1) then
a `ValueError` is raised.
"""
# if no argument is given then return an arbitrary
# location Point on this Segment
frac = frac or UNCERTAINTY + random.random()*(1 - UNCERTAINTY)
# if frac is outside the open interval (0, 1)
if not (0 < frac < 1):
raise ValueError('The argument (frac) cannot be '
'outside of the open interval (0, 1), '
'got: {0}'.format(frac))
# calculate the displacement relative to the
# first Point
dx = (self.p2.x - self.p1.x) * frac
dy = (self.p2.y - self.p1.y) * frac
# calculate the location of the new Point on
# the Segment
new_x = self.p1.x + dx
new_y = self.p1.y + dy
return Point(new_x, new_y)
class Polygon(GeometricObject):
"""
A general (closed) `Polygon` class.
The `Polygon` is made out of points (vertices of type
`Point`) and edges (`Segment`). It can be created by
passing a list of `Point`-like objects.
Parameters
----------
vertices : {list/tuple of point-like}
The `list` of `Point`-like objects that make the
`Polygon`. The `self.edges` of the `Polygon` are
automatically created and stored. If the length of the `vertices` list
is < 3 this cannot be a `Polygon` and a `ValueError` will be
raised.
Raises
------
ValueError
In case length of the `vertices` `list` is smaller than 3.
"""
def __init__(self, vertices):
if len(vertices) < 3:
raise ValueError('List of points cannot have less than 3 '
'elements')
self._vertices = [Point(point) for point in vertices]
# this is for internal use only
# first initialize to None so that area property can check for it
self._diameter = None
self._width = None
self._area = None
# setup self._area at this point (with signs)
self.area
if self._area < 0:
# the vertices are in clockwise order so set them
# in counterclockwise order
self.vertices.reverse()
# change the sign of the area appropriately
self._area = -self._area
# now select the lowest (and left if equal to some other)
# and make it the first vertex in the Polygon
lowest_idx = self._vertices.index(min(self._vertices))
# rotate such that the lowset (and left) most vertex is the first one
self._vertices = u.rotated(self._vertices, -lowest_idx)
# and add the first vertex to the list at the end for further processing
self._vertices += [self._vertices[0]]
self._edges = [Segment(p1, p2) for p1, p2 in \
zip(self._vertices[:-1],
self._vertices[1:])]
@property
def vertices(self):
"""
[list of points] Get the `vertices`.
The list of `Point`-like objects that make up the
`Polygon`. It's lengths cannot be less than 3.
"""
return self._vertices
@property
def edges(self):
"""
[list of segments] Get the `edges`, that is the segments.
These are the `edges` of the `Polygon`, which are
defined by the list of vertices. The `Polygon` is considered
to be closed (ie. the last segment is defined by points `pn` and `p1`).
"""
return self._edges
@property
def area(self):
"""
[scalar] Get the (positive) area of this `Polygon`.
Using the standard formula [WPolygon]_ for the area of a `Polygon`:
.. math::
A &= \\frac{1}{2} \\sum_{i=0}^{n-1} (x_iy_{i+1} - x_{i+1}y_i)
:math:`A` can be negative depending on the orientation of the `Polygon`
but this property always returns the positive value.
Notes
-----
This function (property) also sets up `self._area` if it's not set.
This variable (`self._area`) is meant to be just for internal use (at
least for now).
"""
# first add the first vertex to the list
if self._area is None:
vertices = self.vertices + [self.vertices[0]]
self._area = 1/2. * sum([v1.x*v2.y - v2.x*v1.y for v1, v2 in \
zip(vertices[:-1], vertices[1:])
])
return abs(self._area)
@u.cached_property
def bounding_box(self):
"""
[BoundingBox] Get `BoundingBox` of `self`.
"""
return BoundingBox(self)
@property
def bbox_width(self):
"""
[scalar] Get `self.bounding_box.width`.
"""
return self.bounding_box.width
@property
def bbox_height(self):
"""
[scalar] Get `self.bounding_box.height`.
"""
return self.bounding_box.height
@property
def diameter(self):
"""
[scalar] Get the `diameter` of the `Polygon`.
Refer to `_compute_diameter_width` for details on how this is
calculated.
See also
--------
Polygon.diameter, Polygon._compute_diameter_width
"""
if self._diameter is None:
self._diameter, self._width = self._compute_diameter_width()
return self._diameter
@property
def width(self):
"""
[scalar] Get the `width` of the `Polygon`.
Refer to `_compute_diameter_width` for details on how this is
calculated.
See also
--------
Polygon.diameter, Polygon._compute_diameter_width
"""
if self._width is None:
self._diameter, self._width = self._compute_diameter_width()
return self._width
@u.cached_property
def centroid(self):
"""
[Point] Get the centroid (`Point`) of the `Polygon`.
Defined as [WPolygon]_:
.. math::
C_x &= \\frac{1}{6A} \\sum_{i=0}^{i=n-1}(x_i + x_{i+1})
(x_iy_{i+1}-x_{i+1}y_i)
C_y &= \\frac{1}{6A} \\sum_{i=0}^{i=n-1}(y_i + y_{i+1})
(x_iy_{i+1}-x_{i+1}y_i)
where :math:`A` is the area using the standard formula for a `Polygon`
[WPolygon]_ so it can take negative values.
"""
vertices = self.vertices + [self.vertices[0]]
x = 1/(6.*self._area) * \
sum([(v1.x + v2.x)*(v1.x*v2.y - v2.x*v1.y) for v1, v2 in \
zip(vertices[:-1], vertices[1:])])
y = 1/(6.*self._area) * \
sum([(v1.y + v2.y)*(v1.x*v2.y - v2.x*v1.y) for v1, v2 in \
zip(vertices[:-1], vertices[1:])])
return Point(x, y)
def __str__(self):
return super(Polygon, self).__str__(vertices=[str(v)
for v in self.vertices[:-1]])
def __getitem__(self, idx):
"""
Retreive points (`self.vertices`) by `idx`.
Parameters
----------
idx : scalar
The index of the `Point` (`vertex`).
Returns
-------
ret : point
The `vertex` by index.
"""
return self.vertices[idx]
def __len__(self):
"""
The length of the `Polygon` is defined by the length of the
`self.vertices` list.
"""
return len(self.vertices)
def _compute_diameter_width(self):
"""
Compute the `diameter` and `width` of the `Polygon`.
This is meant for internal use only. The `diameter` is defined by the
length of the rectangle of minimum area enclosing the `Polygon`, and the
`width` of the `Polygon` is then just the width of the same rectangle of
minimum area enclosing the `Polygon`. It's calculation is based on [Arnon1983]_.
"""
def distance(xi, yi, xj, yj, m):
bi = yi - m*xi
bj = yj - m*xj
return abs(bj - bi)/math.sqrt(m*m+1.)
v = self.vertices
n = len(v) - 1
j = 0
for i in range(n):
while Vector(v[i], v[i + 1]) * Vector(v[j], v[j + 1]) > 0:
j = (j + 1) % n
if i == 0:
k = j
while Vector(v[i], v[i + 1]).cross(Vector(v[k], v[k + 1])) > 0:
k = (k + 1) % n
if i == 0:
m = k
while Vector(v[i], v[i + 1]).dot(Vector(v[m], v[m + 1])) < 0:
m = (m + 1) % n
if abs(v[i].x - v[i + 1].x) < UNCERTAINTY:
d1 = abs(v[k].x - v[i].x)
d2 = abs(v[m].y - v[j].y)
elif abs(v[i].y - v[i + 1].y) < UNCERTAINTY:
d1 = abs(v[k].y - v[i].y)
d2 = abs(v[m].x - v[j].x)
else:
s = (v[i + 1].y - v[i].y)/(v[i + 1].x - v[i].x)
d1 = distance(v[i].x, v[i].y, v[k].x, v[k].y, s)
d2 = distance(v[j].x, v[j].y, v[m].x, v[m].y, -1./s)
Ai = d1*d2
if i == 0 or Ai < A:
A = d1*d2
res_d1 = d1
res_d2 = d2
return (res_d1, res_d2) if res_d1 > res_d2 else (res_d2, res_d1)
def has(self, point):
"""
Determine if `point` is inside `Polygon` based on the winding
number.
Parameters
----------
point : point-like
The `point` to test if it's included in `self` or not.
Returns
-------
out : {True, False}
`True` if the `point` is included in `self` (`wn` > 0), else
`False` (`wn` == 0).
Notes
-----
Winding number algorithm (C++ implementation):
http://geomalgorithms.com/a03-_inclusion.html
"""
# initialize the winding number
wn = 0
# be sure to convert point to Point
point = Point(point)
# loop through all of the vertices in the polygon (two by two)
for v1, v2 in zip(self.vertices[:-1], self.vertices[1:]):
if v1.y < point.y:
if v2.y > point.y:
# an upward crossing
if point.is_left((v1, v2)) > 0:
# point left of edge
wn += 1
else:
if v2.y <= point.y:
# a downward crossing
if point.is_left((v1, v2)) < 0:
# point right of edge
wn -= 1
# return
return wn > 0
def get_point_on_self(self, edge_no=None, frac=None):
"""
Return a random `Point` on the given `Segment`
defined by `edge_no`.
Parameters
----------
edge_no : int, optional
The index of the `edge` from the edge list. Default is
`edge_no` = 0, which means the calculate on first edge.
frac : float, optional
A number in the open interval (0, 1). The point will be
placed on the edge with the edge number edge_no and
relative to the first point in the specified edge. If
left to default (`None`), a random `Point` will be
returned on the specified edge.
Returns
-------
out : point
The `Point` on this edge (`Segment`).
"""
segment = self.edges[edge_no]
return segment.get_point_on_self(frac)
def divide(self, obj=None, edge_no=None, frac=None, relative_phi=None,
drelative_phi=0):
"""
Divide the `Polygon`.
Parameters
----------
obj : line-like, optional
If no `obj` is given then `edge_no` is used to build a `Ray`
from a randomly chosen Point on `self.edges[edge_no]` with
inward direction and the closest intersection `Point` to
`Ray.p1` is used to divide the `Polygon` in two, else all
of the points given by the intersection between the
`Polygon` and `obj` are used to split the
`Polygon` in any number of polygons.
edge_no : int, optional
If given, `self.edges[edge_no]` will be used to build a
`Ray` as explained above, else a random edge number will
be chosen.
frac : float, optional
If given the point on `self.edges[edge_no]` will be situated at
the fraction `frac` between `self.edges[edge_no].p1` and
`self.edges[edge_no].p2` relateive to p1. Must be in the open
interval (0, 1).
relative_phi : float, optional
Is an angle (in degrees) that gives the direction of the
`Ray` spawned from `self.edges[edge_no]`. It has to be in
the open interval (0, 90). If not given a random direction will be
choosed in the interval (0, 90).
drelative_phi : float, optional
Is an angle interval centered on `relative_phi` which is used to
calculate a random relative direction for the `Ray`
spawned from `self.edges[edge_no]` in the interval `[relateive_phi -
drelative_phi/2, relative_phi + drelative_phi/2)`. If not given
it's assumed to be 0.
Returns
-------
ret : tuple of size 2
The first element is a list with the newly created polygons and
the second element in the tuple is another list with the
`Segments` that were used to divide the initial `Polygon`
(ie. the common edge between the newly created polygons). These
lists can be of length 0 if no division took place.
See also
--------
Polygon.get_point_on_self, Segment.get_point_on_self
"""
# final list of polygons
polys = []
division_segments = []
input_obj = obj
if input_obj:
# if a Line-like is given then calculate the intersection
# Points with all the edges for later use
intersections = input_obj.intersection(self)
else:
# WARNING:
# -------
# This only works for non intersecting Polygons
# select a random edge number and get a random Point
# on that edge to create a random Ray. This is used
# to build an intersection Points list with only two points
# the randomly generated Point and the Point closest to
# the randomly generated one. This works becase we are
# careful to generate a Ray only to the right of the segment
if edge_no is None:
edge_no = random.randint(0, len(self.edges) - 1)
random_point = self.get_point_on_self(edge_no, frac)
# generate a random angle to create a Ray which will be pointing
# always in the right of the selected edge
edge = self.edges[edge_no]
if relative_phi and not (0 <= relative_phi + drelative_phi <= 180):
raise ValueError('This has to hold: 0 <= relateive_phi +'
' drelative_phi <= 180, but got:'
' relative_phi={}, drelative_phi={}'
.format(relative_phi, drelative_phi))
if not relative_phi:
phi = edge.phi + math.pi*random.random()
else:
phi = edge.phi + math.radians(relative_phi + \
drelative_phi*random.random())
obj = Ray(random_point, Vector(1, phi, coordinates='polar'))
intersections = obj.intersection(self)
# and finally get the randomly generated Point + the first
# intersection Point in the sorted list
intersections = [[obj.p1, edge_no], intersections[0]]
if edge_no > intersections[1][1]:
# sort by edge_no if necessary
intersections = [intersections[1], intersections[0]]
# place the intersection Points in right positions in the new
# vertex listand replace the edge number with the new location
# (basically creating a new edge and pointing to that)
all_vertices = self.vertices[:-1]
# count is to hold how many vertices we already added in new list
# so that the edge's number can be appropriately updated
count = 0
for item in intersections:
# the position where the intersection Point will be inserted
idx = item[1] + count + 1
item[1] = idx
if item[0] == self.vertices[idx - count - 1]:
# if the intersection point coincides with the Point on the
# Polygon behind the insertion Point then we just skip the
# intersection Point, but alter the edge number in intersections
# accordingly
item[1] -= 1
continue
if item[0] == self.vertices[idx - count]:
# if the intersection point coincides with the Point on the
# Polygon after the insertion Point then we just skip
# everything
continue
all_vertices.insert(idx, item[0])
# store the new position
# increase the counter to account for the addition of the Point
count += 1
# sort the Points first from top to bottom (inverse on Y) and
# from left to right (on X) because this is the way the intersection
# Points are used in the algorithm
if abs(obj.p1.x - obj.p2.x) < UNCERTAINTY:
# find if the `Line`-like is vertical and if so then
# sort over Y
intersections.sort(key=lambda item: item[0].y)
else:
intersections.sort(key=lambda item: item[0].x)
# only after creating all_vertices list we can take care of the
# different cases that we have regarding Segmet, Ray etc. usage
if input_obj:
if (type(obj) is Segment) and (self.has(obj.p1) and \
self.has(obj.p2)):
# remove first and last Points from intersection list
# because the Segment has the end Points inside the Polygon
del (intersections[0], intersections[-1])
elif (type(obj) is Segment and (self.has(obj.p1) and \
not self.has(obj.p2))) or (type(obj) is Ray and \
self.has(obj.p1)):
# remove only the point closest to obj.p1 since this point is
# inside the Polygon
if (obj.p1.is_left(obj.p2)):
del intersections[0]
else:
del intersections[-1]
elif (type(obj) is Segment) and (not self.has(obj.p1) and \
self.has(obj.p2)):
# same as before except for obj.p2 now
if obj.p2.is_left(obj.p1):
del intersections[-1]
else:
del intersections[0]
if intersections is None or len(intersections) < 2:
# if we have less than two intersection Points return None
return polys, division_segments
# make separate lists for intersection Points and edges' number for
# further processing
intersection_points, edge_nos = map(list, zip(*intersections))
# keep track of used slices
slice_to_del = []
# loop over the edge_nos two at a time to construct Polygons
# determined by the intersection Points and contained within these
# then store the slice to be removed, ie. the portion of all_vertices
# without the interseciton Points. Example:
# * if we have a polygon defined by [p0, i0, p1, i1, p2, p3]
# * then edge_nos must be: [1, 3] (not necessarily in this order)
# * first get the Polygon defined by [i0, p1, i1] then remove these
# * Points from the list and we end up with the remaining Polygon
# * [p0, i0, i1, p2, p3]
for i, j in zip(edge_nos[:-1:2], edge_nos[1::2]):
if i > j:
i, j = j, i
polys.append(Polygon(all_vertices[i:j+1]))
division_segments.append(Segment(all_vertices[i], all_vertices[j]))
# insert always at the begining because we have to delete them
# in inverse order so that the slices make sense when selecting
# the items from the list
slice_to_del.insert(0, slice(i+1, j))
for sl in slice_to_del:
del all_vertices[sl]
# here append the remaining Polygon
polys.append(Polygon(all_vertices))
return polys, division_segments
| FGCSchool-Math-Club/fgcs-math-club-2014 | Geo2D-0.1.22/build/lib.linux-x86_64-2.7/geo2d/geometry.py | Python | bsd-2-clause | 62,543 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def delete_orphan_collaborations(apps, schema_editor):
Project = apps.get_model('projects', 'Project')
Collaboration = apps.get_model('structuredcollaboration', 'Collaboration')
ContentType = apps.get_model('contenttypes', 'ContentType')
try:
ctype = ContentType.objects.get(model='project', app_label='projects')
to_delete = []
for c in Collaboration.objects.filter(content_type=ctype):
try:
Project.objects.get(id=int(c.object_pk))
except Project.DoesNotExist:
to_delete.append(c.id)
Collaboration.objects.filter(id__in=to_delete).delete()
except ContentType.DoesNotExist:
pass # skip this migration during unit tests
class Migration(migrations.Migration):
dependencies = [
('projects', '0013_auto_20151021_1438'),
('structuredcollaboration', '0004_auto_20151016_1401'),
('contenttypes', '0001_initial'),
]
operations = [
migrations.RunPython(delete_orphan_collaborations)
]
| c0cky/mediathread | mediathread/projects/migrations/0014_auto_20151104_1513.py | Python | gpl-2.0 | 1,149 |
#!/usr/bin/python
# -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
# Copyright (C) OpenERP Venezuela (<http://openerp.com.ve>).
# All Rights Reserved
# Credits######################################################
# Coded by: javier@vauxoo.com
# Audited by: Vauxoo C.A.
#############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
from openerp.osv import osv, fields
class account_move_line(osv.Model):
_inherit = 'account.move.line'
'''
Check that the entry balance is greater than zero
'''
def _update_check_nonzero(self, cr, uid, ids, context=None):
writeoff = 0.0
for line in self.browse(cr, uid, ids, context=context):
writeoff = abs(line.debit - line.credit)
if writeoff == 0.0:
return False
return True
_constraints = [
(_update_check_nonzero,
'You can not create an entry with zero balance !\
Please set amount !', []),
]
| 3dfxsoftware/cbss-addons | account_move_nonzero/account_move_line.py | Python | gpl-2.0 | 1,858 |
import os
import boto3
from chalice import Chalice
from chalicelib import db
from chalicelib import rekognition
app = Chalice(app_name='media-query')
_MEDIA_DB = None
_REKOGNITION_CLIENT = None
_SUPPORTED_IMAGE_EXTENSIONS = (
'.jpg',
'.png',
)
def get_media_db():
global _MEDIA_DB
if _MEDIA_DB is None:
_MEDIA_DB = db.DynamoMediaDB(
boto3.resource('dynamodb').Table(
os.environ['MEDIA_TABLE_NAME']))
return _MEDIA_DB
def get_rekognition_client():
global _REKOGNITION_CLIENT
if _REKOGNITION_CLIENT is None:
_REKOGNITION_CLIENT = rekognition.RekognitonClient(
boto3.client('rekognition'))
return _REKOGNITION_CLIENT
@app.on_s3_event(bucket=os.environ['MEDIA_BUCKET_NAME'],
events=['s3:ObjectCreated:*'])
def handle_object_created(event):
if _is_image(event.key):
_handle_created_image(bucket=event.bucket, key=event.key)
@app.on_s3_event(bucket=os.environ['MEDIA_BUCKET_NAME'],
events=['s3:ObjectRemoved:*'])
def handle_object_removed(event):
if _is_image(event.key):
get_media_db().delete_media_file(event.key)
def _is_image(key):
return key.endswith(_SUPPORTED_IMAGE_EXTENSIONS)
def _handle_created_image(bucket, key):
labels = get_rekognition_client().get_image_labels(bucket=bucket, key=key)
get_media_db().add_media_file(key, media_type=db.IMAGE_TYPE, labels=labels)
| aws-samples/chalice-workshop | code/media-query/06-web-api/app.py | Python | apache-2.0 | 1,443 |
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'd F Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'j. F Y. H:i'
YEAR_MONTH_FORMAT = 'F Y.'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.m.Y.'
SHORT_DATETIME_FORMAT = 'j.m.Y. H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.'
'%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59'
'%d.%m.%Y. %H:%M:%S.%f', # '25.10.2006. 14:30:59.000200'
'%d.%m.%Y. %H:%M', # '25.10.2006. 14:30'
'%d.%m.%Y.', # '25.10.2006.'
'%d.%m.%y. %H:%M:%S', # '25.10.06. 14:30:59'
'%d.%m.%y. %H:%M:%S.%f', # '25.10.06. 14:30:59.000200'
'%d.%m.%y. %H:%M', # '25.10.06. 14:30'
'%d.%m.%y.', # '25.10.06.'
'%d. %m. %Y. %H:%M:%S', # '25. 10. 2006. 14:30:59'
'%d. %m. %Y. %H:%M:%S.%f', # '25. 10. 2006. 14:30:59.000200'
'%d. %m. %Y. %H:%M', # '25. 10. 2006. 14:30'
'%d. %m. %Y.', # '25. 10. 2006.'
'%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59'
'%d. %m. %y. %H:%M:%S.%f', # '25. 10. 06. 14:30:59.000200'
'%d. %m. %y. %H:%M', # '25. 10. 06. 14:30'
'%d. %m. %y.', # '25. 10. 06.'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| edisonlz/fruit | web_project/base/site-packages/django/conf/locale/mk/formats.py | Python | apache-2.0 | 1,758 |
#!/usr/bin/python
# vim: set fileencoding=utf-8:
###########################################
#
# 目录测试(探测敏感目录是否存在)
# 1) 容错测试
# 2) 服务器指纹识别
# 3) 404重定向识别
# 4) 获得响应头
#
###########################################
import hashlib
import random
from httplib2 import Http
class Directory_testing:
def __init__(self,target):
self.target=target
# 找出404页面
def Error404(self):
# 生成随机的测试目录
text=hashlib.sha1(str(random.uniform(1,100000))).hexdigest()
try:
# 测试404的标准是什么
# 如果状态是404则证明是标准的404页面
# 如果状态是302则证明会直接跳转,就以302作为标准
# 如果状态是200的情况,则证明存在自定义的404页面,抓下来作为指标
resp,content=Http().request(self.target+'/'+text,"GET")
if resp['status']==404:
return "default"
elif resp['status']==302:
return "Jump"
elif resp['status']==200:
return content
except:
print "[*] 404 Page GET Error!"
# 获得目录测试返回的值
def get_results(self,text):
results=[]
try:
resp,content=Http().request(self.target+text,"GET")
results.append(self.target+text)
results.append(resp['status'])
results.append(resp['server'])
results.append(content)
return results
except:
print "[*] testing "+self.target+" Error!"
# 导入目录字典
def load_dictionary(self,patch):
tmp_data=[]
for line in open(patch):
tmp_data.append(line.strip("\n"))
return tmp_data
# 主程序调用
def _main(self):
if self.Error404()=="default":
self.default_mode()
t=Directory_testing("http://www.163.com")
t.load_dictionary("../dic/dictionary.dic")
# for i in a:
# print t.get_results(i)
| 0xwindows/w3a_Scan_Console | module/directory-test_module.py | Python | gpl-2.0 | 1,776 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2013, 2014, 2016, 2017, 2018 Guenter Bartsch
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# simple pulseaudio recording client
#
# based on: http://freshfoo.com/blog/pulseaudio_monitoring
import ctypes
import threading
import logging
import time
import numpy as np
from builtins import str as text, range
from nltools.vad import BUFFER_DURATION
SOURCE_TIMEOUT = 30 # 3 seconds
PA_INVALID_INDEX = 4294967295 # ((uint32_t) -1)
pa = ctypes.cdll.LoadLibrary('libpulse.so.0')
class pa_proplist(ctypes.Structure):
pass
pa_encoding = ctypes.c_int # enum
pa_encoding_t = pa_encoding
class pa_format_info(ctypes.Structure):
pass
pa_format_info._fields_ = [
('encoding', pa_encoding_t),
('plist', ctypes.POINTER(pa_proplist)),
]
class pa_context(ctypes.Structure):
pass
pa_context._fields_ = [ ]
pa_context_notify_cb_t = ctypes.CFUNCTYPE(None, ctypes.POINTER(pa_context), ctypes.c_void_p)
pa_context_success_cb_t = ctypes.CFUNCTYPE(None, ctypes.POINTER(pa_context), ctypes.c_int, ctypes.c_void_p)
pa_sample_format = ctypes.c_int # enum
pa_sample_format_t = pa_sample_format
pa_format_info_set_sample_format = pa.pa_format_info_set_sample_format
pa_format_info_set_sample_format.restype = None
pa_format_info_set_sample_format.argtypes = [ctypes.POINTER(pa_format_info), pa_sample_format_t]
class pa_sink_port_info(ctypes.Structure):
pass
pa_sink_port_info._fields_ = [
('name', ctypes.c_char_p),
('description', ctypes.c_char_p),
('priority', ctypes.c_uint32),
('available', ctypes.c_int),
]
class pa_sink_info(ctypes.Structure):
pass
class pa_sample_spec(ctypes.Structure):
pass
pa_sample_spec._fields_ = [
('format', pa_sample_format_t),
('rate', ctypes.c_uint32),
('channels', ctypes.c_uint8),
]
class pa_source_info(ctypes.Structure):
pass
pa_channel_position = ctypes.c_int # enum
pa_channel_position_t = pa_channel_position
class pa_channel_map(ctypes.Structure):
pass
pa_channel_map._fields_ = [
('channels', ctypes.c_uint8),
('map', pa_channel_position_t * 32),
]
class pa_cvolume(ctypes.Structure):
pass
pa_volume_t = ctypes.c_uint32
pa_cvolume._fields_ = [
('channels', ctypes.c_uint8),
('values', pa_volume_t * 32),
]
pa_source_flags = ctypes.c_int # enum
pa_source_flags_t = pa_source_flags
pa_source_state = ctypes.c_int # enum
pa_source_state_t = pa_source_state
class pa_source_port_info(ctypes.Structure):
pass
pa_source_port_info._fields_ = [
('name', ctypes.c_char_p),
('description', ctypes.c_char_p),
('priority', ctypes.c_uint32),
('available', ctypes.c_int),
]
pa_source_info._fields_ = [
('name', ctypes.c_char_p),
('index', ctypes.c_uint32),
('description', ctypes.c_char_p),
('sample_spec', pa_sample_spec),
('channel_map', pa_channel_map),
('owner_module', ctypes.c_uint32),
('volume', pa_cvolume),
('mute', ctypes.c_int),
('monitor_of_sink', ctypes.c_uint32),
('monitor_of_sink_name', ctypes.c_char_p),
('latency', ctypes.c_uint64),
('driver', ctypes.c_char_p),
('flags', pa_source_flags_t),
('proplist', ctypes.POINTER(pa_proplist)),
('configured_latency', ctypes.c_uint64),
('base_volume', pa_volume_t),
('state', pa_source_state_t),
('n_volume_steps', ctypes.c_uint32),
('card', ctypes.c_uint32),
('n_ports', ctypes.c_uint32),
('ports', ctypes.POINTER(ctypes.POINTER(pa_source_port_info))),
('active_port', ctypes.POINTER(pa_source_port_info)),
('n_formats', ctypes.c_uint8),
('formats', ctypes.POINTER(ctypes.POINTER(pa_format_info))),
]
pa_source_info_cb_t = ctypes.CFUNCTYPE(None, ctypes.POINTER(pa_context), ctypes.POINTER(pa_source_info), ctypes.c_int, ctypes.c_void_p)
class pa_stream(ctypes.Structure):
pass
pa_stream._fields_ = [
]
pa_stream_request_cb_t = ctypes.CFUNCTYPE(None, ctypes.POINTER(pa_stream), ctypes.c_size_t, ctypes.c_void_p)
class pa_threaded_mainloop(ctypes.Structure):
pass
pa_threaded_mainloop._fields_ = [
]
pa_threaded_mainloop_new = pa.pa_threaded_mainloop_new
pa_threaded_mainloop_new.restype = ctypes.POINTER(pa_threaded_mainloop)
pa_threaded_mainloop_new.argtypes = []
class pa_mainloop_api(ctypes.Structure):
pass
pa_threaded_mainloop_get_api = pa.pa_threaded_mainloop_get_api
pa_threaded_mainloop_get_api.restype = ctypes.POINTER(pa_mainloop_api)
pa_threaded_mainloop_get_api.argtypes = [ctypes.POINTER(pa_threaded_mainloop)]
pa_context_new = pa.pa_context_new
pa_context_new.restype = ctypes.POINTER(pa_context)
pa_context_new.argtypes = [ctypes.POINTER(pa_mainloop_api), ctypes.c_char_p]
pa_context_set_state_callback = pa.pa_context_set_state_callback
pa_context_set_state_callback.restype = None
pa_context_set_state_callback.argtypes = [ctypes.POINTER(pa_context), pa_context_notify_cb_t, ctypes.c_void_p]
pa_context_flags = ctypes.c_int # enum
pa_context_flags_t = pa_context_flags
class pa_spawn_api(ctypes.Structure):
pass
pa_context_connect = pa.pa_context_connect
pa_context_connect.restype = ctypes.c_int
pa_context_connect.argtypes = [ctypes.POINTER(pa_context), ctypes.c_char_p, pa_context_flags_t, ctypes.POINTER(pa_spawn_api)]
pa_threaded_mainloop_start = pa.pa_threaded_mainloop_start
pa_threaded_mainloop_start.restype = ctypes.c_int
pa_threaded_mainloop_start.argtypes = [ctypes.POINTER(pa_threaded_mainloop)]
pa_threaded_mainloop_lock = pa.pa_threaded_mainloop_lock
pa_threaded_mainloop_lock.restype = None
pa_threaded_mainloop_lock.argtypes = [ctypes.POINTER(pa_threaded_mainloop)]
pa_context_disconnect = pa.pa_context_disconnect
pa_context_disconnect.restype = None
pa_context_disconnect.argtypes = [ctypes.POINTER(pa_context)]
pa_context_unref = pa.pa_context_unref
pa_context_unref.restype = None
pa_context_unref.argtypes = [ctypes.POINTER(pa_context)]
pa_threaded_mainloop_unlock = pa.pa_threaded_mainloop_unlock
pa_threaded_mainloop_unlock.restype = None
pa_threaded_mainloop_unlock.argtypes = [ctypes.POINTER(pa_threaded_mainloop)]
pa_threaded_mainloop_stop = pa.pa_threaded_mainloop_stop
pa_threaded_mainloop_stop.restype = None
pa_threaded_mainloop_stop.argtypes = [ctypes.POINTER(pa_threaded_mainloop)]
pa_threaded_mainloop_free = pa.pa_threaded_mainloop_free
pa_threaded_mainloop_free.restype = None
pa_threaded_mainloop_free.argtypes = [ctypes.POINTER(pa_threaded_mainloop)]
pa_context_get_state = pa.pa_context_get_state
pa_context_get_state.restype = ctypes.c_int
pa_context_get_state.argtypes = [ctypes.POINTER(pa_context)]
PA_CONTEXT_NOFLAGS = 0
PA_CONTEXT_NOFAIL = 2
PA_CONTEXT_NOAUTOSPAWN = 1
PA_CONTEXT_UNCONNECTED = 0
PA_CONTEXT_CONNECTING = 1
PA_CONTEXT_AUTHORIZING = 2
PA_CONTEXT_READY = 4
PA_CONTEXT_FAILED = 5
PA_CONTEXT_TERMINATED = 6
class pa_operation(ctypes.Structure):
pass
pa_context_get_source_info_list = pa.pa_context_get_source_info_list
pa_context_get_source_info_list.restype = ctypes.POINTER(pa_operation)
pa_context_get_source_info_list.argtypes = [ctypes.POINTER(pa_context), pa_source_info_cb_t, ctypes.c_void_p]
PA_VOLUME_NORM = 65536
pa_context_set_source_volume_by_index = pa.pa_context_set_source_volume_by_index
pa_context_set_source_volume_by_index.restype = ctypes.POINTER(pa_operation)
pa_context_set_source_volume_by_index.argtypes = [ctypes.POINTER(pa_context), ctypes.c_uint32, ctypes.POINTER(pa_cvolume), pa_context_success_cb_t, ctypes.c_void_p]
pa_operation_unref = pa.pa_operation_unref
pa_operation_unref.restype = None
pa_operation_unref.argtypes = [ctypes.POINTER(pa_operation)]
PA_SAMPLE_INVALID = -1
PA_SAMPLE_U8 = 0
PA_SAMPLE_ALAW = 1
PA_SAMPLE_ULAW = 2
PA_SAMPLE_S16LE = 3
PA_SAMPLE_S16BE = 4
PA_SAMPLE_FLOAT32LE = 5
PA_SAMPLE_FLOAT32BE = 6
PA_SAMPLE_S32LE = 7
PA_SAMPLE_S32BE = 8
PA_SAMPLE_S24LE = 9
PA_SAMPLE_S24BE = 10
PA_SAMPLE_S24_32LE = 11
PA_SAMPLE_S24_32BE = 12
PA_SAMPLE_MAX = 13
pa_stream_new = pa.pa_stream_new
pa_stream_new.restype = ctypes.POINTER(pa_stream)
pa_stream_new.argtypes = [ctypes.POINTER(pa_context), ctypes.c_char_p, ctypes.POINTER(pa_sample_spec), ctypes.POINTER(pa_channel_map)]
pa_stream_set_read_callback = pa.pa_stream_set_read_callback
pa_stream_set_read_callback.restype = None
pa_stream_set_read_callback.argtypes = [ctypes.POINTER(pa_stream), pa_stream_request_cb_t, ctypes.c_void_p]
PA_STREAM_ADJUST_LATENCY = 8192
pa_stream_flags = ctypes.c_int # enum
pa_stream_flags_t = pa_stream_flags
class pa_buffer_attr(ctypes.Structure):
pass
pa_buffer_attr._fields_ = [
('maxlength', ctypes.c_uint32),
('tlength', ctypes.c_uint32),
('prebuf', ctypes.c_uint32),
('minreq', ctypes.c_uint32),
('fragsize', ctypes.c_uint32),
]
pa_stream_connect_record = pa.pa_stream_connect_record
pa_stream_connect_record.restype = ctypes.c_int
pa_stream_connect_record.argtypes = [ctypes.POINTER(pa_stream), ctypes.c_char_p, ctypes.POINTER(pa_buffer_attr), pa_stream_flags_t]
pa_stream_peek = pa.pa_stream_peek
pa_stream_peek.restype = ctypes.c_int
pa_stream_peek.argtypes = [ctypes.POINTER(pa_stream), ctypes.POINTER(ctypes.c_void_p), ctypes.POINTER(ctypes.c_size_t)]
pa_stream_drop = pa.pa_stream_drop
pa_stream_drop.restype = ctypes.c_int
pa_stream_drop.argtypes = [ctypes.POINTER(pa_stream)]
def null_cb(a=None, b=None, c=None, d=None):
return
MIX_MODE_BOTH = 0
MIX_MODE_LEFT = 1
MIX_MODE_RIGHT = 2
DEFAULT_VOLUME = 100
DEFAULT_RATE = 16000
DEFAULT_NAME = b'Python PulseRecorder'
DEFAULT_FRAMES_PER_BUFFER = int(DEFAULT_RATE * BUFFER_DURATION / 1000)
DEFAULT_MIX_MODE = MIX_MODE_BOTH
class PulseRecorder(object):
def __init__(self, volume=DEFAULT_VOLUME, rate=DEFAULT_RATE, source_name=None):
self.match_source_name = source_name
self.rate = rate
self.volume = volume
self.source_idx = -1
self.source_score = 0
self.source_log = False
self.source_name = ''
self.source_description = ''
# Wrap callback methods in appropriate ctypefunc instances so
# that the Pulseaudio C API can call them
self._context_notify_cb = pa_context_notify_cb_t(self.context_notify_cb)
self._source_info_cb = pa_source_info_cb_t(self.source_info_cb)
self._stream_read_cb = pa_stream_request_cb_t(self.stream_read_cb)
self._null_cb = pa_context_success_cb_t(null_cb)
# lock/cond for buffers
self._lock = threading.Lock()
self._cond = threading.Condition(self._lock)
def start_recording(self, frames_per_buffer = DEFAULT_FRAMES_PER_BUFFER, mix_mode = DEFAULT_MIX_MODE):
logging.debug("start_recording...")
self._frames_per_buffer = frames_per_buffer
self._mix_mode = mix_mode
self._record_stereo = mix_mode != MIX_MODE_BOTH
self._buffers = []
self._cur_buf_cnt = 0
self.source_idx = -1
self.source_score = 0
self.source_log = False
self.source_name = ''
self.source_description = ''
self._buffers.append(np.empty(self._frames_per_buffer, dtype=np.int16))
self._mainloop = pa_threaded_mainloop_new()
_mainloop_api = pa_threaded_mainloop_get_api(self._mainloop)
self._context = pa_context_new(_mainloop_api, DEFAULT_NAME)
pa_context_set_state_callback(self._context, self._context_notify_cb, None)
pa_context_connect(self._context, None, 0, None)
pa_threaded_mainloop_start(self._mainloop)
# wait for audio source detection
cnt = 0
while (self.source_idx < 0) and (cnt < SOURCE_TIMEOUT):
cnt += 1
time.sleep (0.1)
if self.source_idx < 0:
raise Exception ("Pulserecorder: no suitable input source found.")
def stop_recording(self):
logging.debug("stop_recording...")
pa_threaded_mainloop_lock(self._mainloop)
pa_context_disconnect(self._context)
pa_context_unref(self._context)
pa_threaded_mainloop_unlock(self._mainloop)
pa_threaded_mainloop_stop(self._mainloop)
pa_threaded_mainloop_free(self._mainloop)
self.source_idx = -1
def context_notify_cb(self, context, _):
state = pa_context_get_state(context)
if state == PA_CONTEXT_READY:
logging.debug("Pulseaudio connection ready...")
o = pa_context_get_source_info_list(context, self._source_info_cb, None)
pa_operation_unref(o)
elif state == PA_CONTEXT_FAILED :
logging.error("Connection failed")
elif state == PA_CONTEXT_TERMINATED:
logging.debug("Connection terminated")
def source_info_cb(self, context, source_info_p, eol, __):
logging.debug("source_info_cb... eol: %d" % eol)
if eol:
if not self.source_log:
logging.info(u'audio source: %s' % self.source_description.decode('utf8','ignore'))
logging.debug(u'name: %s' % text(self.source_name) )
self.source_log = True
if self.source_idx < 0:
logging.error ("Pulserecorder: no suitable input source found.")
#
# set volume first
#
cvol = pa_cvolume()
cvol.channels = 1
cvol.values[0] = int((self.volume * PA_VOLUME_NORM) / 100)
operation = pa_context_set_source_volume_by_index (self._context, self.source_idx, cvol, self._null_cb, None)
pa_operation_unref(operation)
logging.debug('recording from %s' % self.source_name)
samplespec = pa_sample_spec()
samplespec.channels = 2 if self._record_stereo else 1
samplespec.format = PA_SAMPLE_S16LE
samplespec.rate = self.rate
pa_stream = pa_stream_new(context, b"pulserecorder", samplespec, None)
pa_stream_set_read_callback(pa_stream,
self._stream_read_cb,
self.source_idx)
# flags = PA_STREAM_NOFLAGS
flags = PA_STREAM_ADJUST_LATENCY
# buffer_attr = None
fragsize = self._frames_per_buffer*2
if self._record_stereo:
fragsize *= 2
buffer_attr = pa_buffer_attr(-1, -1, -1, -1, fragsize=fragsize)
pa_stream_connect_record(pa_stream,
self.source_name,
buffer_attr,
flags)
if not source_info_p:
return
source_info = source_info_p.contents
logging.debug('index : %d' % source_info.index)
logging.debug('name : %s' % source_info.name)
logging.debug('description : %s' % source_info.description)
logging.debug('monitor of : %d' % source_info.monitor_of_sink)
if source_info.monitor_of_sink != PA_INVALID_INDEX:
logging.debug("ignoring source: monitor")
return
score = 1
if self.match_source_name and (text(self.match_source_name) in text(source_info.description)):
score += 100
# microphone source auto-detection magic
# import pdb; pdb.set_trace()
if source_info.ports:
score += 1
mic_port = False
for pi in range(source_info.n_ports):
if text('mic') in text(source_info.ports[pi].contents.name):
logging.debug("mic port found")
score += 1
break
logging.debug('source score: %d, highest score so far: %d' % (score, self.source_score))
if score > self.source_score:
self.source_idx = source_info.index
self.source_score = score
self.source_name = source_info.name
self.source_description = source_info.description
def stream_read_cb(self, stream, length, index_incr):
data = ctypes.c_void_p()
pa_stream_peek(stream, data, ctypes.c_ulong(length))
data = ctypes.cast(data, ctypes.POINTER(ctypes.c_ubyte))
self._lock.acquire()
bytes_per_sample = 4 if self._record_stereo else 2
num_samples = int(length / bytes_per_sample)
for i in range(num_samples):
if self._mix_mode == MIX_MODE_BOTH:
off_low = 0
off_high = 1
elif self._mix_mode == MIX_MODE_LEFT:
off_low = 0
off_high = 1
elif self._mix_mode == MIX_MODE_RIGHT:
off_low = 2
off_high = 3
sample = data[i*bytes_per_sample +off_low ] + 256 * data[i*bytes_per_sample+off_high]
self._buffers[len(self._buffers)-1][self._cur_buf_cnt] = sample
self._cur_buf_cnt += 1
# buffer full?
if self._cur_buf_cnt >= self._frames_per_buffer:
self._buffers.append(np.empty(self._frames_per_buffer, dtype=np.int16))
self._cur_buf_cnt = 0
self._cond.notifyAll()
self._lock.release()
pa_stream_drop(stream)
def get_samples(self):
self._lock.acquire()
buf = None
while len(self._buffers) < 2:
self._cond.wait()
buf = self._buffers.pop(0)
self._lock.release()
return buf
| gooofy/py-nltools | nltools/pulserecorder.py | Python | apache-2.0 | 18,079 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from math import ceil
import unittest
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.struct import HStruct
from hwt.simulator.simTestCase import SimTestCase
from hwtHls.platform.virtual import VirtualHlsPlatform
from hwtLib.amba.axis import axis_send_bytes
from pyMathBitPrecise.bit_utils import int_to_int_list, mask
from tests.io.axiStream.axisParseIf import AxiSParse2If
from hwtSimApi.utils import freq_to_period
class AxiSParseIfTC(SimTestCase):
def _test_AxiSParse2If(self, DATA_WIDTH:int, freq=int(1e6), N=16):
u = AxiSParse2If()
u.DATA_WIDTH = DATA_WIDTH
u.CLK_FREQ = freq
self.compileSimAndStart(u, target_platform=VirtualHlsPlatform())
T1 = HStruct(
(Bits(16), "v0"),
(Bits(8), "v1"),
)
T2 = HStruct(
(Bits(16), "v0"),
(Bits(16), "v1"),
)
T4 = HStruct(
(Bits(16), "v0"),
(Bits(32), "v1"),
)
ref = []
ALL_Ts = [T1, T2, T4]
for _ in range(N):
T = self._rand.choice(ALL_Ts)
v1_t = T.field_by_name["v1"].dtype
v1 = self._rand.getrandbits(v1_t.bit_length())
d = {
"v0": v1_t.bit_length() // 8,
"v1": v1
}
if v1_t.bit_length() in (16, 32):
ref.append(v1)
v = T.from_py(d)
w = v._dtype.bit_length()
v = v._reinterpret_cast(Bits(w))
v.vld_mask = mask(w)
v = int(v)
data = int_to_int_list(v, 8, ceil(T.bit_length() / 8))
axis_send_bytes(u.i, data)
t = int(freq_to_period(freq)) * (len(u.i._ag.data) + 5) * 2
self.runSim(t)
self.assertValSequenceEqual(u.o._ag.data, ref, "%r [%s] != [%s]" % (
u.o,
", ".join("0x%x" % int(i) if i._is_full_valid() else repr(i) for i in u.o._ag.data),
", ".join("0x%x" % i for i in ref)
))
def test_AxiSParse2If_8b_1MHz(self):
self._test_AxiSParse2If(8)
def test_AxiSParse2If_16b_1MHz(self):
self._test_AxiSParse2If(16)
def test_AxiSParse2If_24b_1MHz(self):
self._test_AxiSParse2If(24)
def test_AxiSParse2If_48b_1MHz(self):
self._test_AxiSParse2If(48)
def test_AxiSParse2If_512b_1MHz(self):
self._test_AxiSParse2If(512)
def test_AxiSParse2If_8b_40MHz(self):
self._test_AxiSParse2If(8, freq=int(40e6))
def test_AxiSParse2If_16b_40MHz(self):
self._test_AxiSParse2If(16, freq=int(40e6))
def test_AxiSParse2If_24b_40MHz(self):
self._test_AxiSParse2If(24, freq=int(40e6))
def test_AxiSParse2If_48b_40MHz(self):
self._test_AxiSParse2If(48, freq=int(40e6))
def test_AxiSParse2If_512b_40MHz(self):
self._test_AxiSParse2If(512, freq=int(40e6))
def test_AxiSParse2If_8b_100MHz(self):
self._test_AxiSParse2If(8, freq=int(100e6))
def test_AxiSParse2If_16b_100MHz(self):
self._test_AxiSParse2If(16, freq=int(100e6))
def test_AxiSParse2If_24b_100MHz(self):
self._test_AxiSParse2If(24, freq=int(100e6))
def test_AxiSParse2If_48b_100MHz(self):
self._test_AxiSParse2If(48, freq=int(100e6))
def test_AxiSParse2If_512b_100MHz(self):
self._test_AxiSParse2If(512, freq=int(100e6))
if __name__ == '__main__':
suite = unittest.TestSuite()
# suite.addTest(AxiSParseIfTC('test_AxiSParse2If_8b_1MHz'))
suite.addTest(unittest.makeSuite(AxiSParseIfTC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
| Nic30/hwtHls | tests/io/axiStream/axisParseIf_test.py | Python | mit | 3,663 |
import shutil
from pprint import pprint
import pandas as pd
import csv
import pickle
import inspect, os
import requests
from os import listdir
import numpy as np
import subprocess
from luigi import six
from sklearn.decomposition import NMF
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.naive_bayes import MultinomialNB | felipegerard/arte_mexicano_antiguo | montactuaria/Analisis_access_log/luigi/ functions/functions.py | Python | agpl-3.0 | 362 |
# -*- coding: utf-8 -*-
"""
hog
~~~
Sending multiple HTTP requests ON GREEN thread.
:copyright: (c) 2014-2019 by Park Hyunwoo.
:license: MIT, see LICENSE for more details.
"""
from six import itervalues, iteritems
from six.moves import xrange
import eventlet
eventlet.monkey_patch()
import click
import re
import requests
import sys
import time
from collections import defaultdict
HR = '-' * 79
PERCENTAGE = [50, 66, 75, 80, 90, 95, 98, 99, 100, ]
class HogResult(object):
def __init__(self):
super(HogResult, self).__init__()
self.elapsed = 0
self.requests = 0
self.responses = defaultdict(list)
self.succeed_responses = []
@property
def ok(self):
return len(self.succeed_responses) == self.requests
class Hog(object):
STATUS_TIMEOUT = -1
STATUS_FAILED = -2
def __init__(self, callback=None):
super(Hog, self).__init__()
self.callback = callback
def fetch(self):
elapsed = 0
try:
if self.method == 'GET':
r = requests.get(
self.url,
params=self.params,
headers=self.headers,
timeout=self.timeout
)
else:
r = requests.post(
self.url,
data=self.params,
headers=self.headers,
timeout=self.timeout
)
status = r.status_code
elapsed = r.elapsed.total_seconds()
if 200 <= status < 400:
self.result.succeed_responses.append(elapsed)
except requests.exceptions.ConnectionError:
status = self.STATUS_FAILED
except requests.exceptions.Timeout:
status = self.STATUS_TIMEOUT
self.result.responses[status].append(elapsed)
if self.callback:
self.callback(self.result)
def run(self, url, params=None, headers=None, method='GET',
timeout=5, concurrency=10, requests=100, limit=0):
self.url = url
self.params = params
self.headers = headers
self.method = method
self.timeout = timeout
self.result = HogResult()
self.result.requests = requests
if self.callback:
self.callback(self.result)
pool = eventlet.GreenPool(int(concurrency))
start = time.time()
if limit == 0:
for _ in pool.imap(lambda x: self.fetch(),
xrange(int(requests))):
pass
else:
interval = 1.0 / limit
for i in xrange(int(requests)):
pool.spawn_n(self.fetch)
time.sleep(interval)
pool.waitall()
self.result.elapsed = time.time() - start
return self.result
def run(url, params=None, headers=None, method='GET',
timeout=5, concurrency=10, requests=100, limit=0, callback=None):
return Hog(callback) \
.run(url, params, headers, method, timeout, concurrency, requests, limit)
def parse_from_list_and_file(lst, filename):
res = {}
if filename:
with open(filename, 'r') as fh:
lst += [_.rstrip('\r\n') for _ in fh.readlines()]
if lst:
for param in lst:
m = re.match(r'(?P<key>[^=]+)=(?P<value>.+)', param)
if m:
res[m.group('key')] = m.group('value')
return res
def callback(result):
percent = sum([len(_) for _
in itervalues(result.responses)]) * 100 / result.requests
sys.stdout.write(" [{:<70}] {:>3}%\r".format(
'=' * int(0.7 * percent),
percent
))
sys.stdout.flush()
def print_result(result):
# Print out results
click.echo(HR)
click.echo("STATUS\tCOUNT\tAVERAGE")
click.echo(HR)
for status, elapsed_times in iteritems(result.responses):
if status <= 0:
continue
count = len(elapsed_times)
click.echo("{:>6}{:>7}{:>10.2f}ms".format(
status, count, sum(elapsed_times) * 1000 / count
))
# Print distribution
if result.succeed_responses:
click.echo(HR)
click.echo("Response time distribution of succeed requests")
elapsed_sorted = sorted(result.succeed_responses)
for p in PERCENTAGE:
c = (len(elapsed_sorted) * p / 100) - 1
click.echo("{:>12}%{:>10.2f}ms".format(p, elapsed_sorted[int(c)] * 1000))
# Print errors and summary
click.echo(HR)
if result.responses.get(-1):
click.echo(">>> {} request(s) timed out".format(len(result.responses[-1])))
if result.responses.get(-2):
click.echo(">>> {} request(s) failed".format(len(result.responses[-2])))
click.echo("total time elapsed {:.4f}s".format(result.elapsed))
@click.command()
@click.option('-c', '--concurrency', type=int, default=10, help='Number of threads')
@click.option('-n', '--requests', type=int, default=100, help='Number of requests')
@click.option('-l', '--limit', type=int, default=0, help='Limit requests per second (0=unlimited)')
@click.option('-t', '--timeout', type=int, default=5, help='Timeout limit in seconds')
@click.option('-p', '--params', multiple=True, help='Parameters (in key=value format)')
@click.option('-f', '--paramfile', help='File contains parameters (multiple key=value)')
@click.option('-H', '--headers', multiple=True, help='Custom headers (in key=value format)')
@click.option('-F', '--headerfile', help='File contains custom headers (multiple key=value)')
@click.option('-m', '--method', type=click.Choice(['GET', 'POST']), default='GET', help='Method to be used (GET,POST)')
@click.argument('url')
def hog(concurrency, requests, limit, timeout,
params, paramfile, headers, headerfile, method, url):
'''Sending multiple `HTTP` requests `ON` `GREEN` thread'''
params = parse_from_list_and_file(params, paramfile)
headers = parse_from_list_and_file(headers, headerfile)
# Running information
click.echo(HR)
click.echo("Hog is running with {} threads, ".format(concurrency) +
"{} requests ".format(requests) +
"and timeout in {} second(s).".format(timeout))
if limit != 0:
click.echo(">>> Limit: {} request(s) per second.".format(limit))
click.echo(HR)
# Let's begin!
result = Hog(callback).run(url, params, headers, method, timeout, concurrency, requests, limit)
sys.stdout.write("\n")
print_result(result)
if __name__ == '__main__':
hog()
| lqez/hog | hog/hog.py | Python | mit | 6,591 |
try:
import facebook # noqa F401
except ImportError:
from PokeAlarm.Utils import pip_install
pip_install('facebook-sdk', '2.0.0')
from FacebookPageAlarm import FacebookPageAlarm # noqa 401
| neskk/PokeAlarm | PokeAlarm/Alarms/FacebookPage/__init__.py | Python | agpl-3.0 | 205 |
# -*- coding: utf-8 -*-
import json
import mimetypes
import os
from datetime import datetime
from django import forms
from django.conf import settings
from django.core.validators import URLValidator
from django.forms import widgets
from django.forms.extras.widgets import SelectDateWidget
from django.forms.models import modelformset_factory
from django.template.defaultfilters import filesizeformat
from django.utils import six
from django.utils.functional import lazy
from django.utils.safestring import mark_safe
from django.utils.translation import trans_real as translation
import commonware
import happyforms
import waffle
from jinja2 import escape as jinja2_escape
from jinja2.filters import do_dictsort
from mpconstants import regions as mpconstants_regions
from quieter_formset.formset import BaseModelFormSet
from tower import ugettext as _, ugettext_lazy as _lazy, ungettext as ngettext
import lib.iarc
import mkt
from lib.video import tasks as vtasks
from mkt import get_user
from mkt.access import acl
from mkt.api.models import Access
from mkt.constants import (CATEGORY_CHOICES, MAX_PACKAGED_APP_SIZE,
ratingsbodies)
from mkt.developers.utils import prioritize_app
from mkt.files.models import FileUpload
from mkt.files.utils import SafeUnzip, WebAppParser
from mkt.regions import REGIONS_CHOICES_SORTED_BY_NAME
from mkt.regions.utils import parse_region
from mkt.reviewers.models import RereviewQueue
from mkt.site.fields import SeparatedValuesField
from mkt.site.forms import AddonChoiceField
from mkt.site.utils import remove_icons, slug_validator, slugify
from mkt.tags.models import Tag
from mkt.tags.utils import can_edit_restricted_tags, clean_tags
from mkt.translations.fields import TransField
from mkt.translations.forms import TranslationFormMixin
from mkt.translations.models import Translation
from mkt.translations.widgets import TranslationTextarea, TransTextarea
from mkt.versions.models import Version
from mkt.webapps.models import (AddonUser, BlockedSlug, IARCInfo, Preview,
Webapp)
from mkt.webapps.tasks import (index_webapps, set_storefront_data,
update_manifests)
from . import tasks
log = commonware.log.getLogger('mkt.developers')
def region_error(region):
return forms.ValidationError(_('You cannot select {region}.').format(
region=unicode(parse_region(region).name)
))
def toggle_app_for_special_regions(request, app, enabled_regions=None):
"""Toggle for special regions (e.g., China)."""
if not waffle.flag_is_active(request, 'special-regions'):
return
for region in mkt.regions.SPECIAL_REGIONS:
status = app.geodata.get_status(region)
if enabled_regions is not None:
if region.id in enabled_regions:
# If it's not already enabled, mark as pending.
if status != mkt.STATUS_PUBLIC:
# Developer requested for it to be in China.
status = mkt.STATUS_PENDING
value, changed = app.geodata.set_status(region, status)
if changed:
log.info(u'[Webapp:%s] App marked as pending '
u'special region (%s).' % (app, region.slug))
value, changed = app.geodata.set_nominated_date(
region, save=True)
log.info(u'[Webapp:%s] Setting nomination date to '
u'now for region (%s).' % (app, region.slug))
else:
# Developer cancelled request for approval.
status = mkt.STATUS_NULL
value, changed = app.geodata.set_status(
region, status, save=True)
if changed:
log.info(u'[Webapp:%s] App marked as null special '
u'region (%s).' % (app, region.slug))
if status == mkt.STATUS_PUBLIC:
# Reviewer approved for it to be in China.
aer = app.addonexcludedregion.filter(region=region.id)
if aer.exists():
aer.delete()
log.info(u'[Webapp:%s] App included in new special '
u'region (%s).' % (app, region.slug))
else:
# Developer requested for it to be in China.
aer, created = app.addonexcludedregion.get_or_create(
region=region.id)
if created:
log.info(u'[Webapp:%s] App excluded from new special '
u'region (%s).' % (app, region.slug))
class AuthorForm(happyforms.ModelForm):
def clean_user(self):
user = self.cleaned_data['user']
if not user.read_dev_agreement:
raise forms.ValidationError(
_('All team members must have read and agreed to the '
'developer agreement.'))
return user
class Meta:
model = AddonUser
exclude = ('addon',)
class BaseModelFormSet(BaseModelFormSet):
"""
Override the parent's is_valid to prevent deleting all forms.
"""
def is_valid(self):
# clean() won't get called in is_valid() if all the rows are getting
# deleted. We can't allow deleting everything.
rv = super(BaseModelFormSet, self).is_valid()
return rv and not any(self.errors) and not bool(self.non_form_errors())
class BaseAuthorFormSet(BaseModelFormSet):
def clean(self):
if any(self.errors):
return
# cleaned_data could be None if it's the empty extra form.
data = filter(None, [f.cleaned_data for f in self.forms
if not f.cleaned_data.get('DELETE', False)])
if not any(d['role'] == mkt.AUTHOR_ROLE_OWNER for d in data):
raise forms.ValidationError(_('Must have at least one owner.'))
if not any(d['listed'] for d in data):
raise forms.ValidationError(
_('At least one team member must be listed.'))
users = [d['user'] for d in data]
if sorted(users) != sorted(set(users)):
raise forms.ValidationError(
_('A team member can only be listed once.'))
AuthorFormSet = modelformset_factory(AddonUser, formset=BaseAuthorFormSet,
form=AuthorForm, can_delete=True, extra=0)
class DeleteForm(happyforms.Form):
reason = forms.CharField(required=False)
def __init__(self, request):
super(DeleteForm, self).__init__(request.POST)
def trap_duplicate(request, manifest_url):
# See if this user has any other apps with the same manifest.
owned = (request.user.addonuser_set
.filter(addon__manifest_url=manifest_url))
if not owned:
return
try:
app = owned[0].addon
except Webapp.DoesNotExist:
return
error_url = app.get_dev_url()
msg = None
if app.status == mkt.STATUS_PUBLIC:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently public. '
'<a href="%s">Edit app</a>')
elif app.status == mkt.STATUS_PENDING:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently pending. '
'<a href="%s">Edit app</a>')
elif app.status == mkt.STATUS_NULL:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently incomplete. '
'<a href="%s">Resume app</a>')
elif app.status == mkt.STATUS_REJECTED:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently rejected. '
'<a href="%s">Edit app</a>')
elif app.status == mkt.STATUS_DISABLED:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently banned on Marketplace. '
'<a href="%s">Edit app</a>')
elif app.disabled_by_user:
msg = _(u'Oops, looks like you already submitted that manifest '
'for %s, which is currently disabled. '
'<a href="%s">Edit app</a>')
if msg:
return msg % (jinja2_escape(app.name), error_url)
def verify_app_domain(manifest_url, exclude=None, packaged=False):
if packaged or waffle.switch_is_active('webapps-unique-by-domain'):
domain = Webapp.domain_from_url(manifest_url)
qs = Webapp.objects.filter(app_domain=domain)
if exclude:
qs = qs.exclude(pk=exclude.pk)
if qs.exists():
raise forms.ValidationError(
_('An app already exists on this domain; '
'only one app per domain is allowed.'))
class PreviewForm(happyforms.ModelForm):
file_upload = forms.FileField(required=False)
upload_hash = forms.CharField(required=False)
# This lets us POST the data URIs of the unsaved previews so we can still
# show them if there were form errors.
unsaved_image_data = forms.CharField(required=False,
widget=forms.HiddenInput)
unsaved_image_type = forms.CharField(required=False,
widget=forms.HiddenInput)
def save(self, addon, commit=True):
if self.cleaned_data:
self.instance.addon = addon
if self.cleaned_data.get('DELETE'):
# Existing preview.
if self.instance.id:
self.instance.delete()
# User has no desire to save this preview.
return
super(PreviewForm, self).save(commit=commit)
if self.cleaned_data['upload_hash']:
upload_hash = self.cleaned_data['upload_hash']
upload_path = os.path.join(settings.TMP_PATH, 'preview',
upload_hash)
filetype = (os.path.splitext(upload_hash)[1][1:]
.replace('-', '/'))
if filetype in mkt.VIDEO_TYPES:
self.instance.update(filetype=filetype)
vtasks.resize_video.delay(upload_path, self.instance.pk,
user_pk=mkt.get_user().pk)
else:
self.instance.update(filetype='image/png')
tasks.resize_preview.delay(upload_path, self.instance.pk,
set_modified_on=[self.instance])
class Meta:
model = Preview
fields = ('file_upload', 'upload_hash', 'id', 'position')
class JSONField(forms.Field):
def to_python(self, value):
if value == '':
return None
try:
if isinstance(value, basestring):
return json.loads(value)
except ValueError:
pass
return value
class JSONMultipleChoiceField(forms.MultipleChoiceField, JSONField):
widget = forms.CheckboxSelectMultiple
class AdminSettingsForm(PreviewForm):
DELETE = forms.BooleanField(required=False)
mozilla_contact = SeparatedValuesField(forms.EmailField, separator=',',
required=False)
vip_app = forms.BooleanField(required=False)
priority_review = forms.BooleanField(required=False)
banner_regions = JSONMultipleChoiceField(
required=False, choices=mkt.regions.REGIONS_CHOICES_NAME)
banner_message = TransField(required=False)
class Meta:
model = Preview
fields = ('file_upload', 'upload_hash', 'position')
def __init__(self, *args, **kw):
# Note that this form is not inheriting from AddonFormBase, so we have
# to get rid of 'version' ourselves instead of letting the parent class
# do it.
kw.pop('version', None)
# Get the object for the app's promo `Preview` and pass it to the form.
if kw.get('instance'):
addon = kw.pop('instance')
self.instance = addon
self.promo = addon.get_promo()
self.request = kw.pop('request', None)
# Note: After calling `super`, `self.instance` becomes the `Preview`
# object.
super(AdminSettingsForm, self).__init__(*args, **kw)
self.initial['vip_app'] = addon.vip_app
self.initial['priority_review'] = addon.priority_review
if self.instance:
self.initial['mozilla_contact'] = addon.mozilla_contact
self.initial['banner_regions'] = addon.geodata.banner_regions or []
self.initial['banner_message'] = addon.geodata.banner_message_id
@property
def regions_by_id(self):
return mkt.regions.REGIONS_CHOICES_ID_DICT
def clean_position(self):
return -1
def clean_banner_regions(self):
try:
regions = map(int, self.cleaned_data.get('banner_regions'))
except (TypeError, ValueError):
# input data is not a list or data contains non-integers.
raise forms.ValidationError(_('Invalid region(s) selected.'))
return list(regions)
def clean_mozilla_contact(self):
contact = self.cleaned_data.get('mozilla_contact')
if self.cleaned_data.get('mozilla_contact') is None:
return u''
return contact
def save(self, addon, commit=True):
if (self.cleaned_data.get('DELETE') and
'upload_hash' not in self.changed_data and self.promo.id):
self.promo.delete()
elif self.promo and 'upload_hash' in self.changed_data:
self.promo.delete()
elif self.cleaned_data.get('upload_hash'):
super(AdminSettingsForm, self).save(addon, True)
updates = {
'vip_app': self.cleaned_data.get('vip_app'),
}
contact = self.cleaned_data.get('mozilla_contact')
if contact is not None:
updates['mozilla_contact'] = contact
if (self.cleaned_data.get('priority_review') and
not addon.priority_review):
# addon.priority_review gets updated within prioritize_app().
prioritize_app(addon, self.request.user)
else:
updates['priority_review'] = self.cleaned_data.get(
'priority_review')
addon.update(**updates)
geodata = addon.geodata
geodata.banner_regions = self.cleaned_data.get('banner_regions')
geodata.banner_message = self.cleaned_data.get('banner_message')
geodata.save()
uses_flash = self.cleaned_data.get('flash')
af = addon.get_latest_file()
if af is not None:
af.update(uses_flash=bool(uses_flash))
index_webapps.delay([addon.id])
return addon
class BasePreviewFormSet(BaseModelFormSet):
def clean(self):
if any(self.errors):
return
at_least_one = False
for form in self.forms:
if (not form.cleaned_data.get('DELETE') and
form.cleaned_data.get('upload_hash') is not None):
at_least_one = True
if not at_least_one:
raise forms.ValidationError(
_('You must upload at least one screenshot or video.'))
PreviewFormSet = modelformset_factory(Preview, formset=BasePreviewFormSet,
form=PreviewForm, can_delete=True,
extra=1)
class NewManifestForm(happyforms.Form):
manifest = forms.URLField()
def __init__(self, *args, **kwargs):
self.is_standalone = kwargs.pop('is_standalone', False)
super(NewManifestForm, self).__init__(*args, **kwargs)
def clean_manifest(self):
manifest = self.cleaned_data['manifest']
# Skip checking the domain for the standalone validator.
if not self.is_standalone:
verify_app_domain(manifest)
return manifest
class NewPackagedAppForm(happyforms.Form):
upload = forms.FileField()
def __init__(self, *args, **kwargs):
self.max_size = kwargs.pop('max_size', MAX_PACKAGED_APP_SIZE)
self.user = kwargs.pop('user', get_user())
self.addon = kwargs.pop('addon', None)
self.file_upload = None
super(NewPackagedAppForm, self).__init__(*args, **kwargs)
def clean_upload(self):
upload = self.cleaned_data['upload']
errors = []
if upload.size > self.max_size:
errors.append({
'type': 'error',
'message': _('Packaged app too large for submission. Packages '
'must be smaller than %s.' % filesizeformat(
self.max_size)),
'tier': 1,
})
# Immediately raise an error, do not process the rest of the view,
# which would read the file.
raise self.persist_errors(errors, upload)
manifest = None
try:
# Be careful to keep this as in-memory zip reading.
safe_zip = SafeUnzip(upload, 'r')
safe_zip.is_valid() # Will throw ValidationError if necessary.
manifest = safe_zip.extract_path('manifest.webapp')
except forms.ValidationError as e:
errors.append({
'type': 'error',
'message': ''.join(e.messages),
'tier': 1,
})
except Exception as e:
errors.append({
'type': 'error',
'message': _('Error extracting manifest from zip file.'),
'tier': 1,
})
finally:
safe_zip.close()
origin = None
if manifest:
try:
origin = WebAppParser.decode_manifest(manifest).get('origin')
except forms.ValidationError as e:
errors.append({
'type': 'error',
'message': ''.join(e.messages),
'tier': 1,
})
if origin:
try:
verify_app_domain(origin, packaged=True, exclude=self.addon)
except forms.ValidationError, e:
errors.append({
'type': 'error',
'message': ''.join(e.messages),
'tier': 1,
})
if errors:
raise self.persist_errors(errors, upload)
# Everything passed validation.
self.file_upload = FileUpload.from_post(
upload, upload.name, upload.size, user=self.user)
def persist_errors(self, errors, upload):
"""
Persist the error with this into FileUpload (but do not persist
the file contents, which are too large) and return a ValidationError.
"""
validation = {
'errors': len(errors),
'success': False,
'messages': errors,
}
self.file_upload = FileUpload.objects.create(
user=self.user, name=getattr(upload, 'name', ''),
validation=json.dumps(validation))
# Return a ValidationError to be raised by the view.
return forms.ValidationError(' '.join(e['message'] for e in errors))
class AddonFormBase(TranslationFormMixin, happyforms.ModelForm):
def __init__(self, *args, **kw):
self.request = kw.pop('request')
self.version = kw.pop('version', None)
super(AddonFormBase, self).__init__(*args, **kw)
class Meta:
models = Webapp
fields = ('name', 'slug')
class AppFormBasic(AddonFormBase):
"""Form to edit basic app info."""
slug = forms.CharField(max_length=30, widget=forms.TextInput)
manifest_url = forms.URLField()
hosted_url = forms.CharField(
label=_lazy(u'Hosted URL:'), required=False,
help_text=_lazy(
u'A URL to where your app is hosted on the web, if it exists. This'
u' allows users to try out your app before installing it.'))
description = TransField(
required=True,
label=_lazy(u'Provide a detailed description of your app'),
help_text=_lazy(u'This description will appear on the details page.'),
widget=TransTextarea)
tags = forms.CharField(
label=_lazy(u'Search Keywords:'), required=False,
widget=forms.Textarea(attrs={'rows': 3}),
help_text=_lazy(
u'The search keywords are used to return search results in the '
u'Firefox Marketplace. Be sure to include a keywords that '
u'accurately reflect your app.'))
class Meta:
model = Webapp
fields = ('slug', 'manifest_url', 'hosted_url', 'description', 'tags')
def __init__(self, *args, **kw):
# Force the form to use app_slug. We want to keep
# this under "slug" so all the js continues to work.
kw.setdefault('initial', {})['slug'] = kw['instance'].app_slug
super(AppFormBasic, self).__init__(*args, **kw)
self.old_manifest_url = self.instance.manifest_url
if self.instance.is_packaged:
# Manifest URL cannot be changed for packaged apps.
del self.fields['manifest_url']
self.initial['tags'] = ', '.join(self.get_tags(self.instance))
def clean_tags(self):
return clean_tags(self.request, self.cleaned_data['tags'])
def get_tags(self, addon):
if can_edit_restricted_tags(self.request):
return list(addon.tags.values_list('tag_text', flat=True))
else:
return list(addon.tags.filter(restricted=False)
.values_list('tag_text', flat=True))
def _post_clean(self):
# Switch slug to app_slug in cleaned_data and self._meta.fields so
# we can update the app_slug field for webapps.
try:
self._meta.fields = list(self._meta.fields)
slug_idx = self._meta.fields.index('slug')
data = self.cleaned_data
if 'slug' in data:
data['app_slug'] = data.pop('slug')
self._meta.fields[slug_idx] = 'app_slug'
super(AppFormBasic, self)._post_clean()
finally:
self._meta.fields[slug_idx] = 'slug'
def clean_slug(self):
slug = self.cleaned_data['slug']
slug_validator(slug, lower=False)
if slug != self.instance.app_slug:
if Webapp.objects.filter(app_slug=slug).exists():
raise forms.ValidationError(
_('This slug is already in use. Please choose another.'))
if BlockedSlug.blocked(slug):
raise forms.ValidationError(_('The slug cannot be "%s". '
'Please choose another.' % slug))
return slug.lower()
def clean_manifest_url(self):
manifest_url = self.cleaned_data['manifest_url']
# Only verify if manifest changed.
if 'manifest_url' in self.changed_data:
verify_app_domain(manifest_url, exclude=self.instance)
return manifest_url
def save(self, addon, commit=False):
# We ignore `commit`, since we need it to be `False` so we can save
# the ManyToMany fields on our own.
addonform = super(AppFormBasic, self).save(commit=False)
addonform.save()
if 'manifest_url' in self.changed_data:
before_url = self.old_manifest_url
after_url = self.cleaned_data['manifest_url']
# If a non-admin edited the manifest URL, add to Re-review Queue.
if not acl.action_allowed(self.request, 'Admin', '%'):
log.info(u'[Webapp:%s] (Re-review) Manifest URL changed '
u'from %s to %s'
% (self.instance, before_url, after_url))
msg = (_(u'Manifest URL changed from {before_url} to '
u'{after_url}')
.format(before_url=before_url, after_url=after_url))
RereviewQueue.flag(self.instance,
mkt.LOG.REREVIEW_MANIFEST_URL_CHANGE, msg)
# Refetch the new manifest.
log.info('Manifest %s refreshed for %s'
% (addon.manifest_url, addon))
update_manifests.delay([self.instance.id])
tags_new = self.cleaned_data['tags']
tags_old = [slugify(t, spaces=True) for t in self.get_tags(addon)]
add_tags = set(tags_new) - set(tags_old)
del_tags = set(tags_old) - set(tags_new)
# Add new tags.
for t in add_tags:
Tag(tag_text=t).save_tag(addon)
# Remove old tags.
for t in del_tags:
Tag(tag_text=t).remove_tag(addon)
return addonform
class AppFormDetails(AddonFormBase):
LOCALES = [(translation.to_locale(k).replace('_', '-'), v)
for k, v in do_dictsort(settings.LANGUAGES)]
default_locale = forms.TypedChoiceField(required=False, choices=LOCALES)
homepage = TransField.adapt(forms.URLField)(required=False)
privacy_policy = TransField(
widget=TransTextarea(), required=True,
label=_lazy(u"Please specify your app's Privacy Policy"))
class Meta:
model = Webapp
fields = ('default_locale', 'homepage', 'privacy_policy')
def clean(self):
# Make sure we have the required translations in the new locale.
required = ['name', 'description']
data = self.cleaned_data
if not self.errors and 'default_locale' in self.changed_data:
fields = dict((k, getattr(self.instance, k + '_id'))
for k in required)
locale = data['default_locale']
ids = filter(None, fields.values())
qs = (Translation.objects.filter(locale=locale, id__in=ids,
localized_string__isnull=False)
.values_list('id', flat=True))
missing = [k for k, v in fields.items() if v not in qs]
if missing:
raise forms.ValidationError(
_('Before changing your default locale you must have a '
'name and description in that locale. '
'You are missing %s.') % ', '.join(map(repr, missing)))
return data
class AppFormMedia(AddonFormBase):
icon_upload_hash = forms.CharField(required=False)
unsaved_icon_data = forms.CharField(required=False,
widget=forms.HiddenInput)
class Meta:
model = Webapp
fields = ('icon_upload_hash', 'icon_type')
def save(self, addon, commit=True):
if self.cleaned_data['icon_upload_hash']:
upload_hash = self.cleaned_data['icon_upload_hash']
upload_path = os.path.join(settings.TMP_PATH, 'icon', upload_hash)
dirname = addon.get_icon_dir()
destination = os.path.join(dirname, '%s' % addon.id)
remove_icons(destination)
tasks.resize_icon.delay(upload_path, destination,
mkt.CONTENT_ICON_SIZES,
set_modified_on=[addon])
return super(AppFormMedia, self).save(commit)
class AppSupportFormMixin(object):
def get_default_translation_for(self, field_name):
"""
Return the cleaned_data for the specified field_name, using the
field's default_locale.
"""
default_locale = self.fields[field_name].default_locale
return self.cleaned_data.get(field_name, {}).get(default_locale, '')
def clean_support_fields(self):
"""
Make sure either support email or support url are present.
"""
if ('support_email' in self._errors or
'support_url' in self._errors):
# If there are already errors for those fields, bail out, that
# means at least one of them was filled, the user just needs to
# correct the error.
return
support_email = self.get_default_translation_for('support_email')
support_url = self.get_default_translation_for('support_url')
if not support_email and not support_url:
# Mark the fields as invalid, add an error message on a special
# 'support' field that the template will use if necessary, not on
# both fields individually.
self._errors['support'] = self.error_class(
[_('You must provide either a website, an email, or both.')])
self._errors['support_email'] = self.error_class([''])
self._errors['support_url'] = self.error_class([''])
def clean(self):
cleaned_data = super(AppSupportFormMixin, self).clean()
self.clean_support_fields()
return cleaned_data
class AppFormSupport(AppSupportFormMixin, AddonFormBase):
support_url = TransField.adapt(forms.URLField)(required=False)
support_email = TransField.adapt(forms.EmailField)(required=False)
class Meta:
model = Webapp
fields = ('support_email', 'support_url')
class AppAppealForm(happyforms.Form):
"""
If a developer's app is rejected he can make changes and request
another review.
"""
notes = forms.CharField(
label=_lazy(u'Your comments'),
required=False, widget=forms.Textarea(attrs={'rows': 2}))
def __init__(self, *args, **kw):
self.product = kw.pop('product', None)
super(AppAppealForm, self).__init__(*args, **kw)
def save(self):
version = self.product.versions.latest()
notes = self.cleaned_data['notes']
if notes:
mkt.log(mkt.LOG.WEBAPP_RESUBMIT, self.product, version,
details={'comments': notes})
else:
mkt.log(mkt.LOG.WEBAPP_RESUBMIT, self.product, version)
# Mark app and file as pending again.
self.product.update(status=mkt.WEBAPPS_UNREVIEWED_STATUS)
version.all_files[0].update(status=mkt.WEBAPPS_UNREVIEWED_STATUS)
return version
class PublishForm(happyforms.Form):
# Publish choice wording is slightly different here than with the
# submission flow because the app may have already been published.
mark_safe_lazy = lazy(mark_safe, six.text_type)
PUBLISH_CHOICES = (
(mkt.PUBLISH_IMMEDIATE,
mark_safe_lazy(_lazy(
u'<b>Published</b>: Visible to everyone in the Marketplace and '
u'included in search results and listing pages.'))),
(mkt.PUBLISH_HIDDEN,
mark_safe_lazy(_lazy(
u'<b>Unlisted</b>: Visible to only people with the URL and '
u'does not appear in search results and listing pages.'))),
)
# Used for setting initial form values.
PUBLISH_MAPPING = {
mkt.STATUS_PUBLIC: mkt.PUBLISH_IMMEDIATE,
mkt.STATUS_UNLISTED: mkt.PUBLISH_HIDDEN,
mkt.STATUS_APPROVED: mkt.PUBLISH_PRIVATE,
}
# Use in form processing to set status.
STATUS_MAPPING = dict((v, k) for k, v in PUBLISH_MAPPING.items())
publish_type = forms.TypedChoiceField(
required=False, choices=PUBLISH_CHOICES, widget=forms.RadioSelect(),
initial=0, coerce=int, label=_lazy('App Visibility:'))
limited = forms.BooleanField(
required=False, label=_lazy(
u'<b>Limit to my team</b>: Visible to only Team Members.'))
def __init__(self, *args, **kwargs):
self.addon = kwargs.pop('addon')
super(PublishForm, self).__init__(*args, **kwargs)
limited = False
publish = self.PUBLISH_MAPPING.get(self.addon.status,
mkt.PUBLISH_IMMEDIATE)
if self.addon.status == mkt.STATUS_APPROVED:
# Special case if app is currently private.
limited = True
publish = mkt.PUBLISH_HIDDEN
# Determine the current selection via STATUS to publish choice mapping.
self.fields['publish_type'].initial = publish
self.fields['limited'].initial = limited
# Make the limited label safe so we can display the HTML.
self.fields['limited'].label = mark_safe(self.fields['limited'].label)
def save(self):
publish = self.cleaned_data['publish_type']
limited = self.cleaned_data['limited']
if publish == mkt.PUBLISH_HIDDEN and limited:
publish = mkt.PUBLISH_PRIVATE
status = self.STATUS_MAPPING[publish]
self.addon.update(status=status)
mkt.log(mkt.LOG.CHANGE_STATUS, self.addon.get_status_display(),
self.addon)
# Call update_version, so various other bits of data update.
self.addon.update_version()
# Call to update names and locales if changed.
self.addon.update_name_from_package_manifest()
self.addon.update_supported_locales()
set_storefront_data.delay(self.addon.pk)
class RegionForm(forms.Form):
regions = forms.MultipleChoiceField(
required=False, choices=[], widget=forms.CheckboxSelectMultiple,
label=_lazy(u'Choose the regions your app will be listed in:'),
error_messages={'required':
_lazy(u'You must select at least one region.')})
special_regions = forms.MultipleChoiceField(
required=False, widget=forms.CheckboxSelectMultiple,
choices=[(x.id, x.name) for x in mkt.regions.SPECIAL_REGIONS])
enable_new_regions = forms.BooleanField(
required=False, label=_lazy(u'Enable new regions'))
restricted = forms.TypedChoiceField(
required=False, initial=0, coerce=int,
choices=[(0, _lazy('Make my app available in most regions')),
(1, _lazy('Choose where my app is made available'))],
widget=forms.RadioSelect(attrs={'class': 'choices'}))
def __init__(self, *args, **kw):
self.product = kw.pop('product', None)
self.request = kw.pop('request', None)
super(RegionForm, self).__init__(*args, **kw)
self.fields['regions'].choices = REGIONS_CHOICES_SORTED_BY_NAME()
# This is the list of the user's exclusions as we don't
# want the user's choices to be altered by external
# exclusions e.g. payments availability.
user_exclusions = list(
self.product.addonexcludedregion.values_list('region', flat=True)
)
# If we have excluded regions, uncheck those.
# Otherwise, default to everything checked.
self.regions_before = self.product.get_region_ids(
restofworld=True,
excluded=user_exclusions
)
self.initial = {
'regions': sorted(self.regions_before),
'restricted': int(self.product.geodata.restricted),
'enable_new_regions': self.product.enable_new_regions,
}
# The checkboxes for special regions are
#
# - checked ... if an app has not been requested for approval in
# China or the app has been rejected in China.
#
# - unchecked ... if an app has been requested for approval in
# China or the app has been approved in China.
unchecked_statuses = (mkt.STATUS_NULL, mkt.STATUS_REJECTED)
for region in self.special_region_objs:
if self.product.geodata.get_status(region) in unchecked_statuses:
# If it's rejected in this region, uncheck its checkbox.
if region.id in self.initial['regions']:
self.initial['regions'].remove(region.id)
elif region.id not in self.initial['regions']:
# If it's pending/public, check its checkbox.
self.initial['regions'].append(region.id)
@property
def regions_by_id(self):
return mkt.regions.REGIONS_CHOICES_ID_DICT
@property
def special_region_objs(self):
return mkt.regions.SPECIAL_REGIONS
@property
def special_region_ids(self):
return mkt.regions.SPECIAL_REGION_IDS
@property
def low_memory_regions(self):
return any(region.low_memory for region in self.regions_by_id.values())
@property
def special_region_statuses(self):
"""Returns the null/pending/public status for each region."""
statuses = {}
for region in self.special_region_objs:
statuses[region.id] = self.product.geodata.get_status_slug(region)
return statuses
@property
def special_region_messages(self):
"""Returns the L10n messages for each region's status."""
return self.product.geodata.get_status_messages()
def is_toggling(self):
if not self.request or not hasattr(self.request, 'POST'):
return False
value = self.request.POST.get('toggle-paid')
return value if value in ('free', 'paid') else False
def _product_is_paid(self):
return (self.product.premium_type in mkt.ADDON_PREMIUMS or
self.product.premium_type == mkt.ADDON_FREE_INAPP)
def clean_regions(self):
regions = self.cleaned_data['regions']
if not self.is_toggling():
if not regions:
raise forms.ValidationError(
_('You must select at least one region.'))
return regions
def save(self):
# Don't save regions if we are toggling.
if self.is_toggling():
return
regions = [int(x) for x in self.cleaned_data['regions']]
special_regions = [
int(x) for x in self.cleaned_data['special_regions']
]
restricted = int(self.cleaned_data['restricted'] or 0)
if restricted:
before = set(self.regions_before)
after = set(regions)
log.info(u'[Webapp:%s] App marked as restricted.' % self.product)
# Add new region exclusions.
to_add = before - after
for region in to_add:
aer, created = self.product.addonexcludedregion.get_or_create(
region=region)
if created:
log.info(u'[Webapp:%s] Excluded from new region (%s).'
% (self.product, region))
# Remove old region exclusions.
to_remove = after - before
for region in to_remove:
self.product.addonexcludedregion.filter(
region=region).delete()
log.info(u'[Webapp:%s] No longer excluded from region (%s).'
% (self.product, region))
# If restricted, check how we should handle new regions.
if self.cleaned_data['enable_new_regions']:
self.product.update(enable_new_regions=True)
log.info(u'[Webapp:%s] will be added to future regions.'
% self.product)
else:
self.product.update(enable_new_regions=False)
log.info(u'[Webapp:%s] will not be added to future regions.'
% self.product)
else:
# If not restricted, set `enable_new_regions` to True and remove
# currently excluded regions.
self.product.update(enable_new_regions=True)
self.product.addonexcludedregion.all().delete()
log.info(u'[Webapp:%s] App marked as unrestricted.' % self.product)
self.product.geodata.update(restricted=restricted)
# Toggle region exclusions/statuses for special regions (e.g., China).
toggle_app_for_special_regions(self.request, self.product,
special_regions)
class CategoryForm(happyforms.Form):
categories = forms.MultipleChoiceField(label=_lazy(u'Categories'),
choices=CATEGORY_CHOICES,
widget=forms.CheckboxSelectMultiple)
def __init__(self, *args, **kw):
self.request = kw.pop('request', None)
self.product = kw.pop('product', None)
super(CategoryForm, self).__init__(*args, **kw)
self.cats_before = (list(self.product.categories)
if self.product.categories else [])
self.initial['categories'] = self.cats_before
def max_categories(self):
return mkt.MAX_CATEGORIES
def clean_categories(self):
categories = self.cleaned_data['categories']
set_categories = set(categories)
total = len(set_categories)
max_cat = mkt.MAX_CATEGORIES
if total > max_cat:
# L10n: {0} is the number of categories.
raise forms.ValidationError(ngettext(
'You can have only {0} category.',
'You can have only {0} categories.',
max_cat).format(max_cat))
return categories
def save(self):
after = list(self.cleaned_data['categories'])
self.product.update(categories=after)
toggle_app_for_special_regions(self.request, self.product)
class DevAgreementForm(happyforms.Form):
read_dev_agreement = forms.BooleanField(label=_lazy(u'Agree'),
widget=forms.HiddenInput)
def __init__(self, *args, **kw):
self.instance = kw.pop('instance')
super(DevAgreementForm, self).__init__(*args, **kw)
def save(self):
self.instance.read_dev_agreement = datetime.now()
self.instance.save()
class DevNewsletterForm(happyforms.Form):
"""Devhub newsletter subscription form."""
email = forms.EmailField(
error_messages={'required':
_lazy(u'Please enter a valid email address.')},
widget=forms.TextInput(attrs={'required': '',
'placeholder':
_lazy(u'Your email address')}))
email_format = forms.ChoiceField(
widget=forms.RadioSelect(),
choices=(('H', 'HTML'), ('T', _lazy(u'Text'))),
initial='H')
privacy = forms.BooleanField(
error_messages={'required':
_lazy(u'You must agree to the Privacy Policy.')})
country = forms.ChoiceField(label=_lazy(u'Country'))
def __init__(self, locale, *args, **kw):
regions = mpconstants_regions.get_region(locale).REGIONS
regions = sorted(regions.iteritems(), key=lambda x: x[1])
super(DevNewsletterForm, self).__init__(*args, **kw)
self.fields['country'].choices = regions
self.fields['country'].initial = 'us'
class AppFormTechnical(AddonFormBase):
flash = forms.BooleanField(required=False)
is_offline = forms.BooleanField(required=False)
class Meta:
model = Webapp
fields = ('is_offline', 'public_stats',)
def __init__(self, *args, **kw):
super(AppFormTechnical, self).__init__(*args, **kw)
if self.version.all_files:
self.initial['flash'] = self.version.all_files[0].uses_flash
def save(self, addon, commit=False):
uses_flash = self.cleaned_data.get('flash')
self.instance = super(AppFormTechnical, self).save(commit=True)
if self.version.all_files:
self.version.all_files[0].update(uses_flash=bool(uses_flash))
return self.instance
class TransactionFilterForm(happyforms.Form):
app = AddonChoiceField(queryset=None, required=False, label=_lazy(u'App'))
transaction_type = forms.ChoiceField(
required=False, label=_lazy(u'Transaction Type'),
choices=[(None, '')] + mkt.MKT_TRANSACTION_CONTRIB_TYPES.items())
transaction_id = forms.CharField(
required=False, label=_lazy(u'Transaction ID'))
current_year = datetime.today().year
years = [current_year - x for x in range(current_year - 2012)]
date_from = forms.DateTimeField(
required=False, widget=SelectDateWidget(years=years),
label=_lazy(u'From'))
date_to = forms.DateTimeField(
required=False, widget=SelectDateWidget(years=years),
label=_lazy(u'To'))
def __init__(self, *args, **kwargs):
self.apps = kwargs.pop('apps', [])
super(TransactionFilterForm, self).__init__(*args, **kwargs)
self.fields['app'].queryset = self.apps
class APIConsumerForm(happyforms.ModelForm):
app_name = forms.CharField(required=False)
oauth_leg = forms.ChoiceField(choices=(
('website', _lazy('Web site')),
('command', _lazy('Command line')))
)
redirect_uri = forms.CharField(validators=[URLValidator()], required=False)
class Meta:
model = Access
fields = ('app_name', 'redirect_uri')
def __init__(self, *args, **kwargs):
super(APIConsumerForm, self).__init__(*args, **kwargs)
if self.data.get('oauth_leg') == 'website':
for field in ['app_name', 'redirect_uri']:
self.fields[field].required = True
class AppVersionForm(happyforms.ModelForm):
releasenotes = TransField(widget=TransTextarea(), required=False)
approvalnotes = forms.CharField(
widget=TranslationTextarea(attrs={'rows': 4}), required=False)
publish_immediately = forms.BooleanField(
required=False,
label=_lazy(u'Make this the Active version of my app as soon as it '
u'has been reviewed and approved.'))
class Meta:
model = Version
fields = ('releasenotes', 'approvalnotes')
def __init__(self, *args, **kwargs):
super(AppVersionForm, self).__init__(*args, **kwargs)
self.fields['publish_immediately'].initial = (
self.instance.addon.publish_type == mkt.PUBLISH_IMMEDIATE)
def save(self, *args, **kwargs):
rval = super(AppVersionForm, self).save(*args, **kwargs)
if self.instance.all_files[0].status == mkt.STATUS_PENDING:
# If version is pending, allow changes to publish_type.
if self.cleaned_data.get('publish_immediately'):
publish_type = mkt.PUBLISH_IMMEDIATE
else:
publish_type = mkt.PUBLISH_PRIVATE
self.instance.addon.update(publish_type=publish_type)
return rval
class PreloadTestPlanForm(happyforms.Form):
agree = forms.BooleanField(
widget=forms.CheckboxInput,
label=_lazy(
u'Please consider my app as a candidate to be pre-loaded on a '
u'Firefox OS device. I agree to the terms and conditions outlined '
u'above. I understand that this document is not a commitment to '
u'pre-load my app.'
))
test_plan = forms.FileField(
label=_lazy(u'Upload Your Test Plan (.pdf, .xls under 2.5MB)'),
widget=forms.FileInput(attrs={'class': 'button'}))
def clean(self):
"""Validate test_plan file."""
content_types = [
'application/pdf',
'application/vnd.pdf',
'application/ms-excel',
'application/vnd.ms-excel',
'application/vnd.openxmlformats-officedocument.spreadsheetml.'
'sheet'
]
max_upload_size = 2621440 # 2.5MB
if 'test_plan' not in self.files:
raise forms.ValidationError(_('Test plan required.'))
file = self.files['test_plan']
content_type = mimetypes.guess_type(file.name)[0]
if content_type in content_types:
if file._size > max_upload_size:
msg = _('File too large. Keep size under %s. Current size %s.')
msg = msg % (filesizeformat(max_upload_size),
filesizeformat(file._size))
self._errors['test_plan'] = self.error_class([msg])
raise forms.ValidationError(msg)
else:
msg = (_('Invalid file type {0}. Only {1} files are supported.')
.format(content_type, ', '.join(content_types)))
self._errors['test_plan'] = self.error_class([msg])
raise forms.ValidationError(msg)
return self.cleaned_data
class IARCGetAppInfoForm(happyforms.Form):
submission_id = forms.CharField()
security_code = forms.CharField(max_length=10)
def __init__(self, app, *args, **kwargs):
self.app = app
super(IARCGetAppInfoForm, self).__init__(*args, **kwargs)
def clean_submission_id(self):
submission_id = (
# Also allow "subm-1234" since that's what IARC tool displays.
self.cleaned_data['submission_id'].lower().replace('subm-', ''))
if submission_id.isdigit():
return int(submission_id)
raise forms.ValidationError(_('Please enter a valid submission ID.'))
def clean(self):
cleaned_data = super(IARCGetAppInfoForm, self).clean()
app = self.app
iarc_id = cleaned_data.get('submission_id')
if not app or not iarc_id:
return cleaned_data
if (not settings.IARC_ALLOW_CERT_REUSE and
IARCInfo.objects.filter(submission_id=iarc_id)
.exclude(addon=app).exists()):
del cleaned_data['submission_id']
raise forms.ValidationError(
_('This IARC certificate is already being used for another '
'app. Please create a new IARC Ratings Certificate.'))
return cleaned_data
def save(self, *args, **kwargs):
app = self.app
iarc_id = self.cleaned_data['submission_id']
iarc_code = self.cleaned_data['security_code']
if settings.DEBUG and iarc_id == 0:
# A local developer is being lazy. Skip the hard work.
app.set_iarc_info(iarc_id, iarc_code)
app.set_descriptors([])
app.set_interactives([])
app.set_content_ratings({ratingsbodies.ESRB: ratingsbodies.ESRB_E})
return
# Generate XML.
xml = lib.iarc.utils.render_xml(
'get_app_info.xml',
{'submission_id': iarc_id, 'security_code': iarc_code})
# Process that shizzle.
client = lib.iarc.client.get_iarc_client('services')
resp = client.Get_App_Info(XMLString=xml)
# Handle response.
data = lib.iarc.utils.IARC_XML_Parser().parse_string(resp)
if data.get('rows'):
row = data['rows'][0]
if 'submission_id' not in row:
# [{'ActionStatus': 'No records found. Please try another
# 'criteria.', 'rowId: 1}].
msg = _('Invalid submission ID or security code.')
self._errors['submission_id'] = self.error_class([msg])
log.info('[IARC] Bad GetAppInfo: %s' % row)
raise forms.ValidationError(msg)
# We found a rating, so store the id and code for future use.
app.set_iarc_info(iarc_id, iarc_code)
app.set_descriptors(row.get('descriptors', []))
app.set_interactives(row.get('interactives', []))
app.set_content_ratings(row.get('ratings', {}))
else:
msg = _('Invalid submission ID or security code.')
self._errors['submission_id'] = self.error_class([msg])
log.info('[IARC] Bad GetAppInfo. No rows: %s' % data)
raise forms.ValidationError(msg)
class ContentRatingForm(happyforms.Form):
since = forms.DateTimeField()
class MOTDForm(happyforms.Form):
motd = forms.CharField(widget=widgets.Textarea())
| elysium001/zamboni | mkt/developers/forms.py | Python | bsd-3-clause | 51,617 |
#----------------------------------------------------------------------------#
# Imports
#----------------------------------------------------------------------------#
import json
import dateutil.parser
from datetime import *
import babel
from flask import Flask, render_template, request, Response, flash, redirect, url_for
from flask_moment import Moment
import logging
from logging import Formatter, FileHandler
from flask_wtf import Form
from forms import *
from models import setup_db, Venue, Artist, Show
from sqlalchemy import func
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import aliased
#----------------------------------------------------------------------------#
# App Config.
#----------------------------------------------------------------------------#
app = Flask(__name__)
moment = Moment(app)
db = setup_db(app)
#----------------------------------------------------------------------------#
# Filters.
#----------------------------------------------------------------------------#
def format_datetime(value, format='medium'):
date = dateutil.parser.parse(str(value))
if format == 'full':
format="EEEE MMMM, d, y 'at' h:mma"
elif format == 'medium':
format="EE MM, dd, y h:mma"
return babel.dates.format_datetime(date, format)
app.jinja_env.filters['datetime'] = format_datetime
#----------------------------------------------------------------------------#
# Controllers.
#----------------------------------------------------------------------------#
@app.route('/')
def index():
return render_template('pages/home.html')
# Venues
# ----------------------------------------------------------------
@app.route('/venues')
def venues():
# TODO: replace with real venues data,
# num_shows should be aggregated based on number of upcoming shows per venue.
current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
venue_query = Venue.query.group_by(Venue.id, Venue.state, Venue.city).all()
city_and_state = ''
data = []
for venue in venue_query:
upcoming_shows = venue.shows.filter(Show.start_time > current_time).all()
if city_and_state == venue.city + venue.state:
data[len(data) - 1]["venues"].append({
"id": venue.id,
"name": venue.name,
"num_upcoming_shows": len(upcoming_shows)
})
else:
city_and_state = venue.city + venue.state
data.append({
"city": venue.city,
"state": venue.state,
"venues": [{
"id": venue.id,
"name": venue.name,
"num_upcoming_shows": len(upcoming_shows)
}]
})
return render_template('pages/venues.html', areas=data)
@app.route('/venues/search', methods=['POST'])
def search_venues():
# TODO: implement search on artists with partial string search. Ensure it is case-insensitive.
# search for Hop should return "The Musical Hop".
# search for "Music" should return "The Musical Hop" and "Park Square Live Music & Coffee"
venue_query = Venue.query.filter(Venue.name.ilike('%' + request.form['search_term'] + '%'))
venue_list = list(map(Venue.short, venue_query))
response = {
"count": len(venue_list),
"data": venue_list
}
return render_template(
'pages/search_venues.html',
results=response,
search_term=request.form.get('search_term', '')
)
@app.route('/venues/<int:venue_id>')
def show_venue(venue_id):
# shows the venue page with the given venue_id
# TODO: replace with real venue data from the venues table, using venue_id
venue_query = Venue.query.get(venue_id)
if venue_query:
venue_details = Venue.details(venue_query)
current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
new_shows_query = Show.query.options(db.joinedload(Show.Venue)).filter(Show.venue_id == venue_id).filter(Show.start_time > current_time).all()
new_shows_list = list(map(Show.artist_details, new_shows_query))
venue_details["upcoming_shows"] = new_shows_list
venue_details["upcoming_shows_count"] = len(new_shows_list)
past_shows_query = Show.query.options(db.joinedload(Show.Venue)).filter(Show.venue_id == venue_id).filter(Show.start_time <= current_time).all()
past_shows_list = list(map(Show.artist_details, past_shows_query))
venue_details["past_shows"] = past_shows_list
venue_details["past_shows_count"] = len(past_shows_list)
return render_template('pages/show_venue.html', venue=venue_details)
return render_template('errors/404.html')
# Create Venue
# ----------------------------------------------------------------
@app.route('/venues/create', methods=['GET'])
def create_venue_form():
form = VenueForm()
return render_template('forms/new_venue.html', form=form)
@app.route('/venues/create', methods=['POST'])
def create_venue_submission():
# TODO: insert form data as a new Venue record in the db, instead
# TODO: modify data to be the data object returned from db insertion
# on successful db insert, flash success
# flash('Venue ' + request.form['name'] + ' was successfully listed!')
form = VenueForm(request.form)
if form.validate():
try:
seeking_talent = False
seeking_description = ''
if 'seeking_talent' in request.form:
seeking_talent = request.form['seeking_talent'] == 'y'
if 'seeking_description' in request.form:
seeking_description = request.form['seeking_description']
new_venue = Venue(
name=request.form['name'],
genres=request.form.getlist('genres'),
address=request.form['address'],
city=request.form['city'],
state=request.form['state'],
phone=request.form['phone'],
website=request.form['website'],
facebook_link=request.form['facebook_link'],
image_link=request.form['image_link'],
seeking_talent=seeking_talent,
seeking_description=seeking_description,
)
Venue.insert(new_venue)
flash('Venue ' + request.form['name'] + ' was successfully listed!')
except SQLAlchemyError as e:
# TODO: on unsuccessful db insert, flash an error instead.
# e.g., flash('An error occurred. Venue ' + data.name + ' could not be listed.')
# see: http://flask.pocoo.org/docs/1.0/patterns/flashing/
flash('An error occurred. Venue ' + request.form['name'] + ' could not be listed.')
return render_template('pages/home.html')
@app.route('/venues/<venue_id>', methods=['DELETE'])
def delete_venue(venue_id):
# TODO: Complete this endpoint for taking a venue_id, and using
# SQLAlchemy ORM to delete a record. Handle cases where the session commit could fail.
# BONUS CHALLENGE: Implement a button to delete a Venue on a Venue Page, have it so that
# clicking that button delete it from the db then redirect the user to the homepage
venue_data = Venue.query.get(venue_id)
if venue_data:
Venue.delete(venue_data)
return None
# Artists
# ----------------------------------------------------------------
@app.route('/artists')
def artists():
# TODO: replace with real data returned from querying the database
artist_query = Artist.query.all()
artist_list = list(map(Artist.short, artist_query))
return render_template('pages/artists.html', artists=artist_list)
@app.route('/artists/search', methods=['POST'])
def search_artists():
# TODO: implement search on artists with partial string search. Ensure it is case-insensitive.
# search for "A" should return "Guns N Petals", "Matt Quevado", and "The Wild Sax Band".
# search for "band" should return "The Wild Sax Band".
artist_query = Artist.query.filter(Artist.name.ilike('%' + request.form['search_term'] + '%'))
artist_list = list(map(Artist.short, artist_query))
response = {
"count": len(artist_list),
"data": artist_list
}
return render_template(
'pages/search_artists.html',
results=response,
search_term=request.form.get('search_term', '')
)
@app.route('/artists/<int:artist_id>')
def show_artist(artist_id):
# shows the venue page with the given venue_id
# TODO: replace with real venue data from the venues table, using venue_id
artist_query = Artist.query.get(artist_id)
if artist_query:
artist_details = Artist.details(artist_query)
current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
new_shows_query = Show.query.options(db.joinedload(Show.Artist)).filter(Show.artist_id == artist_id).filter(Show.start_time > current_time).all()
new_shows_list = list(map(Show.venue_details, new_shows_query))
artist_details["upcoming_shows"] = new_shows_list
artist_details["upcoming_shows_count"] = len(new_shows_list)
past_shows_query = Show.query.options(db.joinedload(Show.Artist)).filter(Show.artist_id == artist_id).filter(Show.start_time <= current_time).all()
past_shows_list = list(map(Show.venue_details, past_shows_query))
artist_details["past_shows"] = past_shows_list
artist_details["past_shows_count"] = len(past_shows_list)
return render_template('pages/show_artist.html', artist=artist_details)
return render_template('errors/404.html')
# Update
# ----------------------------------------------------------------
@app.route('/artists/<int:artist_id>/edit', methods=['GET'])
def edit_artist(artist_id):
form = ArtistForm()
# TODO: populate form with fields from artist with ID <artist_id>
artist_query = Artist.query.get(artist_id)
if artist_query:
artist_details = Artist.details(artist_query)
form.name.data = artist_details["name"]
form.genres.data = artist_details["genres"]
form.city.data = artist_details["city"]
form.state.data = artist_details["state"]
form.phone.data = artist_details["phone"]
form.website.data = artist_details["website"]
form.facebook_link.data = artist_details["facebook_link"]
form.seeking_venue.data = artist_details["seeking_venue"]
form.seeking_description.data = artist_details["seeking_description"]
form.image_link.data = artist_details["image_link"]
return render_template('forms/edit_artist.html', form=form, artist=artist_details)
return render_template('errors/404.html')
@app.route('/artists/<int:artist_id>/edit', methods=['POST'])
def edit_artist_submission(artist_id):
# TODO: take values from the form submitted, and update existing
# artist record with ID <artist_id> using the new attributes
form = ArtistForm(request.form)
artist_data = Artist.query.get(artist_id)
if artist_data:
if form.validate():
seeking_venue = False
seeking_description = ''
if 'seeking_venue' in request.form:
seeking_venue = request.form['seeking_venue'] == 'y'
if 'seeking_description' in request.form:
seeking_description = request.form['seeking_description']
setattr(artist_data, 'name', request.form['name'])
setattr(artist_data, 'genres', request.form.getlist('genres'))
setattr(artist_data, 'city', request.form['city'])
setattr(artist_data, 'state', request.form['state'])
setattr(artist_data, 'phone', request.form['phone'])
setattr(artist_data, 'website', request.form['website'])
setattr(artist_data, 'facebook_link', request.form['facebook_link'])
setattr(artist_data, 'image_link', request.form['image_link'])
setattr(artist_data, 'seeking_description', seeking_description)
setattr(artist_data, 'seeking_venue', seeking_venue)
Artist.update(artist_data)
return redirect(url_for('show_artist', artist_id=artist_id))
else:
print(form.errors)
return render_template('errors/404.html'), 404
# return redirect(url_for('show_artist', artist_id=artist_id))
@app.route('/venues/<int:venue_id>/edit', methods=['GET'])
def edit_venue(venue_id):
form = VenueForm()
# TODO: populate form with values from venue with ID <venue_id>
venue_query = Venue.query.get(venue_id)
if venue_query:
venue_details = Venue.details(venue_query)
form.name.data = venue_details["name"]
form.genres.data = venue_details["genres"]
form.address.data = venue_details["address"]
form.city.data = venue_details["city"]
form.state.data = venue_details["state"]
form.phone.data = venue_details["phone"]
form.website.data = venue_details["website"]
form.facebook_link.data = venue_details["facebook_link"]
form.seeking_talent.data = venue_details["seeking_talent"]
form.seeking_description.data = venue_details["seeking_description"]
form.image_link.data = venue_details["image_link"]
return render_template('forms/edit_venue.html', form=form, venue=venue_details)
return render_template('errors/404.html')
@app.route('/venues/<int:venue_id>/edit', methods=['POST'])
def edit_venue_submission(venue_id):
# TODO: take values from the form submitted, and update existing
# venue record with ID <venue_id> using the new attributes
form = VenueForm(request.form)
venue_data = Venue.query.get(venue_id)
if venue_data:
if form.validate():
seeking_talent = False
seeking_description = ''
if 'seeking_talent' in request.form:
seeking_talent = request.form['seeking_talent'] == 'y'
if 'seeking_description' in request.form:
seeking_description = request.form['seeking_description']
setattr(venue_data, 'name', request.form['name'])
setattr(venue_data, 'genres', request.form.getlist('genres'))
setattr(venue_data, 'address', request.form['address'])
setattr(venue_data, 'city', request.form['city'])
setattr(venue_data, 'state', request.form['state'])
setattr(venue_data, 'phone', request.form['phone'])
setattr(venue_data, 'website', request.form['website'])
setattr(venue_data, 'facebook_link', request.form['facebook_link'])
setattr(venue_data, 'image_link', request.form['image_link'])
setattr(venue_data, 'seeking_description', seeking_description)
setattr(venue_data, 'seeking_talent', seeking_talent)
Venue.update(venue_data)
return redirect(url_for('show_venue', venue_id=venue_id))
else:
print(form.errors)
return render_template('errors/404.html'), 404
# Create Artist
# ----------------------------------------------------------------
@app.route('/artists/create', methods=['GET'])
def create_artist_form():
form = ArtistForm()
return render_template('forms/new_artist.html', form=form)
@app.route('/artists/create', methods=['POST'])
def create_artist_submission():
# called upon submitting the new artist listing form
# TODO: insert form data as a new Venue record in the db, instead
# TODO: modify data to be the data object returned from db insertion
try:
seeking_venue = False
seeking_description = ''
if 'seeking_venue' in request.form:
seeking_venue = request.form['seeking_venue'] == 'y'
if 'seeking_description' in request.form:
seeking_description = request.form['seeking_description']
new_artist = Artist(
name=request.form['name'],
genres=request.form.getlist('genres'),
city=request.form['city'],
state=request.form['state'],
phone=request.form['phone'],
website=request.form['website'],
facebook_link=request.form['facebook_link'],
image_link=request.form['image_link'],
seeking_venue=seeking_venue,
seeking_description=seeking_description,
)
Artist.insert(new_artist)
# on successful db insert, flash success
flash('Artist ' + request.form['name'] + ' was successfully listed!')
except SQLAlchemyError as e:
# TODO: on unsuccessful db insert, flash an error instead.
# e.g., flash('An error occurred. Artist ' + data.name + ' could not be listed.')
flash('An error occurred. Artist ' + request.form['name'] + ' could not be listed.')
return render_template('pages/home.html')
# Shows
# ----------------------------------------------------------------
@app.route('/shows')
def shows():
# displays list of shows at /shows
# TODO: replace with real venues data.
# num_shows should be aggregated based on number of upcoming shows per venue.
shows_query = Show.query.options(db.joinedload(Show.Venue), db.joinedload(Show.Artist)).all()
shows_list = list(map(Show.details, shows_query))
return render_template('pages/shows.html', shows=shows_list)
@app.route('/shows/create')
def create_shows():
# renders form. do not touch.
form = ShowForm()
return render_template('forms/new_show.html', form=form)
@app.route('/shows/create', methods=['POST'])
def create_show_submission():
# called to create new shows in the db, upon submitting new show listing form
# TODO: insert form data as a new Show record in the db, instead
try:
new_show = Show(
venue_id=request.form['venue_id'],
artist_id=request.form['artist_id'],
start_time=request.form['start_time'],
)
Show.insert(new_show)
# on successful db insert, flash success
flash('Show was successfully listed!')
except SQLAlchemyError as e:
# TODO: on unsuccessful db insert, flash an error instead.
# e.g., flash('An error occurred. Show could not be listed.')
# see: http://flask.pocoo.org/docs/1.0/patterns/flashing/
flash('An error occurred. Show could not be listed.')
return render_template('pages/home.html')
@app.errorhandler(404)
def not_found_error(error):
return render_template('errors/404.html'), 404
@app.errorhandler(500)
def server_error(error):
return render_template('errors/500.html'), 500
if not app.debug:
file_handler = FileHandler('error.log')
file_handler.setFormatter(
Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')
)
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('errors')
#----------------------------------------------------------------------------#
# Launch.
#----------------------------------------------------------------------------#
# Default port:
if __name__ == '__main__':
app.run()
# Or specify port manually:
'''
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
'''
| manishbisht/Udacity | Full Stack Web Developer Nanodegree v2/P1 - Fyyur Artist Booking Site/app.py | Python | mit | 19,320 |
#-*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 One Click Software (http://oneclick.solutions)
# and Copyright (C) 2011,2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'View Employees by Department',
'version': '1.1',
'category': 'Human Resources',
'description': """
Change Default Employee View
============================
View the employees list grouped by department.
""",
'author':'Michael Telahun Makonnen <mmakonnen@gmail.com> and One Click Software',
'website':'http://oneclick.solutions',
'depends': [
'hr',
'listview_images',
],
'init_xml': [
],
'update_xml': [
'hr_view.xml',
],
'test': [
],
'demo_xml': [
],
'installable': True,
'active': False,
}
| cartertech/odoo-hr-ng | hr_view_employee_by_department/__openerp__.py | Python | agpl-3.0 | 1,653 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-16 18:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('neighborhood', '0003_auto_20160916_1759'),
]
operations = [
migrations.AlterField(
model_name='house',
name='last_sold',
field=models.DateField(null=True),
),
]
| josephkane/neighborhood | nh_rest/neighborhood/migrations/0004_auto_20160916_1800.py | Python | mit | 452 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
import uuid
from openstack.telemetry.alarm.v2 import alarm
from openstack.tests.functional import base
@unittest.skip("bug/1524468")
@unittest.skipUnless(base.service_exists(service_type="alarming"),
"Alarming service does not exist")
@unittest.skipUnless(base.service_exists(service_type="metering"),
"Metering service does not exist")
class TestAlarm(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestAlarm, cls).setUpClass()
meter = next(cls.conn.telemetry.meters())
sot = cls.conn.alarm.create_alarm(
name=cls.NAME,
type='threshold',
threshold_rule={
'meter_name': meter.name,
'threshold': 1.1,
},
)
assert isinstance(sot, alarm.Alarm)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.alarm.delete_alarm(cls.ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_get(self):
sot = self.conn.alarm.get_alarm(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.alarm.alarms()]
self.assertIn(self.NAME, names)
| briancurtin/python-openstacksdk | openstack/tests/functional/telemetry/alarm/v2/test_alarm.py | Python | apache-2.0 | 1,941 |
# -*- coding: utf-8 -*-
# Derived work from Facebook's tornado server.
"""TCPServer using non-blocking evented polling loop."""
import os, socket, errno, stat
import ssl # Python 2.6+
from pluggdapps.evserver import process
from pluggdapps.evserver.httpioloop import HTTPIOLoop
from pluggdapps.evserver.httpiostream import HTTPIOStream, HTTPSSLIOStream
import pluggdapps.utils as h
class TCPServer( object ):
"""A non-blocking, single-threaded "Mixin class" implementing TCP server.
To use `TCPServer`, define a `Plugin` subclass which overrides the
`handle_stream` method.
`TCPServer` can serve SSL traffic with Python 2.6+ and OpenSSL.
To make this server serve SSL traffic, configure the sub-class plugin with
`ssloptions.*` settings. which is required for the `ssl.wrap_socket`
method, including "certfile" and "keyfile"
"""
def __init__( self, sett ):
# configuration settings
self.sett = sett
self._sockets = {} # fd -> socket object
self._pending_sockets = []
self._started = False
self.ioloop = None
def listen( self ):
"""Starts accepting connections on the given port.
This method may be called more than once to listen on multiple ports.
`listen` takes effect immediately; it is not necessary to call
`TCPServer.start` afterwards. It is, however, necessary to start
the `HTTPIOLoop`.
"""
sett = self.sett
sockets = bind_sockets(
sett['port'], sett['host'], None, sett['backlog'] )
self.add_sockets(sockets)
def add_sockets( self, sockets ):
"""Make the server start accepting connections using event loop on the
given sockets.
The ``sockets`` parameter is a list of socket objects such as
those returned by `bind_sockets`.
"""
self.ioloop = HTTPIOLoop( self.sett )
for sock in sockets:
self._sockets[ sock.fileno()] = sock
add_accept_handler( sock, self._handle_connection, self.ioloop )
def add_socket( self, socket ):
"""Singular version of `add_sockets`. Takes a single socket object."""
self.add_sockets([socket])
def bind( self ):
"""Binds this server to the addres, port and family configured in
server settings.
This method may be called multiple times prior to `start` to listen
on multiple ports or interfaces."""
family = socket.AF_UNSPEC
sett = self.sett
sockets = bind_sockets(
sett['port'], sett['host'], family, sett['backlog'] )
if self._started :
self.add_sockets( sockets )
else:
self._pending_sockets.extend( sockets )
def start( self ):
"""Starts this server using HTTPIOloop.
By default, we run the server in this process and do not fork any
additional child process.
If `multiprocess` settings not configured or configured as <= 0, we
detect the number of cores available on this machine and fork that
number of child processes. If `multiprocess` settings configured as
> 0, we fork that specific number of sub-processes.
Since we use processes and not threads, there is no shared memory
between any server code.
Note that multiple processes are not compatible with the autoreload
module (or the ``debug=True`` option to `Platform`). When using
multiple processes, no HTTPIOLoop can be created or referenced until
after the call to ``TCPServer.start(n)``.
"""
assert not self._started
self._started = True
sett = self.sett
if sett['multiprocess'] <= 0: # Single process
#log.info("Starting server in single process mode ...")
self.listen()
else : # multi-process
#log.info("Starting server in multi process mode ...")
sockets = bind_sockets(
sett['port'], sett['host'], None, sett['backlog'] )
process.fork_processes( sett['multiprocess'], sett['max_restart'] )
self.add_sockets( sockets )
# TODO : Setup logging for multiple process ?
self.ioloop.start() # Block !
def stop(self):
"""Stops listening for new connections.
Requests currently in progress may still continue after the
server is stopped.
"""
for fd, sock in self._sockets.items() :
self.ioloop.remove_handler(fd)
sock.close()
def handle_stream(self, stream, address):
"""Override to handle a new `IOStream` from an incoming connection."""
raise NotImplementedError()
def _handle_connection( self, conn, address ):
ssloptions = h.settingsfor( 'ssloptions.', self.sett )
is_ssl = ssloptions['keyfile'] and ssloptions['certfile']
if is_ssl :
try:
conn = ssl.wrap_socket( conn,
server_side=True,
do_handshake_on_connect=False,
**ssloptions )
except ssl.SSLError as err:
if err.args[0] == ssl.SSL_ERROR_EOF:
return conn.close()
else:
raise
except socket.error as err:
if err.args[0] == errno.ECONNABORTED:
return conn.close()
else:
raise
try:
if is_ssl :
stream = HTTPSSLIOStream(
conn, address, self.ioloop,
self.sett, ssloptions=ssloptions )
else :
stream = HTTPIOStream( conn, address, self.ioloop, self.sett )
self.handle_stream( stream, address )
except Exception:
#log.error("Error in connection callback", exc_info=True)
pass
def bind_sockets( port, address, family, backlog ):
"""Creates listening sockets bound to the given port and address.
Returns a list of socket objects (multiple sockets are returned if
the given address maps to multiple IP addresses, which is most common
for mixed IPv4 and IPv6 use).
Address may be either an IP address or hostname. If it's a hostname,
the server will listen on all IP addresses associated with the
name. Address may be an empty string or None to listen on all
available interfaces. Family may be set to either socket.AF_INET
or socket.AF_INET6 to restrict to ipv4 or ipv6 addresses, otherwise
both will be used if available.
The ``backlog`` argument has the same meaning as for
``socket.listen()``.
"""
family = family or socket.AF_UNSPEC
sockets = []
if address == "":
address = None
flags = socket.AI_PASSIVE
if hasattr(socket, "AI_ADDRCONFIG"):
# AI_ADDRCONFIG ensures that we only try to bind on ipv6
# if the system is configured for it, but the flag doesn't
# exist on some platforms (specifically WinXP, although
# newer versions of windows have it)
flags |= socket.AI_ADDRCONFIG
addrinfo = set(
socket.getaddrinfo(
address, port, family, socket.SOCK_STREAM, 0, flags))
for res in addrinfo :
#log.info("Binding socket for %s", res)
af, socktype, proto, canonname, sockaddr = res
sock = socket.socket(af, socktype, proto)
h.set_close_exec(sock.fileno())
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if af == socket.AF_INET6:
# On linux, ipv6 sockets accept ipv4 too by default,
# but this makes it impossible to bind to both
# 0.0.0.0 in ipv4 and :: in ipv6. On other systems,
# separate sockets *must* be used to listen for both ipv4
# and ipv6. For consistency, always disable ipv4 on our
# ipv6 sockets and use a separate ipv4 socket when needed.
#
# Python 2.x on windows doesn't have IPPROTO_IPV6.
if hasattr(socket, "IPPROTO_IPV6"):
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
#log.debug( "Set server socket to non-blocking mode ..." )
sock.setblocking(0) # Set to non-blocking.
sock.bind(sockaddr)
#log.debug( "Server listening with a backlog of %s", backlog )
sock.listen(backlog)
sockets.append(sock)
return sockets
def add_accept_handler( sock, callback, ioloop ):
"""Adds an ``HTTPIOLoop`` event handler to accept new connections on
``sock``.
When a connection is accepted, ``callback(connection, address)`` will
be run (``connection`` is a socket object, and ``address`` is the
address of the other end of the connection). Note that this signature
is different from the ``callback(fd, events)`` signature used for
``HTTPIOLoop`` handlers.
"""
def accept_handler( fd, events ):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
return
raise
#log.info( "Accepting new connection from %s", address )
callback( connection, address )
ioloop.add_handler( sock.fileno(), accept_handler, HTTPIOLoop.READ )
| prataprc/pluggdapps | pluggdapps/.Attic/evserver/tcpserver.py | Python | gpl-3.0 | 9,575 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Sales Expense',
'version': '1.0',
'category': 'Sales/Sales',
'summary': 'Quotation, Sales Orders, Delivery & Invoicing Control',
'description': """
Reinvoice Employee Expense
==========================
Create some products for which you can re-invoice the costs.
This module allow to reinvoice employee expense, by setting the SO directly on the expense.
""",
'depends': ['sale_management', 'hr_expense'],
'data': [
'views/assets.xml',
'views/product_view.xml',
'views/hr_expense_views.xml',
'views/sale_order_views.xml',
],
'demo': ['data/sale_expense_demo.xml'],
'test': [],
'installable': True,
'auto_install': True,
}
| ygol/odoo | addons/sale_expense/__manifest__.py | Python | agpl-3.0 | 814 |
import warnings
from rope.base import exceptions, pyobjects, pynames, taskhandle, evaluate, worder, codeanalyze
from rope.base.change import ChangeSet, ChangeContents, MoveResource
from rope.refactor import occurrences, sourceutils
class Rename(object):
"""A class for performing rename refactoring
It can rename everything: classes, functions, modules, packages,
methods, variables and keyword arguments.
"""
def __init__(self, project, resource, offset=None):
"""If `offset` is None, the `resource` itself will be renamed"""
self.project = project
self.pycore = project.pycore
self.resource = resource
if offset is not None:
self.old_name = worder.get_name_at(self.resource, offset)
this_pymodule = self.pycore.resource_to_pyobject(self.resource)
self.old_instance, self.old_pyname = \
evaluate.eval_location2(this_pymodule, offset)
if self.old_pyname is None:
raise exceptions.RefactoringError(
'Rename refactoring should be performed'
' on resolvable python identifiers.')
else:
if not resource.is_folder() and resource.name == '__init__.py':
resource = resource.parent
dummy_pymodule = self.pycore.get_string_module('')
self.old_instance = None
self.old_pyname = pynames.ImportedModule(dummy_pymodule,
resource=resource)
if resource.is_folder():
self.old_name = resource.name
else:
self.old_name = resource.name[:-3]
def get_old_name(self):
return self.old_name
def get_changes(self, new_name, in_file=None, in_hierarchy=False,
unsure=None, docs=False, resources=None,
task_handle=taskhandle.NullTaskHandle()):
"""Get the changes needed for this refactoring
Parameters:
- `in_hierarchy`: when renaming a method this keyword forces
to rename all matching methods in the hierarchy
- `docs`: when `True` rename refactoring will rename
occurrences in comments and strings where the name is
visible. Setting it will make renames faster, too.
- `unsure`: decides what to do about unsure occurrences.
If `None`, they are ignored. Otherwise `unsure` is
called with an instance of `occurrence.Occurrence` as
parameter. If it returns `True`, the occurrence is
considered to be a match.
- `resources` can be a list of `rope.base.resources.File`\s to
apply this refactoring on. If `None`, the restructuring
will be applied to all python files.
- `in_file`: this argument has been deprecated; use
`resources` instead.
"""
if unsure in (True, False):
warnings.warn(
'unsure parameter should be a function that returns '
'True or False', DeprecationWarning, stacklevel=2)
def unsure_func(value=unsure):
return value
unsure = unsure_func
if in_file is not None:
warnings.warn(
'`in_file` argument has been deprecated; use `resources` '
'instead. ', DeprecationWarning, stacklevel=2)
if in_file:
resources = [self.resource]
if _is_local(self.old_pyname):
resources = [self.resource]
if resources is None:
resources = self.pycore.get_python_files()
changes = ChangeSet('Renaming <%s> to <%s>' %
(self.old_name, new_name))
finder = occurrences.create_finder(
self.pycore, self.old_name, self.old_pyname, unsure=unsure,
docs=docs, instance=self.old_instance,
in_hierarchy=in_hierarchy and self.is_method())
job_set = task_handle.create_jobset('Collecting Changes', len(resources))
for file_ in resources:
job_set.started_job(file_.path)
new_content = rename_in_module(finder, new_name, resource=file_)
if new_content is not None:
changes.add_change(ChangeContents(file_, new_content))
job_set.finished_job()
if self._is_renaming_a_module():
resource = self.old_pyname.get_object().get_resource()
if self._is_allowed_to_move(resources, resource):
self._rename_module(resource, new_name, changes)
return changes
def _is_allowed_to_move(self, resources, resource):
if resource.is_folder():
try:
return resource.get_child('__init__.py') in resources
except exceptions.ResourceNotFoundError:
return False
else:
return resource in resources
def _is_renaming_a_module(self):
if isinstance(self.old_pyname.get_object(), pyobjects.AbstractModule):
return True
return False
def is_method(self):
pyname = self.old_pyname
return isinstance(pyname, pynames.DefinedName) and \
isinstance(pyname.get_object(), pyobjects.PyFunction) and \
isinstance(pyname.get_object().parent, pyobjects.PyClass)
def _rename_module(self, resource, new_name, changes):
if not resource.is_folder():
new_name = new_name + '.py'
parent_path = resource.parent.path
if parent_path == '':
new_location = new_name
else:
new_location = parent_path + '/' + new_name
changes.add_change(MoveResource(resource, new_location))
class ChangeOccurrences(object):
"""A class for changing the occurrences of a name in a scope
This class replaces the occurrences of a name. Note that it only
changes the scope containing the offset passed to the constructor.
What's more it does not have any side-effects. That is for
example changing occurrences of a module does not rename the
module; it merely replaces the occurrences of that module in a
scope with the given expression. This class is useful for
performing many custom refactorings.
"""
def __init__(self, project, resource, offset):
self.pycore = project.pycore
self.resource = resource
self.offset = offset
self.old_name = worder.get_name_at(resource, offset)
self.pymodule = self.pycore.resource_to_pyobject(self.resource)
self.old_pyname = evaluate.eval_location(self.pymodule, offset)
def get_old_name(self):
word_finder = worder.Worder(self.resource.read())
return word_finder.get_primary_at(self.offset)
def _get_scope_offset(self):
lines = self.pymodule.lines
scope = self.pymodule.get_scope().\
get_inner_scope_for_line(lines.get_line_number(self.offset))
start = lines.get_line_start(scope.get_start())
end = lines.get_line_end(scope.get_end())
return start, end
def get_changes(self, new_name, only_calls=False, reads=True, writes=True):
changes = ChangeSet('Changing <%s> occurrences to <%s>' %
(self.old_name, new_name))
scope_start, scope_end = self._get_scope_offset()
finder = occurrences.create_finder(
self.pycore, self.old_name, self.old_pyname,
imports=False, only_calls=only_calls)
new_contents = rename_in_module(
finder, new_name, pymodule=self.pymodule, replace_primary=True,
region=(scope_start, scope_end), reads=reads, writes=writes)
if new_contents is not None:
changes.add_change(ChangeContents(self.resource, new_contents))
return changes
def rename_in_module(occurrences_finder, new_name, resource=None, pymodule=None,
replace_primary=False, region=None, reads=True, writes=True):
"""Returns the changed source or `None` if there is no changes"""
if resource is not None:
source_code = resource.read()
else:
source_code = pymodule.source_code
change_collector = codeanalyze.ChangeCollector(source_code)
for occurrence in occurrences_finder.find_occurrences(resource, pymodule):
if replace_primary and occurrence.is_a_fixed_primary():
continue
if replace_primary:
start, end = occurrence.get_primary_range()
else:
start, end = occurrence.get_word_range()
if (not reads and not occurrence.is_written()) or \
(not writes and occurrence.is_written()):
continue
if region is None or region[0] <= start < region[1]:
change_collector.add_change(start, end, new_name)
return change_collector.get_changed()
def _is_local(pyname):
module, lineno = pyname.get_definition_location()
if lineno is None:
return False
scope = module.get_scope().get_inner_scope_for_line(lineno)
if isinstance(pyname, pynames.DefinedName) and \
scope.get_kind() in ('Function', 'Class'):
scope = scope.parent
return scope.get_kind() == 'Function' and \
pyname in list(scope.get_names().values()) and \
isinstance(pyname, pynames.AssignedName)
| JetChars/vim | vim/bundle/python-mode/pymode/libs3/rope/refactor/rename.py | Python | apache-2.0 | 9,365 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Saves and restore variables inside traced @tf.functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import uuid
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_spec
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_io_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.training.saving import checkpoint_options
from tensorflow.python.training.saving import saveable_hook
from tensorflow.python.training.saving import saveable_object
from tensorflow.python.training.saving import saveable_object_util
from tensorflow.python.util import nest
class _SingleDeviceSaver(object):
"""Saves and restores checkpoints from the current device."""
__slots__ = ["_saveable_objects"]
def __init__(self, saveable_objects):
"""Specify a list of `SaveableObject`s to save and restore.
Args:
saveable_objects: A list of `SaveableObject`s.
"""
saveable_objects = list(saveable_objects)
for saveable in saveable_objects:
if not isinstance(saveable, saveable_object.SaveableObject):
raise ValueError(
"Expected a list of SaveableObjects, got %s." % (saveable,))
self._saveable_objects = saveable_objects
def save(self, file_prefix, options=None):
"""Save the saveable objects to a checkpoint with `file_prefix`.
Args:
file_prefix: A string or scalar string Tensor containing the prefix to
save under.
options: Optional `CheckpointOptions` object.
Returns:
An `Operation`, or None when executing eagerly.
"""
options = options or checkpoint_options.CheckpointOptions()
tensor_names = []
tensors = []
tensor_slices = []
for saveable in self._saveable_objects:
for spec in saveable.specs:
tensor_names.append(spec.name)
tensors.append(spec.tensor)
tensor_slices.append(spec.slice_spec)
save_device = options.experimental_io_device or "cpu:0"
with ops.device(save_device):
return io_ops.save_v2(file_prefix, tensor_names, tensor_slices, tensors)
def restore(self, file_prefix, options=None):
"""Restore the saveable objects from a checkpoint with `file_prefix`.
Args:
file_prefix: A string or scalar string Tensor containing the prefix for
files to read from.
options: Optional `CheckpointOptions` object.
Returns:
A dictionary mapping from SaveableObject names to restore operations.
"""
options = options or checkpoint_options.CheckpointOptions()
restore_specs = []
tensor_structure = []
for saveable in self._saveable_objects:
saveable_tensor_structure = []
tensor_structure.append(saveable_tensor_structure)
for spec in saveable.specs:
saveable_tensor_structure.append(spec.name)
restore_specs.append((spec.name, spec.slice_spec, spec.dtype))
tensor_names, tensor_slices, tensor_dtypes = zip(*restore_specs)
restore_device = options.experimental_io_device or "cpu:0"
with ops.device(restore_device):
restored_tensors = io_ops.restore_v2(
file_prefix, tensor_names, tensor_slices, tensor_dtypes)
structured_restored_tensors = nest.pack_sequence_as(
tensor_structure, restored_tensors)
restore_ops = {}
for saveable, restored_tensors in zip(self._saveable_objects,
structured_restored_tensors):
restore_ops[saveable.name] = saveable.restore(
restored_tensors, restored_shapes=None)
return restore_ops
def sharded_filename(filename_tensor, shard, num_shards):
"""Append sharding information to a filename.
Args:
filename_tensor: A string tensor.
shard: Integer. The shard for the filename.
num_shards: An int Tensor for the number of shards.
Returns:
A string tensor.
"""
return gen_io_ops.sharded_filename(filename_tensor, shard, num_shards)
class MultiDeviceSaver(object):
"""Saves checkpoints directly from multiple devices.
Note that this is a low-level utility which stores Tensors in the keys
specified by `SaveableObject`s. Higher-level utilities for object-based
checkpointing are built on top of it.
"""
def __init__(self, saveable_objects):
"""Specify a list of `SaveableObject`s to save and restore.
Args:
saveable_objects: A list of `SaveableObject`s.
Objects extending `SaveableObject` will be saved and restored, and
objects extending `SaveableHook` will be called into at save and
restore time.
"""
self._before_save_callbacks = []
self._after_restore_callbacks = []
saveable_objects = list(saveable_objects)
saveables_by_device = {}
for saveable in saveable_objects:
is_saveable = isinstance(saveable, saveable_object.SaveableObject)
is_hook = isinstance(saveable, saveable_hook.SaveableHook)
if not is_saveable and not is_hook:
raise ValueError(
"Expected a dictionary of SaveableObjects, got {}."
.format(saveable))
if is_hook:
self._before_save_callbacks.append(saveable.before_save)
self._after_restore_callbacks.append(saveable.after_restore)
if is_saveable:
host_device = saveable_object_util.set_cpu0(saveable.device)
saveables_by_device.setdefault(host_device, []).append(saveable)
self._single_device_savers = {
device: _SingleDeviceSaver(saveables)
for device, saveables in saveables_by_device.items()}
def to_proto(self):
"""Serializes to a SaverDef referencing the current graph."""
filename_tensor = array_ops.placeholder(
shape=[], dtype=dtypes.string, name="saver_filename")
save_tensor = self._traced_save(filename_tensor)
restore_op = self._traced_restore(filename_tensor).op
return saver_pb2.SaverDef(
filename_tensor_name=filename_tensor.name,
save_tensor_name=save_tensor.name,
restore_op_name=restore_op.name,
version=saver_pb2.SaverDef.V2)
@def_function.function(
input_signature=(tensor_spec.TensorSpec(shape=(), dtype=dtypes.string),),
autograph=False)
def _traced_save(self, file_prefix):
save_op = self.save(file_prefix)
with ops.device("cpu:0"):
with ops.control_dependencies([save_op]):
return array_ops.identity(file_prefix)
@def_function.function(
input_signature=(tensor_spec.TensorSpec(shape=(), dtype=dtypes.string),),
autograph=False)
def _traced_restore(self, file_prefix):
restore_ops = self.restore(file_prefix)
with ops.device("cpu:0"):
with ops.control_dependencies(restore_ops.values()):
return array_ops.identity(file_prefix)
def save(self, file_prefix, options=None):
"""Save the saveable objects to a checkpoint with `file_prefix`.
Args:
file_prefix: A string or scalar string Tensor containing the prefix to
save under.
options: Optional `CheckpointOptions` object.
Returns:
An `Operation`, or None when executing eagerly.
"""
options = options or checkpoint_options.CheckpointOptions()
for callback in self._before_save_callbacks:
callback()
# IMPLEMENTATION DETAILS: most clients should skip.
#
# Suffix for any well-formed "checkpoint_prefix", when sharded.
# Transformations:
# * Users pass in "save_path" in save() and restore(). Say "myckpt".
# * checkpoint_prefix gets fed <save_path><sharded_suffix>.
#
# Example:
# During runtime, a temporary directory is first created, which contains
# files
#
# <train dir>/myckpt_temp/
# part-?????-of-?????{.index, .data-00000-of-00001}
#
# Before .save() finishes, they will be (hopefully, atomically) renamed to
#
# <train dir>/
# myckpt{.index, .data-?????-of-?????}
#
# Filesystems with eventual consistency (such as S3), don't need a
# temporary location. Using a temporary directory in those cases might
# cause situations where files are not available during copy.
#
# Users only need to interact with the user-specified prefix, which is
# "<train dir>/myckpt" in this case. Save() and Restore() work with the
# prefix directly, instead of any physical pathname. (On failure and
# subsequent restore, an outdated and orphaned temporary directory can be
# safely removed.)
with ops.device("CPU"):
sharded_suffix = array_ops.where(
string_ops.regex_full_match(file_prefix, "^s3://.*"),
constant_op.constant(".part"),
constant_op.constant("_temp_%s/part" % uuid.uuid4().hex))
tmp_checkpoint_prefix = string_ops.string_join(
[file_prefix, sharded_suffix])
def save_fn():
num_shards = len(self._single_device_savers)
sharded_saves = []
sharded_prefixes = []
num_shards_tensor = constant_op.constant(num_shards, name="num_shards")
last_device = None
for shard, (device, saver) in enumerate(
sorted(self._single_device_savers.items())):
last_device = device
with ops.device(saveable_object_util.set_cpu0(device)):
shard_prefix = sharded_filename(tmp_checkpoint_prefix, shard,
num_shards_tensor)
sharded_prefixes.append(shard_prefix)
with ops.device(device):
# _SingleDeviceSaver will use the CPU device when necessary, but
# initial read operations should be placed on the SaveableObject's
# device.
sharded_saves.append(saver.save(shard_prefix, options))
with ops.control_dependencies(sharded_saves):
# Merge on the io_device if specified, otherwise co-locates the merge op
# with the last device used.
merge_device = (
options.experimental_io_device or
saveable_object_util.set_cpu0(last_device))
with ops.device(merge_device):
# V2 format write path consists of a metadata merge step. Once
# merged, attempts to delete the temporary directory,
# "<user-fed prefix>_temp".
return gen_io_ops.merge_v2_checkpoints(
sharded_prefixes, file_prefix, delete_old_dirs=True)
# Since this will causes a function re-trace on each save, limit this to the
# cases where it is needed: eager and when there are multiple tasks/single
# device savers. Note that the retrace is needed to ensure we pickup the
# latest values of options like experimental_io_device.
if context.executing_eagerly() and len(self._single_device_savers) > 1:
# Explicitly place the identity op on the first device.
@def_function.function(experimental_compile=False)
def tf_function_save():
save_fn()
tf_function_save()
else:
return save_fn()
def restore(self, file_prefix, options=None):
"""Restore the saveable objects from a checkpoint with `file_prefix`.
Args:
file_prefix: A string or scalar string Tensor containing the prefix for
files to read from.
options: Optional `CheckpointOptions` object.
Returns:
A dictionary mapping from SaveableObject names to restore operations.
"""
options = options or checkpoint_options.CheckpointOptions()
def restore_fn():
restore_ops = {}
# Sort by device name to avoid propagating non-deterministic dictionary
# ordering in some Python versions.
for device, saver in sorted(self._single_device_savers.items()):
with ops.device(device):
restore_ops.update(saver.restore(file_prefix, options))
return restore_ops
# Since this will causes a function re-trace on each save, limit this to the
# cases where it is needed: eager and when there are multiple tasks/single
# device savers. Note that the retrace is needed to ensure we pickup the
# latest values of options like experimental_io_device.
if context.executing_eagerly() and len(self._single_device_savers) > 1:
first_device, _ = list(self._single_device_savers.items())[0]
@def_function.function(experimental_compile=False)
def tf_function_restore():
restore_ops = restore_fn()
restore_tensors = {}
# tf.functions must return tensors, thus we use control dependencies so
# that we can return a tensor which depends on the given op.
with ops.device(saveable_object_util.set_cpu0(first_device)):
for name, op in restore_ops.items():
with ops.control_dependencies([op]):
restore_tensors[name] = array_ops.identity(file_prefix)
return restore_tensors
restore_ops = tf_function_restore()
else:
restore_ops = restore_fn()
for callback in self._after_restore_callbacks:
callback()
return restore_ops
| aldian/tensorflow | tensorflow/python/training/saving/functional_saver.py | Python | apache-2.0 | 13,910 |
#!/usr/bin/env python
# Copyright (c) 2011-2018, wradlib developers.
# Distributed under the MIT License. See LICENSE.txt for more info.
"""
Read RADOLAN and DX
^^^^^^^^^^^^^^^^^^^
Reading DX and RADOLAN data from German Weather Service
.. autosummary::
:nosignatures:
:toctree: generated/
read_dx
read_radolan_composite
get_radolan_filehandle
read_radolan_header
parse_dwd_composite_header
read_radolan_binary_array
decode_radolan_runlength_array
"""
# standard libraries
from __future__ import absolute_import
import datetime as dt
try:
from StringIO import StringIO
import io
except ImportError:
from io import StringIO # noqa
import io
import re
import warnings
# site packages
import numpy as np
from .. import util as util
# current DWD file naming pattern (2008) for example:
# raa00-dx_10488-200608050000-drs---bin
dwdpattern = re.compile('raa..-(..)[_-]([0-9]{5})-([0-9]*)-(.*?)---bin')
def _get_timestamp_from_filename(filename):
"""Helper function doing the actual work of get_dx_timestamp"""
time = dwdpattern.search(filename).group(3)
if len(time) == 10:
time = '20' + time
return dt.datetime.strptime(time, '%Y%m%d%H%M')
def get_dx_timestamp(name):
"""Converts a dx-timestamp (as part of a dx-product filename) to a
python datetime.object.
Parameters
----------
name : string
representing a DWD product name
Returns
-------
time : timezone-aware datetime.datetime object
"""
return _get_timestamp_from_filename(name).replace(tzinfo=util.UTC())
def unpack_dx(raw):
"""function removes DWD-DX-product bit-13 zero packing"""
# data is encoded in the first 12 bits
data = 4095
# the zero compression flag is bit 13
flag = 4096
beam = []
# # naive version
# # 49193 function calls in 0.772 CPU seconds
# # 20234 function calls in 0.581 CPU seconds
# for item in raw:
# if item & flag:
# beam.extend([0]* (item & data))
# else:
# beam.append(item & data)
# performance version - hopefully
# 6204 function calls in 0.149 CPU seconds
# get all compression cases
flagged = np.where(raw & flag)[0]
# if there is no zero in the whole data, we can return raw as it is
if flagged.size == 0:
assert raw.size == 128
return raw
# everything until the first flag is normal data
beam.extend(raw[0:flagged[0]])
# iterate over all flags except the last one
for this, nxt in zip(flagged[:-1], flagged[1:]):
# create as many zeros as there are given within the flagged
# byte's data part
beam.extend([0] * (raw[this] & data))
# append the data until the next flag
beam.extend(raw[this + 1:nxt])
# process the last flag
# add zeroes
beam.extend([0] * (raw[flagged[-1]] & data))
# add remaining data
beam.extend(raw[flagged[-1] + 1:])
# return the data
return np.array(beam)
def parse_dx_header(header):
"""Internal function to retrieve and interpret the ASCII header of a DWD
DX product file.
Parameters
----------
header : string
string representation of DX header
"""
# empty container
out = {}
# RADOLAN product type def
out["producttype"] = header[0:2]
# time stamp from file header as Python datetime object
out["datetime"] = dt.datetime.strptime(header[2:8] + header[13:17] + "00",
"%d%H%M%m%y%S")
# Make it aware of its time zone (UTC)
out["datetime"] = out["datetime"].replace(tzinfo=util.UTC())
# radar location ID (always 10000 for composites)
out["radarid"] = header[8:13]
pos_by = header.find("BY")
pos_vs = header.find("VS")
pos_co = header.find("CO")
pos_cd = header.find("CD")
pos_cs = header.find("CS")
pos_ep = header.find("EP")
pos_ms = header.find("MS")
out['bytes'] = int(header[pos_by + 2:pos_by + 7])
out['version'] = header[pos_vs + 2:pos_vs + 4]
out['cluttermap'] = int(header[pos_co + 2:pos_co + 3])
out['dopplerfilter'] = int(header[pos_cd + 2:pos_cd + 3])
out['statfilter'] = int(header[pos_cs + 2:pos_cs + 3])
out['elevprofile'] = [float(header[pos_ep + 2 + 3 * i:pos_ep + 2 + 3 * (i + 1)]) for i in range(8)] # noqa
out['message'] = header[pos_ms + 5:pos_ms + 5 + int(header[pos_ms + 2:pos_ms + 5])] # noqa
return out
def read_dx(filename):
"""Data reader for German Weather Service DX product raw radar data files.
This product uses a simple algorithm to compress zero values to reduce data
file size.
Notes
-----
While the format appears to be well defined, there have been reports on DX-
files that seem to produce errors. e.g. while one file usually contains a
360 degree by 128 1km range bins, there are files, that contain 361 beams.
Also, while usually azimuths are stored monotonously in ascending order,
this is not guaranteed by the format. This routine does not (yet) check
for this and directly returns the data in the order found in the file.
If you are in doubt, check the 'azim' attribute.
Be aware that this function does no extensive checking on its output.
If e.g. beams contain different numbers of range bins, the resulting data
will not be a 2-D array but a 1-D array of objects, which will most
probably break calling code. It was decided to leave the handling of these
(hopefully) rare events to the user, who might still be able to retrieve
some reasonable data, instead of raising an exception, making it impossible
to get any data from a file containing errors.
Parameters
----------
filename : string
binary file of DX raw data
Returns
-------
data : :func:`numpy:numpy.array`
of image data [dBZ]; shape (360,128)
attributes : dict
dictionary of attributes - currently implemented keys:
- 'azim' - azimuths np.array of shape (360,)
- 'elev' - elevations (1 per azimuth); np.array of shape (360,)
- 'clutter' - clutter mask; boolean array of same shape as `data`;
corresponds to bit 15 set in each dataset.
- 'bytes'- the total product length (including header).
Apparently, this value may be off by one byte for unknown reasons
- 'version'- a product version string - use unknown
- 'cluttermap' - number of the (DWD internal) cluttermap used
- 'dopplerfilter' - number of the dopplerfilter used (DWD internal)
- 'statfilter' - number of a statistical filter used (DWD internal)
- 'elevprofile' - as stated in the format description, this list
indicates the elevations in the eight 45 degree sectors. These
sectors need not start at 0 degrees north, so it is advised to
explicitly evaluate the `elev` attribute, if elevation information
is needed.
- 'message' - additional text stored in the header.
Examples
--------
See :ref:`/notebooks/fileio/wradlib_reading_dx.ipynb`.
"""
azimuthbitmask = 2 ** (14 - 1)
databitmask = 2 ** (13 - 1) - 1
clutterflag = 2 ** 15
dataflag = 2 ** 13 - 1
f = get_radolan_filehandle(filename)
# header string for later processing
header = ''
atend = False
# read header
while True:
mychar = f.read(1)
# 0x03 signals the end of the header but sometimes there might be
# an additional 0x03 char after that
if mychar == b'\x03':
atend = True
if mychar != b'\x03' and atend:
break
header += str(mychar.decode())
attrs = parse_dx_header(header)
# position file at end of header
f.seek(len(header))
# read number of bytes as declared in the header
# intermediate fix:
# if product length is uneven but header is even (e.g. because it has two
# chr(3) at the end, read one byte less
buflen = attrs['bytes'] - len(header)
if (buflen % 2) != 0:
# make sure that this is consistent with our assumption
# i.e. contact DWD again, if DX files show up with uneven byte lengths
# *and* only one 0x03 character
# assert header[-2] == chr(3)
buflen -= 1
buf = f.read(buflen)
# we can interpret the rest directly as a 1-D array of 16 bit unsigned ints
raw = np.frombuffer(buf, dtype='uint16')
# reading finished, close file, but only if we opened it.
if isinstance(f, io.IOBase):
f.close()
# a new ray/beam starts with bit 14 set
# careful! where always returns its results in a tuple, so in order to get
# the indices we have to retrieve element 0 of this tuple
newazimuths = np.where(raw == azimuthbitmask)[0] # Thomas kontaktieren!
# for the following calculations it is necessary to have the end of the
# data as the last index
newazimuths = np.append(newazimuths, len(raw))
# initialize our list of rays/beams
beams = []
# initialize our list of elevations
elevs = []
# initialize our list of azimuths
azims = []
# iterate over all beams
for i in range(newazimuths.size - 1):
# unpack zeros
beam = unpack_dx(raw[newazimuths[i] + 3:newazimuths[i + 1]])
beams.append(beam)
elevs.append((raw[newazimuths[i] + 2] & databitmask) / 10.)
azims.append((raw[newazimuths[i] + 1] & databitmask) / 10.)
beams = np.array(beams)
# attrs = {}
attrs['elev'] = np.array(elevs)
attrs['azim'] = np.array(azims)
attrs['clutter'] = (beams & clutterflag) != 0
# converting the DWD rvp6-format into dBZ data and return as numpy array
# together with attributes
return (beams & dataflag) * 0.5 - 32.5, attrs
def get_radolan_header_token():
"""Return array with known header token of radolan composites
Returns
-------
head : dict
with known header token, value set to None
"""
head = {'BY': None, 'VS': None, 'SW': None, 'PR': None,
'INT': None, 'GP': None, 'MS': None, 'LV': None,
'CS': None, 'MX': None, 'BG': None, 'ST': None,
'VV': None, 'MF': None, 'QN': None, 'VR': None,
'U': None}
return head
def get_radolan_header_token_pos(header):
"""Get Token and positions from DWD radolan header
Parameters
----------
header : string
(ASCII header)
Returns
-------
head : dictionary
with found header tokens and positions
"""
head_dict = get_radolan_header_token()
for token in head_dict.keys():
d = header.rfind(token)
if d > -1:
head_dict[token] = d
head = {}
result_dict = {}
result_dict.update((k, v) for k, v in head_dict.items() if v is not None)
for k, v in head_dict.items():
if v is not None:
start = v + len(k)
filt = [x for x in result_dict.values() if x > v]
if filt:
stop = min(filt)
else:
stop = len(header)
head[k] = (start, stop)
else:
head[k] = v
return head
def parse_dwd_composite_header(header):
"""Parses the ASCII header of a DWD quantitative composite file
Parameters
----------
header : string
(ASCII header)
Returns
-------
output : dictionary
of metadata retrieved from file header
"""
# empty container
out = {}
# RADOLAN product type def
out["producttype"] = header[0:2]
# file time stamp as Python datetime object
out["datetime"] = dt.datetime.strptime(header[2:8] + header[13:17] + "00",
"%d%H%M%m%y%S")
# radar location ID (always 10000 for composites)
out["radarid"] = header[8:13]
# get dict of header token with positions
head = get_radolan_header_token_pos(header)
# iterate over token and fill output dict accordingly
# for k, v in head.iteritems():
for k, v in head.items():
if v:
if k == 'BY':
out['datasize'] = int(header[v[0]:v[1]]) - len(header) - 1
if k == 'VS':
out["maxrange"] = {0: "100 km and 128 km (mixed)",
1: "100 km",
2: "128 km",
3: "150 km"}.get(int(header[v[0]:v[1]]),
"100 km")
if k == 'SW':
out["radolanversion"] = header[v[0]:v[1]].strip()
if k == 'PR':
out["precision"] = float('1' + header[v[0]:v[1]].strip())
if k == 'INT':
out["intervalseconds"] = int(header[v[0]:v[1]]) * 60
if k == 'U':
out["intervalunit"] = int(header[v[0]:v[1]])
if out["intervalunit"] == 1:
out["intervalseconds"] *= 1440
if k == 'GP':
dimstrings = header[v[0]:v[1]].strip().split("x")
out["nrow"] = int(dimstrings[0])
out["ncol"] = int(dimstrings[1])
if k == 'BG':
dimstrings = header[v[0]:v[1]]
dimstrings = (dimstrings[:int(len(dimstrings) / 2)],
dimstrings[int(len(dimstrings) / 2):])
out["nrow"] = int(dimstrings[0])
out["ncol"] = int(dimstrings[1])
if k == 'LV':
lv = header[v[0]:v[1]].split()
out['nlevel'] = np.int(lv[0])
out['level'] = np.array(lv[1:]).astype('float')
if k == 'MS':
locationstring = (header[v[0]:].strip().split("<")[1].
split(">")[0])
out["radarlocations"] = locationstring.split(",")
if k == 'ST':
locationstring = (header[v[0]:].strip().split("<")[1].
split(">")[0])
out["radardays"] = locationstring.split(",")
if k == 'CS':
out['indicator'] = {0: "near ground level",
1: "maximum",
2: "tops"}.get(int(header[v[0]:v[1]]))
if k == 'MX':
out['imagecount'] = int(header[v[0]:v[1]])
if k == 'VV':
out['predictiontime'] = int(header[v[0]:v[1]])
if k == 'MF':
out['moduleflag'] = int(header[v[0]:v[1]])
if k == 'QN':
out['quantification'] = int(header[v[0]:v[1]])
if k == 'VR':
out['reanalysisversion'] = header[v[0]:v[1]].strip()
return out
def decode_radolan_runlength_line(line, attrs):
"""Decodes one line of runlength coded binary data of DWD
composite file and returns decoded array
Parameters
----------
line : :func:`numpy:numpy.array`
of byte values
attrs : dict
dictionary of attributes derived from file header
Returns
-------
arr : :func:`numpy:numpy.array`
of decoded values
"""
# byte '0' is line number, we don't need it
# so we start with offset byte,
lo = 1
byte = line[lo]
# line empty condition, lf directly behind line number
if byte == 10:
return np.ones(attrs['ncol'], dtype=np.uint8) * attrs['nodataflag']
offset = byte - 16
# check if offset byte is 255 and take next byte(s)
# also for the offset
while byte == 255:
lo += 1
byte = line[lo]
offset += byte - 16
# just take the rest
dline = line[lo + 1:]
# this could be optimized
# iterate over line string, until lf (10) is reached
for lo, byte in enumerate(dline):
if byte == 10:
break
width = (byte & 0xF0) >> 4
val = byte & 0x0F
# the "offset pixel" are "not measured" values
# so we set them to 'nodata'
if lo == 0:
arr = np.ones(offset, dtype=np.uint8) * attrs['nodataflag']
arr = np.append(arr, np.ones(width, dtype=np.uint8) * val)
trailing = attrs['ncol'] - len(arr)
if trailing > 0:
arr = np.append(arr, np.ones(trailing,
dtype=np.uint8) * attrs['nodataflag'])
elif trailing < 0:
arr = dline[:trailing]
return arr
def read_radolan_runlength_line(fid):
"""Reads one line of runlength coded binary data of DWD
composite file and returns it as numpy array
Parameters
----------
fid : object
file/buffer id
Returns
-------
line : :func:`numpy:numpy.array`
of coded values
"""
line = fid.readline()
# check if eot
if line == b'\x04':
return None
# convert input buffer to np.uint8 array
line = np.frombuffer(line, np.uint8).astype(np.uint8)
return line
def decode_radolan_runlength_array(binarr, attrs):
"""Decodes the binary runlength coded section from DWD composite
file and return decoded numpy array with correct shape
Parameters
----------
binarr : string
Buffer
attrs : dict
Attribute dict of file header
Returns
-------
arr : :func:`numpy:numpy.array`
of decoded values
"""
buf = io.BytesIO(binarr)
# read and decode first line
line = read_radolan_runlength_line(buf)
arr = decode_radolan_runlength_line(line, attrs)
# read following lines
line = read_radolan_runlength_line(buf)
while line is not None:
dline = decode_radolan_runlength_line(line, attrs)
arr = np.vstack((arr, dline))
line = read_radolan_runlength_line(buf)
# return upside down because first line read is top line
return np.flipud(arr)
def read_radolan_binary_array(fid, size):
"""Read binary data from file given by filehandle
Parameters
----------
fid : object
file handle
size : int
number of bytes to read
Returns
-------
binarr : string
array of binary data
"""
binarr = fid.read(size)
fid.close()
if len(binarr) != size:
raise IOError('{0}: File corruption while reading {1}! \nCould not '
'read enough data!'.format(__name__, fid.name))
return binarr
def get_radolan_filehandle(fname):
"""Opens radolan file and returns file handle
Parameters
----------
fname : string
filename
Returns
-------
f : object
filehandle
"""
gzip = util.import_optional('gzip')
# open file handle
try:
f = gzip.open(fname, 'rb')
f.read(1)
except IOError:
f = open(fname, 'rb')
f.read(1)
# rewind file
f.seek(0, 0)
return f
def read_radolan_header(fid):
"""Reads radolan ASCII header and returns it as string
Parameters
----------
fid : object
file handle
Returns
-------
header : string
"""
header = ''
while True:
mychar = fid.read(1)
if not mychar:
raise EOFError('Unexpected EOF detected while reading '
'RADOLAN header')
if mychar == b'\x03':
break
header += str(mychar.decode())
return header
def read_radolan_composite(f, missing=-9999, loaddata=True):
"""Read quantitative radar composite format of the German Weather Service
The quantitative composite format of the DWD (German Weather Service) was
established in the course of the
RADOLAN project and includes several file
types, e.g. RX, RO, RK, RZ, RP, RT, RC, RI, RG, PC, PG and many, many more.
(see format description on the RADOLAN project homepage :cite:`DWD2009`).
At the moment, the national RADOLAN composite is a 900 x 900 grid with 1 km
resolution and in polar-stereographic projection. There are other grid
resolutions for different composites (eg. PC, PG)
Warning
-------
This function already evaluates and applies the so-called
PR factor which is specified in the header section of the RADOLAN files.
The raw values in an RY file are in the unit 0.01 mm/5min, while
read_radolan_composite returns values in mm/5min (i. e. factor 100 higher).
The factor is also returned as part of attrs dictionary under
keyword "precision".
Parameters
----------
f : string or file handle
path to the composite file or file handle
missing : int
value assigned to no-data cells
loaddata : bool
True | False, If False function returns (None, attrs)
Returns
-------
output : tuple
tuple of two items (data, attrs):
- data : :func:`numpy:numpy.array` of shape (number of rows,
number of columns)
- attrs : dictionary of metadata information from the file header
Examples
--------
See :ref:`/notebooks/radolan/radolan_format.ipynb`.
"""
NODATA = missing
mask = 0xFFF # max value integer
# If a file name is supplied, get a file handle
try:
header = read_radolan_header(f)
except AttributeError:
f = get_radolan_filehandle(f)
header = read_radolan_header(f)
attrs = parse_dwd_composite_header(header)
if not loaddata:
f.close()
return None, attrs
attrs["nodataflag"] = NODATA
if not attrs["radarid"] == "10000":
warnings.warn("WARNING: You are using function e" +
"wradlib.io.read_RADOLAN_composit for a non " +
"composite file.\n " +
"This might work...but please check the validity " +
"of the results")
# read the actual data
indat = read_radolan_binary_array(f, attrs['datasize'])
if attrs['producttype'] in ['RX', 'EX', 'WX']:
# convert to 8bit integer
arr = np.frombuffer(indat, np.uint8).astype(np.uint8)
arr = np.where(arr == 250, NODATA, arr)
attrs['cluttermask'] = np.where(arr == 249)[0]
elif attrs['producttype'] in ['PG', 'PC']:
arr = decode_radolan_runlength_array(indat, attrs)
else:
# convert to 16-bit integers
arr = np.frombuffer(indat, np.uint16).astype(np.uint16)
# evaluate bits 13, 14, 15 and 16
attrs['secondary'] = np.where(arr & 0x1000)[0]
nodata = np.where(arr & 0x2000)[0]
negative = np.where(arr & 0x4000)[0]
attrs['cluttermask'] = np.where(arr & 0x8000)[0]
# mask out the last 4 bits
arr &= mask
# consider negative flag if product is RD (differences from adjustment)
if attrs['producttype'] == 'RD':
# NOT TESTED, YET
arr[negative] = -arr[negative]
# apply precision factor
# this promotes arr to float if precision is float
arr = arr * attrs['precision']
# set nodata value
arr[nodata] = NODATA
# anyway, bring it into right shape
arr = arr.reshape((attrs['nrow'], attrs['ncol']))
return arr, attrs
| kmuehlbauer/wradlib | wradlib/io/radolan.py | Python | mit | 23,177 |
################################################################################
#
# This program is part of the DellMon Zenpack for Zenoss.
# Copyright (C) 2009, 2010 Egor Puzanov.
#
# This program can be used under the GNU General Public License version 2
# You can find full information here: http://www.zenoss.com/oss
#
################################################################################
__doc__="""DellExpansionCardMap
DellExpansionCardMap maps the pCIDeviceTable table to cards objects
$Id: DellExpansionCardMap.py,v 1.6 2010/11/11 00:07:51 egor Exp $"""
__version__ = '$Revision: 1.6 $'[11:-2]
from Products.DataCollector.plugins.CollectorPlugin import SnmpPlugin, GetTableMap
from Products.DataCollector.plugins.DataMaps import MultiArgs
class DellExpansionCardMap(SnmpPlugin):
"""Map Dell System Management PCI table to model."""
maptype = "DellExpansionCardMap"
modname = "ZenPacks.community.DellMon.DellExpansionCard"
relname = "cards"
compname = "hw"
snmpGetTableMaps = (
GetTableMap('pciTable',
'.1.3.6.1.4.1.674.10892.1.1100.80.1',
{
'.5': 'status',
'.6': 'slot',
'.8': '_manuf',
'.9': '_model',
}
),
GetTableMap('storageCntlrTable',
'.1.3.6.1.4.1.674.10893.1.20.130.1.1',
{
'.1': 'snmpindex',
'.2': '_model',
'.3': '_manuf',
'.4': 'controllerType',
'.8': 'FWRev',
'.9': '_cacheSizeM',
'.10': 'cacheSize',
'.38': 'status',
'.41': 'SWVer',
'.42': 'slot',
'.43': 'role',
}
),
GetTableMap('bmcLANInterfaceTable',
'.1.3.6.1.4.1.674.10892.1.1900.30.1',
{
'.9': 'ipaddress',
'.10': 'subnetmask',
'.12': 'macaddress',
}
),
GetTableMap('applicationTable',
'.1.3.6.1.4.1.674.10899.1.6.1',
{
'.4': 'ver',
'.5': 'name',
}
),
)
controllerTypes = { 1: 'SCSI',
2: 'PowerVault 660F',
3: 'PowerVault 662F',
4: 'IDE',
5: 'SATA',
6: 'SAS',
}
def process(self, device, results, log):
"""collect snmp information from this device"""
log.info('processing %s for device %s', self.name(), device.id)
rm = self.relMap()
getdata, tabledata = results
cntlrs = {}
ttable = ''.join(chr(x) for x in range(256))
for oid, cntlr in tabledata.get('storageCntlrTable', {}).iteritems():
cntlr['snmpindex'] = oid.strip('.')
cntlrs[cntlr['_model'].translate(ttable, ' /'.lower())] = cntlr
drac = {}
for drac in tabledata.get('bmcLANInterfaceTable', {}).values(): break
for cmp in tabledata.get('applicationTable', {}).values():
if 'DRAC' in cmp.get('name', ''):
drac['FWRev'] = cmp.get('ver', '')
elif cmp.get('name', '').startswith('Dell OS Drivers Pack'):
drac['SWVer'] = cmp.get('ver', '')
else: continue
for oid, card in tabledata.get('pciTable', {}).iteritems():
try:
scntlr = cntlrs.get(card['_model'].translate(ttable, ' /-'.lower()), None)
if scntlr:
om = self.objectMap(scntlr)
om.modname = "ZenPacks.community.DellMon.DellStorageCntlr"
om.controllerType = self.controllerTypes.get(getattr(om, 'controllerType', 0), 'Unknown')
om.cacheSize = getattr(om, '_cacheSizeM', 0) * 1048576 + getattr(om, 'cacheSize', 0)
om.slot = card['slot']
elif 'DRAC' in card['_model']:
card.update(drac)
om = self.objectMap(card)
om.modname = "ZenPacks.community.DellMon.DellRemoteAccessCntlr"
if hasattr(om, 'macaddress'):
om.macaddress = self.asmac(om.macaddress)
om.snmpindex = oid.strip('.')
elif card['_model'].startswith('Remote Access Controller'):
card.update(drac)
om = self.objectMap(card)
om.modname = "ZenPacks.community.DellMon.DellRemoteAccessCntlr"
if hasattr(om, 'macaddress'):
om.macaddress = self.asmac(om.macaddress)
om.snmpindex = oid.strip('.')
else:
om = self.objectMap(card)
om.snmpindex = oid.strip('.')
om.id = self.prepId("pci%s" % om.slot)
om._manuf = getattr(om, '_manuf', 'Unknown').split('(')[0].strip()
om.setProductKey = MultiArgs(om._model, om._manuf)
except AttributeError:
continue
rm.append(om)
return rm
| zenoss/Community-Zenpacks | ZenPacks.community.DellMon/ZenPacks/community/DellMon/modeler/plugins/community/snmp/DellExpansionCardMap.py | Python | gpl-2.0 | 5,452 |
import media
import fresh_tomatoes
# Instantiate a media.Movie object for each movie
# Declare title, storyline, poster image, and trailer url for each movie
super_troopers = media.Movie("Super Troopers",
"Five Vermont state troopers, avid pranksters"
" with a knack for screwing up,"
" try to save their jobs and out-do the local"
" police department by solving a crime.",
"https://images-na.ssl-images-amazon.com/images/M"
"/MV5BYzAyOTZjZDItZjNiYy00YTA3LWEyYWMtZTA0NmUzYjZ"
"hNjg0XkEyXkFqcGdeQXVyMTQxNzMzNDI@._V1_.jpg",
"https://www.youtube.com/watch?v=MPhWl_S8ies")
unforgiven = media.Movie("Unforgiven",
"Retired Old West gunslinger William Munny"
" reluctantly takes on one last job, with the help of"
" his old partner and a young man.",
"https://images-na.ssl-images-amazon.com/images/M/MV5"
"BODM3YWY4NmQtN2Y3Ni00OTg0LWFhZGQtZWE3ZWY4MTJlOWU4XkE"
"yXkFqcGdeQXVyNjU0OTQ0OTY@._V1_SY1000_CR0,0,665,1000_"
"AL_.jpg",
"https://www.youtube.com/watch?v=ftTX4FoBWlE")
dirty_rotten = media.Movie("Dirty Rotten Scoundrels",
"Two con men try to settle their rivalry"
" by betting on who can swindle a young"
" American heiress out of $50,000 first.",
"https://images-na.ssl-images-amazon.com"
"/images/M/MV5BMTYyNDk2NDE0OV5BMl5BanBnX"
"kFtZTcwNjQ0NzQzNA@@._V1_SY1000_CR0,0,66"
"6,1000_AL_.jpg",
"https://www.youtube.com/watch?v=0ke-v0e3Cd4")
the_matrix = media.Movie("The Matrix",
"A computer hacker learns from mysterious rebels"
" about the true nature of his reality and his role"
" in the war against its controllers.",
"https://images-na.ssl-images-amazon.com/images/M/MV5"
"BNzQzOTk3OTAtNDQ0Zi00ZTVkLWI0MTEtMDllZjNkYzNjNTc4L2l"
"tYWdlXkEyXkFqcGdeQXVyNjU0OTQ0OTY@._V1_SY1000_CR0,0,6"
"65,1000_AL_.jpg",
"https://www.youtube.com/watch?v=m8e-FF8MsqU")
i_love_you_man = media.Movie("I Love You, Man",
"Friendless Peter Klaven goes on a series of"
" man-dates to find a Best Man for his wedding."
" But, when his insta-bond with his new B.F.F."
" puts a strain on his relationship with his"
" fiancée, can the trio learn to live happily"
" ever after?",
"https://images-na.ssl-images-amazon.com/images/M"
"/MV5BMTU4MjI5NTEyNV5BMl5BanBnXkFtZTcwNjQ1NTMzMg@"
"@._V1_.jpg",
"https://www.youtube.com/watch?v=um5DuTLzw-I")
office_space = media.Movie("Office Space",
"Three company workers who hate their jobs decide"
" to rebel against their greedy boss.",
"https://images-na.ssl-images-amazon.com/images/M/M"
"V5BOTA5MzQ3MzI1NV5BMl5BanBnXkFtZTgwNTcxNTYxMTE@._V"
"1_SY1000_CR0,0,675,1000_AL_.jpg",
"https://www.youtube.com/watch?v=dMIrlP61Z9s")
# Add all movies to the movies list
movies = [dirty_rotten, i_love_you_man, the_matrix, office_space,
super_troopers, unforgiven]
# Pass the movies list to fresh_tomatoes.py to render the web page
fresh_tomatoes.open_movies_page(movies)
| realomgitsdave/movietrailersite | entertainment_center.py | Python | mit | 4,165 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import inspect
from pyspark import pandas as ps
from pyspark.pandas.exceptions import PandasNotImplementedError
from pyspark.pandas.missing.window import (
MissingPandasLikeExpanding,
MissingPandasLikeRolling,
MissingPandasLikeExpandingGroupby,
MissingPandasLikeRollingGroupby,
)
from pyspark.testing.pandasutils import PandasOnSparkTestCase, TestUtils
class ExpandingRollingTest(PandasOnSparkTestCase, TestUtils):
def test_missing(self):
psdf = ps.DataFrame({"a": [1, 2, 3, 4, 5, 6, 7, 8, 9]})
# Expanding functions
missing_functions = inspect.getmembers(MissingPandasLikeExpanding, inspect.isfunction)
unsupported_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function"
]
for name in unsupported_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.expanding(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.expanding(1), name)() # Series
deprecated_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function"
]
for name in deprecated_functions:
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.expanding(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.a.expanding(1), name)() # Series
# Rolling functions
missing_functions = inspect.getmembers(MissingPandasLikeRolling, inspect.isfunction)
unsupported_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function"
]
for name in unsupported_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.rolling(1), name)() # Series
deprecated_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function"
]
for name in deprecated_functions:
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.a.rolling(1), name)() # Series
# Expanding properties
missing_properties = inspect.getmembers(
MissingPandasLikeExpanding, lambda o: isinstance(o, property)
)
unsupported_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "unsupported_property"
]
for name in unsupported_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.expanding(1), name) # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.expanding(1), name) # Series
deprecated_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "deprecated_property"
]
for name in deprecated_properties:
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.expanding(1), name) # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.a.expanding(1), name) # Series
# Rolling properties
missing_properties = inspect.getmembers(
MissingPandasLikeRolling, lambda o: isinstance(o, property)
)
unsupported_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "unsupported_property"
]
for name in unsupported_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.rolling(1), name)() # Series
deprecated_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "deprecated_property"
]
for name in deprecated_properties:
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.a.rolling(1), name)() # Series
def test_missing_groupby(self):
psdf = ps.DataFrame({"a": [1, 2, 3, 4, 5, 6, 7, 8, 9]})
# Expanding functions
missing_functions = inspect.getmembers(
MissingPandasLikeExpandingGroupby, inspect.isfunction
)
unsupported_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function"
]
for name in unsupported_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.groupby("a").expanding(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.groupby(psdf.a).expanding(1), name)() # Series
deprecated_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function"
]
for name in deprecated_functions:
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.groupby("a").expanding(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.a.groupby(psdf.a).expanding(1), name)() # Series
# Rolling functions
missing_functions = inspect.getmembers(MissingPandasLikeRollingGroupby, inspect.isfunction)
unsupported_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "unsupported_function"
]
for name in unsupported_functions:
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.groupby("a").rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"method.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.groupby(psdf.a).rolling(1), name)() # Series
deprecated_functions = [
name for (name, type_) in missing_functions if type_.__name__ == "deprecated_function"
]
for name in deprecated_functions:
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "method.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.a.rolling(1), name)() # Series
# Expanding properties
missing_properties = inspect.getmembers(
MissingPandasLikeExpandingGroupby, lambda o: isinstance(o, property)
)
unsupported_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "unsupported_property"
]
for name in unsupported_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.groupby("a").expanding(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Expanding.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.groupby(psdf.a).expanding(1), name)() # Series
deprecated_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "deprecated_property"
]
for name in deprecated_properties:
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.expanding(1), name) # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Expanding.*{}.*is deprecated".format(name)
):
getattr(psdf.a.expanding(1), name) # Series
# Rolling properties
missing_properties = inspect.getmembers(
MissingPandasLikeRollingGroupby, lambda o: isinstance(o, property)
)
unsupported_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "unsupported_property"
]
for name in unsupported_properties:
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.groupby("a").rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError,
"property.*Rolling.*{}.*not implemented( yet\\.|\\. .+)".format(name),
):
getattr(psdf.a.groupby(psdf.a).rolling(1), name)() # Series
deprecated_properties = [
name
for (name, type_) in missing_properties
if type_.fget.__name__ == "deprecated_property"
]
for name in deprecated_properties:
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.rolling(1), name)() # Frame
with self.assertRaisesRegex(
PandasNotImplementedError, "property.*Rolling.*{}.*is deprecated".format(name)
):
getattr(psdf.a.rolling(1), name)() # Series
if __name__ == "__main__":
import unittest
from pyspark.pandas.tests.test_window import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| ueshin/apache-spark | python/pyspark/pandas/tests/test_window.py | Python | apache-2.0 | 13,671 |
import wakefs.config
from tests.utils import rand_len_str
import os
import random
import unittest
class TestConfigFileCreate(unittest.TestCase):
def test_file_create(self):
testfile = "test.cfg"
with wakefs.config.Config(testfile) as config:
pass
self.assertTrue(os.path.exists(testfile))
os.remove(testfile)
class TestConfigAttributes(unittest.TestCase):
def setUp(self):
self.testfile = "test.cfg"
self.config = wakefs.config.Config(self.testfile)
def test_get_attribute(self):
self.config.database_uri
def test_get_wrong_attribute(self):
with self.assertRaises(AttributeError):
self.config.detabase_uri
def test_set_attribute(self):
teststr = rand_len_str()
self.config.test = teststr
self.assertTrue(self.config.test == teststr)
def test_del_attribute(self):
teststr = rand_len_str()
self.config.test = teststr
self.assertTrue(self.config.test == teststr)
del self.config.test
with self.assertRaises(AttributeError):
self.config.test
def tearDown(self):
self.config.close()
os.remove(self.testfile)
| authmillenon/wakefs | tests/config.py | Python | mit | 1,222 |
from setuptools import setup
from os import path
BASE_PATH = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(BASE_PATH, 'README.rst'), 'r') as f:
long_description = f.read()
setup(
name='python-jumprunpro',
version='0.0.2',
author='Nate Mara',
author_email='natemara@gmail.com',
description='Simple python bindings for scraping data from JumpRun Pro',
long_description=long_description,
license='MIT',
test_suite='tests',
keywords='skydiving manifest',
url='https://github.com/natemara/jumprunpro-python',
packages=['jumprun'],
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
install_requires=[
'beautifulsoup4==4.3.2',
'requests==2.6.2',
'python-dateutil==2.4.2',
],
)
| natemara/jumprunpro-python | setup.py | Python | mit | 1,076 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Escrito por Daniel Fuentes B.
# Modificado por Kenny Meyer
# Licencia: X11/MIT license http://www.opensource.org/licenses/mit-license.php
# ---------------------------
# Importacion de los módulos
# ---------------------------
import pygame
from pygame.locals import *
# -----------
# Constantes
# -----------
SCREEN_WIDTH = 640
SCREEN_HEIGHT = 480
# ------------------------------
# Clases y Funciones utilizadas
# ------------------------------
# ------------------------------
# Funcion principal del juego
# ------------------------------
def main():
pygame.init()
# creamos la ventana y le indicamos un titulo:
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT))
pygame.display.set_caption("tutorial pygame parte 1")
if __name__ == "__main__":
main()
| kennym/fsa2011 | parte_1/ejemplo1.py | Python | mit | 849 |
import vtk
from vtk.util import vtkAlgorithm as vta
from vtk.test import Testing
class TestPythonAlgorithm(Testing.vtkTest):
def testSource(self):
class MyAlgorithm(vta.VTKAlgorithm):
def __init__(self):
vta.VTKAlgorithm.__init__(self, nInputPorts=0, outputType='vtkImageData')
self.Wavelet = vtk.vtkRTAnalyticSource()
def RequestInformation(self, vtkself, request, inInfo, outInfo):
self.Wavelet.UpdateInformation()
wOutInfo = self.Wavelet.GetOutputInformation(0)
vtkSDDP = vtk.vtkStreamingDemandDrivenPipeline
outInfo.GetInformationObject(0).Set(vtkSDDP.WHOLE_EXTENT(), wOutInfo.Get(vtkSDDP.WHOLE_EXTENT()), 6)
return 1
def RequestData(self, vtkself, request, inInfo, outInfo):
self.Wavelet.Update()
out = self.GetOutputData(outInfo, 0)
out.ShallowCopy(self.Wavelet.GetOutput())
return 1
ex = vtk.vtkPythonAlgorithm()
ex.SetPythonObject(MyAlgorithm())
ex.Update()
w = vtk.vtkRTAnalyticSource()
w.Update()
output = ex.GetOutputDataObject(0)
self.assertEqual(output.GetPointData().GetScalars().GetRange(),\
w.GetOutput().GetPointData().GetScalars().GetRange())
vtkSDDP = vtk.vtkStreamingDemandDrivenPipeline
self.assertEqual(ex.GetOutputInformation(0).Get(vtkSDDP.WHOLE_EXTENT()),\
w.GetOutputInformation(0).Get(vtkSDDP.WHOLE_EXTENT()))
def testSource2(self):
class MyAlgorithm(vta.VTKPythonAlgorithmBase):
def __init__(self):
vta.VTKPythonAlgorithmBase.__init__(self, nInputPorts=0, outputType='vtkImageData')
self.Wavelet = vtk.vtkRTAnalyticSource()
def RequestInformation(self, request, inInfo, outInfo):
self.Wavelet.UpdateInformation()
wOutInfo = self.Wavelet.GetOutputInformation(0)
vtkSDDP = vtk.vtkStreamingDemandDrivenPipeline
outInfo.GetInformationObject(0).Set(
vtkSDDP.WHOLE_EXTENT(), wOutInfo.Get(vtkSDDP.WHOLE_EXTENT()), 6)
return 1
def RequestData(self, request, inInfo, outInfo):
self.Wavelet.Update()
out = vtk.vtkImageData.GetData(outInfo)
out.ShallowCopy(self.Wavelet.GetOutput())
return 1
ex = MyAlgorithm()
ex.Update()
w = vtk.vtkRTAnalyticSource()
w.Update()
output = ex.GetOutputDataObject(0)
self.assertEqual(output.GetPointData().GetScalars().GetRange(),\
w.GetOutput().GetPointData().GetScalars().GetRange())
vtkSDDP = vtk.vtkStreamingDemandDrivenPipeline
self.assertEqual(ex.GetOutputInformation(0).Get(vtkSDDP.WHOLE_EXTENT()),\
w.GetOutputInformation(0).Get(vtkSDDP.WHOLE_EXTENT()))
def testFilter(self):
class MyAlgorithm(vta.VTKAlgorithm):
def RequestData(self, vtkself, request, inInfo, outInfo):
inp = self.GetInputData(inInfo, 0, 0)
out = self.GetOutputData(outInfo, 0)
out.ShallowCopy(inp)
return 1
sphere = vtk.vtkSphereSource()
ex = vtk.vtkPythonAlgorithm()
ex.SetPythonObject(MyAlgorithm())
ex.SetInputConnection(sphere.GetOutputPort())
ex.Update()
output = ex.GetOutputDataObject(0)
ncells = output.GetNumberOfCells()
self.assertNotEqual(ncells, 0)
self.assertEqual(ncells, sphere.GetOutput().GetNumberOfCells())
self.assertEqual(output.GetBounds(), sphere.GetOutput().GetBounds())
def testFilter2(self):
class MyAlgorithm(vta.VTKPythonAlgorithmBase):
def __init__(self):
vta.VTKPythonAlgorithmBase.__init__(self)
def RequestData(self, request, inInfo, outInfo):
inp = self.GetInputData(inInfo, 0, 0)
out = self.GetOutputData(outInfo, 0)
out.ShallowCopy(inp)
return 1
sphere = vtk.vtkSphereSource()
ex = MyAlgorithm()
ex.SetInputConnection(sphere.GetOutputPort())
ex.Update()
output = ex.GetOutputDataObject(0)
ncells = output.GetNumberOfCells()
self.assertNotEqual(ncells, 0)
self.assertEqual(ncells, sphere.GetOutput().GetNumberOfCells())
self.assertEqual(output.GetBounds(), sphere.GetOutput().GetBounds())
if __name__ == "__main__":
Testing.main([(TestPythonAlgorithm, 'test')])
| HopeFOAM/HopeFOAM | ThirdParty-0.1/ParaView-5.0.1/VTK/Filters/Python/Testing/Python/TestPythonAlgorithm.py | Python | gpl-3.0 | 4,659 |
# -*- coding: utf-8 -*-
# © 2016 Comunitea - Kiko Sanchez <kiko@comunitea.com>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import account
from . import res_partner_bank
| Comunitea/CMNT_00098_2017_JIM_addons | jim_account/models/__init__.py | Python | agpl-3.0 | 198 |
# -*- coding: utf-8 -*-
"""
AONX Server - Pequeño servidor de Argentum Online.
Copyright (C) 2011 Alejandro Santos <alejolp@alejolp.com.ar>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# Obliga a hacer: import corevars
__all__ = []
ServerConfig = None # Configuracion del servidor.
gameServer = None
forbiddenNames = None
mapData = None # Lista de mapas
objData = None
npcData = None
hechData = None
| alejolp/argentum-py-server | argentumserver/corevars.py | Python | gpl-3.0 | 1,040 |
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext_lazy as _
from facebook import display_album
from models import FacebookGallery
class FacebookGalleryPlugin(CMSPluginBase):
model = FacebookGallery
name = _("Facebook Album Gallery")
render_template = "cmsplugin_fbgallery/album.html"
def render(self, context, instance, placeholder):
album = display_album(instance.album_id)
context.update({
'object': instance,
'album': album,
})
return context
plugin_pool.register_plugin(FacebookGalleryPlugin)
| justinasjaronis/cmsplugin-fbgallery | cmsplugin_fbgallery/cms_plugins.py | Python | mit | 652 |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.middleware.s3api.utils import Config
class TestS3ApiCfg(unittest.TestCase):
def test_config(self):
conf = Config(
{
'a': 'str',
'b': 10,
'c': True,
}
)
conf.update(
{
'a': 'str2',
'b': '100',
'c': 'false',
}
)
self.assertEqual(conf['a'], 'str2')
self.assertEqual(conf['b'], 100)
self.assertEqual(conf['c'], False)
if __name__ == '__main__':
unittest.main()
| openstack/swift | test/unit/common/middleware/s3api/test_cfg.py | Python | apache-2.0 | 1,196 |
from .. import Provider as PersonProvider
class Provider(PersonProvider):
formats_male = (
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}",
"{{first_name_male}} {{last_name}}-{{last_name}}",
"{{prefix_male}} {{first_name_male}} {{last_name}}",
)
formats_female = (
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}",
"{{first_name_female}} {{last_name}}-{{last_name}}",
"{{prefix_female}} {{first_name_female}} {{last_name}}",
)
formats = formats_male + formats_female
# source: https://www.data.gv.at/katalog/dataset/87fc82a0-0042-49c8-b6f9-2602cd3dc17a
first_names_male = (
"Aaron",
"Adam",
"Adrian",
"Adriano",
"Alan",
"Aleksander",
"Alex",
"Alexandar",
"Alexander",
"Andreas",
"Andrej",
"Angelo",
"Anton",
"Antonio",
"Antonius",
"Arda",
"Armin",
"Aron",
"Arthur",
"Aurelio",
"Axel",
"Bastian",
"Ben",
"Benedict",
"Benedikt",
"Beni",
"Benjamin",
"Bernhard",
"Boris",
"Bruno",
"Calvin",
"Carl",
"Carlo",
"Chris",
"Christian",
"Christoph",
"Christopher",
"Clemens",
"Constantin",
"Cornelius",
"Cristiano",
"Damian",
"Daniel",
"Danilo",
"Dario",
"Darius",
"Darko",
"David",
"Dennis",
"Dominik",
"Eduard",
"Elias",
"Elyas",
"Emanuel",
"Emil",
"Emilian",
"Emmanuel",
"Eric",
"Erik",
"Erwin",
"Fabian",
"Fabio",
"Felix",
"Ferdinand",
"Fernando",
"Filip",
"Finn",
"Florentin",
"Florian",
"Florin",
"Franz",
"Frederik",
"Fridolin",
"Friedrich",
"Gabriel",
"Georg",
"Gregor",
"Gustav",
"Heinrich",
"Henri",
"Henrik",
"Henry",
"Hubert",
"Hugo",
"Igor",
"Ilias",
"Isa",
"Ismail",
"Jacob",
"Jakob",
"James",
"Jamie",
"Jan",
"Jannik",
"Jannis",
"Jasper",
"Joel",
"Johann",
"Johannes",
"John",
"Jonas",
"Jonathan",
"Josef",
"Joseph",
"Joshua",
"Julian",
"Julius",
"Justin",
"Justus",
"Kai",
"Karim",
"Karl",
"Kevin",
"Kilian",
"Konrad",
"Konstantin",
"Kristian",
"Lars",
"Laurenz",
"Laurin",
"Lean",
"Leander",
"Lennard",
"Lennart",
"Leo",
"Leon",
"Leonard",
"Leonardo",
"Leonhard",
"Leopold",
"Levi",
"Liam",
"Lino",
"Linus",
"Lionel",
"Lorenz",
"Lorenzo",
"Louis",
"Luca",
"Lucas",
"Luis",
"Luka",
"Lukas",
"Maksim",
"Manuel",
"Marc",
"Marcel",
"Marco",
"Marcus",
"Mario",
"Marius",
"Mark",
"Marko",
"Markus",
"Martin",
"Marvin",
"Mateo",
"Matheo",
"Mathias",
"Matteo",
"Matthias",
"Maurice",
"Max",
"Maximilian",
"Merlin",
"Mert",
"Michael",
"Mika",
"Mike",
"Milan",
"Milo",
"Moritz",
"Natan",
"Nathan",
"Nicholas",
"Nick",
"Nico",
"Nicolai",
"Nicolas",
"Niklas",
"Niko",
"Nikola",
"Nikolai",
"Nikolas",
"Nikolaus",
"Nils",
"Nino",
"Noah",
"Noel",
"Oliver",
"Oscar",
"Oskar",
"Pascal",
"Patrick",
"Patrik",
"Paul",
"Peter",
"Philip",
"Philipp",
"Phillip",
"Raffael",
"Ralph",
"Raphael",
"Rene",
"Ricardo",
"Richard",
"Robert",
"Robin",
"Roman",
"Ruben",
"Sam",
"Samuel",
"Sandro",
"Sascha",
"Sebastian",
"Severin",
"Simon",
"Stefan",
"Stephan",
"Steven",
"Sven",
"Teodor",
"Theo",
"Theodor",
"Thomas",
"Tim",
"Timo",
"Timon",
"Tobias",
"Tom",
"Tristan",
"Valentin",
"Valentino",
"Victor",
"Viktor",
"Vincent",
"Vito",
"William",
"Xavier",
)
# source: https://www.data.gv.at/katalog/dataset/87fc82a0-0042-49c8-b6f9-2602cd3dc17a
first_names_female = (
"Adelina",
"Adriana",
"Ajna",
"Alara",
"Aleksandra",
"Alena",
"Alexa",
"Alexandra",
"Alexia",
"Alice",
"Alma",
"Amanda",
"Amelia",
"Amelie",
"Anabella",
"Anastasia",
"Andjela",
"Andjelina",
"Andrea",
"Angela",
"Angelika",
"Angelina",
"Anika",
"Anita",
"Anja",
"Anna",
"Anna-Lena",
"Anna-Maria",
"Annabell",
"Annabella",
"Annabelle",
"Annalena",
"Anne",
"Annika",
"Antonella",
"Antonia",
"Ariana",
"Ariane",
"Aurelia",
"Aurora",
"Ava",
"Aylin",
"Barbara",
"Beatrice",
"Bernadette",
"Berra",
"Bianca",
"Carina",
"Carla",
"Carlotta",
"Carolina",
"Caroline",
"Catharina",
"Cecilia",
"Charlotte",
"Christina",
"Christine",
"Claire",
"Clara",
"Clarissa",
"Claudia",
"Constanze",
"Cristina",
"Dana",
"Daniela",
"Denise",
"Diana",
"Dilara",
"Domenica",
"Dora",
"Eda",
"Edda",
"Ela",
"Elena",
"Eleonora",
"Elina",
"Elisa",
"Elisabeth",
"Ella",
"Ellie",
"Elma",
"Elona",
"Elsa",
"Elvira",
"Emanuela",
"Emely",
"Emilia",
"Emilie",
"Emilija",
"Emma",
"Erina",
"Estelle",
"Esther",
"Eva",
"Evelyn",
"Felicitas",
"Fiona",
"Florentina",
"Francesca",
"Franziska",
"Frida",
"Gabriela",
"Gloria",
"Hanna",
"Hannah",
"Heidi",
"Helena",
"Helene",
"Ina",
"Ines",
"Irina",
"Iris",
"Irma",
"Isabel",
"Isabell",
"Isabella",
"Isabelle",
"Jana",
"Janine",
"Jasmina",
"Jasmine",
"Jennifer",
"Jessica",
"Johanna",
"Josefine",
"Jovana",
"Julia",
"Juliana",
"Juliane",
"Julijana",
"Juna",
"Kalina",
"Karina",
"Karla",
"Karolina",
"Karoline",
"Katarina",
"Katharina",
"Katja",
"Kerstin",
"Klara",
"Kristina",
"Kyra",
"Laetitia",
"Laila",
"Lana",
"Lara",
"Lara-Sophie",
"Larissa",
"Laura",
"Laureen",
"Lea",
"Lea-Sophie",
"Leah",
"Leandra",
"Lena",
"Leni",
"Leona",
"Leoni",
"Leonie",
"Leonora",
"Leontina",
"Leticia",
"Leyla",
"Lia",
"Lilia",
"Lilian",
"Liliana",
"Liliane",
"Lilli",
"Lilly",
"Lily",
"Lina",
"Linda",
"Linnea",
"Lisa",
"Lisa-Marie",
"Lola",
"Lora",
"Lorena",
"Lotta",
"Lotte",
"Louisa",
"Louise",
"Luana",
"Lucia",
"Lucie",
"Luisa",
"Luise",
"Luna",
"Lydia",
"Madeleine",
"Magdalena",
"Maida",
"Maja",
"Malena",
"Manuela",
"Mara",
"Maria",
"Mariam",
"Mariana",
"Marie",
"Marie-Louise",
"Marie-Sophie",
"Mariella",
"Marijana",
"Marina",
"Marissa",
"Marlene",
"Marta",
"Martha",
"Martina",
"Maryam",
"Mathilda",
"Matilda",
"Maya",
"Melanie",
"Melek",
"Melina",
"Melisa",
"Melissa",
"Mia",
"Michaela",
"Michelle",
"Mila",
"Milica",
"Mina",
"Mira",
"Miriam",
"Mona",
"Nadia",
"Nadin",
"Nadine",
"Nadja",
"Naomi",
"Natalia",
"Natalie",
"Natascha",
"Nathalie",
"Nela",
"Nele",
"Nelly",
"Nicola",
"Nicole",
"Nika",
"Nikita",
"Nikola",
"Nikolina",
"Nina",
"Nisa",
"Nora",
"Norah",
"Olivia",
"Patricia",
"Paula",
"Paulina",
"Pauline",
"Petra",
"Philippa",
"Pia",
"Rachel",
"Raffaela",
"Rana",
"Rayana",
"Rebecca",
"Rita",
"Romy",
"Ronja",
"Ronya",
"Rosa",
"Rosalie",
"Ruth",
"Sabine",
"Sabrina",
"Sahra",
"Salma",
"Sandra",
"Sara",
"Sarah",
"Selena",
"Selin",
"Selina",
"Selma",
"Sena",
"Siena",
"Sigrid",
"Sofia",
"Sofie",
"Sofija",
"Sonja",
"Sophia",
"Sophie",
"Sophie-Marie",
"Soraya",
"Stefanie",
"Stella",
"Stephanie",
"Tamara",
"Tanja",
"Tea",
"Theodora",
"Theresa",
"Therese",
"Tiffany",
"Tina",
"Valentina",
"Vanessa",
"Vera",
"Verena",
"Veronika",
"Victoria",
"Viktoria",
"Viola",
"Violetta",
"Vivian",
"Yasmina",
"Ylvie",
"Yvonne",
"Zara",
"Zoe",
"Zoey",
)
first_names = first_names_male + first_names_female
# about 1000 of the most popular Austrian surnames
# https://de.wiktionary.org/wiki/Verzeichnis:Deutsch/Namen/die_h%C3%A4ufigsten_Nachnamen_%C3%96sterreichs
last_names = (
"Achleitner",
"Ackerl",
"Adam",
"Adler",
"Aichholzer",
"Aichinger",
"Aigner",
"Albrecht",
"Altmann",
"Amann",
"Amon",
"Anderl",
"Angerer",
"Arnold",
"Artner",
"Aschauer",
"Auer",
"Augustin",
"Auinger",
"Bacher",
"Bachinger",
"Bachler",
"Bachmann",
"Bader",
"Baier",
"Baldauf",
"Barth",
"Bartl",
"Bauer",
"Baumann",
"Baumgartner",
"Bayer",
"Beck",
"Becker",
"Beer",
"Berchtold",
"Berger",
"Bergmann",
"Berner",
"Bernhard",
"Berthold",
"Bichler",
"Biedermann",
"Binder",
"Bischof",
"Bitschnau",
"Bittner",
"Blauensteiner",
"Blum",
"Blümel",
"Bock",
"Bodner",
"Bogner",
"Brandl",
"Brandner",
"Brandstetter",
"Brandstätter",
"Brandtner",
"Braun",
"Brenner",
"Breuer",
"Bruckner",
"Brugger",
"Brunner",
"Bräuer",
"Buchberger",
"Buchegger",
"Bucher",
"Buchinger",
"Buchner",
"Burger",
"Burgstaller",
"Burtscher",
"Bäck",
"Böck",
"Böhler",
"Böhm",
"Bösch",
"Bürger",
"Dallinger",
"Dangl",
"Danner",
"Danninger",
"Decker",
"Dengg",
"Denk",
"Deutschmann",
"Dietl",
"Dietrich",
"Dirnberger",
"Dittrich",
"Dobler",
"Doppler",
"Dorfer",
"Dorn",
"Dorner",
"Draxler",
"Dünser",
"Eberhard",
"Eberharter",
"Eberl",
"Ebner",
"Ecker",
"Eder",
"Edlinger",
"Egger",
"Eibl",
"Eichberger",
"Eichhorn",
"Eichinger",
"Eisl",
"Eisner",
"Eller",
"Ender",
"Engel",
"Engl",
"Enzinger",
"Erber",
"Erhart",
"Erlacher",
"Erler",
"Ernst",
"Ertl",
"Fabian",
"Falkner",
"Fankhauser",
"Farkas",
"Fasching",
"Fehringer",
"Feichtenschlager",
"Feichter",
"Feichtinger",
"Feichtner",
"Feigl",
"Felber",
"Felder",
"Fellinger",
"Fellner",
"Fercher",
"Ferstl",
"Fichtinger",
"Fiedler",
"Fink",
"Fischer",
"Fitz",
"Fleck",
"Fleischhacker",
"Fleischmann",
"Foidl",
"Forster",
"Forstner",
"Frank",
"Franz",
"Freitag",
"Freudenthaler",
"Frey",
"Frick",
"Friedl",
"Friedrich",
"Frisch",
"Fritsch",
"Fritz",
"Froschauer",
"Fröhlich",
"Fröschl",
"Frühwirth",
"Fuchs",
"Fuhrmann",
"Füreder",
"Fürst",
"Gabriel",
"Gahleitner",
"Galler",
"Gamsjäger",
"Gangl",
"Gartner",
"Gasser",
"Gassner",
"Gattringer",
"Geier",
"Geiger",
"Geisler",
"Geyer",
"Gindl",
"Glaser",
"Glatz",
"Glück",
"Gmeiner",
"Gollner",
"Gosch",
"Grabher",
"Grabner",
"Graf",
"Grasser",
"Grassl",
"Gratz",
"Gratzer",
"Gratzl",
"Greiner",
"Griesser",
"Grill",
"Gritsch",
"Gross",
"Groß",
"Gruber",
"Grundner",
"Grünberger",
"Grüner",
"Grünwald",
"Gschaider",
"Gschwandtner",
"Gstrein",
"Guggenberger",
"Gutmann",
"Gärtner",
"Göschl",
"Götz",
"Günther",
"Haas",
"Haberl",
"Hacker",
"Hackl",
"Haderer",
"Hafner",
"Hagen",
"Hager",
"Hahn",
"Haid",
"Haiden",
"Haider",
"Haidinger",
"Haindl",
"Hainzl",
"Haller",
"Hammer",
"Hammerer",
"Hammerl",
"Handl",
"Handler",
"Haring",
"Harrer",
"Hartl",
"Hartmann",
"Haslauer",
"Haslinger",
"Hattinger",
"Hauer",
"Haumer",
"Hausberger",
"Hauser",
"Hebenstreit",
"Hechenberger",
"Heger",
"Heigl",
"Heim",
"Heindl",
"Heinrich",
"Heinz",
"Heinzl",
"Heiss",
"Heissenberger",
"Held",
"Hell",
"Heller",
"Helm",
"Hemetsberger",
"Herbst",
"Hermann",
"Herrmann",
"Herzog",
"Himmelbauer",
"Hinterberger",
"Hinteregger",
"Hinterleitner",
"Hirsch",
"Hirschmann",
"Hochleitner",
"Hochreiter",
"Hofbauer",
"Hofer",
"Hoffmann",
"Hofinger",
"Hofmann",
"Hofmeister",
"Hofstetter",
"Hofstätter",
"Holl",
"Hollaus",
"Holler",
"Holzer",
"Holzinger",
"Holzknecht",
"Holzmann",
"Horak",
"Horn",
"Hosp",
"Huber",
"Hubmann",
"Huemer",
"Hufnagl",
"Humer",
"Hummel",
"Hummer",
"Huter",
"Hutter",
"Hutterer",
"Hämmerle",
"Häusler",
"Hödl",
"Höfer",
"Höfler",
"Höglinger",
"Höller",
"Hölzl",
"Hörl",
"Hörmann",
"Hübner",
"Hütter",
"Jahn",
"Jandl",
"Janisch",
"Jank",
"Jauk",
"Jenewein",
"Jost",
"Jovanovic",
"Juen",
"Jung",
"Jungwirth",
"Jäger",
"Jöbstl",
"Kager",
"Kahr",
"Kain",
"Kaindl",
"Kainz",
"Kaiser",
"Kalcher",
"Kaltenbrunner",
"Kaltenböck",
"Kaltenegger",
"Kammerer",
"Kapeller",
"Kappel",
"Kargl",
"Karl",
"Karner",
"Karrer",
"Kaspar",
"Kasper",
"Kastner",
"Kaufmann",
"Keller",
"Kellner",
"Keplinger",
"Kern",
"Kerschbaum",
"Kerschbaumer",
"Kessler",
"Kirchmair",
"Kirchner",
"Kirschner",
"Kiss",
"Kitzler",
"Klammer",
"Klaus",
"Klausner",
"Klein",
"Klement",
"Klinger",
"Klingler",
"Klocker",
"Kloiber",
"Klotz",
"Klug",
"Knapp",
"Knaus",
"Knoll",
"Kober",
"Koch",
"Kocher",
"Kofler",
"Kogler",
"Kohl",
"Kohler",
"Kolar",
"Kolb",
"Koller",
"Kollmann",
"Kolm",
"Konrad",
"Kopf",
"Kopp",
"Koppensteiner",
"Kraft",
"Krainer",
"Krainz",
"Kral",
"Krall",
"Kramer",
"Krammer",
"Kratzer",
"Kraus",
"Kraxner",
"Kreidl",
"Kreiner",
"Kremser",
"Krenn",
"Kreuzer",
"Kriegl",
"Kronberger",
"Kronsteiner",
"Krug",
"Kröll",
"Kucera",
"Kugler",
"Kuhn",
"Kummer",
"Kunz",
"Kurz",
"Kurzmann",
"Käfer",
"Köberl",
"Köck",
"Köhler",
"Kölbl",
"Köll",
"König",
"Kössler",
"Lackner",
"Ladner",
"Lagler",
"Laimer",
"Lammer",
"Lampert",
"Lampl",
"Lamprecht",
"Landl",
"Lang",
"Langer",
"Larcher",
"Lassnig",
"Leber",
"Lechner",
"Lederer",
"Leeb",
"Lehner",
"Leibetseder",
"Leitgeb",
"Leithner",
"Leitner",
"Lengauer",
"Lenz",
"Leonhartsberger",
"Leopold",
"Lerch",
"Lercher",
"Lettner",
"Leutgeb",
"Lichtenegger",
"Linder",
"Lindinger",
"Lindner",
"Lindorfer",
"Lintner",
"Lipp",
"List",
"Loibl",
"Loidl",
"Lorenz",
"Ludwig",
"Luger",
"Luttenberger",
"Lutz",
"Löffler",
"Macher",
"Mader",
"Maier",
"Maierhofer",
"Mair",
"Mairhofer",
"Mandl",
"Mann",
"Margreiter",
"Maringer",
"Mark",
"Markl",
"Marte",
"Martin",
"Marx",
"Mathis",
"Maurer",
"Mayer",
"Mayerhofer",
"Mayr",
"Mayrhofer",
"Meier",
"Meindl",
"Meister",
"Meixner",
"Messner",
"Metzler",
"Meusburger",
"Meyer",
"Mitter",
"Mitteregger",
"Mitterer",
"Mitterlehner",
"Mittermayr",
"Mohr",
"Moosbrugger",
"Moritz",
"Moser",
"Muhr",
"Mörth",
"Mühlbacher",
"Mühlberger",
"Mühlböck",
"Müller",
"Müllner",
"Nagel",
"Nagele",
"Nagl",
"Nemeth",
"Neubacher",
"Neubauer",
"Neugebauer",
"Neuhauser",
"Neuhold",
"Neulinger",
"Neumann",
"Neumayer",
"Neumayr",
"Neumeister",
"Neumüller",
"Neuner",
"Neureiter",
"Neuwirth",
"Niederl",
"Nowak",
"Nussbaumer",
"Nußbaumer",
"Nöbauer",
"Oberhauser",
"Oberhofer",
"Oberleitner",
"Obermayr",
"Obermüller",
"Oberndorfer",
"Ofner",
"Ortner",
"Ostermann",
"Oswald",
"Ott",
"Pacher",
"Pachler",
"Paier",
"Pammer",
"Parzer",
"Pauer",
"Paul",
"Paulitsch",
"Payer",
"Peer",
"Peham",
"Pendl",
"Penz",
"Perner",
"Pertl",
"Pesendorfer",
"Peter",
"Petz",
"Pfeffer",
"Pfeifer",
"Pfeiffer",
"Pfister",
"Pfleger",
"Philipp",
"Pichler",
"Pieber",
"Pilz",
"Pinter",
"Pircher",
"Pirker",
"Plank",
"Plattner",
"Platzer",
"Pock",
"Pohl",
"Pointner",
"Pokorny",
"Pollak",
"Polzer",
"Posch",
"Postl",
"Prager",
"Prantl",
"Praxmarer",
"Prem",
"Prenner",
"Prinz",
"Probst",
"Prohaska",
"Pröll",
"Pucher",
"Puchner",
"Puntigam",
"Punz",
"Putz",
"Pöll",
"Pölzl",
"Pöschl",
"Pühringer",
"Raab",
"Rabitsch",
"Rabl",
"Radl",
"Rainer",
"Ramsauer",
"Rath",
"Rauch",
"Rausch",
"Rauscher",
"Rauter",
"Rechberger",
"Redl",
"Reich",
"Reichel",
"Reicher",
"Reichl",
"Reichmann",
"Reif",
"Reinbacher",
"Reindl",
"Reiner",
"Reinisch",
"Reinprecht",
"Reinthaler",
"Reischl",
"Reisinger",
"Reisner",
"Reitbauer",
"Reiter",
"Reiterer",
"Reithofer",
"Reitinger",
"Renner",
"Resch",
"Rettenbacher",
"Richter",
"Rieder",
"Riedl",
"Riedler",
"Riedmann",
"Rieger",
"Riegler",
"Riener",
"Riepl",
"Rieser",
"Ringhofer",
"Rinner",
"Ritter",
"Rohrer",
"Rohrmoser",
"Rosenberger",
"Rosner",
"Rossmann",
"Roth",
"Rottensteiner",
"Rotter",
"Rudolf",
"Rupp",
"Röck",
"Rössler",
"Sagmeister",
"Sailer",
"Salcher",
"Salzer",
"Salzmann",
"Sammer",
"Santner",
"Sattler",
"Sauer",
"Schachinger",
"Schachner",
"Schaffer",
"Schalk",
"Schaller",
"Schandl",
"Schantl",
"Scharf",
"Scharinger",
"Schartner",
"Schatz",
"Schatzl",
"Schauer",
"Scheer",
"Scheiber",
"Scheidl",
"Schenk",
"Scherer",
"Scherr",
"Scherz",
"Scherzer",
"Scheucher",
"Schiefer",
"Schiestl",
"Schilcher",
"Schiller",
"Schimpl",
"Schinagl",
"Schindler",
"Schinnerl",
"Schlager",
"Schlosser",
"Schlögl",
"Schmid",
"Schmidinger",
"Schmidl",
"Schmidt",
"Schmied",
"Schmuck",
"Schmölzer",
"Schnabl",
"Schneeberger",
"Schneider",
"Schober",
"Scholz",
"Schramm",
"Schrammel",
"Schranz",
"Schreiber",
"Schreiner",
"Schrempf",
"Schrenk",
"Schrittwieser",
"Schröder",
"Schubert",
"Schuh",
"Schuler",
"Schuller",
"Schulz",
"Schuster",
"Schwab",
"Schwaiger",
"Schwaighofer",
"Schwarz",
"Schwarzinger",
"Schwarzl",
"Schweiger",
"Schweighofer",
"Schweitzer",
"Schwendinger",
"Schäfer",
"Schöberl",
"Schöffmann",
"Schöller",
"Schön",
"Schönauer",
"Schönberger",
"Schöpf",
"Schüller",
"Schütz",
"Seebacher",
"Seidl",
"Seifert",
"Seiler",
"Seiser",
"Seitz",
"Seiwald",
"Sieber",
"Sieberer",
"Siegl",
"Sigl",
"Siller",
"Simic",
"Simon",
"Singer",
"Sommer",
"Sonnleitner",
"Sorger",
"Sperl",
"Spiegl",
"Spindler",
"Spitzer",
"Spreitzer",
"Springer",
"Stadlbauer",
"Stadler",
"Stangl",
"Stark",
"Staudacher",
"Staudinger",
"Stecher",
"Stefan",
"Steger",
"Steidl",
"Steiger",
"Steinacher",
"Steinbacher",
"Steinbauer",
"Steinberger",
"Steinböck",
"Steindl",
"Steiner",
"Steininger",
"Steinkellner",
"Steinlechner",
"Steinwender",
"Stelzer",
"Stelzl",
"Stern",
"Steurer",
"Stiegler",
"Stifter",
"Stock",
"Stocker",
"Stockhammer",
"Stockinger",
"Stoiber",
"Stolz",
"Strasser",
"Strauss",
"Strauß",
"Streicher",
"Strobl",
"Strohmaier",
"Strohmayer",
"Strohmeier",
"Stummer",
"Sturm",
"Stöckl",
"Stöger",
"Stückler",
"Stütz",
"Sulzer",
"Suppan",
"Taferner",
"Tanzer",
"Tauber",
"Taucher",
"Teufl",
"Thaler",
"Thalhammer",
"Thaller",
"Thurner",
"Tiefenbacher",
"Tischler",
"Toth",
"Trattner",
"Trauner",
"Traxler",
"Trimmel",
"Trinkl",
"Trummer",
"Uhl",
"Ullmann",
"Ulrich",
"Unger",
"Unterberger",
"Unterweger",
"Urban",
"Varga",
"Veit",
"Vogel",
"Vogl",
"Vogler",
"Vogt",
"Wachter",
"Wagner",
"Walch",
"Walcher",
"Walder",
"Waldner",
"Wallner",
"Walser",
"Walter",
"Waltl",
"Wandl",
"Weber",
"Wechselberger",
"Wegscheider",
"Weidinger",
"Weigl",
"Weinberger",
"Weiser",
"Weiss",
"Weissenböck",
"Weiß",
"Wenger",
"Weninger",
"Wenzl",
"Werner",
"Widhalm",
"Widmann",
"Wiedner",
"Wieland",
"Wiener",
"Wiesbauer",
"Wieser",
"Wiesinger",
"Wiesner",
"Wild",
"Wilfinger",
"Wilhelm",
"Wimmer",
"Windhager",
"Windisch",
"Winkler",
"Winter",
"Wirth",
"Wittmann",
"Wohlmuth",
"Wolf",
"Wurm",
"Wurzer",
"Wurzinger",
"Wögerbauer",
"Wöhrer",
"Yilmaz",
"Zach",
"Zangerl",
"Zauner",
"Zechmeister",
"Zechner",
"Zehetner",
"Zeiler",
"Zeilinger",
"Zeiner",
"Zeller",
"Zenz",
"Zettl",
"Ziegler",
"Zimmermann",
"Zotter",
"Zöchling",
"Zöhrer",
)
prefixes_male = (
"Herr",
"Dr.",
"Ing.",
"Dipl.-Ing.",
"Prof.",
"Univ.Prof.",
)
prefixes_female = (
"Frau",
"Dr.",
"Ing.",
"Dipl.-Ing.",
"Prof.",
"Univ.Prof.",
)
prefixes_male = (
"Herr",
"Dr.",
"Ing.",
"Dipl.-Ing.",
"Prof.",
"Univ.Prof.",
)
prefixes_female = (
"Frau",
"Dr.",
"Ing.",
"Dipl.-Ing.",
"Prof.",
"Univ.Prof.",
)
prefixes = ("Dr.", "Mag.", "Ing.", "Dipl.-Ing.", "Prof.", "Univ.Prof.")
| joke2k/faker | faker/providers/person/de_AT/__init__.py | Python | mit | 29,783 |
from peewee import BooleanField
from wtforms import widgets
from wtfpeewee.fields import BooleanSelectField
from wtfpeewee.fields import ModelSelectField
from wtfpeewee.orm import ModelConverter
class BaseModelConverter(ModelConverter):
def __init__(self, *args, **kwargs):
super(BaseModelConverter, self).__init__(*args, **kwargs)
self.converters[BooleanField] = self.handle_boolean
def handle_boolean(self, model, field, **kwargs):
return field.name, BooleanSelectField(**kwargs)
class ChosenAjaxSelectWidget(widgets.Select):
def __init__(self, data_source, data_param, *args, **kwargs):
self.data_source = data_source
self.data_param = data_param
super(ChosenAjaxSelectWidget, self).__init__(*args, **kwargs)
def __call__(self, field, **kwargs):
if field.allow_blank and not self.multiple:
kwargs['data-role'] = u'ajax-chosenblank'
else:
kwargs['data-role'] = u'ajax-chosen'
kwargs['data-source'] = self.data_source
kwargs['data-param'] = self.data_param
kwargs['data-placeholder'] = 'Type to search...'
return super(ChosenAjaxSelectWidget, self).__call__(field, **kwargs)
class LimitedModelSelectField(ModelSelectField):
def iter_choices(self):
for obj in self.query.limit(20):
yield (obj._pk, self.get_label(obj), obj == self.data)
| coleifer/flask-peewee | flask_peewee/forms.py | Python | mit | 1,407 |
#!/usr/bin/python
import numpy as np
import pandas as pd
from glob import glob
from cluster_analysis import cluster
import matplotlib.pyplot as plt
def cluster_traj(first_frame=-20):
# create a list of all the clusters filename
complete_traj = glob("cluster.*.out.gz")
# sort the list
complete_traj.sort(key=lambda f: int(filter(str.isdigit, f)))
# consider only the desired frame
wanted_frames = complete_traj[first_frame:]
# list for all the cluster size and average cluster size and hairpin
cluster_all = []
average_cluster_size = []
average_hairpins = []
# for each file in the trajectory
for frame in wanted_frames:
#open the file
cluster_info = cluster(fname=frame)
#calculate the occurrence of each size
frame_cluster_occurrence = cluster_info.occurrence()
cluster_frame = list(frame_cluster_occurrence)
cluster_all = cluster_all + cluster_frame
#calulate the histogram for average size
hist_frame,bins_frame,average_cluster_frame = cluster_info.histogram(frame_cluster_occurrence)
average_cluster_size = average_cluster_size + [average_cluster_frame]
# hairpins defects
average_hairpins = average_hairpins + [cluster_info.hairpin()]
# convert to array
cluster_all = np.array(cluster_all)
average_cluster_size = np.array(average_cluster_size)
average_hairpins = np.array(average_hairpins)
print("Average size and std: ",average_cluster_size.mean(),average_cluster_size.std())
print("Average number of hairpin defect per chain and std: ",average_hairpins.mean(),average_hairpins.std())
# create the histogram
cluster_histogram, bins,useless = cluster_info.histogram(cluster_all)
# save the file
np.savetxt("cluster_histogram.out",
np.transpose([bins, bins*cluster_histogram]))
# matplotlib graph
plt.xlabel("Taille")
plt.ylabel("Nombre de particules")
plt.bar(bins, bins * cluster_histogram)
plt.show()
cluster_traj()
| EtiCui/Msc-UdeS | dataAnalysis/cluster_traj.py | Python | mit | 2,038 |
from js_process_ast import *
def type_logger(node, typespace):
def arg_log():
n = js_parse("""
var _args = "";
for (var i=0; i<arguments.length; i++) {
if (i > 0)
_args += ","
if (typeof arguments[i] == "object")
_args += arguments[i].constructor.name;
else if (typeof arguments[i] == "number")
_args += "number";
else if (typeof arguments[i] == "boolean")
_args += "boolean";
else if (typeof arguments[i] == "string")
_args += "string";
else if (arguments[i] == null)
_args += "null";
else if (arguments[i] == undefined)
_args += "undefined";
else
_args += "[type error]";
}
""");
return n
def funclog(name):
log = arg_log()
n2 = js_parse("""
$n;
_profile_log("$s", _args, get_callstack());
""", [log, name]);
return n2
def func(n):
n.prepend(funclog("FUNC"))
def method(n):
n.prepend(funclog("METH"))
def setter(n):
n.prepend(funclog("SETR"))
def getter(n):
n.prepend(funclog("GETR"))
traverse(node, FunctionNode, func)
traverse(node, MethodNode, method)
traverse(node, MethodSetter, setter)
traverse(node, MethodGetter, getter)
def crash_logger(node, typespace):
pass
| joeedh/webblender | tools/extjs_cc/js_profile.py | Python | apache-2.0 | 1,338 |
import matplotlib
matplotlib.rc('text', usetex = True)
import pylab
import Numeric
## interface tracking profiles
N = 500
delta = 0.6
X = -1 + 2. * Numeric.arange(N) / (N - 1)
pylab.plot(X, (1 - Numeric.tanh(4. * X / delta)) / 2, ## phase field tanh profiles
X, (X + 1) / 2, ## level set distance function
X, (1.4 + Numeric.tanh(4. * X / delta)) / 4, ## composition profile
X, X < 0, 'k--', ## sharp interface
linewidth = 5)
## legend
pylab.legend((r'phase field', r'level set', r'composition', r'sharp interface'), shadow = True, loc = (0.01, 0.55))
ltext = pylab.gca().get_legend().get_texts()
pylab.setp(ltext[0], fontsize = 20, color = 'b')
pylab.setp(ltext[1], fontsize = 20, color = 'g')
pylab.setp(ltext[2], fontsize = 20, color = 'r')
pylab.setp(ltext[3], fontsize = 20, color = 'k')
## the arrow
height = 0.1
offset = 0.02
pylab.plot((-delta / 2., delta / 2), (height, height), 'k', linewidth = 2)
pylab.plot((-delta / 2, -delta / 2 + offset * 2), (height, height - offset), 'k', linewidth = 2)
pylab.plot((-delta / 2, -delta / 2 + offset * 2), (height, height + offset), 'k', linewidth = 2)
pylab.plot((delta / 2, delta / 2 - offset * 2), (height, height - offset), 'k', linewidth = 2)
pylab.plot((delta / 2, delta / 2 - offset * 2), (height, height + offset), 'k', linewidth = 2)
pylab.text(-0.06, height - 0.06, r'$\delta$', {'color' : 'k', 'fontsize' : 24})
## X-axis label
pylab.xticks((-1, 0, 1), ('-1', '0', '1'), color = 'k', size = 20)
## Left Y-axis labels
pylab.ylabel(r'\bf{phase field} $\phi$', {'color' : 'b',
'fontsize' : 20 })
pylab.yticks((0, 0.5, 1), ('0', '.5', '1'), color = 'k', size = 20)
## Right Y-axis labels
pylab.text(1.05, 0.5, r"\bf{level set} $\phi$", {'color' : 'g', 'fontsize' : 20},
horizontalalignment = 'left',
verticalalignment = 'center',
rotation = 90,
clip_on = False)
pylab.text(1.01, -0.02, "-1", {'color' : 'k', 'fontsize' : 20})
pylab.text(1.01, 0.98, "1", {'color' : 'k', 'fontsize' : 20})
pylab.text(1.01, 0.48, "0", {'color' : 'k', 'fontsize' : 20})
## level set equations
pylab.text(0.1, 0.85, r'$|\nabla\phi| = 1,$ \newline $ \frac{\partial \phi}{\partial t} + U|\nabla \phi| = 0$', {'color' : 'g', 'fontsize' : 20})
## phase field equations
pylab.text(0.2, 0.15, r'$\mathcal{F} = \int f\left( \phi, c \right) dV,$ \newline $ \frac{ \partial \phi } { \partial t } = -M_{ \phi } \frac{ \delta \mathcal{F} } { \delta \phi }$', {'color' : 'b', 'fontsize' : 20})
pylab.savefig('pfm-lsm.png')
pylab.show()
| sniemi/SamPy | sandbox/src1/examples/dannys_example.py | Python | bsd-2-clause | 2,666 |
import copy
import os
from visitor import *
from stringstream import *
class Rewriter(NodeVisitor):
""" Class for rewriting of the original AST. Includes:
1. the initial small rewritings,
2. transformation into our representation,
3. transforming from our representation to C-executable code,
4. creating our representation of the device kernel code,
5. creating a C-executable kernel code,
6. Creating the host code (boilerplate code)
"""
def __init__(self):
# List of loop indices
self.index = list()
# dict of the upper limit of the loop indices
self.UpperLimit = dict()
# dict of the lower limit of the loop indices
self.LowerLimit = dict()
# The local work group size
self.Local = dict()
self.Local['name'] = 'LSIZE'
self.Local['size'] = ['64']
# The number of dimensions of each array
self.NumDims = dict()
# The Ids of arrays, or pointers
self.ArrayIds = set()
# The indices that appear in the subscript of each array
self.IndexInSubscript = dict()
# All Ids that are not arrays, or pointers
self.NonArrayIds = set()
# Ids that we remove due to parallelization of loops
self.RemovedIds = set()
# The mapping from the indices that we parallelize
# to their function returning their thread id in the kernel
self.IndexToThreadId = dict()
self.IndexToLocalId = dict()
self.IndexToLocalVar = dict()
# The indices that we parallelize
self.GridIndices = list()
# The OpenCl kernel before anything
self.Kernel = None
# The "inside" of the OpenCl kernel after parallelization
self.InsideKernel = None
# The name of the kernel, i.e. the FuncName + Kernel
self.KernelName = None
# The mapping from the array ids to a list of
# the names of their dimensions
self.ArrayIdToDimName = dict()
# ArrayRef inside a loop in the kernel
# Mapping from Id to AST ArrayRef node
self.LoopArray = dict()
# The argument list in our IR
self.DevArgList = list()
# The name and type of the kernel function.
self.DevFuncTypeId = None
# The name of the kernel function.
self.DevFuncId = None
# The device names of the pointers in the boilerplate code
self.DevId = dict()
# The host names of the pointers in the boilerplate code
self.HstId = dict()
# The types of the arguments for the kernel
self.Type = dict()
# The name of the variable denoting the memory size
self.Mem = dict()
# Dimension of the parallelization
self.ParDim = None
# VarName of the global/local worksize array.
self.Worksize = dict()
# The dimension that the index indexes
self.IdxToDim = dict()
# Whether an array is read, write or both
self.ReadWrite = dict()
# List of arrays that are write only
self.WriteOnly = list()
# List of arrays that are read only
self.ReadOnly = list()
# dict of indices to loops in the kernel
self.Loops = dict()
# Contains the loop indices for each subscript
self.SubIdx = dict()
# Contains a list for each arrayref of what loop indices
# appear in the subscript.
self.Subscript = dict()
# The same as above but names are saved as strings instead
# of Id(strings)
self.SubscriptNoId = dict()
# Decides whether we read back data from GPU
self.NoReadBack = False
# A list of calls to the transpose function which we perform
# after data was read back from the GPU.
self.WriteTranspose = list()
# A mapping from array references to the loops they appear in.
self.RefToLoop = dict()
# List of arguments for the kernel
## self.KernelArgs = list()
########################################################
# Datastructures used when performing transformations #
########################################################
# Holds the sub-AST in AllocateBuffers
# that we add transposition to.
self.Transposition = None
# Holds the sub-AST in AllocateBuffers
# that we add constant memory pointer initializations to.
self.ConstantMemory = None
# Holds the sub-AST in AllocateBuffers
# where we set the defines for the kernel.
self.Define = None
# Dict containing the name and type for each kernel argument
# set in SetArguments
self.KernelArgs = dict()
# Holds information about which names have been swapped
# in a transposition
self.NameSwap = dict()
# Holds information about which subscripts have been swapped
# in a transposition
self.SubSwap = dict()
# Holds information about which indices have been swapped
# in a transposition
self.IdxSwap = dict()
# Holds information about which dimensions have been swapped
# in a transposition
self.DimSwap = dict()
# Holds additional global variables such as pointers that we add
# when we to perform transformations
self.GlobalVars = dict()
# Holds additional cl_mem variables that we add
# when we to perform Constant Memory transformation
self.ConstantMem = dict()
# Name swap in relation to [local memory]
self.LocalSwap = dict()
# Extra things that we add to ids [local memory]
self.Add = dict()
# Holds includes for the kernel
self.Includes = list()
# Holds the ast for a function that returns the kernelstring
self.KernelStringStream = list()
# Holds a list of which loops we will unroll
self.UnrollLoops = list()
# True is SetDefine were called.
self.DefinesAreMade = False
# List of what kernel arguments changes
self.Change = list()
self.IfThenElse = None
def initOriginal(self, ast):
loops = ForLoops()
loops.visit(ast)
forLoopAst = loops.ast
loopIndices = LoopIndices()
loopIndices.visit(forLoopAst)
self.index = loopIndices.index
self.UpperLimit = loopIndices.end
self.LowerLimit = loopIndices.start
norm = Norm(self.index)
norm.visit(forLoopAst)
arrays = Arrays(self.index)
arrays.visit(ast)
for n in arrays.numIndices:
if arrays.numIndices[n] == 2:
arrays.numSubscripts[n] = 2
elif arrays.numIndices[n] > 2:
arrays.numSubscripts[n] = 1
self.NumDims = arrays.numSubscripts
self.IndexInSubscript = arrays.indexIds
typeIds = TypeIds()
typeIds.visit(loops.ast)
typeIds2 = TypeIds()
typeIds2.visit(ast)
outsideTypeIds = typeIds2.ids - typeIds.ids
for n in typeIds.ids:
typeIds2.dictIds.pop(n)
self.Type = typeIds2.dictIds
ids = Ids()
ids.visit(ast)
print ids.ids
print arrays.ids
print typeIds.ids
# print "typeIds.ids ", typeIds.ids
# print "arrays.ids ", arrays.ids
# print "ids.ids ", ids.ids
otherIds = ids.ids - arrays.ids - typeIds.ids
self.ArrayIds = arrays.ids - typeIds.ids
self.NonArrayIds = otherIds
def initNewRepr(self, ast, dev = 'GPU'):
## findIncludes = FindIncludes()
## findIncludes.visit(ast)
## self.Includes = findIncludes.includes
perfectForLoop = PerfectForLoop()
perfectForLoop.visit(ast)
if self.ParDim is None:
self.ParDim = perfectForLoop.depth
if self.ParDim == 1:
self.Local['size'] = ['256']
if dev == 'CPU':
self.Local['size'] = ['16']
else:
self.Local['size'] = ['16','16']
if dev == 'CPU':
self.Local['size'] = ['4','4']
innerbody = perfectForLoop.inner
if perfectForLoop.depth == 2 and self.ParDim == 1:
innerbody = perfectForLoop.outer
firstLoop = ForLoops()
firstLoop.visit(innerbody.compound)
loopIndices = LoopIndices()
if firstLoop.ast is not None:
loopIndices.visit(innerbody.compound)
self.Loops = loopIndices.Loops
self.InsideKernel = firstLoop.ast
arrays = Arrays(self.index)
arrays.visit(innerbody.compound)
self.NumDims = arrays.numSubscripts
self.LoopArrays = arrays.LoopArrays
initIds = InitIds()
initIds.visit(perfectForLoop.ast.init)
gridIds = list()
idMap = dict()
localMap = dict()
localVarMap = dict()
firstIdx = initIds.index[0]
idMap[firstIdx] = 'get_global_id(0)'
localMap[firstIdx] = 'get_local_id(0)'
localVarMap[firstIdx] = 'l' + firstIdx
self.ReverseIdx = dict()
self.ReverseIdx[0] = 1
gridIds.extend(initIds.index)
kernel = perfectForLoop.ast.compound
self.ReverseIdx[1] = 0
if self.ParDim == 2:
initIds = InitIds()
initIds.visit(kernel.statements[0].init)
kernel = kernel.statements[0].compound
secondIdx = initIds.index[0]
idMap[secondIdx] = 'get_global_id(1)'
localMap[secondIdx] = 'get_local_id(1)'
localVarMap[secondIdx] = 'l' + secondIdx
gridIds.extend(initIds.index)
(idMap[gridIds[0]], idMap[gridIds[1]]) = (idMap[gridIds[1]], idMap[gridIds[0]])
(localMap[gridIds[0]], localMap[gridIds[1]]) = (localMap[gridIds[1]], localMap[gridIds[0]])
## (localVarMap[gridIds[0]], localVarMap[gridIds[1]]) = (localVarMap[gridIds[1]], localVarMap[gridIds[0]])
self.IndexToLocalId = localMap
self.IndexToLocalVar = localVarMap
self.IndexToThreadId = idMap
self.GridIndices = gridIds
self.Kernel = kernel
for i, n in enumerate(reversed(self.GridIndices)):
self.IdxToDim[i] = n
findDim = FindDim(self.NumDims)
findDim.visit(ast)
self.ArrayIdToDimName = findDim.dimNames
self.RemovedIds = set(self.UpperLimit[i] for i in self.GridIndices)
idsStillInKernel = Ids()
idsStillInKernel.visit(self.Kernel)
self.RemovedIds = self.RemovedIds - idsStillInKernel.ids
otherIds = self.ArrayIds.union(self.NonArrayIds)
findDeviceArgs = FindDeviceArgs(otherIds)
findDeviceArgs.visit(ast)
self.DevArgList = findDeviceArgs.arglist
findFunction = FindFunction()
findFunction.visit(ast)
self.DevFuncTypeId = findFunction.typeid
self.DevFuncId = self.DevFuncTypeId.name.name
for n in self.ArrayIds:
self.DevId[n] = 'dev_ptr' + n
self.HstId[n] = 'hst_ptr' + n
self.Mem[n] = 'hst_ptr' + n + '_mem_size'
## for n in self.DevArgList:
## name = n.name.name
## type = n.type[-2:]
## self.Type[name] = type
for n in self.ArrayIdToDimName:
for m in self.ArrayIdToDimName[n]:
self.Type[m] = ['size_t']
kernelName = self.DevFuncTypeId.name.name
self.KernelName = kernelName + 'Kernel'
self.Worksize['local'] = kernelName + '_local_worksize'
self.Worksize['global'] = kernelName + '_global_worksize'
self.Worksize['offset'] = kernelName + '_global_offset'
findReadWrite = FindReadWrite(self.ArrayIds)
findReadWrite.visit(ast)
self.ReadWrite = findReadWrite.ReadWrite
for n in self.ReadWrite:
pset = self.ReadWrite[n]
if len(pset) == 1:
if 'write' in pset:
self.WriteOnly.append(n)
else:
self.ReadOnly.append(n)
argIds = self.NonArrayIds.union(self.ArrayIds) - self.RemovedIds
print self.NonArrayIds
# print self.KernelArgs, "KA Rewr be"
# print self.ArrayIdToDimName, "rewriter"
print argIds
for n in argIds:
tmplist = [n]
try:
if self.NumDims[n] == 2:
tmplist.append(self.ArrayIdToDimName[n][0])
except KeyError:
pass
# print tmplist, " tmp ", n
for m in tmplist:
self.KernelArgs[m] = self.Type[m]
# print self.KernelArgs, "KA Rewrie"
self.Transposition = GroupCompound([Comment('// Transposition')])
self.ConstantMemory = GroupCompound([Comment('// Constant Memory')])
self.Define = GroupCompound([Comment('// Defines for the kernel')])
arrays = Arrays(self.index)
arrays.visit(ast)
self.Subscript = arrays.Subscript
self.SubIdx = arrays.SubIdx
self.SubscriptNoId = copy.deepcopy(self.Subscript)
for n in self.SubscriptNoId.values():
for m in n:
for i,k in enumerate(m):
try:
m[i] = k.name
except AttributeError:
try:
m[i] = k.value
except AttributeError:
m[i] = 'unknown'
refToLoop = RefToLoop(self.GridIndices)
refToLoop.visit(ast)
self.RefToLoop = refToLoop.RefToLoop
def DataStructures(self):
print "self.index " , self.index
print "self.UpperLimit " , self.UpperLimit
print "self.LowerLimit " , self.LowerLimit
print "self.NumDims " , self.NumDims
print "self.ArrayIds " , self.ArrayIds
print "self.IndexInSubscript " , self.IndexInSubscript
print "self.NonArrayIds " , self.NonArrayIds
print "self.RemovedIds " , self.RemovedIds
print "self.IndexToThreadId " , self.IndexToThreadId
print "self.IndexToLocalId " , self.IndexToLocalId
print "self.IndexToLocalVar " , self.IndexToLocalVar
print "self.ReverseIdx ", self.ReverseIdx
print "self.GridIndices " , self.GridIndices
## print "self.Kernel " , self.Kernel
print "self.ArrayIdToDimName " , self.ArrayIdToDimName
print "self.DevArgList " , self.DevArgList
print "self.DevFuncTypeId " , self.DevFuncTypeId
print "self.DevId " , self.DevId
print "self.HstId " , self.HstId
print "self.Type " , self.Type
print "self.Mem " , self.Mem
print "self.ParDim " , self.ParDim
print "self.Worksize " , self.Worksize
print "self.IdxToDim " , self.IdxToDim
print "self.WriteOnly " , self.WriteOnly
print "self.ReadOnly " , self.ReadOnly
print "self.Subscript " , self.Subscript
print "self.SubscriptNoId " , self.SubscriptNoId
print "TRANSFORMATIONS"
print "self.Transposition " , self.Transposition
print "self.ConstantMemory " , self.ConstantMemory
print "self.KernelArgs " , self.KernelArgs
print "self.NameSwap " , self.NameSwap
print "self.LocalSwap " , self.LocalSwap
print "self.LoopArrays " , self.LoopArrays
print "self.Add ", self.Add
print "self.GlobalVars ", self.GlobalVars
print "self.ConstantMem " , self.ConstantMem
print "self.Loops " , self.Loops
print "self.RefToLoop ", self.RefToLoop
def rewrite(self, ast, functionname = 'FunctionName', changeAST = True):
""" Rewrites a few things in the AST to increase the
abstraction level.
"""
typeid = TypeId(['void'], Id(functionname),ast.coord)
arraysArg = list()
for arrayid in self.ArrayIds:
arraysArg.append(TypeId(self.Type[arrayid], Id(arrayid,ast.coord),ast.coord))
for iarg in xrange(self.NumDims[arrayid]):
arraysArg.append(TypeId(['size_t'], Id('hst_ptr'+arrayid+'_dim'+str(iarg+1),ast.coord),ast.coord))
for arrayid in self.NonArrayIds:
arraysArg.append(TypeId(self.Type[arrayid], Id(arrayid,ast.coord),ast.coord))
arglist = ArgList([] + arraysArg)
while isinstance(ast.ext[0], Include):
include = ast.ext.pop(0)
self.Includes.append(include)
while not isinstance(ast.ext[0], ForLoop):
ast.ext.pop(0)
compound = Compound(ast.ext)
if changeAST:
ast.ext = list()
ast.ext.append(FuncDecl(typeid,arglist,compound))
def rewriteToSequentialC(self, ast):
loops = ForLoops()
loops.visit(ast)
forLoopAst = loops.ast
loopIndices = LoopIndices()
loopIndices.visit(forLoopAst)
self.index = loopIndices.index
arrays2 = Arrays(self.index)
arrays2.visit(ast)
findDim = FindDim(arrays2.numIndices)
findDim.visit(ast)
rewriteArrayRef = RewriteArrayRef(self.NumDims,
self.ArrayIdToDimName,
self)
rewriteArrayRef.visit(ast)
def rewriteToDeviceCTemp(self, ast, changeAST = True):
findDeviceArgs = FindDeviceArgs(self.NonArrayIds)
findDeviceArgs.visit(ast)
findFunction = FindFunction()
findFunction.visit(ast)
# add OpenCL keywords to indicate the kernel function.
findFunction.typeid.type.insert(0, '__kernel')
exchangeIndices = ExchangeIndices(self.IndexToThreadId)
exchangeIndices.visit(self.Kernel)
newast = FuncDecl(findFunction.typeid, ArgList(findDeviceArgs.arglist,ast.coord), self.Kernel, ast.coord)
if changeAST:
ast.ext = list()
ast.ext.append(newast)
def InSourceKernel(self, ast, cond, filename, kernelstringname):
self.rewriteToDeviceCRelease(ast)
ssprint = SSGenerator()
newast = FileAST([])
ssprint.createKernelStringStream(ast, newast, self.UnrollLoops, kernelstringname, filename = filename)
self.KernelStringStream.append({'name' : kernelstringname, \
'ast' : newast,
'cond' : cond})
def rewriteToDeviceCRelease(self, ast):
arglist = list()
argIds = self.NonArrayIds.union(self.ArrayIds) - self.RemovedIds
# The list of arguments for the kernel
dictTypeHostPtrs = copy.deepcopy(self.Type)
for n in self.ArrayIds:
dictTypeHostPtrs[self.ArrayIdToDimName[n][0]] = ['size_t']
for n in self.KernelArgs:
type = copy.deepcopy(self.KernelArgs[n])
if type[0] == 'size_t':
type[0] = 'unsigned'
if len(type) == 2:
type.insert(0, '__global')
arglist.append(TypeId(type, Id(n)))
exchangeArrayId = ExchangeArrayId(self.LocalSwap)
for n in self.LoopArrays.values():
for m in n:
exchangeArrayId.visit(m)
## for n in self.Add:
## addToIds = AddToId(n, self.Add[n])
## addToIds.visit(self.InsideKernel.compound)
MyKernel = copy.deepcopy(self.Kernel)
rewriteArrayRef = RewriteArrayRef(self.NumDims, self.ArrayIdToDimName, self)
rewriteArrayRef.visit(MyKernel)
arrays = self.ArrayIds
exchangeIndices = ExchangeId(self.IndexToThreadId)
exchangeIndices.visit(MyKernel)
exchangeTypes = ExchangeTypes()
exchangeTypes.visit(MyKernel)
typeid = copy.deepcopy(self.DevFuncTypeId)
typeid.type.insert(0, '__kernel')
ext = copy.deepcopy(self.Includes)
newast = FileAST(ext)
for n in arglist:
if len(n.type) == 3:
if n.type[1] == 'double':
ext.insert(0, Compound([Id("#pragma OPENCL EXTENSION cl_khr_fp64: enable")]))
break
else:
if n.type[0] == 'double':
ext.insert(0,Compound([Id("#pragma OPENCL EXTENSION cl_khr_fp64: enable")]))
break
ext.append(FuncDecl(typeid, ArgList(arglist), MyKernel))
ast.ext = list()
## ast.ext.append(Id('#define LSIZE ' + str(self.Local['size'])))
ast.ext.append(newast)
def constantMemory2(self, arrDict):
arrNames = arrDict.keys()
# find out if we need to split into global and constant memory space
split = dict()
for name in arrNames:
if len(arrDict[name]) != len(self.Subscript[name]):
# Every aref to name is not put in constant memory
# so we split.
split[name] = True
else:
split[name] = False
# Add new constant pointer
ptrname = 'Constant' + ''.join(arrNames)
hst_ptrname = 'hst_ptr' + ptrname
dev_ptrname = 'dev_ptr' + ptrname
typeset = set()
for name in arrNames:
typeset.add(self.Type[name][0])
if len(typeset) > 1:
print "Conflicting types in constant memory transformation... Aborting"
return
ptrtype = [typeset.pop(), '*']
# Add the ptr to central data structures
self.Type[ptrname] = ptrtype
self.DevId[ptrname] = dev_ptrname
self.HstId[ptrname] = hst_ptrname
self.Mem[ptrname] = self.HstId[ptrname]+'_mem_size'
# Add the ptr to be a kernel argument
self.KernelArgs[ptrname] = ['__constant'] + ptrtype
self.GlobalVars[ptrname] = ''
self.ConstantMem[ptrname] = arrNames
# Delete original arguments if we split
for n in split:
if not split[n]:
self.KernelArgs.pop(n)
self.DevId.pop(n)
# Add pointer allocation to AllocateBuffers
lval = Id(self.HstId[ptrname])
rval = Id('new ' + self.Type[ptrname][0] + '['\
+ self.Mem[ptrname] + ']')
self.ConstantMemory.statements.append(Assignment(lval, rval))
# find the loop the we need to add to the allocation section
# Do it by looking at the loop indices in the subscripts
ids = []
for s in arrDict:
# Just look at only the first subscript at the moment
array = arrDict[s]
subs = self.LoopArrays[s]
try:
sub = subs[array[0]]
except IndexError:
print array[0]
print subs
print "ConstantMemory: Wrong index... Are you using zero indexing for the beginning of the loop?"
return
arrays = Arrays(self.Loops.keys())
arrays.visit(sub)
ids = set(arrays.SubIdx[s][0]) - set([None]) - set(self.GridIndices)
break
if len(ids) > 1:
print "Constant memory only supported for one loop at the moment"
return
# Add the loop to the allocation code
forloop = copy.deepcopy(self.Loops[iter(ids).next()])
newcomp = []
forcomp = []
groupComp = GroupCompound(newcomp)
forloop.compound = Compound(forcomp)
loopcount = forloop.init.lval.name.name
# Add the for loop from the kernel
newcomp.append(forloop)
# find dimension of the constant ptr
constantdim = sum([ (len(arrDict[m])) for m in arrDict])
# add constant writes
writes = []
for i in xrange(constantdim):
writes.append((
[BinOp(BinOp(Id(str(constantdim)), '*', \
Id(loopcount)), '+', Id(str(i)))]))
# for rewriting the ARefs that we copy
rewriteArrayRef = RewriteArrayRef(self.NumDims, self.ArrayIdToDimName, self)
# add global loadings
count = 0
for n in arrDict:
for i in arrDict[n]:
aref = copy.deepcopy(self.LoopArrays[n][i])
name = aref.name.name
rewriteArrayRef.visit(aref)
aref.name.name = self.HstId[name]
lval = ArrayRef(Id(self.HstId[ptrname]), writes[count])
assign = Assignment(lval, aref)
forcomp.append(assign)
count += 1
# Must now replace global arefs with constant arefs
count = 0
for n in arrDict:
for i in (arrDict[n]):
aref_new = writes[count]
aref_old = self.LoopArrays[n][i]
# Copying the internal data of the two arefs
aref_old.name.name = ptrname
aref_old.subscript = aref_new
count += 1
self.ConstantMemory.statements.append(groupComp)
def generateBoilerplateCode(self, ast):
dictNToNumScripts = self.NumDims
dictNToDimNames = self.ArrayIdToDimName
idMap = self.IndexToThreadId
gridIds = self.GridIndices
NonArrayIds = copy.deepcopy(self.NonArrayIds)
otherIds = self.ArrayIds.union(self.NonArrayIds) - self.RemovedIds
fileAST = FileAST([])
fileAST.ext.append(Id('#include \"../../../utils/StartUtil.cpp\"'))
fileAST.ext.append(Id('using namespace std;'))
## fileAST.ext.append(Id('#define LSIZE ' + str(self.Local['size'][0])))
kernelId = Id(self.KernelName)
kernelTypeid = TypeId(['cl_kernel'], kernelId, 0)
fileAST.ext.append(kernelTypeid)
## fileAST.show()
listDevBuffers = []
for n in self.ArrayIds:
try:
name = self.DevId[n]
listDevBuffers.append(TypeId(['cl_mem'], Id(name)))
except KeyError:
pass
for n in self.ConstantMem:
name = self.DevId[n]
listDevBuffers.append(TypeId(['cl_mem'], Id(name)))
dictNToDevPtr = self.DevId
listDevBuffers = GroupCompound(listDevBuffers)
fileAST.ext.append(listDevBuffers)
listHostPtrs = []
dictTypeHostPtrs = dict()
dictNToHstPtr = dict()
for n in self.DevArgList:
name = n.name.name
type = self.Type[name]
try:
name = self.HstId[name]
except KeyError:
pass
listHostPtrs.append(TypeId(type, Id(name), 0))
for n in self.GlobalVars:
type = self.Type[n]
name = self.HstId[n]
listHostPtrs.append(TypeId(type, Id(name), 0))
dictNToHstPtr = self.HstId
dictTypeHostPtrs = copy.deepcopy(self.Type)
listHostPtrs = GroupCompound(listHostPtrs)
fileAST.ext.append(listHostPtrs)
listMemSize = []
listDimSize = []
listMemSizeCalcTemp = []
dictMemSizeCalc = dict()
dictNToSize = self.Mem
for n in self.Mem:
sizeName = self.Mem[n]
listMemSize.append(TypeId(['size_t'], Id(sizeName)))
for n in self.ArrayIds:
for dimName in self.ArrayIdToDimName[n]:
listDimSize.append(\
TypeId(['size_t'], Id(dimName)))
fileAST.ext.append(GroupCompound(listMemSize))
fileAST.ext.append(GroupCompound(listDimSize))
misc = []
lval = TypeId(['size_t'], Id('isFirstTime'))
rval = Constant(1)
misc.append(Assignment(lval,rval))
lval = TypeId(['std::string'], Id('KernelDefines'))
rval = Constant('""')
misc.append(Assignment(lval,rval))
lval = TypeId(['Stopwatch'], Id('timer'))
misc.append(lval)
fileAST.ext.append(GroupCompound(misc))
# Generate the GetKernelCode function
for optim in self.KernelStringStream:
fileAST.ext.append(optim['ast'])
getKernelCode = EmptyFuncDecl('GetKernelCode', type = ['std::string'])
getKernelStats = []
getKernelCode.compound.statements = getKernelStats
getKernelStats.append(self.IfThenElse)
## getKernelStats.append(Id('return str.str();'))
fileAST.ext.append(getKernelCode)
allocateBuffer = EmptyFuncDecl('AllocateBuffers')
fileAST.ext.append(allocateBuffer)
listSetMemSize = []
for entry in self.ArrayIds:
n = self.ArrayIdToDimName[entry]
lval = Id(self.Mem[entry])
rval = BinOp(Id(n[0]),'*', Id('sizeof('+\
self.Type[entry][0]+')'))
if len(n) == 2:
rval = BinOp(Id(n[1]),'*', rval)
listSetMemSize.append(Assignment(lval,rval))
for n in self.ConstantMem:
terms = self.ConstantMem[n]
rval = Id(self.Mem[terms[0]])
for s in terms[1:]:
rval = BinOp(rval, '+', Id(self.Mem[s]))
lval = Id(self.Mem[n])
listSetMemSize.append(Assignment(lval,rval))
allocateBuffer.compound.statements.append(\
GroupCompound(listSetMemSize))
allocateBuffer.compound.statements.append(\
self.Transposition)
allocateBuffer.compound.statements.append(\
self.ConstantMemory)
allocateBuffer.compound.statements.append(\
self.Define)
ErrName = 'oclErrNum'
lval = TypeId(['cl_int'], Id(ErrName))
rval = Id('CL_SUCCESS')
clSuc = Assignment(lval,rval)
allocateBuffer.compound.statements.extend(\
[GroupCompound([clSuc])])
for n in dictNToDevPtr:
lval = Id(dictNToDevPtr[n])
op = '='
arrayn = dictNToHstPtr[n]
try:
arrayn = self.NameSwap[arrayn]
except KeyError:
pass
if n in self.WriteOnly:
flag = Id('CL_MEM_WRITE_ONLY')
arraynId = Id('NULL')
elif n in self.ReadOnly:
flag = Id('CL_MEM_USE_HOST_PTR | CL_MEM_READ_ONLY')
arraynId = Id(arrayn)
else:
flag = Id('CL_MEM_USE_HOST_PTR')
arraynId = Id(arrayn)
arglist = ArgList([Id('context'),\
flag,\
Id(dictNToSize[n]),\
arraynId,\
Id('&'+ErrName)])
rval = FuncDecl(Id('clCreateBuffer'), arglist, Compound([]))
allocateBuffer.compound.statements.append(\
Assignment(lval,rval))
arglist = ArgList([Id(ErrName), Constant("clCreateBuffer " + lval.name)])
ErrCheck = FuncDecl(Id('oclCheckErr'),arglist, Compound([]))
allocateBuffer.compound.statements.append(ErrCheck)
setArgumentsKernel = EmptyFuncDecl('SetArguments'+self.DevFuncId)
fileAST.ext.append(setArgumentsKernel)
ArgBody = setArgumentsKernel.compound.statements
ArgBody.append(clSuc)
cntName = Id('counter')
lval = TypeId(['int'], cntName)
rval = Constant(0)
ArgBody.append(Assignment(lval,rval))
for n in dictNToDimNames:
## add dim arguments to set of ids
NonArrayIds.add(dictNToDimNames[n][0])
# Add types of dimensions for size arguments
dictTypeHostPtrs[dictNToDimNames[n][0]] = ['size_t']
for n in self.RemovedIds:
dictTypeHostPtrs.pop(n,None)
## clSetKernelArg for Arrays
for n in self.KernelArgs:
lval = Id(ErrName)
op = '|='
type = self.Type[n]
if len(type) == 2:
arglist = ArgList([kernelId,\
Increment(cntName,'++'),\
Id('sizeof(cl_mem)'),\
Id('(void *) &' + dictNToDevPtr[n])])
rval = FuncDecl(Id('clSetKernelArg'),arglist, Compound([]))
else:
try:
n = self.NameSwap[n]
except KeyError:
pass
cl_type = type[0]
if cl_type == 'size_t':
cl_type = 'unsigned'
arglist = ArgList([kernelId,\
Increment(cntName,'++'),\
Id('sizeof('+cl_type+')'),\
Id('(void *) &' + n)])
rval = FuncDecl(Id('clSetKernelArg'),arglist, Compound([]))
ArgBody.append(Assignment(lval,rval,op))
arglist = ArgList([Id(ErrName), Constant('clSetKernelArg')])
ErrId = Id('oclCheckErr')
ErrCheck = FuncDecl(ErrId, arglist, Compound([]))
ArgBody.append(ErrCheck)
execKernel = EmptyFuncDecl('Exec' + self.DevFuncTypeId.name.name)
fileAST.ext.append(execKernel)
execBody = execKernel.compound.statements
execBody.append(clSuc)
eventName = Id('GPUExecution')
event = TypeId(['cl_event'], eventName)
execBody.append(event)
for n in self.Worksize:
lval = TypeId(['size_t'], Id(self.Worksize[n] + '[]'))
if n == 'local':
local_worksize = [Id(i) for i in self.Local['size']]
rval = ArrayInit(local_worksize)
elif n == 'global':
initlist = []
for m in reversed(self.GridIndices):
initlist.append(Id(self.UpperLimit[m]\
+' - '+ self.LowerLimit[m]))
rval = ArrayInit(initlist)
else:
initlist = []
for m in reversed(self.GridIndices):
initlist.append(Id(self.LowerLimit[m]))
rval = ArrayInit(initlist)
execBody.append(Assignment(lval,rval))
lval = Id(ErrName)
arglist = ArgList([Id('command_queue'),\
Id(self.KernelName),\
Constant(self.ParDim),\
Id(self.Worksize['offset']),\
Id(self.Worksize['global']),\
Id(self.Worksize['local']),\
Constant(0), Id('NULL'), \
Id('&' + eventName.name)])
rval = FuncDecl(Id('clEnqueueNDRangeKernel'),arglist, Compound([]))
execBody.append(Assignment(lval,rval))
arglist = ArgList([Id(ErrName), Constant('clEnqueueNDRangeKernel')])
ErrCheck = FuncDecl(ErrId, arglist, Compound([]))
execBody.append(ErrCheck)
arglist = ArgList([Id('command_queue')])
finish = FuncDecl(Id('clFinish'), arglist, Compound([]))
execBody.append(Assignment(Id(ErrName), finish))
arglist = ArgList([Id(ErrName), Constant('clFinish')])
ErrCheck = FuncDecl(ErrId, arglist, Compound([]))
execBody.append(ErrCheck)
if not self.NoReadBack:
for n in self.WriteOnly:
lval = Id(ErrName)
Hstn = self.HstId[n]
try:
Hstn = self.NameSwap[Hstn]
except KeyError:
pass
arglist = ArgList([Id('command_queue'),\
Id(self.DevId[n]),\
Id('CL_TRUE'),\
Constant(0),\
Id(self.Mem[n]),\
Id(Hstn),\
Constant(1),
Id('&' + eventName.name),Id('NULL')])
rval = FuncDecl(Id('clEnqueueReadBuffer'),arglist, Compound([]))
execBody.append(Assignment(lval,rval))
arglist = ArgList([Id(ErrName), Constant('clEnqueueReadBuffer')])
ErrCheck = FuncDecl(ErrId, arglist, Compound([]))
execBody.append(ErrCheck)
# add clFinish statement
arglist = ArgList([Id('command_queue')])
finish = FuncDecl(Id('clFinish'), arglist, Compound([]))
execBody.append(Assignment(Id(ErrName), finish))
arglist = ArgList([Id(ErrName), Constant('clFinish')])
ErrCheck = FuncDecl(ErrId, arglist, Compound([]))
execBody.append(ErrCheck)
for n in self.WriteTranspose:
execBody.append(n)
runOCL = EmptyFuncDecl('RunOCL' + self.KernelName)
fileAST.ext.append(runOCL)
runOCLBody = runOCL.compound.statements
argIds = self.NonArrayIds.union(self.ArrayIds) #
typeIdList = []
ifThenList = []
for n in argIds:
type = self.Type[n]
argn = Id('arg_'+n)
typeIdList.append(TypeId(type,argn))
try:
newn = self.HstId[n]
except KeyError:
newn = n
lval = Id(newn)
rval = argn
ifThenList.append(Assignment(lval,rval))
try:
for m in self.ArrayIdToDimName[n]:
type = ['size_t']
argm = Id('arg_'+m)
lval = Id(m)
rval = argm
ifThenList.append(Assignment(lval,rval))
typeIdList.append(TypeId(type, argm))
except KeyError:
pass
arglist = ArgList(typeIdList)
runOCL.arglist = arglist
arglist = ArgList([])
ifThenList.append(FuncDecl(Id('StartUpGPU'), arglist, Compound([])))
ifThenList.append(FuncDecl(Id('AllocateBuffers'), arglist, Compound([])))
useFile = 'true'
if self.KernelStringStream:
useFile = 'false'
ifThenList.append(Id('cout << "$Defines " << KernelDefines << endl;'))
arglist = ArgList([Constant(self.DevFuncId),
Constant(self.DevFuncId+'.cl'),
Id('GetKernelCode()'),
Id(useFile),
Id('&' + self.KernelName),
Id('KernelDefines')])
ifThenList.append(FuncDecl(Id('compileKernel'), arglist, Compound([])))
ifThenList.append(FuncDecl(Id('SetArguments'+self.DevFuncId), ArgList([]), Compound([])))
runOCLBody.append(IfThen(Id('isFirstTime'), Compound(ifThenList)))
arglist = ArgList([])
# Insert timing
runOCLBody.append(Id('timer.start();'))
runOCLBody.append(FuncDecl(Id('Exec' + self.DevFuncId), arglist, Compound([])))
runOCLBody.append(Id('cout << "$Time " << timer.stop() << endl;'))
return fileAST
| dikujepsen/OpenTran | v2.0/framework/old/rewriter.py | Python | mit | 39,575 |
#!/usr/bin/env python
#
# Author: Ying Xiong.
# Created: Mar 18, 2014.
import numpy as np
import unittest
from quaternion import *
from unittest_utils import *
class QuaternionTest(unittest.TestCase):
"""Unit test for Quaternion."""
def testQuadHProd(self):
o = np.array([1, 0, 0, 0])
i = np.array([0, 1, 0, 0])
j = np.array([0, 0, 1, 0])
k = np.array([0, 0, 0, 1])
tol = 1e-12
# ii = jj = kk = ijk = -1.
check_near(quatHProd(i,i), -o, tol)
check_near(quatHProd(j,j), -o, tol)
check_near(quatHProd(k,k), -o, tol)
check_near(quatHProd(quatHProd(i,j),k), -o, tol)
# ij = k, ji = -k.
check_near(quatHProd(i,j), k, tol)
check_near(quatHProd(j,i), -k, tol)
# jk = i, kj = -i.
check_near(quatHProd(j,k), i, tol)
check_near(quatHProd(k,j), -i, tol)
# ki = j, ik = -j.
check_near(quatHProd(k,i), j, tol)
check_near(quatHProd(i,k), -j, tol)
# ||q|| = sqrt(q q*)
np.random.seed(0)
tol = 1e-12
nTest = 100
for iTest in xrange(nTest):
q = np.random.randn(4)
p = quatConj(q)
n = np.array([np.dot(q,q), 0, 0, 0])
check_near(quatHProd(q,p), n, tol)
check_near(quatHProd(p,q), n, tol)
def testQuatRecip(self):
np.random.seed(0)
tol = 1e-12
o = np.array([1,0,0,0])
nTest = 100
for iTest in xrange(nTest):
q = np.random.randn(4)
p = quatRecip(q)
check_near(quatHProd(p,q), o, tol)
check_near(quatHProd(q,p), o, tol)
def testQuatToRotMatx(self):
np.random.seed(0)
tol = 1e-12
nTest = 100
for iTest in xrange(nTest):
# Rotate the vector by two different methods and check whether the
# results are the same.
v = np.random.randn(3)
q = np.random.randn(4)
q /= np.linalg.norm(q)
R = quatToRotMatx(q)
check_near(rotVecByQuat(v, q), np.dot(R, v), tol)
def testQuatFromRotMatx(self):
np.random.seed(0)
tol = 1e-12
nTest = 100
for iTest in xrange(nTest):
# Convert back and forth between quaternion and rotation matrix.
q = np.random.randn(4)
q /= np.linalg.norm(q)
R = quatToRotMatx(q)
q2 = quatFromRotMatx(R)
assert check_near(q, q2, tol, raise_exception=False) or \
check_near(q, -q2, tol, raise_exception=False)
def testRotVecByAxisAng(self):
tol = 1e-12
x = np.array([1,0,0])
z = np.array([0,0,1])
theta = np.pi / 3
r = np.array([np.cos(theta), np.sin(theta), 0])
check_near(rotVecByAxisAng(x, z, theta), r, tol)
| yxiong/xy_python_utils | xy_python_utils/quaternion_test.py | Python | mit | 2,857 |
import random
import docker
from docker.utils import create_ipam_config
from docker.utils import create_ipam_pool
import pytest
from ..helpers import requires_api_version
from .base import BaseIntegrationTest
class TestNetworks(BaseIntegrationTest):
def create_network(self, *args, **kwargs):
net_name = u'dockerpy{}'.format(random.getrandbits(24))[:14]
net_id = self.client.create_network(net_name, *args, **kwargs)['Id']
self.tmp_networks.append(net_id)
return (net_name, net_id)
@requires_api_version('1.21')
def test_list_networks(self):
networks = self.client.networks()
initial_size = len(networks)
net_name, net_id = self.create_network()
networks = self.client.networks()
self.assertEqual(len(networks), initial_size + 1)
self.assertTrue(net_id in [n['Id'] for n in networks])
networks_by_name = self.client.networks(names=[net_name])
self.assertEqual([n['Id'] for n in networks_by_name], [net_id])
networks_by_partial_id = self.client.networks(ids=[net_id[:8]])
self.assertEqual([n['Id'] for n in networks_by_partial_id], [net_id])
@requires_api_version('1.21')
def test_inspect_network(self):
net_name, net_id = self.create_network()
net = self.client.inspect_network(net_id)
self.assertEqual(net['Id'], net_id)
self.assertEqual(net['Name'], net_name)
self.assertEqual(net['Driver'], 'bridge')
self.assertEqual(net['Scope'], 'local')
self.assertEqual(net['IPAM']['Driver'], 'default')
@requires_api_version('1.21')
def test_create_network_with_ipam_config(self):
_, net_id = self.create_network(
ipam=create_ipam_config(
driver='default',
pool_configs=[
create_ipam_pool(
subnet="172.28.0.0/16",
iprange="172.28.5.0/24",
gateway="172.28.5.254",
aux_addresses={
"a": "172.28.1.5",
"b": "172.28.1.6",
"c": "172.28.1.7",
},
),
],
),
)
net = self.client.inspect_network(net_id)
ipam = net['IPAM']
assert ipam.pop('Options', None) is None
assert ipam['Driver'] == 'default'
assert ipam['Config'] == [{
'Subnet': "172.28.0.0/16",
'IPRange': "172.28.5.0/24",
'Gateway': "172.28.5.254",
'AuxiliaryAddresses': {
"a": "172.28.1.5",
"b": "172.28.1.6",
"c": "172.28.1.7",
},
}]
@requires_api_version('1.21')
def test_create_network_with_host_driver_fails(self):
net_name = 'dockerpy{}'.format(random.getrandbits(24))[:14]
with pytest.raises(docker.errors.APIError):
self.client.create_network(net_name, driver='host')
@requires_api_version('1.21')
def test_remove_network(self):
initial_size = len(self.client.networks())
net_name, net_id = self.create_network()
self.assertEqual(len(self.client.networks()), initial_size + 1)
self.client.remove_network(net_id)
self.assertEqual(len(self.client.networks()), initial_size)
@requires_api_version('1.21')
def test_connect_and_disconnect_container(self):
net_name, net_id = self.create_network()
container = self.client.create_container('busybox', 'top')
self.tmp_containers.append(container)
self.client.start(container)
network_data = self.client.inspect_network(net_id)
self.assertFalse(network_data.get('Containers'))
self.client.connect_container_to_network(container, net_id)
network_data = self.client.inspect_network(net_id)
self.assertEqual(
list(network_data['Containers'].keys()),
[container['Id']]
)
with pytest.raises(docker.errors.APIError):
self.client.connect_container_to_network(container, net_id)
self.client.disconnect_container_from_network(container, net_id)
network_data = self.client.inspect_network(net_id)
self.assertFalse(network_data.get('Containers'))
with pytest.raises(docker.errors.APIError):
self.client.disconnect_container_from_network(container, net_id)
@requires_api_version('1.22')
def test_connect_and_force_disconnect_container(self):
net_name, net_id = self.create_network()
container = self.client.create_container('busybox', 'top')
self.tmp_containers.append(container)
self.client.start(container)
network_data = self.client.inspect_network(net_id)
self.assertFalse(network_data.get('Containers'))
self.client.connect_container_to_network(container, net_id)
network_data = self.client.inspect_network(net_id)
self.assertEqual(
list(network_data['Containers'].keys()),
[container['Id']]
)
self.client.disconnect_container_from_network(container, net_id, True)
network_data = self.client.inspect_network(net_id)
self.assertFalse(network_data.get('Containers'))
with pytest.raises(docker.errors.APIError):
self.client.disconnect_container_from_network(
container, net_id, force=True
)
@requires_api_version('1.22')
def test_connect_with_aliases(self):
net_name, net_id = self.create_network()
container = self.client.create_container('busybox', 'top')
self.tmp_containers.append(container)
self.client.start(container)
self.client.connect_container_to_network(
container, net_id, aliases=['foo', 'bar'])
container_data = self.client.inspect_container(container)
aliases = (
container_data['NetworkSettings']['Networks'][net_name]['Aliases']
)
assert 'foo' in aliases
assert 'bar' in aliases
@requires_api_version('1.21')
def test_connect_on_container_create(self):
net_name, net_id = self.create_network()
container = self.client.create_container(
image='busybox',
command='top',
host_config=self.client.create_host_config(network_mode=net_name),
)
self.tmp_containers.append(container)
self.client.start(container)
network_data = self.client.inspect_network(net_id)
self.assertEqual(
list(network_data['Containers'].keys()),
[container['Id']])
self.client.disconnect_container_from_network(container, net_id)
network_data = self.client.inspect_network(net_id)
self.assertFalse(network_data.get('Containers'))
@requires_api_version('1.22')
def test_create_with_aliases(self):
net_name, net_id = self.create_network()
container = self.client.create_container(
image='busybox',
command='top',
host_config=self.client.create_host_config(
network_mode=net_name,
),
networking_config=self.client.create_networking_config({
net_name: self.client.create_endpoint_config(
aliases=['foo', 'bar'],
),
}),
)
self.tmp_containers.append(container)
self.client.start(container)
container_data = self.client.inspect_container(container)
aliases = (
container_data['NetworkSettings']['Networks'][net_name]['Aliases']
)
assert 'foo' in aliases
assert 'bar' in aliases
@requires_api_version('1.22')
def test_create_with_ipv4_address(self):
net_name, net_id = self.create_network(
ipam=create_ipam_config(
driver='default',
pool_configs=[create_ipam_pool(subnet="132.124.0.0/16")],
),
)
container = self.client.create_container(
image='busybox', command='top',
host_config=self.client.create_host_config(network_mode=net_name),
networking_config=self.client.create_networking_config({
net_name: self.client.create_endpoint_config(
ipv4_address='132.124.0.23'
)
})
)
self.tmp_containers.append(container)
self.client.start(container)
container_data = self.client.inspect_container(container)
self.assertEqual(
container_data[
'NetworkSettings']['Networks'][net_name]['IPAMConfig'][
'IPv4Address'
],
'132.124.0.23'
)
@requires_api_version('1.22')
def test_create_with_ipv6_address(self):
net_name, net_id = self.create_network(
ipam=create_ipam_config(
driver='default',
pool_configs=[create_ipam_pool(subnet="2001:389::1/64")],
),
)
container = self.client.create_container(
image='busybox', command='top',
host_config=self.client.create_host_config(network_mode=net_name),
networking_config=self.client.create_networking_config({
net_name: self.client.create_endpoint_config(
ipv6_address='2001:389::f00d'
)
})
)
self.tmp_containers.append(container)
self.client.start(container)
container_data = self.client.inspect_container(container)
self.assertEqual(
container_data[
'NetworkSettings']['Networks'][net_name]['IPAMConfig'][
'IPv6Address'
],
'2001:389::f00d'
)
@requires_api_version('1.24')
def test_create_with_linklocal_ips(self):
container = self.client.create_container(
'busybox', 'top',
networking_config=self.client.create_networking_config(
{
'bridge': self.client.create_endpoint_config(
link_local_ips=['169.254.8.8']
)
}
),
host_config=self.client.create_host_config(network_mode='bridge')
)
self.tmp_containers.append(container)
self.client.start(container)
container_data = self.client.inspect_container(container)
net_cfg = container_data['NetworkSettings']['Networks']['bridge']
assert 'IPAMConfig' in net_cfg
assert 'LinkLocalIPs' in net_cfg['IPAMConfig']
assert net_cfg['IPAMConfig']['LinkLocalIPs'] == ['169.254.8.8']
@requires_api_version('1.22')
def test_create_with_links(self):
net_name, net_id = self.create_network()
container = self.create_and_start(
host_config=self.client.create_host_config(network_mode=net_name),
networking_config=self.client.create_networking_config({
net_name: self.client.create_endpoint_config(
links=[('docker-py-test-upstream', 'bar')],
),
}),
)
container_data = self.client.inspect_container(container)
self.assertEqual(
container_data['NetworkSettings']['Networks'][net_name]['Links'],
['docker-py-test-upstream:bar'])
self.create_and_start(
name='docker-py-test-upstream',
host_config=self.client.create_host_config(network_mode=net_name),
)
self.execute(container, ['nslookup', 'bar'])
@requires_api_version('1.21')
def test_create_check_duplicate(self):
net_name, net_id = self.create_network()
with self.assertRaises(docker.errors.APIError):
self.client.create_network(net_name, check_duplicate=True)
net_id = self.client.create_network(net_name, check_duplicate=False)
self.tmp_networks.append(net_id['Id'])
@requires_api_version('1.22')
def test_connect_with_links(self):
net_name, net_id = self.create_network()
container = self.create_and_start(
host_config=self.client.create_host_config(network_mode=net_name))
self.client.disconnect_container_from_network(container, net_name)
self.client.connect_container_to_network(
container, net_name,
links=[('docker-py-test-upstream', 'bar')])
container_data = self.client.inspect_container(container)
self.assertEqual(
container_data['NetworkSettings']['Networks'][net_name]['Links'],
['docker-py-test-upstream:bar'])
self.create_and_start(
name='docker-py-test-upstream',
host_config=self.client.create_host_config(network_mode=net_name),
)
self.execute(container, ['nslookup', 'bar'])
@requires_api_version('1.22')
def test_connect_with_ipv4_address(self):
net_name, net_id = self.create_network(
ipam=create_ipam_config(
driver='default',
pool_configs=[
create_ipam_pool(
subnet="172.28.0.0/16", iprange="172.28.5.0/24",
gateway="172.28.5.254"
)
]
)
)
container = self.create_and_start(
host_config=self.client.create_host_config(network_mode=net_name))
self.client.disconnect_container_from_network(container, net_name)
self.client.connect_container_to_network(
container, net_name, ipv4_address='172.28.5.24'
)
container_data = self.client.inspect_container(container)
net_data = container_data['NetworkSettings']['Networks'][net_name]
self.assertEqual(
net_data['IPAMConfig']['IPv4Address'], '172.28.5.24'
)
@requires_api_version('1.22')
def test_connect_with_ipv6_address(self):
net_name, net_id = self.create_network(
ipam=create_ipam_config(
driver='default',
pool_configs=[
create_ipam_pool(
subnet="2001:389::1/64", iprange="2001:389::0/96",
gateway="2001:389::ffff"
)
]
)
)
container = self.create_and_start(
host_config=self.client.create_host_config(network_mode=net_name))
self.client.disconnect_container_from_network(container, net_name)
self.client.connect_container_to_network(
container, net_name, ipv6_address='2001:389::f00d'
)
container_data = self.client.inspect_container(container)
net_data = container_data['NetworkSettings']['Networks'][net_name]
self.assertEqual(
net_data['IPAMConfig']['IPv6Address'], '2001:389::f00d'
)
@requires_api_version('1.23')
def test_create_internal_networks(self):
_, net_id = self.create_network(internal=True)
net = self.client.inspect_network(net_id)
assert net['Internal'] is True
@requires_api_version('1.23')
def test_create_network_with_labels(self):
_, net_id = self.create_network(labels={
'com.docker.py.test': 'label'
})
net = self.client.inspect_network(net_id)
assert 'Labels' in net
assert len(net['Labels']) == 1
assert net['Labels'] == {
'com.docker.py.test': 'label'
}
@requires_api_version('1.23')
def test_create_network_with_labels_wrong_type(self):
with pytest.raises(TypeError):
self.create_network(labels=['com.docker.py.test=label', ])
@requires_api_version('1.23')
def test_create_network_ipv6_enabled(self):
_, net_id = self.create_network(enable_ipv6=True)
net = self.client.inspect_network(net_id)
assert net['EnableIPv6'] is True
| shakamunyi/docker-py | tests/integration/network_test.py | Python | apache-2.0 | 16,098 |
# -*- coding: utf-8 -*-
"""The check functions."""
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
#
# License: BSD (3-clause)
import operator
from distutils.version import LooseVersion
import os.path as op
import numpy as np
from ._logging import warn, logger
def _ensure_int(x, name='unknown', must_be='an int'):
"""Ensure a variable is an integer."""
# This is preferred over numbers.Integral, see:
# https://github.com/scipy/scipy/pull/7351#issuecomment-299713159
try:
x = int(operator.index(x))
except TypeError:
raise TypeError('%s must be %s, got %s' % (name, must_be, type(x)))
return x
def check_fname(fname, filetype, endings, endings_err=()):
"""Enforce MNE filename conventions.
Parameters
----------
fname : str
Name of the file.
filetype : str
Type of file. e.g., ICA, Epochs etc.
endings : tuple
Acceptable endings for the filename.
endings_err : tuple
Obligatory possible endings for the filename.
"""
if len(endings_err) > 0 and not fname.endswith(endings_err):
print_endings = ' or '.join([', '.join(endings_err[:-1]),
endings_err[-1]])
raise IOError('The filename (%s) for file type %s must end with %s'
% (fname, filetype, print_endings))
print_endings = ' or '.join([', '.join(endings[:-1]), endings[-1]])
if not fname.endswith(endings):
warn('This filename (%s) does not conform to MNE naming conventions. '
'All %s files should end with %s'
% (fname, filetype, print_endings))
def check_version(library, min_version):
r"""Check minimum library version required.
Parameters
----------
library : str
The library name to import. Must have a ``__version__`` property.
min_version : str
The minimum version string. Anything that matches
``'(\d+ | [a-z]+ | \.)'``. Can also be empty to skip version
check (just check for library presence).
Returns
-------
ok : bool
True if the library exists with at least the specified version.
"""
ok = True
try:
library = __import__(library)
except ImportError:
ok = False
else:
if min_version:
this_version = LooseVersion(library.__version__)
if this_version < min_version:
ok = False
return ok
def _check_mayavi_version(min_version='4.3.0'):
"""Check mayavi version."""
if not check_version('mayavi', min_version):
raise RuntimeError("Need mayavi >= %s" % min_version)
def check_random_state(seed):
"""Turn seed into a np.random.RandomState instance.
If seed is None, return the RandomState singleton used by np.random.
If seed is an int, return a new RandomState instance seeded with seed.
If seed is already a RandomState instance, return it.
Otherwise raise ValueError.
"""
if seed is None or seed is np.random:
return np.random.mtrand._rand
if isinstance(seed, (int, np.integer)):
return np.random.RandomState(seed)
if isinstance(seed, np.random.RandomState):
return seed
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
' instance' % seed)
def _check_event_id(event_id, events):
"""Check event_id and convert to default format."""
# check out event_id dict
if event_id is None: # convert to int to make typing-checks happy
event_id = list(np.unique(events[:, 2]))
if isinstance(event_id, dict):
for key in event_id.keys():
_validate_type(key, str, 'Event names')
event_id = {key: _ensure_int(val, 'event_id[%s]' % key)
for key, val in event_id.items()}
elif isinstance(event_id, list):
event_id = [_ensure_int(v, 'event_id[%s]' % vi)
for vi, v in enumerate(event_id)]
event_id = dict(zip((str(i) for i in event_id), event_id))
else:
event_id = _ensure_int(event_id, 'event_id')
event_id = {str(event_id): event_id}
return event_id
def _check_fname(fname, overwrite=False, must_exist=False):
"""Check for file existence."""
_validate_type(fname, 'str', 'fname')
if must_exist and not op.isfile(fname):
raise IOError('File "%s" does not exist' % fname)
if op.isfile(fname):
if not overwrite:
raise IOError('Destination file exists. Please use option '
'"overwrite=True" to force overwriting.')
elif overwrite != 'read':
logger.info('Overwriting existing file.')
def _check_subject(class_subject, input_subject, raise_error=True):
"""Get subject name from class."""
if input_subject is not None:
_validate_type(input_subject, 'str', "subject input")
return input_subject
elif class_subject is not None:
_validate_type(class_subject, 'str',
"Either subject input or class subject attribute")
return class_subject
else:
if raise_error is True:
raise ValueError('Neither subject input nor class subject '
'attribute was a string')
return None
def _check_preload(inst, msg):
"""Ensure data are preloaded."""
from ..epochs import BaseEpochs
from ..evoked import Evoked
from ..time_frequency import _BaseTFR
if isinstance(inst, (_BaseTFR, Evoked)):
pass
else:
name = "epochs" if isinstance(inst, BaseEpochs) else 'raw'
if not inst.preload:
raise RuntimeError(
"By default, MNE does not load data into main memory to "
"conserve resources. " + msg + ' requires %s data to be '
'loaded. Use preload=True (or string) in the constructor or '
'%s.load_data().' % (name, name))
def _check_compensation_grade(info1, info2, name1,
name2='data', ch_names=None):
"""Ensure that objects have same compensation_grade."""
from ..io import Info
from ..io.pick import pick_channels, pick_info
from ..io.compensator import get_current_comp
for t_info in (info1, info2):
if t_info is None:
return
assert isinstance(t_info, Info), t_info # or internal code is wrong
if ch_names is not None:
info1 = info1.copy()
info2 = info2.copy()
# pick channels
for t_info in [info1, info2]:
if t_info['comps']:
t_info['comps'] = []
picks = pick_channels(t_info['ch_names'], ch_names)
pick_info(t_info, picks, copy=False)
# "or 0" here aliases None -> 0, as they are equivalent
grade1 = get_current_comp(info1) or 0
grade2 = get_current_comp(info2) or 0
# perform check
if grade1 != grade2:
raise RuntimeError(
'Compensation grade of %s (%s) and %s (%s) do not match'
% (name1, grade1, name2, grade2))
def _check_pylsl_installed(strict=True):
"""Aux function."""
try:
import pylsl
return pylsl
except ImportError:
if strict is True:
raise RuntimeError('For this functionality to work, the pylsl '
'library is required.')
else:
return False
def _check_pandas_installed(strict=True):
"""Aux function."""
try:
import pandas
return pandas
except ImportError:
if strict is True:
raise RuntimeError('For this functionality to work, the Pandas '
'library is required.')
else:
return False
def _check_pandas_index_arguments(index, defaults):
"""Check pandas index arguments."""
if not any(isinstance(index, k) for k in (list, tuple)):
index = [index]
invalid_choices = [e for e in index if e not in defaults]
if invalid_choices:
options = [', '.join(e) for e in [invalid_choices, defaults]]
raise ValueError('[%s] is not an valid option. Valid index'
'values are \'None\' or %s' % tuple(options))
def _check_ch_locs(chs):
"""Check if channel locations exist.
Parameters
----------
chs : dict
The channels from info['chs']
"""
locs3d = np.array([ch['loc'][:3] for ch in chs])
return not ((locs3d == 0).all() or
(~np.isfinite(locs3d)).all() or
np.allclose(locs3d, 0.))
def _is_numeric(n):
return isinstance(n, (np.integer, np.floating, int, float))
def _validate_type(item, types=None, item_name=None, type_name=None):
"""Validate that `item` is an instance of `types`.
Parameters
----------
item : object
The thing to be checked.
types : type | tuple of types | str
The types to be checked against. If str, must be one of 'str', 'int',
'numeric'.
"""
if types == "int":
_ensure_int(item, name=item_name)
return # terminate prematurely
elif types == "str":
types = str
type_name = "str" if type_name is None else type_name
elif types == "numeric":
types = (np.integer, np.floating, int, float)
type_name = "numeric" if type_name is None else type_name
elif types == "info":
from mne.io import Info as types
type_name = "Info" if type_name is None else type_name
item_name = "Info" if item_name is None else item_name
if not isinstance(types, (list, tuple)):
types = [types]
check_types = tuple(type(None) if type_ is None else type_
for type_ in types)
if not isinstance(item, check_types):
if type_name is None:
type_name = ['None' if cls_ is None else cls_.__name__
for cls_ in types]
if len(type_name) == 1:
type_name = type_name[0]
elif len(type_name) == 2:
type_name = ' or '.join(type_name)
else:
type_name[-1] = 'or ' + type_name[-1]
type_name = ', '.join(type_name)
raise TypeError('%s must be an instance of %s, got %s instead'
% (item_name, type_name, type(item),))
def _check_if_nan(data, msg=" to be plotted"):
"""Raise if any of the values are NaN."""
if not np.isfinite(data).all():
raise ValueError("Some of the values {} are NaN.".format(msg))
def _check_info_inv(info, forward, data_cov=None, noise_cov=None):
"""Return good channels common to forward model and covariance matrices."""
from .. import pick_types
# get a list of all channel names:
fwd_ch_names = forward['info']['ch_names']
# handle channels from forward model and info:
ch_names = _compare_ch_names(info['ch_names'], fwd_ch_names, info['bads'])
# make sure that no reference channels are left:
ref_chs = pick_types(info, meg=False, ref_meg=True)
ref_chs = [info['ch_names'][ch] for ch in ref_chs]
ch_names = [ch for ch in ch_names if ch not in ref_chs]
# inform about excluding channels:
if (data_cov is not None and set(info['bads']) != set(data_cov['bads']) and
(len(set(ch_names).intersection(data_cov['bads'])) > 0)):
logger.info('info["bads"] and data_cov["bads"] do not match, '
'excluding bad channels from both.')
if (noise_cov is not None and
set(info['bads']) != set(noise_cov['bads']) and
(len(set(ch_names).intersection(noise_cov['bads'])) > 0)):
logger.info('info["bads"] and noise_cov["bads"] do not match, '
'excluding bad channels from both.')
# handle channels from data cov if data cov is not None
# Note: data cov is supposed to be None in tf_lcmv
if data_cov is not None:
ch_names = _compare_ch_names(ch_names, data_cov.ch_names,
data_cov['bads'])
# handle channels from noise cov if noise cov available:
if noise_cov is not None:
ch_names = _compare_ch_names(ch_names, noise_cov.ch_names,
noise_cov['bads'])
picks = [info['ch_names'].index(k) for k in ch_names if k in
info['ch_names']]
return picks
def _compare_ch_names(names1, names2, bads):
"""Return channel names of common and good channels."""
ch_names = [ch for ch in names1 if ch not in bads and ch in names2]
return ch_names
def _check_channels_spatial_filter(ch_names, filters):
"""Return data channel indices to be used with spatial filter.
Unlike ``pick_channels``, this respects the order of ch_names.
"""
sel = []
# first check for channel discrepancies between filter and data:
for ch_name in filters['ch_names']:
if ch_name not in ch_names:
raise ValueError('The spatial filter was computed with channel %s '
'which is not present in the data. You should '
'compute a new spatial filter restricted to the '
'good data channels.' % ch_name)
# then compare list of channels and get selection based on data:
sel = [ii for ii, ch_name in enumerate(ch_names)
if ch_name in filters['ch_names']]
return sel
def _check_rank(rank):
"""Check rank parameter and deal with deprecation."""
err_msg = ('rank must be None, dict, "full", or int, '
'got %s (type %s)' % (rank, type(rank)))
if isinstance(rank, str):
# XXX we can use rank='' to deprecate to get to None eventually:
# if rank == '':
# warn('The rank parameter default in 0.18 of "full" will change '
# 'to None in 0.19, set it explicitly to avoid this warning',
# DeprecationWarning)
# rank = 'full'
if rank not in ['full', 'info']:
raise ValueError('rank, if str, must be "full" or "info", '
'got %s' % (rank,))
elif isinstance(rank, bool):
raise TypeError(err_msg)
elif rank is not None and not isinstance(rank, dict):
try:
rank = int(operator.index(rank))
except TypeError:
raise TypeError(err_msg)
else:
warn('rank as int is deprecated and will be removed in 0.19. '
'use rank=dict(meg=...) instead.', DeprecationWarning)
rank = dict(meg=rank)
return rank
def _check_one_ch_type(method, info, forward, data_cov=None, noise_cov=None):
"""Check number of sensor types and presence of noise covariance matrix."""
from ..cov import make_ad_hoc_cov, Covariance
from ..io.pick import pick_info
from ..channels.channels import _contains_ch_type
picks = _check_info_inv(info, forward, data_cov=data_cov,
noise_cov=noise_cov)
info_pick = pick_info(info, picks)
ch_types =\
[_contains_ch_type(info_pick, tt) for tt in ('mag', 'grad', 'eeg')]
if sum(ch_types) > 1:
if method == 'lcmv' and noise_cov is None:
raise ValueError('Source reconstruction with several sensor types'
' requires a noise covariance matrix to be '
'able to apply whitening.')
if method == 'dics':
raise RuntimeError(
'The use of several sensor types with the DICS beamformer is '
'not supported yet.')
if noise_cov is None:
noise_cov = make_ad_hoc_cov(info_pick, std=1.)
else:
noise_cov = noise_cov.copy()
if 'estimator' in noise_cov:
del noise_cov['estimator']
_validate_type(noise_cov, Covariance, 'noise_cov')
return noise_cov, picks
def _check_depth(depth, kind='depth_mne'):
"""Check depth options."""
from ..defaults import _handle_default
if not isinstance(depth, dict):
depth = dict(exp=None if depth is None else float(depth))
return _handle_default(kind, depth)
def _check_option(parameter, value, allowed_values):
"""Check the value of a parameter against a list of valid options.
Raises a ValueError with a readable error message if the value was invalid.
Parameters
----------
parameter : str
The name of the parameter to check. This is used in the error message.
value : any type
The value of the parameter to check.
allowed_values : list
The list of allowed values for the parameter.
Raises
------
ValueError
When the value of the parameter was not one of the valid options.
"""
if value in allowed_values:
return True
# Prepare a nice error message for the user
msg = ("Invalid value for the '{parameter}' parameter. "
'{options}, but got {value!r} instead.')
if len(allowed_values) == 1:
options = 'The only allowed value is %r' % allowed_values[0]
else:
options = 'Allowed values are '
options += ', '.join(['%r' % v for v in allowed_values[:-1]])
options += ' and %r' % allowed_values[-1]
raise ValueError(msg.format(parameter=parameter, options=options,
value=value))
def _check_all_same_channel_names(instances):
"""Check if a collection of instances all have the same channels."""
ch_names = instances[0].info["ch_names"]
for inst in instances:
if ch_names != inst.info["ch_names"]:
return False
return True
def _check_combine(mode, valid=('mean', 'median', 'std')):
if mode == "mean":
def fun(data):
return np.mean(data, axis=0)
elif mode == "std":
def fun(data):
return np.std(data, axis=0)
elif mode == "median":
def fun(data):
return np.median(data, axis=0)
elif callable(mode):
fun = mode
else:
raise ValueError("Combine option must be " + ", ".join(valid) +
" or callable, got %s (type %s)." %
(mode, type(mode)))
return fun
| adykstra/mne-python | mne/utils/check.py | Python | bsd-3-clause | 18,200 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# NAT function test configration of NSX & vSphere environment.
# Static NAT: 仅用address来映射,不用port来映射
# NSX Edge ID, 可在web client-> NSX Edges界面找到
NSX_EDGE_ID = 'edge-9'
# 要删除的floatingIP所对应的nat ruleID
NSX_NAT_RULE_ID = '196613' | smartlinux/nsxapitest | nat/case48_nsx_static_nat_delete_input.py | Python | gpl-3.0 | 320 |
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urllib_parse_urlparse
from ..utils import (
ExtractorError,
parse_iso8601,
qualities,
)
class SRGSSRIE(InfoExtractor):
_VALID_URL = r'(?:https?://tp\.srgssr\.ch/p(?:/[^/]+)+\?urn=urn|srgssr):(?P<bu>srf|rts|rsi|rtr|swi):(?:[^:]+:)?(?P<type>video|audio):(?P<id>[0-9a-f\-]{36}|\d+)'
_GEO_BYPASS = False
_GEO_COUNTRIES = ['CH']
_ERRORS = {
'AGERATING12': 'To protect children under the age of 12, this video is only available between 8 p.m. and 6 a.m.',
'AGERATING18': 'To protect children under the age of 18, this video is only available between 11 p.m. and 5 a.m.',
# 'ENDDATE': 'For legal reasons, this video was only available for a specified period of time.',
'GEOBLOCK': 'For legal reasons, this video is only available in Switzerland.',
'LEGAL': 'The video cannot be transmitted for legal reasons.',
'STARTDATE': 'This video is not yet available. Please try again later.',
}
def _get_tokenized_src(self, url, video_id, format_id):
sp = compat_urllib_parse_urlparse(url).path.split('/')
token = self._download_json(
'http://tp.srgssr.ch/akahd/token?acl=/%s/%s/*' % (sp[1], sp[2]),
video_id, 'Downloading %s token' % format_id, fatal=False) or {}
auth_params = token.get('token', {}).get('authparams')
if auth_params:
url += '?' + auth_params
return url
def get_media_data(self, bu, media_type, media_id):
media_data = self._download_json(
'http://il.srgssr.ch/integrationlayer/1.0/ue/%s/%s/play/%s.json' % (bu, media_type, media_id),
media_id)[media_type.capitalize()]
if media_data.get('block') and media_data['block'] in self._ERRORS:
message = self._ERRORS[media_data['block']]
if media_data['block'] == 'GEOBLOCK':
self.raise_geo_restricted(
msg=message, countries=self._GEO_COUNTRIES)
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, message), expected=True)
return media_data
def _real_extract(self, url):
bu, media_type, media_id = re.match(self._VALID_URL, url).groups()
media_data = self.get_media_data(bu, media_type, media_id)
metadata = media_data['AssetMetadatas']['AssetMetadata'][0]
title = metadata['title']
description = metadata.get('description')
created_date = media_data.get('createdDate') or metadata.get('createdDate')
timestamp = parse_iso8601(created_date)
thumbnails = [{
'id': image.get('id'),
'url': image['url'],
} for image in media_data.get('Image', {}).get('ImageRepresentations', {}).get('ImageRepresentation', [])]
preference = qualities(['LQ', 'MQ', 'SD', 'HQ', 'HD'])
formats = []
for source in media_data.get('Playlists', {}).get('Playlist', []) + media_data.get('Downloads', {}).get('Download',
[]):
protocol = source.get('@protocol')
for asset in source['url']:
asset_url = asset['text']
quality = asset['@quality']
format_id = '%s-%s' % (protocol, quality)
if protocol.startswith('HTTP-HDS') or protocol.startswith('HTTP-HLS'):
asset_url = self._get_tokenized_src(asset_url, media_id, format_id)
if protocol.startswith('HTTP-HDS'):
formats.extend(self._extract_f4m_formats(
asset_url + ('?' if '?' not in asset_url else '&') + 'hdcore=3.4.0',
media_id, f4m_id=format_id, fatal=False))
elif protocol.startswith('HTTP-HLS'):
formats.extend(self._extract_m3u8_formats(
asset_url, media_id, 'mp4', 'm3u8_native',
m3u8_id=format_id, fatal=False))
else:
formats.append({
'format_id': format_id,
'url': asset_url,
'preference': preference(quality),
'ext': 'flv' if protocol == 'RTMP' else None,
})
self._sort_formats(formats)
return {
'id': media_id,
'title': title,
'description': description,
'timestamp': timestamp,
'thumbnails': thumbnails,
'formats': formats,
}
class SRGSSRPlayIE(InfoExtractor):
IE_DESC = 'srf.ch, rts.ch, rsi.ch, rtr.ch and swissinfo.ch play sites'
_VALID_URL = r'https?://(?:(?:www|play)\.)?(?P<bu>srf|rts|rsi|rtr|swissinfo)\.ch/play/(?:tv|radio)/[^/]+/(?P<type>video|audio)/[^?]+\?id=(?P<id>[0-9a-f\-]{36}|\d+)'
_TESTS = [{
'url': 'http://www.srf.ch/play/tv/10vor10/video/snowden-beantragt-asyl-in-russland?id=28e1a57d-5b76-4399-8ab3-9097f071e6c5',
'md5': 'da6b5b3ac9fa4761a942331cef20fcb3',
'info_dict': {
'id': '28e1a57d-5b76-4399-8ab3-9097f071e6c5',
'ext': 'mp4',
'upload_date': '20130701',
'title': 'Snowden beantragt Asyl in Russland',
'timestamp': 1372713995,
}
}, {
# No Speichern (Save) button
'url': 'http://www.srf.ch/play/tv/top-gear/video/jaguar-xk120-shadow-und-tornado-dampflokomotive?id=677f5829-e473-4823-ac83-a1087fe97faa',
'md5': '0a274ce38fda48c53c01890651985bc6',
'info_dict': {
'id': '677f5829-e473-4823-ac83-a1087fe97faa',
'ext': 'flv',
'upload_date': '20130710',
'title': 'Jaguar XK120, Shadow und Tornado-Dampflokomotive',
'description': 'md5:88604432b60d5a38787f152dec89cd56',
'timestamp': 1373493600,
},
}, {
'url': 'http://www.rtr.ch/play/radio/actualitad/audio/saira-tujetsch-tuttina-cuntinuar-cun-sedrun-muster-turissem?id=63cb0778-27f8-49af-9284-8c7a8c6d15fc',
'info_dict': {
'id': '63cb0778-27f8-49af-9284-8c7a8c6d15fc',
'ext': 'mp3',
'upload_date': '20151013',
'title': 'Saira: Tujetsch - tuttina cuntinuar cun Sedrun Mustér Turissem',
'timestamp': 1444750398,
},
'params': {
# rtmp download
'skip_download': True,
},
}, {
'url': 'http://www.rts.ch/play/tv/-/video/le-19h30?id=6348260',
'md5': '67a2a9ae4e8e62a68d0e9820cc9782df',
'info_dict': {
'id': '6348260',
'display_id': '6348260',
'ext': 'mp4',
'duration': 1796,
'title': 'Le 19h30',
'description': '',
'uploader': '19h30',
'upload_date': '20141201',
'timestamp': 1417458600,
'thumbnail': r're:^https?://.*\.image',
'view_count': int,
},
'params': {
# m3u8 download
'skip_download': True,
}
}]
def _real_extract(self, url):
bu, media_type, media_id = re.match(self._VALID_URL, url).groups()
# other info can be extracted from url + '&layout=json'
return self.url_result('srgssr:%s:%s:%s' % (bu[:3], media_type, media_id), 'SRGSSR')
| valmynd/MediaFetcher | src/plugins/youtube_dl/youtube_dl/extractor/srgssr.py | Python | gpl-3.0 | 6,280 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
import time, posix, daily
data = daily.load("ottawa")
class FloatValue():
__slots__ = ()
def __init__(self, field):
self.fieldIndex = field.index
def __call__(self, fields):
r = fields[self.fieldIndex]
if len(r) == 0:
return None
return float(r)
class IntValue():
__slots__ = ()
def __init__(self, field):
self.fieldIndex = field.index
def __call__(self, fields):
r = fields[self.fieldIndex]
if len(r) == 0:
return None
return int(float(r)+.5)
class IntDiff():
__slots__ = ()
def __init__(self, field1, field2):
self.field1Index = field1.index
self.field2Index = field2.index
def __call__(self, fields):
r1 = fields[self.field1Index]
r2 = fields[self.field2Index]
if len(r1) == 0 or len(r2) == 0:
return None
return int(float(r1) - float(r2) +.5)
class Max():
__slots__ = ()
def __init__(self, field):
self.fieldIndex = field.index
#
def __call__(self, fields):
r = fields[self.fieldIndex]
if len(r) == 0:
return -99
return float(r)
#
def better(self, one, two):
return self(one) > self(two)
class MaxDiff():
__slots__ = ()
def __init__(self, field1, field2):
self.field1Index = field1.index
self.field2Index = field2.index
#
def __call__(self, fields):
r1 = fields[self.field1Index]
r2 = fields[self.field2Index]
if len(r1) == 0 or len(r2) == 0:
return 0
return float(r1) - float(r2)
#
def better(self, one, two):
return self(one) > self(two)
class MinDiff():
__slots__ = ()
def __init__(self, field1, field2):
self.field1Index = field1.index
self.field2Index = field2.index
#
def __call__(self, fields):
r1 = fields[self.field1Index]
r2 = fields[self.field2Index]
if len(r1) == 0 or len(r2) == 0:
return 100
return float(r1) - float(r2)
#
def better(self, one, two):
return self(one) < self(two)
def findBest(proc):
bv = None
date = None
#
for year in data:
yd = data[year]
for month in yd:
md = yd[month]
for day in md:
f = md[day]
if bv == None or proc.better(f, bv):
bv = f
date = (year,month,day)
#
return date, bv
def histogram(proc):
hist = {}
#
for year in data:
yd = data[year]
for month in yd:
md = yd[month]
for day in md:
f = proc(md[day])
if f != None:
if f not in hist:
hist[f] = 0
hist[f] += 1
return hist
#findBest(Max(daily.MAX_TEMP))
#findBest(Max(daily.MAX_TEMP))
#findBest(Max(daily.TOTAL_RAIN_MM))
#findBest(Max(daily.TOTAL_SNOW_CM))
#findBest(MaxDiff(daily.MAX_TEMP, daily.MIN_TEMP))
#findBest(MinDiff(daily.MAX_TEMP, daily.MIN_TEMP))
mth = histogram(IntValue(daily.TOTAL_SNOW_CM))
for mt in sorted(mth.keys()):
print '%s\t%s' % (mt, mth[mt])
| endlisnis/weather-records | maxtemp.py | Python | gpl-3.0 | 3,298 |
from Screens.Screen import Screen
from Components.GUIComponent import GUIComponent
from Components.VariableText import VariableText
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.Button import Button
from Components.FileList import FileList
from Components.ScrollLabel import ScrollLabel
from Components.config import config, configfile
from Components.FileList import MultiFileSelectList
from Screens.MessageBox import MessageBox
from os import path, remove, walk, stat, rmdir
from time import time
from enigma import eTimer, eBackgroundFileEraser, eLabel
from glob import glob
import Components.Task
# Import smtplib for the actual sending function
import smtplib, base64
# Here are the email package modules we'll need
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.Utils import formatdate
_session = None
def get_size(start_path=None):
total_size = 0
if start_path:
for dirpath, dirnames, filenames in walk(start_path):
for f in filenames:
fp = path.join(dirpath, f)
total_size += path.getsize(fp)
return total_size
return 0
def AutoLogManager(session=None, **kwargs):
global debuglogcheckpoller
debuglogcheckpoller = LogManagerPoller()
debuglogcheckpoller.start()
class LogManagerPoller:
"""Automatically Poll LogManager"""
def __init__(self):
# Init Timer
self.TrimTimer = eTimer()
self.TrashTimer = eTimer()
def start(self):
if self.TrimTimerJob not in self.TrimTimer.callback:
self.TrimTimer.callback.append(self.TrimTimerJob)
if self.TrashTimerJob not in self.TrashTimer.callback:
self.TrashTimer.callback.append(self.TrashTimerJob)
self.TrimTimer.startLongTimer(0)
self.TrashTimer.startLongTimer(0)
def stop(self):
if self.TrimTimerJob in self.TrimTimer.callback:
self.TrimTimer.callback.remove(self.TrimTimerJob)
if self.TrashTimerJob in self.TrashTimer.callback:
self.TrashTimer.callback.remove(self.TrashTimerJob)
self.TrimTimer.stop()
self.TrashTimer.stop()
def TrimTimerJob(self):
print '[LogManager] Trim Poll Started'
Components.Task.job_manager.AddJob(self.createTrimJob())
def TrashTimerJob(self):
print '[LogManager] Trash Poll Started'
self.JobTrash()
# Components.Task.job_manager.AddJob(self.createTrashJob())
def createTrimJob(self):
job = Components.Task.Job(_("LogManager"))
task = Components.Task.PythonTask(job, _("Checking Logs..."))
task.work = self.JobTrim
task.weighting = 1
return job
def createTrashJob(self):
job = Components.Task.Job(_("LogManager"))
task = Components.Task.PythonTask(job, _("Checking Logs..."))
task.work = self.JobTrash
task.weighting = 1
return job
def openFiles(self, ctimeLimit, allowedBytes):
ctimeLimit = ctimeLimit
allowedBytes = allowedBytes
def JobTrim(self):
filename = ""
for filename in glob(config.crash.debug_path.value + '*.log'):
try:
if path.getsize(filename) > (config.crash.debugloglimit.value * 1024 * 1024):
fh = open(filename, 'rb+')
fh.seek(-(config.crash.debugloglimit.value * 1024 * 1024), 2)
data = fh.read()
fh.seek(0) # rewind
fh.write(data)
fh.truncate()
fh.close()
except:
pass
self.TrimTimer.startLongTimer(3600) #once an hour
def JobTrash(self):
ctimeLimit = time() - (config.crash.daysloglimit.value * 3600 * 24)
allowedBytes = 1024*1024 * int(config.crash.sizeloglimit.value)
mounts = []
matches = []
print "[LogManager] probing folders"
f = open('/proc/mounts', 'r')
for line in f.readlines():
parts = line.strip().split()
mounts.append(parts[1])
f.close()
for mount in mounts:
if path.isdir(path.join(mount,'logs')):
matches.append(path.join(mount,'logs'))
matches.append('/home/root/logs')
print "[LogManager] found following log's:", matches
if len(matches):
for logsfolder in matches:
print "[LogManager] looking in:", logsfolder
logssize = get_size(logsfolder)
bytesToRemove = logssize - allowedBytes
candidates = []
size = 0
for root, dirs, files in walk(logsfolder, topdown=False):
for name in files:
try:
fn = path.join(root, name)
st = stat(fn)
if st.st_ctime < ctimeLimit:
print "[LogManager] " + str(fn) + ": Too old:", name, st.st_ctime
eBackgroundFileEraser.getInstance().erase(fn)
bytesToRemove -= st.st_size
else:
candidates.append((st.st_ctime, fn, st.st_size))
size += st.st_size
except Exception, e:
print "[LogManager] Failed to stat %s:"% name, e
# Remove empty directories if possible
for name in dirs:
try:
rmdir(path.join(root, name))
except:
pass
candidates.sort()
# Now we have a list of ctime, candidates, size. Sorted by ctime (=deletion time)
for st_ctime, fn, st_size in candidates:
print "[LogManager] " + str(logsfolder) + ": bytesToRemove", bytesToRemove
if bytesToRemove < 0:
break
eBackgroundFileEraser.getInstance().erase(fn)
bytesToRemove -= st_size
size -= st_size
self.TrashTimer.startLongTimer(43200) #twice a day
class LogManager(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.logtype = 'crashlogs'
self['myactions'] = ActionMap(['ColorActions', 'OkCancelActions', 'DirectionActions'],
{
'ok': self.changeSelectionState,
'cancel': self.close,
'red': self.changelogtype,
'green': self.showLog,
'yellow': self.deletelog,
'blue': self.sendlog,
"left": self.left,
"right": self.right,
"down": self.down,
"up": self.up
}, -1)
self["key_red"] = Button(_("Debug Logs"))
self["key_green"] = Button(_("View"))
self["key_yellow"] = Button(_("Delete"))
self["key_blue"] = Button(_("Send"))
self.onChangedEntry = [ ]
self.sentsingle = ""
self.selectedFiles = config.logmanager.sentfiles.value
self.previouslySent = config.logmanager.sentfiles.value
self.defaultDir = config.crash.debug_path.value
self.matchingPattern = 'enigma2_crash_'
self.filelist = MultiFileSelectList(self.selectedFiles, self.defaultDir, showDirectories = False, matchingPattern = self.matchingPattern )
self["list"] = self.filelist
self["LogsSize"] = self.logsinfo = LogInfo(config.crash.debug_path.value, LogInfo.USED, update=False)
self.onLayoutFinish.append(self.layoutFinished)
if not self.selectionChanged in self["list"].onSelectionChanged:
self["list"].onSelectionChanged.append(self.selectionChanged)
def createSummary(self):
from Screens.PluginBrowser import PluginBrowserSummary
return PluginBrowserSummary
def selectionChanged(self):
item = self["list"].getCurrent()
desc = ""
if item:
name = str(item[0][0])
else:
name = ""
for cb in self.onChangedEntry:
cb(name, desc)
def layoutFinished(self):
self["LogsSize"].update(config.crash.debug_path.value)
idx = 0
self["list"].moveToIndex(idx)
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(self.defaultDir)
def up(self):
self["list"].up()
def down(self):
self["list"].down()
def left(self):
self["list"].pageUp()
def right(self):
self["list"].pageDown()
def saveSelection(self):
self.selectedFiles = self["list"].getSelectedList()
self.previouslySent = self["list"].getSelectedList()
config.logmanager.sentfiles.setValue(self.selectedFiles)
config.logmanager.sentfiles.save()
configfile.save()
def exit(self):
self.close(None)
def changeSelectionState(self):
try:
self.sel = self["list"].getCurrent()[0]
except:
self.sel = None
if self.sel:
self["list"].changeSelectionState()
self.selectedFiles = self["list"].getSelectedList()
def changelogtype(self):
self["LogsSize"].update(config.crash.debug_path.value)
import re
if self.logtype == 'crashlogs':
self["key_red"].setText(_("Crash Logs"))
self.logtype = 'debuglogs'
self.matchingPattern = 'Enigma2'
else:
self["key_red"].setText(_("Debug Logs"))
self.logtype = 'crashlogs'
self.matchingPattern = 'enigma2_crash_'
self["list"].matchingPattern = re.compile(self.matchingPattern)
self["list"].changeDir(self.defaultDir)
def showLog(self):
try:
self.sel = self["list"].getCurrent()[0]
except:
self.sel = None
if self.sel:
self.session.open(LogManagerViewLog, self.sel[0])
def deletelog(self):
try:
self.sel = self["list"].getCurrent()[0]
except:
self.sel = None
self.selectedFiles = self["list"].getSelectedList()
if self.selectedFiles:
message = _("Do you want to delete all selected files:\n(choose 'No' to only delete the currently selected file.)")
ybox = self.session.openWithCallback(self.doDelete1, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Delete Confirmation"))
elif self.sel:
message = _("Are you sure you want to delete this log:\n") + str(self.sel[0])
ybox = self.session.openWithCallback(self.doDelete3, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Delete Confirmation"))
else:
self.session.open(MessageBox, _("You have selected no logs to delete."), MessageBox.TYPE_INFO, timeout = 10)
def doDelete1(self, answer):
self.selectedFiles = self["list"].getSelectedList()
self.selectedFiles = ",".join(self.selectedFiles).replace(",", " ")
self.sel = self["list"].getCurrent()[0]
if answer is True:
message = _("Are you sure you want to delete all selected logs:\n") + self.selectedFiles
ybox = self.session.openWithCallback(self.doDelete2, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Delete Confirmation"))
else:
message = _("Are you sure you want to delete this log:\n") + str(self.sel[0])
ybox = self.session.openWithCallback(self.doDelete3, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Delete Confirmation"))
def doDelete2(self, answer):
if answer is True:
self.selectedFiles = self["list"].getSelectedList()
self["list"].instance.moveSelectionTo(0)
for f in self.selectedFiles:
remove(f)
config.logmanager.sentfiles.setValue("")
config.logmanager.sentfiles.save()
configfile.save()
self["list"].changeDir(self.defaultDir)
def doDelete3(self, answer):
if answer is True:
self.sel = self["list"].getCurrent()[0]
self["list"].instance.moveSelectionTo(0)
if path.exists(self.defaultDir + self.sel[0]):
remove(self.defaultDir + self.sel[0])
self["list"].changeDir(self.defaultDir)
self["LogsSize"].update(config.crash.debug_path.value)
def sendlog(self, addtionalinfo = None):
try:
self.sel = self["list"].getCurrent()[0]
except:
self.sel = None
if self.sel:
self.sel = str(self.sel[0])
self.selectedFiles = self["list"].getSelectedList()
self.resend = False
for send in self.previouslySent:
if send in self.selectedFiles:
self.selectedFiles.remove(send)
if send == (self.defaultDir + self.sel):
self.resend = True
if self.selectedFiles:
message = _("Do you want to send all selected files:\n(choose 'No' to only send the currently selected file.)")
ybox = self.session.openWithCallback(self.sendlog1, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Send Confirmation"))
elif self.sel and not self.resend:
self.sendallfiles = False
message = _("Are you sure you want to send this log:\n") + self.sel
ybox = self.session.openWithCallback(self.sendlog2, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Send Confirmation"))
elif self.sel and self.resend:
self.sendallfiles = False
message = _("You have already sent this log, are you sure you want to resend this log:\n") + self.sel
ybox = self.session.openWithCallback(self.sendlog2, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Send Confirmation"))
else:
self.session.open(MessageBox, _("You have selected no logs to send."), MessageBox.TYPE_INFO, timeout = 10)
def sendlog1(self, answer):
if answer:
self.sendallfiles = True
message = _("Do you want to add any additional information ?")
ybox = self.session.openWithCallback(self.sendlog3, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Additional Info"))
else:
self.sendallfiles = False
message = _("Are you sure you want to send this log:\n") + str(self.sel[0])
ybox = self.session.openWithCallback(self.sendlog2, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Send Confirmation"))
def sendlog2(self, answer):
if answer:
self.sendallfiles = False
message = _("Do you want to add any additional information ?")
ybox = self.session.openWithCallback(self.sendlog3, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Additional Info"))
def sendlog3(self, answer):
if answer:
message = _("Do you want to attach a text file to explain the log ?\n(choose 'No' to type message using virtual keyboard.)")
ybox = self.session.openWithCallback(self.sendlog4, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Attach a file"))
else:
self.doSendlog()
def sendlog4(self, answer):
if answer:
self.session.openWithCallback(self.doSendlog, LogManagerFb)
else:
from Screens.VirtualKeyBoard import VirtualKeyBoard
self.session.openWithCallback(self.doSendlog, VirtualKeyBoard, title = 'Additonal Info')
def doSendlog(self, additonalinfo = None):
ref = str(time())
# Create the container (outer) email message.
msg = MIMEMultipart()
if config.logmanager.user.value != '' and config.logmanager.useremail.value != '':
fromlogman = config.logmanager.user.value + ' <' + config.logmanager.useremail.value + '>'
tocrashlogs = 'logs@openld.es'
msg['From'] = fromlogman
msg['To'] = tocrashlogs
msg['Cc'] = fromlogman
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = 'Ref: ' + ref
if additonalinfo != "":
msg.attach(MIMEText(additonalinfo, 'plain'))
else:
msg.attach(MIMEText(config.logmanager.additionalinfo.value, 'plain'))
if self.sendallfiles:
self.selectedFiles = self["list"].getSelectedList()
for send in self.previouslySent:
if send in self.selectedFiles:
self.selectedFiles.remove(send)
self.sel = ",".join(self.selectedFiles).replace(",", " ")
self["list"].instance.moveSelectionTo(0)
for f in self.selectedFiles:
self.previouslySent.append(f)
fp = open(f, 'rb')
data = MIMEText(fp.read())
fp.close()
msg.attach(data)
self.saveSelection()
sentfiles = self.sel
else:
self.sel = self["list"].getCurrent()[0]
self.sel = str(self.sel[0])
sentfiles = self.sel
fp = open((self.defaultDir + self.sel), 'rb')
data = MIMEText(fp.read())
fp.close()
msg.attach(data)
self.sentsingle = self.defaultDir + self.sel
self.changeSelectionState()
self.saveSelection()
# Send the email via our own SMTP server.
wos_user = 'logs@openld.es'
wos_pwd = base64.b64decode('TF95X0dCRlRFNHRDenVKN1dNdlEkZj14')
try:
print "connecting to server: mail.openld.es"
#socket.setdefaulttimeout(30)
s = smtplib.SMTP("mail.openld.es",25)
s.login(wos_user, wos_pwd)
if config.logmanager.usersendcopy.value:
s.sendmail(fromlogman, [tocrashlogs, fromlogman], msg.as_string())
s.quit()
self.session.open(MessageBox, sentfiles + ' ' + _('has been sent to the SVN team team.\nplease quote') + ' ' + str(ref) + ' ' + _('when asking question about this log\n\nA copy has been sent to yourself.'), MessageBox.TYPE_INFO)
else:
s.sendmail(fromlogman, tocrashlogs, msg.as_string())
s.quit()
self.session.open(MessageBox, sentfiles + ' ' + _('has been sent to the SVN team team.\nplease quote') + ' ' + str(ref) + ' ' + _('when asking question about this log'), MessageBox.TYPE_INFO)
except Exception,e:
self.session.open(MessageBox, _("Error:\n%s" % e), MessageBox.TYPE_INFO, timeout = 10)
else:
self.session.open(MessageBox, _('You have not setup your user info in the setup screen\nPress MENU, and enter your info, then try again'), MessageBox.TYPE_INFO, timeout = 10)
def myclose(self):
self.close()
class LogManagerViewLog(Screen):
def __init__(self, session, selected):
self.session = session
Screen.__init__(self, session)
self.setTitle(selected)
if path.exists(config.crash.debug_path.value + selected):
log = file(config.crash.debug_path.value + selected).read()
else:
log = ""
self["list"] = ScrollLabel(str(log))
self["setupActions"] = ActionMap(["SetupActions", "ColorActions", "DirectionActions"],
{
"cancel": self.cancel,
"ok": self.cancel,
"up": self["list"].pageUp,
"down": self["list"].pageDown,
"right": self["list"].lastPage
}, -2)
def cancel(self):
self.close()
class LogManagerFb(Screen):
def __init__(self, session, logpath=None):
if logpath is None:
if path.isdir(config.logmanager.path.value):
logpath = config.logmanager.path.value
else:
logpath = "/"
self.session = session
Screen.__init__(self, session)
self["list"] = FileList(logpath, matchingPattern = "^.*")
self["red"] = Label(_("delete"))
self["green"] = Label(_("move"))
self["yellow"] = Label(_("copy"))
self["blue"] = Label(_("rename"))
self["actions"] = ActionMap(["ChannelSelectBaseActions","WizardActions", "DirectionActions", "MenuActions", "NumberActions", "ColorActions"],
{
"ok": self.ok,
"back": self.exit,
"up": self.goUp,
"down": self.goDown,
"left": self.goLeft,
"right": self.goRight,
"0": self.doRefresh,
}, -1)
self.onLayoutFinish.append(self.mainlist)
def exit(self):
config.logmanager.additionalinfo.setValue("")
if self["list"].getCurrentDirectory():
config.logmanager.path.setValue(self["list"].getCurrentDirectory())
config.logmanager.path.save()
self.close()
def ok(self):
if self.SOURCELIST.canDescent(): # isDir
self.SOURCELIST.descent()
if self.SOURCELIST.getCurrentDirectory(): #??? when is it none
self.setTitle(self.SOURCELIST.getCurrentDirectory())
else:
self.onFileAction()
def goLeft(self):
self.SOURCELIST.pageUp()
def goRight(self):
self.SOURCELIST.pageDown()
def goUp(self):
self.SOURCELIST.up()
def goDown(self):
self.SOURCELIST.down()
def doRefresh(self):
self.SOURCELIST.refresh()
def mainlist(self):
self["list"].selectionEnabled(1)
self.SOURCELIST = self["list"]
self.setTitle(self.SOURCELIST.getCurrentDirectory())
def onFileAction(self):
config.logmanager.additionalinfo.setValue(file(self.SOURCELIST.getCurrentDirectory()+self.SOURCELIST.getFilename()).read())
if self["list"].getCurrentDirectory():
config.logmanager.path.setValue(self["list"].getCurrentDirectory())
config.logmanager.path.save()
self.close()
class LogInfo(VariableText, GUIComponent):
FREE = 0
USED = 1
SIZE = 2
def __init__(self, path, type, update = True):
GUIComponent.__init__(self)
VariableText.__init__(self)
self.type = type
# self.path = config.crash.debug_path.value
if update:
self.update(path)
def update(self, path):
try:
total_size = get_size(path)
except OSError:
return -1
if self.type == self.USED:
try:
if total_size < 10000000:
total_size = "%d kB" % (total_size >> 10)
elif total_size < 10000000000:
total_size = "%d MB" % (total_size >> 20)
else:
total_size = "%d GB" % (total_size >> 30)
self.setText(_("Space used:") + " " + total_size)
except:
# occurs when f_blocks is 0 or a similar error
self.setText("-?-")
GUI_WIDGET = eLabel
| popazerty/enigma2 | lib/python/Screens/LogManager.py | Python | gpl-2.0 | 19,571 |
""" Testing arrays module
"""
from __future__ import absolute_import
import numpy as np
from ..arrays import strides_from
from nipy.externals.six import binary_type, text_type
from numpy.testing import (assert_array_almost_equal,
assert_array_equal)
from nose.tools import assert_true, assert_equal, assert_raises
def test_strides_from():
for shape in ((3,), (2,3), (2,3,4), (5,4,3,2)):
for order in 'FC':
for dtype in sum(np.sctypes.values(), []):
if dtype is binary_type:
dtype = 'S3'
elif dtype is text_type:
dtype = 'U4'
elif dtype is np.void:
continue
exp = np.empty(shape, dtype=dtype, order=order).strides
assert_equal(strides_from(shape, dtype, order), exp)
assert_raises(ValueError, strides_from, shape, np.void, order)
assert_raises(ValueError, strides_from, shape, binary_type, order)
assert_raises(ValueError, strides_from, shape, text_type, order)
assert_raises(ValueError, strides_from, (3,2), 'f8', 'G')
| alexis-roche/nipy | nipy/utils/tests/test_arrays.py | Python | bsd-3-clause | 1,157 |
# -*- coding: utf-8 -*-
# @Author: Marco Benzi <marco.benzi@alumnos.usm.cl>
# @Date: 2015-06-07 19:44:12
# @Last Modified 2015-06-09
# @Last Modified time: 2015-06-09 16:07:05
# ==========================================================================
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ==========================================================================
import math
"""
Speed of light constant
"""
c = 3E8
"""
Vacuum permittivity
"""
e0 = 8.8541E-12
"""
Vacuum permeability
"""
u0 = 4E-7*math.pi
def getEffectivePermitivity(WHratio, er):
"""
Returns the effective permitivity for a given W/H ratio.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `WHratio` : W/H ratio.
- `er` : Relative permitivity of the dielectric.
"""
if WHratio <= 1:
return (er + 1)/2 + ((1 + 12/WHratio)**(-0.5) + 0.04*(1-WHratio)**2)*(er -1)/2
else:
return (er + 1)/2 + ((1 + 12/WHratio)**(-0.5))*(er -1)/2
def getAuxVarA(Zo,er):
"""
Returns the auxiliary variable
A = (Zo)/60 * math.sqrt((er + 1)/2) + (er-1)/(er+1)*(0.23+0.11/er)
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
return (Zo)/60 * math.sqrt((er + 1)/2) + (er-1)/(er+1)*(0.23+0.11/er)
def getAuxVarB(Zo,er):
"""
Returns the auxiliary variable
B = (377*math.pi)/(2*Zo*math.sqrt(er))
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
return (377*math.pi)/(2*Zo*math.sqrt(er))
def getWHRatioA(Zo,er):
"""
Returns the W/H ratio for W/H < 2. If the result is > 2, then other method
should be used.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
A = getAuxVarA(Zo,er)
return (8*math.e**A)/(math.e**(2*A) - 2)
def getWHRatioB(Zo,er):
"""
Returns the W/H ratio for W/H > 2. If the result is < 2, then other method
should be used.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
B = getAuxVarB(Zo,er)
return (2/math.pi)*(B-1 - math.log(2*B - 1) + (er - 1)*(math.log(B-1) + 0.39 - 0.61/er)/(2*er))
def getCharacteristicImpedance(WHratio, ef):
"""
Returns the characteristic impedance of the medium, based on the effective
permitivity and W/H ratio.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `WHratio` : W/H ratio.
- `ef` : Effective permitivity of the dielectric.
"""
if WHratio <= 1:
return (60/math.sqrt(ef))*math.log(8/WHratio + WHratio/4)
else:
return (120*math.pi/math.sqrt(ef))/(WHratio + 1.393 + 0.667*math.log(WHratio +1.444))
def getWHRatio(Zo,er):
"""
Returns the W/H ratio, after trying with the two possible set of solutions,
for when W/H < 2 or else. When no solution, returns zero.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
efa = er
efb = er
Zoa = Zo
Zob = Zo
while 1:
rA = getWHRatioA(Zoa,efa)
rB = getWHRatioB(Zob,efb)
if rA < 2:
return rA
if rB > 2:
return rB
Zoa = math.sqrt(efa)*Zoa
Zob = math.sqrt(efb)*Zob
def getCorrectedWidth(W,H,t):
"""
For significant conductor thickness, this returns the corrected width.
Paramenters:
- `W` : Width
- `H` : Height
- `t` : Conductor thickness
"""
if t < H and t < W/2:
if W/H <= math.pi/2:
return W + (1 + math.log(2*H/t))*(t/math.pi)
else:
return W + (1 + math.log(4*math.pi*H/t))*(t/math.pi)
else:
print "The conductor is too thick!!"
def getConductorLoss(W,H,t,sigma,f,Zo):
"""
Returns the conductor loss in [Np/m].
Parameters:
- `W` : Width
- `H` : Height
- `t` : Conductor thickness
- `sigma` : Conductance of medium
- `f` : Operating frequency
- `Zo` : Characteristic impedance
"""
We = getCorrectedWidth(W,H,t)
P = 1 - (We/4/H)**2
Rs = math.sqrt((math.pi*f*u0)/sigma)
Q = 1 + H/We + (math.log((2*H)/t)-t/W)*H/(We*math.pi)
if W/H <= 1/(2*math.pi):
return (1 + H/We + (math.log(4*pi*W/t) + t/W)*H/(math.pi*We))*(8.68*Rs*P)/(2*pi*Zo*H)
elif W/H <= 2:
return (8.68*Rs*P*Q)/(2*math.pi*Zo*H)
else:
return ((8.68*Rs*Q)/(Zo*H))*(We/H + (We/math.pi/H)/(We/2/H)+0.94)*((H/We + 2*math.log(We/2/H + 0.94)/math.pi)**(-2))
def getDielectricLoss(er,ef,tanD,f):
"""
Returns the dielectric loss in [dB/cm].
Paramenters:
- `er` : Relative permitivity of the dielectric
- `ef` : Effective permitivity
- `tanD` : tan \delta
- `f` : Operating frequency
"""
lam = c/math.sqrt(ef)/f
return 27.3*(er*(ef-1)*tanD)/(lam*math.sqrt(er)*(er-1)) | Lisergishnu/LTXKit | uStripDesign.py | Python | gpl-2.0 | 5,581 |
###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""dialogs provides modules for user-based interaction on execution.
Users can then enter file names, numbers, strings, etc."""
from __future__ import division
from vistrails.core.packagemanager import get_package_manager
identifier = 'org.vistrails.vistrails.dialogs'
name = 'Dialogs'
version = '0.9.2'
old_identifiers = ['edu.utah.sci.vistrails.dialogs']
def package_dependencies():
pm = get_package_manager()
if pm.has_package('org.vistrails.vistrails.spreadsheet'):
return ['org.vistrails.vistrails.spreadsheet']
else:
return []
| hjanime/VisTrails | vistrails/packages/dialogs/__init__.py | Python | bsd-3-clause | 2,480 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-28 08:32
from __future__ import unicode_literals
from django.db import migrations, models
int_to_name = {1: 'digital',
2: 'children-cartoons',
3: 'children-novels',
10: 'children-poetry',
11: 'children-theatre',
4: 'children-documentary',
5: 'children-comics',
6: 'adults-novels',
7: 'adults-documentary',
8: 'adults-comics',
12: 'adults-poetry',
13: 'adults-theatre',
9: 'game',
99: 'other'}
def migrate_data(app, schema_editor):
Book = app.get_model('library', 'Book')
db_alias = schema_editor.connection.alias
for book in Book.objects.using(db_alias).all():
book.section_named = int_to_name[book.section]
book.save()
class Migration(migrations.Migration):
dependencies = [
('library', '0009_auto_20161027_0801'),
]
operations = [
migrations.AddField(
model_name='book',
name='section_named',
field=models.CharField(choices=[('digital', 'digital'), ('children-cartoons', 'children - cartoons'), ('children-novels', 'children - novels'), ('children-poetry', 'children - poetry'), ('children-theatre', 'children - theatre'), ('children-documentary', 'children - documentary'), ('children-comics', 'children - comics'), ('adults-novels', 'adults - novels'), ('adults-poetry', 'adults - poetry'), ('adults-theatre', 'adults - theatre'), ('adults-documentary', 'adults - documentary'), ('adults-comics', 'adults - comics'), ('game', 'game'), ('other', 'other')], default='other', max_length=50, verbose_name='section'),
preserve_default=False
),
# We need to make this migration revertible for tests.
# We do not support real backward migration, test will revert this
# migration on an empty database, so we do not have to do a real
# backward data migration. So just use a noop a backward migration.
migrations.RunPython(migrate_data, migrations.RunPython.noop),
migrations.RemoveField(model_name='book',
name='section'),
migrations.RenameField(model_name='book',
old_name='section_named',
new_name='section')
]
| ideascube/ideascube | ideascube/library/migrations/0010_section_type.py | Python | agpl-3.0 | 2,441 |
"""Utilities for with-statement contexts. See PEP 343."""
import sys
from collections import deque
from functools import wraps
__all__ = ["contextmanager", "closing", "ContextDecorator", "ExitStack"]
class ContextDecorator(object):
"A base class or mixin that enables context managers to work as decorators."
def _recreate_cm(self):
"""Return a recreated instance of self.
Allows an otherwise one-shot context manager like
_GeneratorContextManager to support use as
a decorator via implicit recreation.
This is a private interface just for _GeneratorContextManager.
See issue #11647 for details.
"""
return self
def __call__(self, func):
@wraps(func)
def inner(*args, **kwds):
with self._recreate_cm():
return func(*args, **kwds)
return inner
class _GeneratorContextManager(ContextDecorator):
"""Helper for @contextmanager decorator."""
def __init__(self, func, *args, **kwds):
self.gen = func(*args, **kwds)
self.func, self.args, self.kwds = func, args, kwds
def _recreate_cm(self):
# _GCM instances are one-shot context managers, so the
# CM must be recreated each time a decorated function is
# called
return self.__class__(self.func, *self.args, **self.kwds)
def __enter__(self):
try:
return next(self.gen)
except StopIteration:
raise RuntimeError("generator didn't yield")
def __exit__(self, type, value, traceback):
if type is None:
try:
next(self.gen)
except StopIteration:
return
else:
raise RuntimeError("generator didn't stop")
else:
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
value = type()
try:
self.gen.throw(type, value, traceback)
raise RuntimeError("generator didn't stop after throw()")
except StopIteration as exc:
# Suppress the exception *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
# raised inside the "with" statement from being suppressed
return exc is not value
except:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
#
if sys.exc_info()[1] is not value:
raise
def contextmanager(func):
"""@contextmanager decorator.
Typical usage:
@contextmanager
def some_generator(<arguments>):
<setup>
try:
yield <value>
finally:
<cleanup>
This makes this:
with some_generator(<arguments>) as <variable>:
<body>
equivalent to this:
<setup>
try:
<variable> = <value>
<body>
finally:
<cleanup>
"""
@wraps(func)
def helper(*args, **kwds):
return _GeneratorContextManager(func, *args, **kwds)
return helper
class closing(object):
"""Context to automatically close something at the end of a block.
Code like this:
with closing(<module>.open(<arguments>)) as f:
<block>
is equivalent to this:
f = <module>.open(<arguments>)
try:
<block>
finally:
f.close()
"""
def __init__(self, thing):
self.thing = thing
def __enter__(self):
return self.thing
def __exit__(self, *exc_info):
self.thing.close()
# Inspired by discussions on http://bugs.python.org/issue13585
class ExitStack(object):
"""Context manager for dynamic management of a stack of exit callbacks
For example:
with ExitStack() as stack:
files = [stack.enter_context(open(fname)) for fname in filenames]
# All opened files will automatically be closed at the end of
# the with statement, even if attempts to open files later
# in the list raise an exception
"""
def __init__(self):
self._exit_callbacks = deque()
def pop_all(self):
"""Preserve the context stack by transferring it to a new instance"""
new_stack = type(self)()
new_stack._exit_callbacks = self._exit_callbacks
self._exit_callbacks = deque()
return new_stack
def _push_cm_exit(self, cm, cm_exit):
"""Helper to correctly register callbacks to __exit__ methods"""
def _exit_wrapper(*exc_details):
return cm_exit(cm, *exc_details)
_exit_wrapper.__self__ = cm
self.push(_exit_wrapper)
def push(self, exit):
"""Registers a callback with the standard __exit__ method signature
Can suppress exceptions the same way __exit__ methods can.
Also accepts any object with an __exit__ method (registering a call
to the method instead of the object itself)
"""
# We use an unbound method rather than a bound method to follow
# the standard lookup behaviour for special methods
_cb_type = type(exit)
try:
exit_method = _cb_type.__exit__
except AttributeError:
# Not a context manager, so assume its a callable
self._exit_callbacks.append(exit)
else:
self._push_cm_exit(exit, exit_method)
return exit # Allow use as a decorator
def callback(self, callback, *args, **kwds):
"""Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
"""
def _exit_wrapper(exc_type, exc, tb):
callback(*args, **kwds)
# We changed the signature, so using @wraps is not appropriate, but
# setting __wrapped__ may still help with introspection
_exit_wrapper.__wrapped__ = callback
self.push(_exit_wrapper)
return callback # Allow use as a decorator
def enter_context(self, cm):
"""Enters the supplied context manager
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
"""
# We look up the special methods on the type to match the with statement
_cm_type = type(cm)
_exit = _cm_type.__exit__
result = _cm_type.__enter__(cm)
self._push_cm_exit(cm, _exit)
return result
def close(self):
"""Immediately unwind the context stack"""
self.__exit__(None, None, None)
def __enter__(self):
return self
def __exit__(self, *exc_details):
received_exc = exc_details[0] is not None
# We manipulate the exception state so it behaves as though
# we were actually nesting multiple with statements
frame_exc = sys.exc_info()[1]
def _fix_exception_context(new_exc, old_exc):
while 1:
exc_context = new_exc.__context__
if exc_context in (None, frame_exc):
break
new_exc = exc_context
new_exc.__context__ = old_exc
# Callbacks are invoked in LIFO order to match the behaviour of
# nested context managers
suppressed_exc = False
pending_raise = False
while self._exit_callbacks:
cb = self._exit_callbacks.pop()
try:
if cb(*exc_details):
suppressed_exc = True
pending_raise = False
exc_details = (None, None, None)
except:
new_exc_details = sys.exc_info()
# simulate the stack of exceptions by setting the context
_fix_exception_context(new_exc_details[1], exc_details[1])
pending_raise = True
exc_details = new_exc_details
if pending_raise:
try:
# bare "raise exc_details[1]" replaces our carefully
# set-up context
fixed_ctx = exc_details[1].__context__
raise exc_details[1]
except BaseException:
exc_details[1].__context__ = fixed_ctx
raise
return received_exc and suppressed_exc
| amrdraz/brython | www/src/Lib/contextlib.py | Python | bsd-3-clause | 8,788 |
# -*- coding: utf-8 -*-
from __future__ import print_function
# from __future__ import unicode_literals
def concatenate(alignments, padding_length=0, partitions=None):
'''
Concatenate alignments based on the Seq ids; row order does not
matter. If one alignment contains a Seq id that another one does
not, gaps will be introduced in place of the missing Seq.
Args:
alignments: (tuple, list) Alignments to be concatenated.
padding_length: Introduce this many gaps between concatenated
alignments.
'''
from Bio import Alphabet
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Align import MultipleSeqAlignment
if not isinstance(alignments, (list, tuple)):
raise ValueError('Argument must be a list or a tuple.')
elif len(alignments) == 1:
return alignments[0]
if isinstance(alignments, tuple):
alignments = list(alignments)
aln1 = None
aln2 = None
if len(alignments) > 2:
aln2 = alignments.pop()
result1 = concatenate(alignments=alignments,
padding_length=padding_length,
partitions=partitions)
aln1 = result1[0]
partitions = result1[1]
elif len(alignments) == 2:
aln1 = alignments[0]
aln2 = alignments[1]
if (not isinstance(aln1, MultipleSeqAlignment) or
not isinstance(aln2, MultipleSeqAlignment)):
raise ValueError(
'Argument must inherit from Bio.Align.MultipleSeqAlignment.')
alphabet = Alphabet._consensus_alphabet([aln1._alphabet, aln2._alphabet])
aln1_dict = dict()
aln2_dict = dict()
for aln1_s in aln1:
aln1_dict[aln1_s.id] = aln1_s
for aln2_s in aln2:
aln2_dict[aln2_s.id] = aln2_s
aln1_length = aln1.get_alignment_length()
aln2_length = aln2.get_alignment_length()
aln1_gaps = SeqRecord(Seq('-' * aln1_length, alphabet))
aln2_gaps = SeqRecord(Seq('-' * aln2_length, alphabet))
padding = SeqRecord(Seq('N' * padding_length, alphabet))
if not partitions:
partitions = [(1, aln1_length)]
partitions.append((1 + aln1_length, padding_length + aln1_length + aln2_length))
result_seq_list = list()
for aln1_key in aln1_dict.keys():
merged_Seq = None
if aln1_key in aln2_dict:
merged_Seq = aln1_dict[aln1_key] + padding + aln2_dict[aln1_key]
merged_Seq.id = aln1_dict[aln1_key].id
merged_Seq.name = ''
merged_Seq.description = ''
aln2_dict.pop(aln1_key)
else:
aln1_seq_record = aln1_dict[aln1_key]
merged_Seq = aln1_seq_record + padding + aln2_gaps
merged_Seq.id = aln1_seq_record.id
merged_Seq.name = ''
merged_Seq.description = ''
result_seq_list.append(merged_Seq)
for aln2_seq_record in aln2_dict.values():
merged_Seq = aln1_gaps + padding + aln2_seq_record
merged_Seq.id = aln2_seq_record.id
merged_Seq.name = ''
merged_Seq.description = ''
result_seq_list.append(merged_Seq)
result_alignment = MultipleSeqAlignment(result_seq_list, alphabet)
result_alignment.sort()
return((result_alignment, partitions))
def align(records, program, options='', program_executable=''):
import subprocess
from StringIO import StringIO
from Bio import AlignIO
from Bio import SeqIO
import shlex
input_handle = StringIO()
SeqIO.write(records, input_handle, 'fasta')
args = None
options = shlex.split(options)
if program_executable == '':
program_executable = program
if program == 'muscle':
args = [program_executable, '-quiet'] + options + ['-in', '-', '-out', '-']
elif program == 'mafft':
if (len(records) > 5999) and ('--maxiterate' in options):
maxiterate_index = options.index('--maxiterate')
options.pop(maxiterate_index+1)
options.pop(maxiterate_index)
args = [program_executable, '--quiet'] + options + ['-']
if program == 'clustalo':
args = [program_executable] + options + ['-i', '-']
alignment = None
if args:
# print(args)
pipe = subprocess.Popen(
args=args,
bufsize=0,
executable=None,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=None,
close_fds=False,
shell=False,
cwd=None,
env=None,
universal_newlines=True,
startupinfo=None,
creationflags=0)
data = pipe.communicate(input=input_handle.getvalue())
alignment_string = StringIO(data[0])
# print(alignment_string.getvalue())
alignment = AlignIO.read(alignment_string, 'fasta')
return alignment
# def pairwise_identity(
# alignment,
# unknown_letters=set(['N']),
# unknown_id=0.0,
# free_unknowns=True,
# gap_id=0.0,
# free_gaps=True,
# end_gap_id=0.0,
# free_end_gaps=True):
# import sys
# from krpy import kriupac
# if len(alignment) != 2:
# print('Alignment must contain exactly two sequences.')
# sys.exit(1)
# end_gap_letter = '#'
# col_count = alignment.get_alignment_length()
# # Produce a list of string representations of the sequences in alignment.
# # Leading and trailing gaps will be replaced with term_gap_letter.
# aln_seq_str_list = list()
# for aln_seq in alignment:
# aln_str = str(aln_seq.seq)
# aln_str_l_strip = aln_str.lstrip(kriupac.IUPAC_DNA_GAPS_STRING)
# left_gap_count = len(aln_str) - len(aln_str_l_strip)
# aln_str_l_r_strip = aln_str_l_strip.rstrip(kriupac.IUPAC_DNA_GAPS_STRING)
# right_gap_count = len(aln_str_l_strip) - len(aln_str_l_r_strip)
# aln_str_term_gaps = left_gap_count * end_gap_letter + aln_str_l_r_strip + right_gap_count * end_gap_letter
# aln_seq_str_list.append(aln_str_term_gaps)
# # Produce a list of alignment column strings.
# aln_column_str_list = list()
# for col_idx in range(0, col_count):
# aln_column_str = ''
# for aln_seq_str in aln_seq_str_list:
# aln_column_str = aln_column_str + aln_seq_str[col_idx]
# aln_column_str_list.append(aln_column_str)
# # print('--- --- --- --- --- --- --- --- --- --- --- ---')
# score_list = list()
# weights_list = list()
# for col_idx in range(0, col_count):
# col_str = aln_column_str_list[col_idx]
# l1 = col_str[0]
# l2 = col_str[1]
# if l1 in kriupac.IUPAC_DNA_DICT_REVERSE.keys():
# l1 = kriupac.IUPAC_DNA_DICT_REVERSE[l1]
# if l2 in kriupac.IUPAC_DNA_DICT_REVERSE.keys():
# l2 = kriupac.IUPAC_DNA_DICT_REVERSE[l2]
# l1 = set(l1)
# l2 = set(l2)
# #
# end_gap_in_l1 = False
# end_gap_in_l2 = False
# end_gap_in_col = False
# if end_gap_letter in l1:
# end_gap_in_l1 = True
# if end_gap_letter in l2:
# end_gap_in_l2 = True
# if end_gap_in_l1 or end_gap_in_l2:
# end_gap_in_col = True
# #
# gap_in_l1 = False
# gap_in_l2 = False
# gap_in_col = False
# for g in list(kriupac.IUPAC_DNA_GAPS):
# if g in l1:
# gap_in_l1 = True
# if g in l2:
# gap_in_l2 = True
# if gap_in_l1 or gap_in_l2:
# gap_in_col = True
# #
# unknown_in_l1 = False
# unknown_in_l2 = False
# unknown_in_col = False
# for u in list(unknown_letters):
# if u in l1:
# unknown_in_l1 = True
# if u in l2:
# unknown_in_l2 = True
# if unknown_in_l1 or unknown_in_l2:
# unknown_in_col = True
# #
# score = 0.0
# weight = 0.0
# if end_gap_in_col and gap_in_col:
# weight = 0.0
# elif unknown_in_l1 and unknown_in_l2:
# weight = 0.0
# elif not free_end_gaps and end_gap_in_col:
# score = end_gap_id
# weight = 1.0
# elif not free_gaps and gap_in_col:
# score = gap_id
# weight = 1.0
# elif not free_unknowns and unknown_in_col:
# score = unknown_id
# weight = 1.0
# elif (not end_gap_in_col) and (not gap_in_col) and (not unknown_in_col):
# intersection = l1 & l2
# union = l1 | l2
# score = float(len(intersection)) / float(len(union))
# weight = 1.0
# score_list.append(score)
# weights_list.append(weight)
# # print(l1, l2, score, weight)
# # print('--- --- --- --- --- --- --- --- --- --- --- ---')
# pair_id = 0.0
# if sum(weights_list) > 0.0:
# pair_id = sum(score_list) / sum(weights_list)
# # else:
# # pair_id = 1.0
# # print(pair_id)
# return pair_id
# def identity(
# alignment,
# unknown_letters=set(['N']),
# unknown_id=0.0,
# free_unknowns=True,
# gap_id=0.0,
# free_gaps=True,
# end_gap_id=0.0,
# free_end_gaps=True):
# from Bio.Align import MultipleSeqAlignment
# row_count = len(alignment)
# pair_id_list = list()
# done = set()
# for i in range(0, row_count):
# for j in range(0, row_count):
# if i == j:
# continue
# str_1 = str(i)+','+str(j)
# str_2 = str(j)+','+str(i)
# if (str_1 in done) or (str_2 in done):
# continue
# done.add(str_1)
# done.add(str_2)
# # print(str_1)
# aln = MultipleSeqAlignment(records=[alignment[i], alignment[j]])
# pair_id = pairwise_identity(
# alignment=aln,
# unknown_letters=unknown_letters,
# unknown_id=unknown_id,
# free_unknowns=free_unknowns,
# gap_id=gap_id,
# free_gaps=free_gaps,
# end_gap_id=end_gap_id,
# free_end_gaps=free_end_gaps)
# # print(alignment[i].id, alignment[j].id, pair_id)
# if pair_id > 0.0:
# pair_id_list.append(pair_id)
# # print(sum(pair_id_list))
# # print(len(pair_id_list))
# ident = sum(pair_id_list) / len(pair_id_list)
# return ident
def identity(
alignment,
unknown_letters=set(['N']),
free_unknowns=True,
free_gaps=True,
free_end_gaps=True,
return_all_pairwise_identities=False):
# import sys
from scipy.misc import comb
from krpy import kriupac
end_gap_letter = '#'
col_count = alignment.get_alignment_length()
# Produce a list of string representations of the sequences in alignment.
# Leading and trailing gaps will be replaced with term_gap_letter.
aln_seq_str_list = list()
for aln_seq in alignment:
aln_str = str(aln_seq.seq).upper()
aln_str_l_strip = aln_str.lstrip(kriupac.IUPAC_DNA_GAPS_STRING)
left_gap_count = len(aln_str) - len(aln_str_l_strip)
aln_str_l_r_strip = aln_str_l_strip.rstrip(kriupac.IUPAC_DNA_GAPS_STRING)
right_gap_count = len(aln_str_l_strip) - len(aln_str_l_r_strip)
aln_str_term_gaps = left_gap_count * end_gap_letter + aln_str_l_r_strip + right_gap_count * end_gap_letter
aln_seq_str_list.append(aln_str_term_gaps)
# Produce a list of alignment column strings.
aln_column_str_list = list()
for col_idx in range(0, col_count):
aln_column_str = ''
for aln_seq_str in aln_seq_str_list:
aln_column_str = aln_column_str + aln_seq_str[col_idx]
aln_column_str_list.append(aln_column_str)
ident_list = list()
for col_str in aln_column_str_list:
clean_col_str = col_str
if free_unknowns:
for u in list(unknown_letters):
clean_col_str = clean_col_str.replace(u, '')
if free_gaps:
for g in list(kriupac.IUPAC_DNA_GAPS):
clean_col_str = clean_col_str.replace(g, '')
if free_end_gaps:
clean_col_str = clean_col_str.replace(end_gap_letter, '')
if clean_col_str == '':
continue
letter_counts = dict()
for l in clean_col_str:
letter_counts[l] = letter_counts.get(l, 0) + 1
total_l = len(clean_col_str)
if total_l == 1:
ident_list.append(1.0)
continue
total_comb = comb(total_l, 2)
l_comb_sum = 0.0
for l in letter_counts.keys():
c = letter_counts[l]
if c > 1:
l_comb = comb(c, 2)
l_comb_sum = l_comb_sum + l_comb
col_ident = l_comb_sum / total_comb
# print(l_comb_sum, total_comb, col_ident, col_str, '::', clean_col_str)
ident_list.append(col_ident)
# print('\n', ident_list, '\n')
ident_list_sum = 0.0
if ident_list:
ident_list_sum = sum(ident_list)
# print('\n', ident_list_sum, len(ident_list), '\n')
ident = ident_list_sum / float(len(ident_list))
if return_all_pairwise_identities:
return (ident, ident_list)
return ident
def consensus(
alignment,
threshold=0.0,
unknown='N',
resolve_ambiguities=False):
from Bio import Seq
from Bio.Alphabet import generic_dna
from Bio.Alphabet import generic_rna
from krpy import krseq
from krpy import kriupac
uracil = False
col_count = alignment.get_alignment_length()
# row_count = len(alignment)
cons_str = ''
for col_idx in range(0, col_count):
col_str = alignment[:, col_idx]
col_counts = dict()
col_counts_expanded = dict()
col_total = float()
col_proportions = dict()
col_cons_set = set()
# Count bases in column.
for letter in col_str:
letter = letter.upper()
if letter == 'U':
uracil = True
letter = 'T'
if letter not in kriupac.IUPAC_DNA_GAPS:
col_counts[letter] = col_counts.get(letter, 0) + 1.0
for k in col_counts.keys():
if k in kriupac.IUPAC_DNA_DICT_REVERSE:
for letter in kriupac.IUPAC_DNA_DICT_REVERSE[k]:
col_counts_expanded[letter] = col_counts_expanded.get(letter, 0) + col_counts[k]
else:
col_counts_expanded[k] = col_counts_expanded.get(k, 0) + col_counts[k]
for k in col_counts_expanded.keys():
base_count = col_counts_expanded[k]
col_total = col_total + base_count
for k in col_counts_expanded.keys():
base_count = col_counts_expanded[k]
base_prop = 0.0
if col_total > 0.0:
base_prop = base_count / col_total
col_proportions[k] = base_prop
# Keep only the bases that occur at a high enough frequency
if len(col_proportions) > 0.0 and threshold == 0.0:
max_prop = max(col_proportions.values())
if max_prop != 0.0:
for k in col_proportions.keys():
if col_proportions[k] == max_prop:
col_cons_set.add(k)
else:
for k in col_proportions.keys():
if col_proportions[k] >= threshold:
col_cons_set.add(k)
if len(col_cons_set) == 0:
col_cons_set.add(unknown)
col_cons_list = list(col_cons_set)
col_cons_list.sort()
col_str_new = ''.join(col_cons_list)
if (unknown in col_str_new) and len(col_str_new) > 1:
col_str_new = col_str_new.replace(unknown, '')
if ('N' in col_str_new) and len(col_str_new) > 1:
col_str_new = col_str_new.replace('N', '')
site = unknown
if (col_str_new == unknown) or (col_str_new == 'N'):
site = unknown
elif col_str_new == kriupac.IUPAC_DNA_STRING:
site = unknown
else:
site = kriupac.IUPAC_DNA_DICT[col_str_new]
cons_str = cons_str + site
if resolve_ambiguities:
cons_str = krseq.resolve_ambiguities(cons_str)
alphabet = generic_dna
if uracil:
cons_str = cons_str.replace('T', 'U')
alphabet = generic_rna
cons_seq = Seq.Seq(cons_str, alphabet)
ret_value = cons_seq
return ret_value
def pairwise_coverage(pairwise_alignment):
row_count = len(pairwise_alignment)
if row_count != 2:
print('Alignment must contain two sequences.')
return
col_count = pairwise_alignment.get_alignment_length()
a_count = len(str(pairwise_alignment[0].seq).lstrip('-').rstrip('-'))
b_count = len(str(pairwise_alignment[1].seq).lstrip('-').rstrip('-'))
count_str = ''
for col_idx in range(0, col_count):
col_str = pairwise_alignment[:, col_idx]
col_str = col_str.replace('-','')
count_str = count_str + str(len(col_str))
l_count = col_count - len(count_str.lstrip('1'))
r_count = col_count - len(count_str.rstrip('1'))
overlap = col_count - l_count - r_count
# return {pairwise_alignment[0].id: overlap/float(a_count), pairwise_alignment[1].id: overlap/float(b_count)}
return [overlap/float(a_count), overlap/float(b_count)]
def cluster(
records,
threshold=0.95,
unknown='N',
key='gi',
aln_program='mafft',
aln_executable='mafft',
aln_options='--auto --reorder --adjustdirection',
seeds=None,
seed_coverage=0.5,
query_coverage=0.9,
free_gaps=True):
from krpy import krother
from krpy import krcl
results_dict = dict()
consumed_ids = list()
seed_ids = list()
record_count = len(records)
records = sorted(records, key=lambda x: len(x.seq), reverse=True)
records_seeds = records
if seeds:
records_seeds = seeds
for seed_rec in records_seeds:
key_value = None
if key == 'accession':
key_value = seed_rec.id
elif key == 'gi':
key_value = seed_rec.annotations['gi']
elif key == 'description':
key_value = seed_rec.description
else:
key_value = seed_rec.id
s_id = key_value
seed_ids.append(s_id)
for a_rec in records_seeds:
# print('a_rec', a_rec)
key_value = None
if key == 'accession':
key_value = a_rec.id
elif key == 'gi':
key_value = a_rec.annotations['gi']
elif key == 'description':
key_value = a_rec.description
else:
key_value = a_rec.id
a_id = key_value
if not seeds:
if a_id in consumed_ids:
continue
results_dict[a_id] = list()
if a_id not in consumed_ids:
results_dict[a_id].append(['+', a_id, '1.0'])
consumed_ids.append(a_id)
for i, b_rec in enumerate(records):
krcl.print_progress(
current=len(consumed_ids), total=record_count, length=0,
prefix=krother.timestamp() + ' ',
postfix=' records clustered. Checking ' + str(i) + '/' + str(record_count) + '.',
show_bar=False)
# print('b_rec', b_rec)
key_value = None
if key == 'accession':
key_value = b_rec.id
elif key == 'gi':
key_value = b_rec.annotations['gi']
elif key == 'description':
key_value = b_rec.description
else:
key_value = b_rec.id
b_id = key_value
if a_id == b_id:
continue
if b_id in consumed_ids:
continue
aln = align(
records=[a_rec, b_rec],
program=aln_program,
options=aln_options,
program_executable=aln_executable)
# print(aln)
pw_cov = pairwise_coverage(pairwise_alignment=aln)
# print(pw_cov)
a_cov = pw_cov[0]
b_cov = pw_cov[1]
# if a_rec.id in pw_cov.keys():
# a_cov = pw_cov[a_rec.id]
# else:
# a_cov = pw_cov['_R_' + a_rec.id]
# if b_rec.id in pw_cov.keys():
# b_cov = pw_cov[b_rec.id]
# else:
# b_cov = pw_cov['_R_' + b_rec.id]
direction = '+'
for a in aln:
# This will only work with MAFFT!
if a.id.startswith('_R_'):
direction = '-'
break
# score = pairwise_identity(
# alignment=aln,
# unknown_letters=set(['N']),
# unknown_id=0.0,
# free_unknowns=True,
# gap_id=0.0,
# free_gaps=True,
# end_gap_id=0.0,
# free_end_gaps=True)
score = identity(
alignment=aln,
unknown_letters=set(['N']),
free_unknowns=True,
free_gaps=free_gaps,
free_end_gaps=True)
if (score >= threshold) and (a_cov >= seed_coverage) and (b_cov >= query_coverage):
results_dict[a_id].append([direction, b_id, score])
consumed_ids.append(b_id)
krcl.clear_line()
# print(a_id, ':', b_id, '=', score, '|', a_cov, b_cov)
# Report unclustered ids
results_dict['unclustered'] = list()
for rec in records:
key_value = None
if key == 'accession':
key_value = rec.id
elif key == 'gi':
key_value = rec.annotations['gi']
elif key == 'description':
key_value = rec.description
else:
key_value = rec.id
rec_id = key_value
if rec_id not in consumed_ids:
results_dict['unclustered'].append(['.', rec_id, '0.0'])
return results_dict
def dereplicate(
records,
threshold=0.95,
unknown='N',
key='gi',
aln_program='mafft',
aln_executable='mafft',
aln_options='--auto --reorder --adjustdirection',
seed_coverage=0.5,
query_coverage=0.9):
clusters = cluster(
records=records,
threshold=threshold,
unknown=unknown,
key=key,
aln_program=aln_program,
aln_executable=aln_executable,
aln_options=aln_options,
seeds=None,
seed_coverage=seed_coverage,
query_coverage=query_coverage)
dereplicated = list()
for clust_key in clusters.keys():
for r in records:
key_value = None
if key == 'accession':
key_value = r.id
elif key == 'gi':
key_value = r.annotations['gi']
elif key == 'description':
key_value = r.description
else:
key_value = r.id
r_id = key_value
if r_id == clust_key:
dereplicated.append(r)
break
# Should also probably return cluster info, so it is clear which records clustered together
return dereplicated
def determine_conserved_regions(alignment_file, matrix, window, min_length, cutoff):
import subprocess
import csv
import os
directory = os.path.split(alignment_file)[0]
cons_scores = directory + os.path.sep + 'conservation_scores.tsv'
subprocess.call('score_conservation.py -o '+cons_scores+' -m /usr/local/conservation_code/matrix/'+matrix+'.bla -w '+str(window)+' '+alignment_file, shell=True)
cons_csv = csv.reader(open(cons_scores, 'rb'), delimiter='\t')
regions = []
region = []
for row in cons_csv:
if row[0].startswith('#'):
continue
pos = int(row[0])+1
con = float(row[1])
if con >= float(cutoff):
region.append([pos,con])
else:
if len(region) >= min_length:
regions.append(region)
region = []
if len(region) >= min_length:
regions.append(region)
print('There are '+str(len(regions))+' conserved regions.')
# for region in regions:
# print('----------------')
# print('There are '+str(len(region))+' residues in this region.')
# for position in region:
# print(position)
return regions
def slice_out_conserved_regions(regions, alignment_file, name_prefix, output_dir_path):
from Bio import AlignIO
from Bio.Align import MultipleSeqAlignment
import subprocess
import os
# directory = os.path.split(alignment_file)[0]
directory = output_dir_path.strip(os.path.sep)
alignment = AlignIO.read(open(alignment_file), "fasta")
for i in range(0,len(regions)):
region = regions[i]
start = region[0][0]-1
stop = region[-1][0]
name = name_prefix + str(i+1)
sliced_alignment = alignment[:,start:stop]
sliced_alignment_edited = MultipleSeqAlignment(None)
output_path = directory + os.path.sep + name + '.fasta'
for record in sliced_alignment:
if not "-" in str(record.seq):
sliced_alignment_edited.append(record)
AlignIO.write(sliced_alignment_edited, output_path, "fasta")
subprocess.call('usearch -quiet -minseqlength 1 -derep_fulllength '+output_path+' -output '+output_path, shell=True)
sliced_alignment_new = AlignIO.read(open(output_path), "fasta")
j=1
for record in sliced_alignment_new:
record.id = name+'_'+str(j)
record.description = ''
record.name = ''
j = j+1
AlignIO.write(sliced_alignment_new, output_path, "fasta")
return
# if __name__ == '__main__':
# # Tests
# import os
# PS = os.path.sep
# import krbioio
# aln = krbioio.read_alignment_file('/Users/karolis/Desktop/aln_4.phy', 'phylip-relaxed')
# ident = identity(
# alignment=aln,
# unknown_letters=set(['N']),
# free_unknowns=True,
# free_gaps=True,
# free_end_gaps=True)
# print(ident)
# pc = pairwise_coverage(pairwise_alignment=aln)
# print(pc)
# ident = identity(
# alignment=aln,
# unknown_letters=set(['N']),
# unknown_id=0.0,
# free_unknowns=True,
# gap_id=0.0,
# free_gaps=True,
# end_gap_id=0.0,
# free_end_gaps=True)
# print(ident)
# pid = pairwise_identity(
# alignment=aln,
# unknown_letters=set(['N']),
# unknown_id=0.0,
# free_unknowns=True,
# gap_id=0.0,
# free_gaps=True,
# end_gap_id=0.0,
# free_end_gaps=True)
# print(pid)
# cons = consensus(
# alignment=aln,
# threshold=0.4,
# unknown='N',
# resolve_ambiguities=False)
# print(cons)
# recs = krbioio.read_sequence_file(
# file_path='/Users/karolis/Desktop/Actinidia_chinensis__mRNA.gb',
# file_format='genbank',
# ret_type='list'
# )
# cluster(
# records=recs,
# threshold=0.99,
# unknown='N',
# key='gi',
# aln_program='mafft',
# aln_executable='mafft',
# aln_options='--auto --reorder --adjustdirection')
| karolisr/krpy | krpy/kralign.py | Python | gpl-3.0 | 27,802 |
# -*- coding: utf-8 -*-
# 商户的爬虫
import sys
import re
import scrapy
import itertools
import MySQLdb
from dianping.items import MerchantItem
from dianping.Util import getXpathFirst
from dianping.pipelines import DB_ADDR, DB_PORT, DB_PASSWORD
city = '3'
cate = '10'
BASE_URL = 'http://www.dianping.com/search/category/{city}/{cate}/'.format(city=city, cate=cate)
def crawUrls():
coon = MySQLdb.connect(host=DB_ADDR, port=DB_PORT, user='root', passwd=DB_PASSWORD, db='dianping',
charset='utf8')
cursor = coon.cursor()
sql = 'SELECT id FROM T_classfy;'
cursor.execute(sql)
classfy_result = [item[0] for item in cursor.fetchall()]
sql = 'SELECT id FROM T_region;'
cursor.execute(sql)
region_result = [item[0] for item in cursor.fetchall()]
urls = []
for item in itertools.product(classfy_result, region_result):
url = BASE_URL + item[0] + item[1] + 'p1'
urls.append(url)
coon.commit()
coon.close()
return urls
class MerchantsSpider(scrapy.Spider):
name = "merchants"
allowed_domains = ["dianping.com"]
# 查询数据库中的classfy和region两张表,用这两张表,拼接成url,然后添加到start_urls中
start_urls = crawUrls()
def parse(self, response):
with open('{}.html'.format(self.name), 'w') as f:
f.write(response.text.encode('utf-8'))
items = []
# //*[@id="shop-all-list"]/ul/li/div[2]/div[1]/a/h4
node = response.xpath('//*[@id="shop-all-list"]/ul/li')
for each in node:
item = MerchantItem()
item['name'] = getXpathFirst(each.xpath('div[2]/div[1]/a/h4/text()').extract())
item['star'] = getXpathFirst(each.xpath('div[2]/div[2]/span[1]/@title').extract(), None)
item['kouwei'] = getXpathFirst(each.xpath('div[2]/span[1]/span[1]/b/text()').extract(), 0)
item['huanjing'] = getXpathFirst(each.xpath('div[2]/span[1]/span[2]/b/text()').extract(), 0)
item['fuwu'] = getXpathFirst(each.xpath('div[2]/span[1]/span[3]/b/text()').extract(), 0)
item['tag'] = getXpathFirst(each.xpath('div[2]/div[3]/a[1]/span/text()').extract(), None)
ave_price = getXpathFirst(each.xpath('div[2]/div[2]/a[2]/b/text()').extract(), '0')
item['ave_price'] = re.search(r'(\d+)', ave_price).group(1)
item['comment_count'] = getXpathFirst(each.xpath('div[2]/div[2]/a[1]/b/text()').extract(), 0)
item['address'] = getXpathFirst(each.xpath('div[2]/div[3]/span/text()').extract(), None)
item['img_url'] = getXpathFirst(each.xpath('div[1]/a/img/@data-src').extract(), '')
shop_url = getXpathFirst(each.xpath('div[2]/div[1]/a/@href').extract(), None)
item['detail_url'] = shop_url
if shop_url:
item['id'] = shop_url.split('/')[-1]
item['city'] = city
print response.url
result = re.search(r'/(\w\d+)(\w\d+)p(\d+)', response.url)
print result.group()
item['classfy'] = result.group(1)
item['region'] = result.group(2)
curpage = result.group(3)
items.append(item)
page = int(curpage) + 1
url = re.sub('p(\d+)', 'p' + str(page), response.url)
# 发送新的url请求加入待爬队列,并调用回调函数 self.parse
# if page <10 :
yield scrapy.Request(url, callback=self.parse)
yield item
| myhearter/dianping | crawler/dianping/spiders/merchants.py | Python | mit | 3,531 |
# -*- coding: utf-8 -*-
import os
import settings
import json
from kivy.app import App
from kivy.properties import (StringProperty, BooleanProperty,
ObjectProperty, NumericProperty)
from kivy.uix.button import Button
from kivy.uix.togglebutton import ToggleButton
from kivy.uix.label import Label
from kivy.uix.popup import Popup
from kivy.uix.bubble import BubbleButton
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.textinput import TextInput
from kivy.uix.actionbar import ActionButton, ActionItem
from kivy.clock import Clock
from listitems import ChangesItem, RepoHistoryItem, BranchesItem
from boxlayouts import HistoryBox, SettingsBox, ChangesBox, BranchesBox
from main import RepoWatcher, ConfirmPopup, RemotePopup, MyScatter, CustomLabel
from bubbles import NewSwitchRename
from shortcuts import (create_popup, run_syscall, diff_formatter,
striptags, findparent)
class CustomActionButton(Button, ActionItem):
theme = StringProperty("")
ext = StringProperty("")
def __init__(self, *args, **kwargs):
super(CustomActionButton, self).__init__(*args, **kwargs)
self.ext = u"[font=%s][/font]" % settings.KIVY_ICONIC_FONT_PATH
try:
theme = settings.DB.store_get('theme')
except:
theme = ""
self.theme = theme
self.size_hint = (None, None)
self.width = '200dp'
self.height = '20dp'
self.text_size = self.width, None
self.padding_x = '5dp'
self.markup = True
self.shorten = True
self.background_normal = ""
self.background_down = ""
self.background_color = settings.COLOR2
def on_press(self):
themes = filter(
lambda x: x['name'] == self.text.strip().upper(), settings.COLOR_SCHEMAS)
if themes:
self.theme = self.text.strip().upper()
def on_release(self):
for ch in self.parent.children:
ch.text = ch.text.replace('...', '').rstrip()
ch.text = ch.text.replace(self.ext, ' ')
self.text += ' ...'
settings.DB.store_put('theme', self.theme)
settings.DB.store_sync()
Clock.schedule_once(lambda dt: App.get_running_app().restart(), .5)
class CustomTextInput(TextInput):
def __del__(self, *args, **kwargs):
pass
def on_press(self):
branches = findparent(self, BranchesBox)
root = findparent(self, RepoWatcher)
branches.newbranch = False
branches.rename = False
branches.readymerge = False
path = branches.repo_path
if path:
text = self.text.strip()
if self.name == "new":
os.chdir(path)
out = run_syscall('git checkout -b %s' % text)
elif self.name == "edit":
current = root.get_activebranch(path)
os.chdir(path)
out = run_syscall('git branch -m %s %s' % (current, text))
branches.branches_check(path)
def on_text_validate(self):
"""
on_text_validate; on_enter method so called, for textinput
main idea is to handle the action whether that
keyboard action ('enter') is for creating new branch or
rename the current one.
"""
branches = findparent(self, BranchesBox)
root = findparent(self, RepoWatcher)
branches.newbranch = False
branches.rename = False
branches.readymerge = False
path = branches.repo_path
if path:
text = self.text.strip()
if self.name == "new":
pass
os.chdir(path)
out = run_syscall('git checkout -b %s' % text)
elif self.name == "edit":
current = root.get_activebranch(path)
os.chdir(path)
out = run_syscall('git branch -m %s %s' % (current, text))
branches.branches_check(path)
class MergeButton(Button):
"""
MergeButton; to handle merge view to show is used.
"""
def on_press(self):
root = findparent(self, BranchesBox)
root.readymerge = not root.readymerge
root.branches_check(root.repo_path)
def on_release(self):
pass
class MoveButton(Button):
is_pressed = BooleanProperty(False)
cx = NumericProperty()
cy = NumericProperty()
app = ObjectProperty(None)
scatter = ObjectProperty(None)
def on_press(self):
# root = findparent(self, BranchesItem)
# if not root:
# root = findparent(self, BranchesBox)
# s = MyScatter(name = root.repobranchlabel.text,
# sha = root.repobranchsha.text,
# text = root.repobranchtext.text,
# date = root.branchdate.text)
# root = findparent(self, BranchesBox)
# root.add_widget(s)
# self.scatter = s
self.app = self.parent.repobranchlabel
self.cx = self.cy = 0
self.is_pressed = True
def on_release(self):
root = findparent(self, BranchesBox)
# try: root.remove_widget(self.scatter)
# except: pass
sx, sy = root.source.pos
tx, ty = root.target.pos
is_item = bool(findparent(self, BranchesItem))
if is_item:
self.cx += 10
self.cy += self.parent.parent.parent.parent.parent.parent.height - \
self.parent.parent.parent.height
if sx <= self.cx <= sx + 100 and sy <= self.cy <= sy + 45:
root.source.text = striptags(self.app.text).strip()
elif tx <= self.cx <= tx + 100 and ty <= self.cy <= ty + 45:
root.target.text = striptags(self.app.text).strip()
if root.source.text and root.target.text:
info = root.mergeinfolabel.text
info = "[color=%s]" % settings.HEX_COLOR1
info += "Merging [font=%s]%s[/font] " % (
settings.KIVY_DEFAULT_BOLD_FONT_PATH, root.source.text)
info += "into [font=%s]%s[/font]" % (
settings.KIVY_DEFAULT_BOLD_FONT_PATH, root.target.text)
os.chdir(root.repo_path)
out = run_syscall("git log --oneline %s...%s" % (root.source.text,
root.target.text))
info += " [size=10][color=909090](%s Commits)[/color][/size]"\
% len(out.strip().
split('\n'))
root.mergeinfolabel.text = info
root.mergeinfolabel.halign = 'left'
self.is_pressed = False
def on_touch_move(self, touch):
if hasattr(self, 'is_pressed') and self.is_pressed:
self.app = self.parent.repobranchlabel
self.cx, self.cy = touch.pos
class PushUnpushButton(Button):
"""
PushUnpushButton; to send specific branch to remote git server/s,
there can be multiple remote servers so users should be
select one of them to handle this that class was born
"""
branch_name = StringProperty("")
def __del__(self, *args, **kwargs):
pass
def on_press(self):
"""
this default method is written to handle pre_action operations such
finding out the branch name in other way to say which branch is
selected to send.
"""
try:
root = findparent(self, BranchesItem)
branch = striptags(root.repobranchlabel.text)
except:
root = findparent(self, BranchesBox)
branch = striptags(root.repobranchedit.children[1].text)
self.branch_name = branch
root = findparent(self, BranchesBox)
text = striptags(self.text)
if root.repo_path:
if text in ["Push", "RePush"]:
remotes = []
os.chdir(root.repo_path)
out = run_syscall('git remote -v')
remotes = map(lambda x:
{'name': x.split("\t")[0].strip(),
'path': x.split("\t")[1].split(" (push)")[0].strip()},
filter(lambda x: x.find(" (push)") != -1,
out.split('\n')))
content = RemotePopup(branch=branch, remotes=remotes)
content.bind(on_push=self.on_push)
self.popup = Popup(title="Which remote?",
content=content,
size_hint=(None, None),
size=(300, 400),
auto_dismiss= False)
self.popup.open()
else:
os.chdir(root.repo_path)
out = run_syscall('git branch -r').split('\n')
remotes = map(lambda x: x.strip(),
run_syscall('git remote').split('\n'))
possiblities = map(lambda x: "%s/%s" % (x, branch), remotes)
possible = filter(lambda x: x in possiblities,
map(lambda x: x.strip(), out))
if possible:
remote = possible[0].rsplit(branch, 1)[0].rstrip('/')
out = run_syscall('git push %s :%s' % (remote, branch))
root.branches_check(root.repo_path)
def on_push(self, instance, remote_name):
"""
on_push, to handle git push operation,
just the remote name is the key.
"""
root = findparent(self, BranchesBox)
os.chdir(root.repo_path)
remote_name = striptags(remote_name)
out = run_syscall('git push %s %s' % (remote_name, self.branch_name))
self.popup.dismiss()
root.branches_check(root.repo_path)
class RenameButton(Button):
def on_press(self):
pass
def on_release(self):
"""
on_release; default function rewritten to change
the state of branchesbox attribute called rename
"""
root = findparent(self, BranchesBox)
root.rename = not root.rename
root.newbranch = False
root.readymerge = False
root.branches_check(root.repo_path)
class CustomBubbleButton(BubbleButton):
popup = None
def __del__(self, *args, **kwargs):
pass
def __init__(self, *args, **kwargs):
super(CustomBubbleButton, self).__init__(*args, **kwargs)
self.bind(on_press=self.on_press)
def on_answer_delete(self, instance, answer):
"""
on_answer_delete; is for handling the answer of deletion operation.
"""
if repr(answer) == "'yes'":
root = findparent(self, BranchesBox)
branch = self.popup.content.text.split('to delete')[1].\
replace("'", "").strip()
root.remove_branch(root.repo_path, branch)
self.popup.dismiss()
root.branches_check(root.repo_path)
else:
self.popup.dismiss()
def on_press(self, *args):
pass
def on_release(self, *args):
"""
on_release; default function is for triggering related operation,
such for renaming setting root's rename attribute or
newbranch is same also. For deletion operation being sure of action,
and switching from branch to branch.
"""
root = findparent(self, BranchesBox)
if root.repo_path:
if self.text == "Switch to..":
try:
branch = findparent(self, BranchesItem)
branch_name = striptags(
branch.repobranchlabel.text).strip()
os.chdir(root.repo_path)
out = run_syscall("git checkout %s" % branch_name)
root.branches_check(root.repo_path)
except IndexError:
popup = create_popup('Error Occured', Label(text=''))
popup.open()
finally:
root.rename = False
root.newbranch = False
root.readymerge = False
os.chdir(settings.PROJECT_PATH)
elif self.text == "Delete":
branch = findparent(self, BranchesItem)
branch_name = striptags(branch.repobranchlabel.text).strip()
content = ConfirmPopup(text="to delete '%s'" % branch_name)
content.bind(on_answer=self.on_answer_delete)
self.popup = Popup(title="Are you sure?",
content=content,
size_hint=(None, None),
size=(400, 150),
auto_dismiss= False)
self.popup.open()
else:
root.rename = False
root.readymerge = False
root.newbranch = not root.newbranch
root.branches_check(root.repo_path)
class BranchMenuButton(ToggleButton):
def __init__(self, *args, **kwargs):
super(BranchMenuButton, self).__init__(*args, **kwargs)
self.bind(on_release=self.show_bubble)
def __del__(self, *args, **kwargs):
pass
def remove_bubbles(self):
"""
remove_bubbles for remove previously activated bubble widgets
"""
root = findparent(self, BranchesBox)
listed_buttons = set([root.branchmenubutton])
for branchitem in root.branchlist.children[0].children[0].children:
if str(branchitem.__class__).\
split('.')[1].replace('\'>', '') == 'BranchesItem':
listed_buttons.add(branchitem.children[1].children[1])
for bi in listed_buttons:
if bi != self and hasattr(bi, 'bubble'):
bi.remove_widget(bi.bubble)
delattr(bi, 'bubble')
bi.state = 'normal'
def show_bubble(self, *args):
"""
show_buble; handle the displaying bubble around
related button in this case on left
"""
if not hasattr(self, 'bubble'):
item = findparent(self, BranchesItem)
newbranch_d = rename_d = switch_d = delete_d = False
if item:
newbranch_d = rename_d = True
else:
delete_d = switch_d = True
self.bubble = bubble = NewSwitchRename(
newbranch_disabled=newbranch_d,
switch_disabled=switch_d,
rename_disabled=rename_d,
delete_disabled=delete_d)
bubble.x = self.x - 222
bubble.y = self.y
self.add_widget(bubble)
self.remove_bubbles()
else:
self.remove_widget(self.bubble)
delattr(self, 'bubble')
self.state = 'normal'
class HistoryButton(Button):
"""
HistoryButton; to manage user input on history screen, the button
is used on list items of repository logs one box of log data
contains at least four button.
"""
def __del__(self, *args, **kwargs):
pass
def on_press(self):
"""
on_press; default function name, for button classes on kivy.
The press action trigger changing color of buttons in selected box.
"""
root = findparent(self, RepoHistoryItem)
for l in root.parent.children:
if hasattr(l, 'pressed') and l.pressed:
l.pressed = False
button1 = root.button1
button2 = root.button2
button3 = root.button3
button4 = root.button4
button1.text = button1.text.replace('=777777', '=000000')
button2.text = button2.text.replace('=777777', '=000000')
button3.text = button3.text.replace('=777777', '=000000')
button4.text = button4.text.replace('=777777', '=000000')
root.pressed = True
for l in root.parent.children:
if hasattr(l, 'button1'):
buttons = [l.button1, l.button2, l.button3, l.button4]
if hasattr(l, 'pressed') and l.pressed:
for b in buttons:
b.text = b.text.replace('=777777', '=000000')
else:
for b in buttons:
b.text = b.text.replace('=000000', '=777777')
def on_release(self):
"""
on_release; another default function, which used to handle
actual action. diff screen update operation is triggered with
this buttons' release actions.
"""
sub_root = findparent(self, RepoHistoryItem)
self.branch_path = sub_root.branch_path
self.branch_logid = sub_root.branch_logid
root = findparent(self, HistoryBox)
root.load_diff(self.branch_path, self.branch_logid)
class MenuButton(Button):
"""
MenuButton; the buttons of menu items as history, changes,
branches, settings or adding repo are all menubutton classes
:popup: yes-no answer should be taken if removing an repository
action is on the line to be sure
:repo_path: based answer of user to delete a repository this
path will be used.
"""
popup = None
repo_path = ""
def __del__(self, *args, **kwargs):
pass
def make_pressed(self):
"""
make_pressed, is actually handle the display which button is pressed.
"""
change_all = False
if (self.parent.repoadd_button or
self.parent.reporemove_button) and \
self.uid not in [self.parent.repoadd_button.uid,
self.parent.reporemove_button.uid]:
self.background_color = settings.COLOR2
self.pressed = False
change_all = True
if change_all:
buttons = self.parent.parent.menu_list.children
for but in buttons:
if but.uid != self.uid:
but.background_color = settings.COLOR2
but.pressed = False
else:
but.background_color = settings.COLOR3
but.pressed = True
def on_press(self):
"""
on_press; this default function, handle to show which button is select
visually as changing background_color and on back side as changing
pressed attribute.
"""
self.make_pressed()
if self.name not in ["add repo", "remove repo"]:
root = findparent(self, RepoWatcher)
root.show_kv(self.name)()
def on_release(self):
"""
on_release, default function is just handle the removing selected
repository from the list, if there is any selection. Before
deletion should ask to be sure to prevent any unwanted actions.
"""
if self.uid == self.parent.reporemove_button.uid:
repository_list = self.parent.parent.repolstview
repos = repository_list.children[0].children[0].children
for repo in repos:
if repo.children[1].children[0].pressed:
self.repo_path = repo.repo_path
content = ConfirmPopup(text="to delete repository '%s'" %
repo.repo_name)
content.bind(on_answer=self.on_answer)
self.popup = Popup(title="Are you sure?",
content=content,
size_hint=(None, None),
size=(400, 150),
auto_dismiss= False)
self.popup.open()
def on_answer(self, instance, answer):
if repr(answer) == "'yes'":
root = findparent(self, RepoWatcher)
root.remove_repo(self.repo_path)
self.popup.dismiss()
class AddRepoButton(Button):
"""
AddRepoButton; if want to add a repository, this button is actually pressed.
"""
def __del__(self, *args, **kwargs):
pass
def on_press(self):
"""
on_press; default function is for handle import operation of
selected repository, if it is actually a git repository folder,
.git file is checking otherwise error is displaying as popup.
"""
selection = None
popup = None
text = striptags(self.text)
if text == "Add Repo":
selection = self.parent.parent.repoaddbox.repopath.text
self.parent.parent.popup.dismiss()
elif text == "Choose" and self.parent.listview.selection:
selection = self.parent.listview.selection[0]
if selection:
directory = os.path.dirname(settings.REPOFILE)
if not os.path.exists(directory):
os.makedirs(directory)
settings.DB.store_put('repos', [])
settings.DB.store_sync()
try:
data = settings.DB.store_get('repos')
settings.DB.store_sync()
except (TypeError, ValueError):
data = []
if os.path.exists(selection):
os.chdir(selection)
if os.path.exists(".git"):
out = run_syscall("git rev-parse --show-toplevel")
repo_name = out.rsplit('/', 1)[1]
repo_path = selection
if not filter(lambda x: x["name"] == repo_name and
x["path"] == repo_path,
data):
data.append({"name": repo_name,
"path": repo_path})
else:
popup = create_popup('Already Listed', Label(text=''))
settings.DB.store_put('repos', data)
settings.DB.store_put('current_repo', "")
settings.DB.store_sync()
else:
popup = create_popup(
'Error', Label(text='Invalid repo path'))
else:
popup = create_popup('Error', Label(text='Invalid repo path'))
else:
popup = create_popup('Error', Label(text='Invalid repo path'))
if popup:
popup.open()
os.chdir(settings.PROJECT_PATH)
class RepoDetailButton(Button):
"""
RepoDetailButton; repository list is using this class,
all repository on the list is clickable as button
"""
def __del__(self, *args, **kwargs):
pass
def on_press(self):
"""
on_press; default function is for just displaying
which button/repository is pressed
"""
pressed = self.parent.parent.repobutton.children
pressed_area = self.parent.parent
button_list = filter(lambda x: x != pressed_area,
self.parent.parent.parent.children)
for child in pressed:
child.background_color = settings.COLOR3
child.pressed = True
if hasattr(child, 'textcolor'):
child.textcolor = settings.HEX_COLOR3
for child in button_list:
if child != pressed_area:
child.repobut.background_color = settings.COLOR2
child.repobut.pressed = False
child.refreshbut.background_color = settings.COLOR2
child.refreshbut.textcolor = settings.HEX_COLOR2
def on_release(self):
"""
on_release; default function is for displaying repository
detail based on the current screen.
"""
settings.DB.store_put('current_repo', striptags(self.text).strip())
settings.DB.store_sync()
root = findparent(self, RepoWatcher)
screen = root.screen_manager.children[0].children[0].children[0]
if root.history_button.pressed:
screen.check_history(self.repo_path)
elif root.changes_button.pressed:
screen.changes_check(self.repo_path)
elif root.branches_button.pressed:
screen.branches_check(self.repo_path)
elif root.settings_button.pressed:
screen.settings_check(self.repo_path)
os.chdir(settings.PROJECT_PATH)
root.observer_restart(self.repo_path, root)
class ChangesDiffButton(Button):
"""
ChangesDiffButton; to show the file by file diffs this class is
used to display changed files on a list
"""
def __del__(self, *args, **kwargs):
pass
def on_press(self):
"""
on_press; handle to display which file pressed.
"""
root = findparent(self, ChangesItem)
for l in root.parent.children:
if hasattr(l, 'pressed') and l.pressed:
l.pressed = False
root.filename.text = root.filename.text.replace('777777', '000000')
root.pressed = True
for l in root.parent.children:
if hasattr(l, 'pressed') and not l.pressed:
l.filename.text = l.filename.text.replace('000000', '777777')
def on_release(self):
"""
on_release; diff datas are taken and placing the area.
"""
os.chdir(self.repo_path)
out, message, commit, outhor, date = diff_formatter(
run_syscall('git diff %s ' % self.file_name))
screen = findparent(self, ChangesBox)
screen.localdiffarea.text = striptags("[color=000000]%s[/color]" % out)
os.chdir(settings.PROJECT_PATH)
class CommitButton(Button):
"""
CommitButton, is for making difference between
commit and commint&push button.
"""
def __del__(self, *args, **kwargs):
pass
def on_press(self):
"""
on_press; default function is for handling the committion or
commit with push operation checking for empty description
and commit lists handling if anything is wrong popup is
displayed with message.
"""
also_push = self.parent.commitpushbutton.state == 'down'
description = self.parent.parent.parent.parent.message.text
commits = self.parent.parent.parent.parent.uncommitted.\
children[0].children[0].children
if not commits:
popup = create_popup('Commiting...',
Label(text='There is nothing to commit.'))
popup.open()
elif not description:
popup = create_popup('Commiting...',
Label(text='Commit message is required.'))
popup.open()
else:
commit_paths = []
repopath = ""
for c in filter(lambda x: x.__class__ == ChangesItem().__class__,
commits):
checkbox = c.changesgroup.checkbox
filepath = "%s/%s" % (c.changesgroup.filename.repo_path,
c.changesgroup.filename.file_name)
repopath = c.changesgroup.filename.repo_path
if checkbox.active:
commit_paths.append(filepath)
if commit_paths:
os.chdir(repopath)
out = run_syscall('git add %s' % ' '.join(commit_paths))
os.chdir(repopath)
out = run_syscall('git commit -m "%s"' % description)
if also_push:
root = findparent(self, RepoWatcher)
# get remotes
remotes = run_syscall("git remote").strip().split('\n')
# find out already pushed branches.
pushed_script = "git for-each-ref --format='%(refname:short)'"
pushed_script += " --sort=refname refs/remotes/"
pushed_branches = []
bulk_data = []
# To make multiple times to get the remote of branch.
# result of scriipt shuld be kept if the branch is pushed
# remote path will be taken from that bulk data list.
for remote in remotes:
data = run_syscall(pushed_script + remote).strip()
bulk_data.extend(map(lambda x: x.strip(),
data.split('\n')))
data = map(lambda x: x.strip().rsplit(remote + '/', 1)[1],
filter(lambda x: x.strip(), data.split('\n')))
pushed_branches.extend(data)
branchname = root.get_activebranch(repopath)
if branchname in pushed_branches:
# current branch is already pushed then bulk data has
# that remote information on the same index.
remote = bulk_data[pushed_branches.index(branchname)].\
rsplit(branchname, 1)[0].strip("/")
os.chdir(repopath)
out = run_syscall(
'git push %s %s' % (remote, branchname))
else:
popup = create_popup('Commiting...',
Label(
text="""Your branch is not yet pushed,
use branch menu"""))
popup.open()
screen = findparent(self, ChangesBox)
screen.changes_check(repopath)
class CommitandPushButton(ToggleButton):
"""
CommitandPushButton; is for user friendly displaying,
as user should know what is coming next.
"""
def __del__(self, *args, **kwargs):
pass
def on_press(self):
"""
on_press; default function is for changing the commit
button label if this ToggleButton pressed.
"""
if self.state == 'down':
text = self.parent.commitbutton.text
self.parent.commitbutton.text = text.replace("Commit",
"Commit & Push")
self.parent.commitbutton.width = '122dp'
else:
text = self.parent.commitbutton.text
self.parent.commitbutton.text = text.replace("Commit & Push",
"Commit")
self.parent.commitbutton.width = '80dp'
class UnPushedButton(Button):
"""
UnPushedButton, is for reversing the commit into current changes.
"""
def __del__(self, *args, **kwargs):
pass
def on_press(self):
"""
on_press; default function is for reversing the pressed
commits and previous ones into current changes.
Finding the commit number just before the pressed commit
number then reset that previously committed log id.
"""
sha = self.sha
os.chdir(self.path)
out = run_syscall('git log --oneline --pretty="%h"')
commitlist = out.split('\n')
prev_commit = commitlist[commitlist.index(sha) + 1]
os.chdir(self.path)
out = run_syscall('git reset --soft %s;git reset HEAD' % prev_commit)
os.chdir(settings.PROJECT_PATH)
def on_release(self):
root = findparent(self, ChangesBox)
root.changes_check(self.path)
class SettingsButton(Button):
"""
SettingsButton; is for handle the requested change operation on
.gitignore file or remote url path
"""
def __del__(self, *args, **kwargs):
pass
def on_press(self):
"""
on_press, default function is for handle the change itself,
button types help to understand what is wanted.
"""
root = findparent(self, SettingsBox)
button_text = striptags(self.text)
if root.remotebutton == self:
text = root.remote_url.text
os.chdir(root.repo_path)
out = run_syscall('git remote set_url origin %s' % text)
elif root.ignorebutton == self:
text = root.gitignore.text
os.chdir(root.repo_path)
out = run_syscall('echo "%s" > .gitignore' % text)
root.settings_check(root.repo_path)
os.chdir(settings.PROJECT_PATH)
class SyncButton(Button):
"""
SyncButton; repositories in generally needs to update this
button is handle that operation, by user request as pressing.
"""
def __del__(self, *args, **kwargs):
pass
def on_press(self):
"""
on_press; default function is handle all sync operation
because of there is no need to show button pressing
operation at the end popup is displayed.
"""
root = findparent(self, RepoWatcher)
cur_branch = root.get_activebranch(self.path)
os.chdir(self.path)
sys_call = "git stash clear;git stash;"
sys_call += "git fetch origin %(cur_branch)s;"
sys_call += "git pull origin %(cur_branch)s;"
sys_call += "git stash pop"
out = run_syscall(sys_call % {'cur_branch': cur_branch})
os.chdir(settings.PROJECT_PATH)
popup = create_popup('Syncing...', Label(text='Operation complete.'))
popup.open()
class DiffButton(Button):
"""
DiffButton; for more detailed view on history screen log's
changed files diff outputs displays on an other screen,
this button is used for that screen changing.
"""
def __del__(self, *args, **kwargs):
pass
def select(self, *args, **kwargs):
"""
select; default function required select function as default
"""
pass
def deselect(self, *args, **kwargs):
"""
deselect; default function required deselect function as default
"""
pass
def on_press(self):
"""
on_press; default function is handle the screen changing operation
"""
root = findparent(self, RepoWatcher)
root.show_kv('FileDiff')()
def on_release(self):
"""
on_release; handle the data of new screen with selected
file on specificly chosen log id
"""
root = findparent(self, RepoWatcher)
screen = findparent(self, HistoryBox)
sha = screen.commitlabel.text
current_screen = root.screen_manager.current_screen
filediffbox = current_screen.children[0].children[0]
filediffbox.repo_path = self.repo_path
filediffbox.sha = sha.split("[size=11]")[1].split("[/size]")[0].strip()
filediffbox.file_path = self.path
filediffbox.diff = self.parent.textarea.text
| RedXBeard/gitwatcher-ui | buttons.py | Python | mit | 34,985 |
# Add the upper directory (where the nodebox module is) to the search path.
import os, sys; sys.path.insert(0, os.path.join("..",".."))
from nodebox.graphics import *
# This example demonstrates motion tweening and prototype-based inheritance on layers.
# Motion tweening is easy: set the Layer.duration parameter to the amount of seconds
# it should take for transformations to take effect.
# Prototype-based inheritance is used because we were lazy.
# Normally, you create a subclass of layer, give it extra properties (image, color, ...)
# and override its draw() method. The only problem (aside from the repetitive work)
# is Layer.copy(). This creates a copy of the layer with all of its properties,
# but NOT the custom properties we added in a subclass. So we'd have to implement
# our own copy() method for each custom layer that we want to reuse.
# Layers can also use dynamic, prototype-based inheritance, where layers are "inherited"
# instead of subclassed. Custom properties and methods can be set with Layer.set_property()
# and Layer.set_method(). This ensures that they will be copied correctly.
# Create a layer that draws an image, and has the same dimensions as the image.
# It transforms from the center, and it will take one second for transformations to complete.
creature = Layer.from_image("creature.png", x=250, y=250, origin=CENTER, duration=1.0)
# Add a new on_mouse_press handler to the prototype:
def whirl(layer, mouse):
layer.x += random(-100, 100)
layer.y += random(-100, 100)
layer.scaling += random(-0.2, 0.2)
layer.rotation += random(-360, 360)
layer.opacity = random(0.5, 1.0)
creature.set_method(whirl, "on_mouse_press")
# Add a number of copies to the canvas.
for i in range(4):
canvas.append(creature.copy())
canvas.size = 500, 500
canvas.run()
| nodebox/nodebox-opengl | examples/09-layer/03-tween.py | Python | bsd-3-clause | 1,822 |
from rest_framework import viewsets
from rest_framework_extensions.mixins import DetailSerializerMixin
from .models import Comment
from .serializers import CommentSerializer, CommentDetailSerializer
class CommentViewSet(DetailSerializerMixin, viewsets.ReadOnlyModelViewSet):
serializer_class = CommentSerializer
serializer_detail_class = CommentDetailSerializer
queryset = Comment.objects.all()
class CommentWithoutDetailSerializerClassViewSet(DetailSerializerMixin, viewsets.ReadOnlyModelViewSet):
serializer_class = CommentSerializer
queryset = Comment.objects.all()
class CommentWithIdTwoViewSet(DetailSerializerMixin, viewsets.ReadOnlyModelViewSet):
serializer_class = CommentSerializer
serializer_detail_class = CommentDetailSerializer
queryset = Comment.objects.filter(id=2)
class CommentWithIdTwoAndIdOneForDetailViewSet(DetailSerializerMixin, viewsets.ReadOnlyModelViewSet):
serializer_class = CommentSerializer
serializer_detail_class = CommentDetailSerializer
queryset = Comment.objects.filter(id=2)
queryset_detail = Comment.objects.filter(id=1)
class CommentWithDetailSerializerAndNoArgsForGetQuerySetViewSet(DetailSerializerMixin, viewsets.ModelViewSet):
"""
For regression tests https://github.com/chibisov/drf-extensions/pull/24
"""
serializer_class = CommentSerializer
serializer_detail_class = CommentDetailSerializer
queryset = Comment.objects.all()
queryset_detail = Comment.objects.filter(id=1)
def get_queryset(self):
return super().get_queryset()
| chibisov/drf-extensions | tests_app/tests/functional/mixins/detail_serializer_mixin/views.py | Python | mit | 1,569 |
#!/usr/bin/env python
import os, sys, codecs
if __name__ == "__main__":
jamjar_env = os.environ.get('JAMJAR_ENV', None)
if jamjar_env in ['prod', 'dev', 'test']:
settings_module = "jamjar.settings.{}".format(jamjar_env)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
else:
raise RuntimeError("No acceptable JAMJAR_ENV specified! Given: {}".format(jamjar_env))
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| projectjamjar/masonjar | jamjar/manage.py | Python | mit | 523 |
"""Implementation of the encoder of the Transformer model.
Described in Vaswani et al. (2017), arxiv.org/abs/1706.03762
"""
# pylint: disable=unused-import
from typing import Set, Optional, List
# pylint: enable=unused-import
import math
import tensorflow as tf
from typeguard import check_argument_types
from neuralmonkey.attention.base_attention import (
Attendable, get_attention_states, get_attention_mask)
from neuralmonkey.dataset import Dataset
from neuralmonkey.decorators import tensor
from neuralmonkey.attention.scaled_dot_product import attention
from neuralmonkey.logging import log
from neuralmonkey.model.model_part import FeedDict, ModelPart
from neuralmonkey.model.stateful import (TemporalStateful,
TemporalStatefulWithOutput)
from neuralmonkey.nn.utils import dropout
from neuralmonkey.tf_utils import get_variable, layer_norm
def position_signal(dimension: int, length: tf.Tensor) -> tf.Tensor:
# Code simplified and copied from github.com/tensorflow/tensor2tensor
# TODO write this down on a piece of paper and understand the code and
# compare it to the paper
positions = tf.to_float(tf.range(length))
num_timescales = dimension // 2
# see: github.com/tensorflow/tensor2tensor/blob/v1.5.5/tensor2tensor/
# layers/common_attention.py#L425
log_timescale_increment = math.log(1.0e4) / (num_timescales - 1)
inv_timescales = tf.exp(tf.range(num_timescales, dtype=tf.float32)
* -log_timescale_increment)
scaled_time = tf.expand_dims(positions, 1) * tf.expand_dims(
inv_timescales, 0)
signal = tf.concat([tf.sin(scaled_time), tf.cos(scaled_time)], axis=1)
signal = tf.pad(signal, [[0, 0], [0, tf.mod(dimension, 2)]])
signal = tf.reshape(signal, [1, length, dimension])
return signal
class TransformerLayer(TemporalStateful):
def __init__(self, states: tf.Tensor, mask: tf.Tensor) -> None:
self._states = states
self._mask = mask
@property
def temporal_states(self) -> tf.Tensor:
return self._states
@property
def temporal_mask(self) -> tf.Tensor:
return self._mask
# pylint: disable=too-many-instance-attributes
class TransformerEncoder(ModelPart, TemporalStatefulWithOutput):
# pylint: disable=too-many-arguments,too-many-locals
def __init__(self,
name: str,
input_sequence: TemporalStateful,
ff_hidden_size: int,
depth: int,
n_heads: int,
dropout_keep_prob: float = 1.0,
attention_dropout_keep_prob: float = 1.0,
target_space_id: int = None,
use_att_transform_bias: bool = False,
use_positional_encoding: bool = True,
input_for_cross_attention: Attendable = None,
n_cross_att_heads: int = None,
save_checkpoint: str = None,
load_checkpoint: str = None) -> None:
"""Create an encoder of the Transformer model.
Described in Vaswani et al. (2017), arxiv.org/abs/1706.03762
Arguments:
input_sequence: Embedded input sequence.
name: Name of the decoder. Should be unique accross all Neural
Monkey objects.
dropout_keep_prob: Probability of keeping a value during dropout.
target_space_id: Specifies the modality of the target space.
use_att_transform_bias: Add bias when transforming qkv vectors
for attention.
use_positional_encoding: If True, position encoding signal is added
to the input.
Keyword arguments:
ff_hidden_size: Size of the feedforward sublayers.
n_heads: Number of the self-attention heads.
depth: Number of sublayers.
attention_dropout_keep_prob: Probability of keeping a value
during dropout on the attention output.
input_for_cross_attention: An attendable model part that is
attended using cross-attention on every layer of the decoder,
analogically to how encoder is attended in the decoder.
n_cross_att_heads: Number of heads used in the cross-attention.
"""
check_argument_types()
ModelPart.__init__(self, name, save_checkpoint, load_checkpoint)
self.input_sequence = input_sequence
self.model_dimension = self.input_sequence.dimension
self.ff_hidden_size = ff_hidden_size
self.depth = depth
self.n_heads = n_heads
self.dropout_keep_prob = dropout_keep_prob
self.attention_dropout_keep_prob = attention_dropout_keep_prob
self.target_space_id = target_space_id
self.use_att_transform_bias = use_att_transform_bias
self.use_positional_encoding = use_positional_encoding
self.input_for_cross_attention = input_for_cross_attention
self.n_cross_att_heads = n_cross_att_heads
if self.depth <= 0:
raise ValueError("Depth must be a positive integer.")
if self.ff_hidden_size <= 0:
raise ValueError("Feed forward hidden size must be a "
"positive integer.")
if self.dropout_keep_prob <= 0.0 or self.dropout_keep_prob > 1.0:
raise ValueError("Dropout keep prob must be inside (0,1].")
if (self.attention_dropout_keep_prob <= 0.0
or self.attention_dropout_keep_prob > 1.0):
raise ValueError("Dropout keep prob for attn must be in (0,1].")
if self.target_space_id is not None and (self.target_space_id >= 32
or self.target_space_id < 0):
raise ValueError(
"If provided, the target space ID should be between 0 and 31. "
"Was: {}".format(self.target_space_id))
if (input_for_cross_attention is None) != (n_cross_att_heads is None):
raise ValueError(
"Either both input_for_cross_attention and n_cross_att_heads "
"must be provided or none of them.")
if input_for_cross_attention is not None:
cross_att_dim = get_attention_states(
input_for_cross_attention).get_shape()[-1].value
if cross_att_dim != self.model_dimension:
raise ValueError(
"The input for cross-attention must be of the same "
"dimension as the model, was {}.".format(cross_att_dim))
self.train_mode = tf.placeholder(tf.bool, [], "train_mode")
log("Output op: {}".format(self.output))
# pylint: enable=too-many-arguments,too-many-locals
@tensor
def output(self) -> tf.Tensor:
return tf.reduce_sum(self.temporal_states, axis=1)
@tensor
def modality_matrix(self) -> tf.Tensor:
"""Create an embedding matrix for varyining target modalities.
Used to embed different target space modalities in the tensor2tensor
models (e.g. during the zero-shot translation).
"""
emb_size = self.input_sequence.temporal_states.shape.as_list()[-1]
return get_variable(
name="target_modality_embedding_matrix",
shape=[32, emb_size],
dtype=tf.float32,
initializer=tf.glorot_uniform_initializer())
@tensor
def target_modality_embedding(self) -> tf.Tensor:
"""Gather correct embedding of the target space modality.
See TransformerEncoder.modality_matrix for more information.
"""
return tf.gather(self.modality_matrix,
tf.constant(self.target_space_id))
@tensor
def encoder_inputs(self) -> tf.Tensor:
inputs = self.input_sequence.temporal_states
if self.target_space_id is not None:
inputs += tf.reshape(self.target_modality_embedding, [1, 1, -1])
length = tf.shape(inputs)[1]
if self.use_positional_encoding:
inputs += position_signal(self.model_dimension, length)
return dropout(inputs, self.dropout_keep_prob, self.train_mode)
def self_attention_sublayer(
self, prev_layer: TransformerLayer) -> tf.Tensor:
"""Create the encoder self-attention sublayer."""
# Layer normalization
normalized_states = layer_norm(prev_layer.temporal_states)
# Run self-attention
self_context, _ = attention(
queries=normalized_states,
keys=normalized_states,
values=normalized_states,
keys_mask=prev_layer.temporal_mask,
num_heads=self.n_heads,
dropout_callback=lambda x: dropout(
x, self.attention_dropout_keep_prob, self.train_mode),
use_bias=self.use_att_transform_bias)
# Apply dropout
self_context = dropout(
self_context, self.dropout_keep_prob, self.train_mode)
# Add residual connections
return self_context + prev_layer.temporal_states
def cross_attention_sublayer(self, queries: tf.Tensor) -> tf.Tensor:
assert self.cross_attention_sublayer is not None
assert self.n_cross_att_heads is not None
assert self.input_for_cross_attention is not None
encoder_att_states = get_attention_states(
self.input_for_cross_attention)
encoder_att_mask = get_attention_mask(self.input_for_cross_attention)
# Layer normalization
normalized_queries = layer_norm(queries)
encoder_context, _ = attention(
queries=normalized_queries,
keys=encoder_att_states,
values=encoder_att_states,
keys_mask=encoder_att_mask,
num_heads=self.n_cross_att_heads,
dropout_callback=lambda x: dropout(
x, self.attention_dropout_keep_prob, self.train_mode),
use_bias=self.use_att_transform_bias)
# Apply dropout
encoder_context = dropout(
encoder_context, self.dropout_keep_prob, self.train_mode)
# Add residual connections
return encoder_context + queries
def feedforward_sublayer(self, layer_input: tf.Tensor) -> tf.Tensor:
"""Create the feed-forward network sublayer."""
# Layer normalization
normalized_input = layer_norm(layer_input)
# Feed-forward network hidden layer + ReLU
ff_hidden = tf.layers.dense(
normalized_input, self.ff_hidden_size, activation=tf.nn.relu,
name="hidden_state")
# Apply dropout on hidden layer activations
ff_hidden = dropout(ff_hidden, self.dropout_keep_prob, self.train_mode)
# Feed-forward output projection
ff_output = tf.layers.dense(
ff_hidden, self.model_dimension, name="output")
# Apply dropout on feed-forward output projection
ff_output = dropout(ff_output, self.dropout_keep_prob, self.train_mode)
# Add residual connections
return ff_output + layer_input
def layer(self, level: int) -> TransformerLayer:
# Recursive implementation. Outputs of the zeroth layer
# are normalized inputs.
if level == 0:
return TransformerLayer(self.encoder_inputs, self.temporal_mask)
# Compute the outputs of the previous layer
prev_layer = self.layer(level - 1)
with tf.variable_scope("layer_{}".format(level - 1)):
with tf.variable_scope("self_attention"):
self_context = self.self_attention_sublayer(prev_layer)
if self.input_for_cross_attention is not None:
with tf.variable_scope("cross_attention"):
self_context = self.cross_attention_sublayer(self_context)
with tf.variable_scope("feedforward"):
output_states = self.feedforward_sublayer(self_context)
# Layer normalization on the encoder outputs
if self.depth == level:
output_states = layer_norm(output_states)
return TransformerLayer(states=output_states, mask=self.temporal_mask)
@tensor
def temporal_states(self) -> tf.Tensor:
return self.layer(self.depth).temporal_states
@tensor
def temporal_mask(self) -> tf.Tensor:
return self.input_sequence.temporal_mask
def feed_dict(self, dataset: Dataset, train: bool = False) -> FeedDict:
return {self.train_mode: train}
def get_dependencies(self) -> Set[ModelPart]:
"""Collect recusively all inputs."""
to_return = ModelPart.get_dependencies(self)
if (self.input_for_cross_attention is not None
and isinstance(self.input_for_cross_attention, ModelPart)):
to_return = to_return.union(
self.input_for_cross_attention.get_dependencies())
return to_return
| juliakreutzer/bandit-neuralmonkey | neuralmonkey/encoders/transformer.py | Python | bsd-3-clause | 12,957 |
import datetime
import numbers
import string
from django.contrib.auth import models as django_models
from canvas.cache_patterns import CachedCall
from configuration import Config
from services import Services
class _BaseUserMixin(object):
MINIMUM_PASSWORD_LENGTH = 5
MAXIMUM_PASSWORD_LENGTH = 2000
def __unicode__(self):
return unicode(self.to_client())
def to_client(self, **kwargs):
return dict(username=self.username, id=self.id)
def found_limit_reached(self):
from canvas.models import Category
return self.founded_groups.count() >= Category.FOUND_LIMIT
class User(django_models.User, _BaseUserMixin):
class Meta:
proxy = True
@classmethod
def users_over_one_day_old(cls, cutoff=None):
"""
Returns a queryset of all users who signed up at least 24 hours ago.
Optionally specify a `cutoff` datetime - the returned users will have signed up after this.
"""
today = Services.time.today()
yesterday = today - datetime.timedelta(days=1)
users = cls.objects.filter(date_joined__lte=yesterday)
if cutoff:
users = users.filter(date_joined__gte=cutoff)
return users
@property
def redis(self):
from canvas.models import UserRedis
return UserRedis(self.id, self.is_staff)
@property
def kv(self):
if not self.id:
raise Exception("User object doesn't have an id yet. "
"Make sure you .save() before accessing the kv store!")
if not hasattr(self, '_kv'):
from canvas.models import UserKV
self._kv = UserKV(self)
return self._kv
def change_email(self, new_email):
"""
This will send out a confirmation email to the user before it is actually changed.
Returns the EmailConfirmation object that is created.
"""
from apps.user_settings.models import EmailConfirmation
confirmation = EmailConfirmation.objects.create_confirmation(self, new_email)
confirmation.send_confirmation()
return confirmation
@classmethod
def validate_password(cls, password):
""" Returns whether the given password validates. """
return cls.MAXIMUM_PASSWORD_LENGTH >= len(password) >= cls.MINIMUM_PASSWORD_LENGTH
@classmethod
def validate_username(cls, username):
""" Returns None if the username is valid and does not exist. """
un = username.lower()
if (un in Config['blocked_usernames']
or any(fragment in un for fragment in Config['blocked_username_fragments'])
or any(fragment in un for fragment in Config['autoflag_words'])):
return "Sorry, this username is not allowed."
if len(un) < 3:
return "Username must be 3 or more characters."
if len(un) > 16:
return "Username must be 16 characters or less."
alphabet = string.lowercase + string.uppercase + string.digits + '_'
if not all(char in alphabet for char in username):
return "Usernames can only contain letters, digits, and underscores."
if User.objects.filter(username__iexact=username):
return "This username is taken :("
@classmethod
def validate_email(cls, email):
""" Checks whether the email address appears to be well-formed. Very liberal. """
try:
local, hostname = email.split('@')
except (IndexError, ValueError,):
return False
return (
email.count('@') == 1
and '.' in hostname
and len(email) >= 5
and all(ord(c) < 256 for c in email)
and local
and local[-1] != '.'
and hostname
and hostname[0] not in '.-'
and hostname[-1] not in '.-'
and '_' not in hostname
and '..' not in hostname
and ' ' not in email
and ',' not in email
and ':' not in email
and '$' not in email
and ';' not in email
and '&' not in email
and '*' not in email
and '#' not in email
and '%' not in email
and '!' not in email
and hostname != 'example.com'
and all(c in string.ascii_letters for c in hostname.split('.')[-1])
)
@classmethod
def email_is_unused(cls, email):
try:
cls.objects.get(email=email, is_active=True)
except cls.DoesNotExist:
return True
except cls.MultipleObjectsReturned:
return False
else:
return False
def has_lab(self, lab_name):
return bool(int(self.redis.user_kv.hget('labs:' + lab_name) or 0))
def join_lab(self, lab_name):
self.redis.user_kv.hset('labs:' + lab_name, 1)
def has_posted(self):
""" Whether this user has posted a comment or remix. """
from canvas.models import Comment
return self.comments.exists()
def has_stickered(self):
""" Whether this user has ever stickered a post. """
from canvas.models import CommentSticker
return self.commentsticker_set.exists()
def _followed_or_unfollowed(self):
from apps.activity.api import invalidate_caches as invalidate_activity_caches
invalidate_activity_caches(self.id)
def follow(self, user_to_follow):
from canvas.notifications.actions import Actions
if self == user_to_follow:
raise ValueError("Can't follow self.")
if user_to_follow.id in self.redis.new_following:
return
self.redis.following.sadd(user_to_follow.id)
self.redis.new_following.bump(user_to_follow.id)
user_to_follow.redis.followers.sadd(self.id)
user_to_follow.redis.new_followers.bump(self.id)
Actions.followed_by_user(self, user_to_follow)
user_to_follow.userinfo.refresh_follower_count()
self._followed_or_unfollowed()
def unfollow(self, user_to_unfollow):
self.redis.following.srem(user_to_unfollow.id)
self.redis.new_following.remove(user_to_unfollow.id)
self.redis.unfollowed_ever.sadd(user_to_unfollow.id)
user_to_unfollow.redis.followers.srem(self.id)
user_to_unfollow.redis.new_followers.remove(self.id)
user_to_unfollow.userinfo.refresh_follower_count()
self._followed_or_unfollowed()
def followers(self):
#return User.objects.in_bulk_list(self.redis.new_followers.zrange(0, -1))
return User.objects.in_bulk_list(self.redis.followers.smembers())
def follow_thread(self, comment):
self.redis.followed_threads.sadd(comment.id)
def unfollow_thread(self, comment):
self.redis.followed_threads.srem(comment.id)
def is_following(self, user):
if not isinstance(user, numbers.Integral):
user = user.id
#TODO update
return user in self.redis.following
@property
def can_moderate_flagged(self):
return self.is_staff
@property
def can_bestof_all(self):
return self.is_staff
@property
def can_moderate_visibility(self):
if not self.is_staff:
return []
else:
from canvas.models import Visibility
return dict(Visibility.choices).keys()
def _avatar_details(self):
avatar_details = {}
if (self.userinfo is not None
and self.userinfo.profile_image is not None
and self.userinfo.profile_image.reply_content is not None):
avatar_details = self.userinfo.profile_image.reply_content.details().to_client()
return avatar_details
#TODO
@classmethod
def avatar_by_username(cls, username):
return CachedCall(
"avatar:%s:details_v1" % username,
lambda: cls.objects.get(username=username)._avatar_details(),
7*24*60*60,
)
class AnonymousUser(django_models.AnonymousUser, _BaseUserMixin):
"""
Note that Django auth will still sometimes return its own AnonymousUser. So we have a middleware to patch
request.user when anonymous.
"""
class Meta:
proxy = True
def has_lab(self, lab_name):
return False
@property
def logged_out_kv(self):
from canvas.models import LoggedOutKV
return LoggedOutKV()
AnonymousUser.kv = logged_out_kv
# Monkey-patch django
django_models.AnonymousUser.kv = logged_out_kv
| drawquest/drawquest-web | website/apps/canvas_auth/models.py | Python | bsd-3-clause | 8,572 |
# -*- coding: UTF-8 -*-
"""
Functions to use for decorator construction
"""
from __future__ import absolute_import, unicode_literals
__all__ = ("intercept", "log_call")
from inspect import getmodule
from logging import getLogger
from six import raise_from
from sys import version_info
from .constants import LOG_CALL_FMT_STR
from ._memoization import convert_to_hashable
PY2 = version_info < (3, 0)
def intercept(
decorated,
catch=Exception,
reraise=None,
handler=None,
err_msg=None,
include_context=False,
):
"""Intercept an error and either re-raise, handle, or both
Designed to be called via the ``instead`` decorator.
:param Decorated decorated: decorated function information
:param Type[Exception] catch: an exception to intercept
:param Union[bool, Type[Exception]] reraise: if provided, will re-raise
the provided exception, after running any provided
handler callable. If ``False`` or ``None``, no exception
will be re-raised.
:param Callable[[Type[Exception]],Any] handler: a function
to call with the caught exception as its only argument.
If not provided, nothing will be done with the caught
exception.
:param str err_msg: if included will be used to instantiate
the exception. If not included, the caught exception will
be cast to a string and used instead
:param include_context: if True, the previous exception will
be included in the exception context.
"""
try:
return decorated(*decorated.args, **decorated.kwargs)
except catch as exc:
if handler is not None:
handler(exc)
if isinstance(reraise, bool):
if reraise:
raise
elif reraise is not None:
if err_msg is not None:
new_exc = reraise(err_msg)
else:
new_exc = reraise(str(exc))
context = exc if include_context else None
raise_from(new_exc, context)
def log_call(decorated, logger=None, level="info", format_str=LOG_CALL_FMT_STR):
"""Log the parameters & results of a function call
Designed to be called via the ``after`` decorator with
``pass_params=True`` and ``pass_decorated=True``. Use
:any:`decorators.log_call` for easiest invocation.
:param Decorated decorated: decorated function information
:param Optional[logging.Logger] logger: optional logger instance
:param Optional[str] level: log level - must be an acceptable Python
log level, case-insensitive
:param format_str: the string to use when formatting the results
"""
module = getmodule(decorated.wrapped)
if logger is None:
name = module.__name__ if module is not None else "__main__"
logger = getLogger(name)
log_fn = getattr(logger, level.lower())
msg = format_str.format(
name=decorated.wrapped.__name__,
args=decorated.args,
kwargs=decorated.kwargs,
result=decorated.result,
)
log_fn(msg)
def memoize(decorated, memo):
"""Return a memoized result if possible; store if not present
:param Decorator decorated: decorated function information
:param memo: the memoization cache. Must support standard
__getitem__ and __setitem__ calls
"""
key = convert_to_hashable(decorated.args, decorated.kwargs)
if key in memo:
return memo[key]
res = decorated(*decorated.args, **decorated.kwargs)
memo[key] = res
return res
| mplanchard/pydecor | src/pydecor/functions.py | Python | mit | 3,535 |
import time
import json
import random
from flask import Flask, request, current_app
from functools import wraps
from cloudbrain.utils.metadata_info import map_metric_name_to_num_channels, get_supported_devices
from cloudbrain.settings import WEBSERVER_PORT
_MOCK_ENABLED = True
app = Flask(__name__)
app.config['PROPAGATE_EXCEPTIONS'] = True
if not _MOCK_ENABLED:
from cloudbrain.datastore.CassandraDAL import CassandraDAL
dao = CassandraDAL()
dao.connect()
def support_jsonp(f):
"""Wraps JSONified output for JSONP"""
@wraps(f)
def decorated_function(*args, **kwargs):
callback = request.args.get('callback', False)
if callback:
content = str(callback) + '(' + str(f()) + ')'
return current_app.response_class(content, mimetype='application/json')
else:
return f(*args, **kwargs)
return decorated_function
@app.route('/data', methods=['GET'])
@support_jsonp
def data():
"""
GET metric data
:return:
"""
default_start_timestamp = int(time.time() * 1000000 - 5)# return last 5 microseconds if start not specified.
device_id = request.args.get('device_id', None)
device_name = request.args.get('device_name', None)
metric = request.args.get('metric', None)
start = int(request.args.get('start', default_start_timestamp))
if not device_name:
return "missing param: device_name", 500
if not metric:
return "missing param: metric", 500
if not device_id:
return "missing param: device_id", 500
if _MOCK_ENABLED:
data_records = _get_mock_data(device_name, metric)
else:
data_records = dao.get_data(device_name, device_id, metric, start)
return json.dumps(data_records)
@app.route('/power_bands', methods=['GET'])
@support_jsonp
def power_bands():
"""
GET the power bands data
:return:
"""
default_start_timestamp = int(time.time() * 1000000 - 5)# return last 5 microseconds if start not specified.
device_id = request.args.get('device_id', None)
device_name = request.args.get('device_name', None)
start = int(request.args.get('start', default_start_timestamp))
if not device_name:
return "missing param: device_name", 500
if not device_id:
return "missing param: device_id", 500
if _MOCK_ENABLED:
data_records = _get_power_bands_mock_data()
else:
data_records = dao.get_power_band_data(device_name, device_id, start)
return json.dumps(data_records)
def _get_mock_data(device_name, metric):
metric_to_num_channels = map_metric_name_to_num_channels(device_name)
num_channels = metric_to_num_channels[metric]
now = int(time.time() * 1000000 - 5) # micro seconds
data_records = []
for i in xrange(5):
record = {'timestamp': now + i}
for j in xrange(num_channels):
channel_name = 'channel_%s' %j
record[channel_name] = random.random() * 10
data_records.append(record)
return data_records
def _get_power_bands_mock_data():
now = int(time.time() * 1000000 - 5) # micro seconds
data_records = []
for i in xrange(5):
record = {'timestamp': now + i,
"alpha": random.random() * 10,
"beta": random.random() * 10,
"gamma": random.random() * 10,
"theta": random.random() * 10,
"delta": random.random() * 10}
data_records.append(record)
return data_records
@app.route('/device_names', methods=['GET'])
@support_jsonp
def get_device_names():
"""
Returns the device names from the metadata file
:return:
"""
return json.dumps(get_supported_devices())
@app.route('/registered_devices', methods=['GET'])
@support_jsonp
def get_registered_devices():
"""
Get the registered devices IDs
:return:
"""
if _MOCK_ENABLED:
registered_devices = ['octopicorn'] # mock ID
else:
registered_devices = dao.get_registered_devices()
return json.dumps(registered_devices)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=WEBSERVER_PORT)
| realitygaps/cloudbrain | cloudbrain/datastore/rest_api_server.py | Python | agpl-3.0 | 3,932 |
#-*- coding: utf-8 -*-
from models import *
from django.http import HttpResponse
from django.views.decorators.http import require_http_methods
from django.views.decorators.csrf import csrf_exempt
from decorators import jsonResp
import os.path
import json
from prettyprint import pp
from libs.resturl import uriProcessingChain
def getUriPrefixByRequest(request):
prefix = u'{scheme}://{host}{path}'.format(scheme = request.scheme, host = request.get_host(), path = request.path)
return prefix
@csrf_exempt
def testCategoryManage(request, objectid, propName):
if request.method == 'GET':
return getTestObjectCategoryProp(request, objectid, propName)
def getTestObjectCategoryProp(request, objectid, propName):
level = Level.objects.filter(id = objectid)[0]
if propName == 'sub-categories':
ret = map(lambda x: {
'uri': getUriPrefixByRequest(request) + '/' + unicode(x.id),
'title': x.title,
'id': unicode(x.id),
'subCategories': getUriPrefixByRequest(request) + '/' + unicode(x.id) + '/sub-categories',
#level type uri
'levelType': '',
}, level.children.all())
retStr = json.dumps(ret)
resp = HttpResponse(retStr, content_type = 'application/json')
return resp
def putTestObjectCategories(request, objectid):
categoryIds = json.loads(request.body)
categories = Level.objects.filter(testObjects = None).filter(id__in = categoryIds)
testObject = TestObject.objects.filter(id = objectid)[0]
for category in categories:
testObject.testLevels.add(category)
testObject.save()
return getTestObjectCategories(request, objectid)
def testCategories(request, categoryId):
if request.method == 'GET':
return getTestCategories(request, categoryId)
def getTestCategories(request, categoryId):
if u'orphans' == categoryId:
categories = Level.objects.filter(parent = None).filter(testObjects = None)
categories = map(lambda x: {}, categories)
ret = HttpResponse(json.dumps(categories))
return ret
else:
pass
def categoryJsonify(uriPrefix, x):
return {
'id': unicode(x.id),
'title': x.title,
'id': unicode(x.id),
'subCategories':uriPrefix + '/' + unicode(x.id) + '/sub-categories',
#level type uri
'levelType': '',
}
def testObjectListing(useless):
objects = TestObject.objects
return objects
def getTestObjectById(objects, oid):
ret = objects.get(id = oid)
return ret
def getTestObjectCategories(testObject, dummy):
ret = testObject.testLevels
return ret
kPattern = 'pattern'
kChildren = 'children'
kHandler = 'handler'
uriChain = [{
kPattern: 'testObjects',
kHandler: testObjectListing,
kChildren: [{
kPattern : '[0-9a-f-]{36}',
kHandler : getTestObjectById,
kChildren : [{
kPattern : 'categories',
kHandler : getTestObjectCategories,
kChildren : []},
],},],
}]
def testUriChain():
from serializers import obj2json
uri = "testObjects/67a01e17-9e8e-4d99-973b-170d74110a4b"
ret = uriProcessingChain(uri, uriChain)
ret = obj2json(ret)
pp(ret)
ret = type(ret)
from serializers import obj2json
def injectUriForTestObject(prefix, x):
def ap(key):
x[key] = prefix + '/' + x[key]
x['uri'] = prefix
ap('categories')
ap('categoryLevels')
ap('reports')
ap('testcases')
ap('testPlans')
def injectUriForTestObjects(prefix, objects):
return map(lambda x: injectUriForTestObject(prefix + '/' + x['id'], x), objects)
def injectUriForTestLevel(prefix, x):
def ap(key):
x[key] = prefix + '/' + x[key]
x['uri'] = prefix
ap('subCategories')
def injectUriForTestLevels(prefix, objects):
return map(lambda x: injectUriForTestLevel(prefix + '/' + x['id'], x), objects)
uriInjectors = {
TestObject : injectUriForTestObject,
TestObject.objects : injectUriForTestObjects,
TestObject.testLevels.related_manager_cls: injectUriForTestLevels,
Level: injectUriForTestLevel,
}
def injectUri(injector, targets, prefix):
if None == injector:
return targets
injector(prefix, targets)
return targets
def apiRoot(request, uri):
obj = uriProcessingChain(uri, uriChain)
print obj
if (None == obj):
response = HttpResponse()
response.status_code = 404
return response
ret = obj2json(obj)
objType = type(obj)
print 'objType', objType
pp(ret)
injector = None
if obj in uriInjectors:
injector = uriInjectors[obj]
elif objType in uriInjectors:
injector = uriInjectors[objType]
ret = injectUri(injector, ret, getUriPrefixByRequest(request))
ret = json.dumps(ret, ensure_ascii = False)
response = HttpResponse(ret)
return response
| qinggeng/ceShiGuanLiXiTong | site/ceShiGuanLiSite/apps/testManage/apiViews.py | Python | mit | 4,956 |
'''
Created on 23/02/2015
@author: Alex Montes Barrios
'''
import Tkinter as tk
import tkFont
import re
import Queue
import keyword
def rgbColor(red, green, blue):
return '#%02X%02X%02X'%(red, green, blue)
PYTHONSINTAX = [
['pythonNumber', dict(foreground = 'IndianRed'), r'(\d+[.]*)+',re.MULTILINE],
['pythonFunction', dict(font = ('Consolas', 18, 'bold')), r'(?<=def)\s+(\b.+?\b)',re.MULTILINE],
['pythonFunction', dict(font = ('Consolas', 18, 'bold')), r'(?<=class)\s+(\b.+?\b)',re.MULTILINE],
['pythonKwd', dict(foreground = 'blue'), r'\b(' + '|'.join(keyword.kwlist + ['True', 'False', 'None']) + r')\b',re.MULTILINE],
['pythonComment', dict(foreground = 'red'), r'#.*$',re.MULTILINE],
['pythonMultilineString', dict(foreground = 'lime green'), r'(\"\"\"|\'\'\').*?\1|(\"\"\"|\'\'\').+',re.DOTALL],
['pythonString', dict(foreground = 'lime green'), r'(?<!\\)(\'|\").*?((?<!\\)\1|$)',re.MULTILINE]
]
XMLSINTAX = [
['tagnames', dict(foreground = rgbColor(63,127,127)), r'(?<=<|/)\w+',re.MULTILINE],
['tagdelimiters', dict(foreground = rgbColor(0,128,128)), r'(<|</|>)',re.MULTILINE],
['attribnames', dict(foreground = rgbColor(127, 0,127)), r'(?<= )\S+(?==)',re.MULTILINE],
['attribequalsign', dict(foreground = rgbColor(0,0,0)), r'(?<=\w)=(?=(\"|\'))',re.MULTILINE],
['attribvalues', dict(foreground = rgbColor(42,0,255)), r'(?<==)(\"|\')\S+\1',re.MULTILINE],
['commentcontent', dict(foreground = rgbColor(63,95,191)), r'(?<=<!--).+?(?=-->)',re.DOTALL],
['commentdelimiters', dict(foreground = rgbColor(63,95,191)), r'<!--|-->',re.MULTILINE],
['content', dict(foreground = rgbColor(0,0,0)), r'(?<=>)[^<]+(?=</)',re.MULTILINE]
]
sintaxMap = {'.py':PYTHONSINTAX, '.xml':XMLSINTAX}
class SintaxEditor(tk.Frame):
def __init__(self, master, hrzSlider = False, vrtSlider = True):
tk.Frame.__init__(self, master)
self.stopFlag = False
self.toColor = []
self.activeCallBack = []
self.queue = Queue.Queue(maxsize=0)
self.setGUI(hrzSlider, vrtSlider)
self.editable = True
self.contentType = self.contentSource = None
def pasteFromClipboard(self, event = None):
textw = self.textw
try:
data = textw.selection_get(selection = 'CLIPBOARD')
self.setContent(data)
except tk.TclError:
pass
self.formatContent()
def initFrameExec(self):
pass
def setGUI(self,hrzSlider = False, vrtSlider = True):
self.prompt =''
self.cellInput = ''
self.customFont = tkFont.Font(family = 'Consolas', size = 18)
if vrtSlider:
scrollbar = tk.Scrollbar(self)
scrollbar.pack(side = tk.RIGHT, fill = tk.Y)
wrapt = tk.CHAR
if hrzSlider:
hscrollbar = tk.Scrollbar(self, orient = tk.HORIZONTAL)
hscrollbar.pack(side = tk.BOTTOM, fill = tk.X)
wrapt = tk.NONE
textw = tk.Text(self, wrap = wrapt, font = self.customFont, tabs=('1.5c'))
textw.pack(side = tk.LEFT, fill = tk.BOTH, expand = 1)
if vrtSlider:
textw.config(yscrollcommand=scrollbar.set)
scrollbar.config(command=textw.yview)
if hrzSlider:
textw.config(xscrollcommand=hscrollbar.set)
hscrollbar.config(command=textw.xview)
self.textw = textw
textw.see('end')
textw.event_add('<<Copy>>','<Control-C>','<Control-c>')
textw.event_add('<<Paste>>','<Control-V>','<Control-v>')
textw.event_add('<<Cut>>','<Control-X>','<Control-x>')
textw.event_add('<<Selall>>','<Control-A>','<Control-a>')
textw.event_add('<<CursorlineOff>>','<Up>','<Down>','<Next>','<Prior>','<Button-1>')
textw.event_add('<<CursorlineOn>>','<KeyRelease-Up>','<KeyRelease-Down>','<KeyRelease-Next>','<KeyRelease-Prior>','<ButtonRelease-1>')
textw.tag_configure('cursorLine', background = 'alice blue')
textw.tag_configure('sintaxTag')
textw.tag_config("hyper")
textw.tag_bind("hyper", "<Enter>", self._enter)
textw.tag_bind("hyper", "<Leave>", self._leave)
textw.tag_bind("hyper", "<Button-1>", self._click)
self.dispPrompt()
textw.bind('<Key>', self.keyHandler)
textw.bind('<<Copy>>', self.selCopy)
textw.bind('<<Paste>>', self.selPaste)
textw.bind('<<Cut>>', self.selCut)
textw.bind('<<Selall>>',self.selAll)
textw.bind('<<CursorlineOff>>', self.onUpPress)
textw.bind('<<CursorlineOn>>', self.onUpRelease)
def _enter(self, event):
self.textw.config(cursor="hand2")
def _leave(self, event):
self.textw.config(cursor="")
def _click(self, event):
widget = event.widget
for tag in widget.tag_names(tk.CURRENT):
if tag == "hyper":
tagRange = widget.tag_prevrange(tag, tk.CURRENT)
texto = widget.get(*tagRange)
self.processHyperlink(texto)
return
def setHyperlinkManager(self, callbackFunction):
self.hyperlinkManager = callbackFunction
def processHyperlink(self, texto):
if self.hyperlinkManager:
self.hyperlinkManager(texto)
def onUpPress(self, event = None):
textw = self.textw
textw.tag_remove('cursorLine', '1.0', 'end')
def onUpRelease(self, event = None):
textw = self.textw
if textw.tag_ranges('sel'): return
textw.tag_add('cursorLine', 'insert linestart', 'insert lineend + 1 chars')
def getSelRange(self):
textw = self.textw
try:
return textw.tag_ranges('sel')
except tk.TclError:
return None
def longProcess(self, baseIndex, content):
toColor = self.toColor
pos = 0
anchor = baseIndex
while self.stopFlag and self.toColor:
matchs = [reg.search(content, pos) for tag, reg in toColor]
if not any(matchs):
break
match, k = min([(match.start(0), k) for k, match in enumerate(matchs) if match])
tagIni = baseIndex + ' + %d chars'%matchs[k].start(0)
tagFin = baseIndex + ' + %d chars'%matchs[k].end(0)
self.queue.put((toColor[k][0], anchor, tagIni, tagFin))
anchor = tagFin
pos = matchs[k].end(0)
self.stopFlag = False
def queueConsumer(self):
nProcess = 100
while nProcess and not self.queue.empty():
tagTxt, anchor, tagStart, tagEnd = self.queue.get()
for tag in [tagname for tagname in self.textw.tag_names() if tagname.startswith('python')]:
self.textw.tag_remove(tag, anchor, tagEnd)
self.textw.tag_add(tagTxt, tagStart, tagEnd)
self.textw.tag_add('sintaxTag', tagStart)
self.textw.update()
nProcess -= 1
if not self.queue.empty(): self.activeCallBack.append(self.after(50, self.queueConsumer))
def setRegexPattern(self):
sntxIndx = self.sntxIndx.get()
return self.sintaxForColor[sntxIndx]()
def formatContent(self,index1 = '1.0', index2 = 'end'):
while self.activeCallBack:
idAfter = self.activeCallBack.pop()
self.after_cancel(idAfter)
self.queue.queue.clear()
if self.stopFlag:
self.stopFlag = False
if self.t.isAlive(): self.t.join(10)
textw = self.textw
content = textw.get(index1, index2)
baseIndex = textw.index(index1)
if not self.stopFlag:
self.stopFlag = True
from threading import Thread
self.t = Thread(name="regexpThread", target=self.longProcess, args=(baseIndex, content))
self.t.start()
self.activeCallBack.append(self.after(50, self.queueConsumer))
def getContent(self):
textw = self.textw
content = textw.get('1.0','end')
return (content, self.contentType, self.contentSource)
def setContent(self, contentDesc, inspos, sintaxArray = None, isEditable = True):
content, self.contentType, self.contentSource = contentDesc
if sintaxArray: self.sintaxisConfigure(sintaxArray)
self.editable = isEditable
self.__setContent__(content, inspos)
self.onUpRelease()
def setCursorAt(self, inspos, grabFocus = True):
self.textw.mark_set(tk.INSERT, inspos)
self.textw.see(tk.INSERT)
if grabFocus: self.textw.focus_force()
# self.textw.focus_force()
def __setContent__(self, text, inspos):
self.textw.delete('1.0','end')
if not text: return
if isinstance(text,basestring):
self.textw.insert('1.0',text)
self.formatContent()
self.setCursorAt(inspos)
elif hasattr(text, 'nametowidget'):
parent = text.winfo_parent()
wparent = text.nametowidget(parent)
if wparent == self.textw:
self.textw.window_create('1.0', window=text, stretch=1)
pass
else:
self.textw.image_create(inspos, image = text)
def sintaxisConfigure(self, sintaxArray):
if self.toColor:
for elem in self.toColor:
self.textw.tag_remove(elem[0], '1.0', 'end')
self.toColor = []
sintaxArray = sintaxArray or []
for elem in sintaxArray:
tagName, tagCnf, tagRegEx, tagConfFlags = elem
self.textw.tag_configure(tagName, tagCnf)
self.toColor.append([tagName, re.compile(tagRegEx, tagConfFlags)])
def selDel(self, event = None):
textw = self.textw
selRange = self.getSelRange()
if selRange: textw.delete(*selRange)
def selPaste(self, event = None):
if not self.editable: return 'break'
textw = self.textw
selRange = self.textw.tag_ranges('sel')
if selRange: self.textw.delete(*selRange)
try:
text = textw.selection_get(selection = 'CLIPBOARD')
baseIndex = textw.index('insert')
textw.insert('insert', text)
except tk.TclError:
pass
self.formatContent(baseIndex, 'end')
return 'break'
def selCopy(self, event = None):
textw = self.textw
selRange = self.getSelRange()
if selRange:
text = textw.get(*selRange)
textw.clipboard_clear()
textw.clipboard_append(text)
return selRange
def selCut(self, event = None):
if not self.editable: return 'break'
textw = self.textw
selRange = self.selCopy()
if selRange: textw.delete(*selRange)
def selAll(self, event = None):
textw = self.textw
textw.tag_add('sel', '1.0', 'end')
return "break"
def setCustomFont(self, tFamily = "Consolas", tSize = 18):
self.customFont.configure(family = tFamily, size = tSize)
def dispPrompt(self):
self.textw.insert('insert', self.prompt)
self.textw.insert('insert', self.cellInput)
def isIndentModeOn(self):
return len(self.cellInput) > 0
def setNextIndentation(self,expr):
if len(expr):
nTabs = len(expr) - len(expr.lstrip('\t'))
if expr[-1] == ':': nTabs += 1
self.cellInput = nTabs * '\t'
else:
self.cellInput = ''
def keyHandler(self,event):
navKeys = ['Left', 'Right', 'Up','Down','Next','Prior', 'End', 'Home',
'Button-1']
if not self.editable and event.keysym not in navKeys:
return 'break'
textw = event.widget
selRange = self.getSelRange()
if event.keysym == 'Return':
if selRange: textw.delete(*selRange)
strInst = textw.get('insert linestart', 'insert lineend')
self.setNextIndentation(strInst)
textw.insert('insert', '\n')
self.dispPrompt()
elif event.keysym == 'BackSpace':
if not selRange: selRange = ("%s-1c" % tk.INSERT,)
textw.delete(*selRange)
elif event.keysym == 'Delete':
if not selRange: selRange = ("%s" % tk.INSERT,)
textw.delete(*selRange)
elif len(event.char) == 1:
if selRange: textw.delete(*selRange)
textw.insert('insert', event.char)
else:
return
prevIndx = "%s-1c" % tk.INSERT
if textw.tag_names(prevIndx):
frstIndx = (textw.tag_prevrange('sintaxTag', 'insert') or ('1.0',))[0]
textw.tag_remove('sintaxTag', frstIndx)
frstIndx = (textw.tag_prevrange('sintaxTag', 'insert') or ('1.0',))[0]
self.formatContent(frstIndx, 'end')
return "break"
class loggerWindow(SintaxEditor):
def __init__(self, master):
SintaxEditor.__init__(self, master, hrzSlider = True)
def processLog(self, stringIn):
self.__setContent__(stringIn, 'end')
self.update()
pass
class CodeEditor(SintaxEditor):
def __init__(self, master, vrtDisc):
SintaxEditor.__init__(self, master)
self.kodiThreads = vrtDisc._menuthreads
self.coder = vrtDisc.getApiGenerator()
self.sintaxisConfigure(PYTHONSINTAX)
def initFrameExec(self, refreshFlag=False):
actThread = self.kodiThreads.threadDef
content = self.coder.knothCode(actThread)
contentDesc = (content, 'genfile', 'addon_module')
self.setContent(contentDesc, inspos = "0.0", isEditable = True)
| pybquillast/xkAddonIDE | SintaxEditor.py | Python | gpl-3.0 | 14,093 |
#!/usr/bin/python
# Import PySide classes
import sys
from PySide.QtGui import *
# Create a Qt application
app = QApplication(sys.argv)
# Create a Button and show it
button = QPushButton("Hello World")
button.show()
# Enter Qt application main loop
sys.exit(app.exec_())
| madoodia/codeLab | pyside/first_Pyside.py | Python | mit | 272 |
"""
raven.conf.defaults
~~~~~~~~~~~~~~~~~~~
Represents the default values for all Sentry settings.
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import os.path
import socket
ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), os.pardir))
# Allow local testing of Sentry even if DEBUG is enabled
DEBUG = False
# This should be the full URL to sentries store view
SERVERS = None
TIMEOUT = 5
# TODO: this is specific to Django
CLIENT = 'raven.contrib.django.DjangoClient'
NAME = socket.gethostname()
# Superuser key -- will be used if set, otherwise defers to
# SECRET_KEY and PUBLIC_KEY
KEY = None
# Credentials to authenticate with the Sentry server
SECRET_KEY = None
PUBLIC_KEY = None
# We allow setting the site name either by explicitly setting it with the
# SENTRY_SITE setting, or using the django.contrib.sites framework for
# fetching the current site. Since we can't reliably query the database
# from this module, the specific logic is within the SiteFilter
SITE = None
# Extending this allow you to ignore module prefixes when we attempt to
# discover which function an error comes from (typically a view)
EXCLUDE_PATHS = []
# By default Sentry only looks at modules in INSTALLED_APPS for drilling down
# where an exception is located
INCLUDE_PATHS = []
# The maximum number of elements to store for a list-like structure.
MAX_LENGTH_LIST = 50
# The maximum length to store of a string-like structure.
MAX_LENGTH_STRING = 400
# Automatically log frame stacks from all ``logging`` messages.
AUTO_LOG_STACKS = False
# Client-side data processors to apply
PROCESSORS = (
'raven.processors.SanitizePasswordsProcessor',
)
# Default Project ID
PROJECT = 1
| drayanaindra/inasafe | third_party/raven/conf/defaults.py | Python | gpl-3.0 | 1,778 |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='gitplot',
version='0.1.0',
description='git repo graphical plotter',
long_description=readme,
author='Robert Cronk',
author_email='cronk.r@gmail.com',
url='https://github.com/rcronk/gitplot',
license=license,
packages=find_packages(exclude=('tests', 'docs'))
)
| rcronk/gitplot | setup.py | Python | gpl-3.0 | 485 |
import webapp2
from google.appengine.api import taskqueue
from util import (datetime_now, parse_timestamp, domain_from_url,
datetuple_to_string)
from cleaner import get_feeditem_model
from models import (FeedModel, FeedItemModel,
FeedModelKey, FeedItemKey)
from storage import (get_feed, get_feedurls)
import feedparser as fp
# Don't remove embedded videos
fp._HTMLSanitizer.acceptable_elements = \
set(list(fp._HTMLSanitizer.acceptable_elements) + ['object',
'embed',
'iframe'])
class Cacher(webapp2.RequestHandler):
def printpage(self, msg):
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(str(msg))
def get(self):
"""Get is called by cron, which caches all feeds."""
feeds = self._fetch_all_feeds()
if feeds is None:
print("No feeds")
self.printpage("No feeds")
return
for feed in feeds:
self._cache_feed(feed.link, feed.etag, feed.modified)
self.printpage("Caching done.")
def post(self):
"""Post is used when new feeds are added."""
print("In cache post")
# If no urls specified, fetch all
etag, modified = None, None
url = self.request.get("url")
if url is None or len(url) == 0:
urls = get_feedurls(distinct=True)
else:
urls = [url]
# Fetch RSS items
for url in urls:
# Make sure it exists in feeds list
feed = get_feed(url)
# cache it if it exists in database
if feed is not None:
self._cache_feed(url, feed.etag, feed.modified)
def _cache_feed(self, url, etag=None, modified=None, tag=None):
'''
Caches a feed to the database.
url can be either a string or a feedmodel.
'''
if not "://" in url:
url = "http://" + url
# Parse the result
rss = fp.parse(url, etag=etag, modified=modified)
#if "etag" in rss:
# print("etag:", rss.etag)
#if "modified" in rss:
# print("modified:", rss.modified)
f = rss.feed
# If feed does not have title, which is a required attribute,
# we skip it.
if not hasattr(f, "title"):
try:
print(rss.debug_message)
except:
pass
return
# If no items, move on
if len(rss.entries) == 0:
print("No new items, ignoring {}".format(f.title))
return
# Update feed last
timestamp = datetime_now()
# Get individual items
any_new_items = False
for item in rss.entries:
feeditem = get_feeditem_model(url, timestamp, item)
# Ignores existing items
if feeditem is not None:
any_new_items = True
feeditem.put()
#else:
# print("Ignoring existing feeditem")
# Only update feed if any new items were retrieved
if any_new_items:
feeds = FeedModel.query((FeedModel.link == url))
for feed in feeds:
# Update fields
feed.timestamp = timestamp
feed.description = f.get("description", "")
# Don't override user's own title
#feed.title = f.title
feed.published = datetuple_to_string(f.get("published_parsed", None))
feed.etag = rss.get("etag", None)
feed.modified = rss.get("modified", None)
# Save
print("Cached:", feed.title)
# TODO use put_multi or such
feed.put()
application = webapp2.WSGIApplication([('/tasks/cache', Cacher)], debug=True)
| taimur97/Feeder | server/appengine/tasks.py | Python | gpl-2.0 | 4,156 |
from location import build_location
def test_build_location_simple():
# test
Location = build_location()
location = Location("Canada", "Charlottetown")
assert location.country == "Canada"
assert location.city == "Charlottetown"
| codetojoy/gists | python/module_jun_2020/eg_2/tests/test_location.py | Python | apache-2.0 | 253 |
import os
import socket
import logging
import commands
from consts import CHUNKSIZE, CUTOCS_READ, CSTOCU_READ_DATA, CSTOCU_READ_STATUS
from utils import uint64, pack, unpack
logger = logging.getLogger(__name__)
mfsdirs = []
def _scan():
cmd = """ps -eo cmd| grep mfschunkserver | grep -v grep |
head -1 | cut -d ' ' -f1 | xargs dirname | sed 's#sbin##g'"""
mfs_prefix = commands.getoutput(cmd)
mfs_cfg = '%s/etc/mfshdd.cfg' % mfs_prefix
mfs_cfg_list = (mfs_cfg, '/etc/mfs/mfshdd.cfg',
'/etc/mfshdd.cfg', '/usr/local/etc/mfshdd.cfg')
for conf in mfs_cfg_list:
if not os.path.exists(conf):
continue
f = open(conf)
for line in f:
path = line.strip('#* \n')
if os.path.exists(path):
mfsdirs.append(path)
f.close()
_scan()
CHUNKHDRSIZE = 1024 * 5
def read_chunk_from_local(chunkid, version, size, offset=0):
if offset + size > CHUNKSIZE:
raise ValueError("size too large %s > %s" %
(size, CHUNKSIZE-offset))
from dpark.accumulator import LocalReadBytes
name = '%02X/chunk_%016X_%08X.mfs' % (chunkid & 0xFF, chunkid, version)
for d in mfsdirs:
p = os.path.join(d, name)
if os.path.exists(p):
if os.path.getsize(p) < CHUNKHDRSIZE + offset + size:
logger.error('%s is not completed: %d < %d', name,
os.path.getsize(p), CHUNKHDRSIZE + offset + size)
return
#raise ValueError("size too large")
f = open(p)
f.seek(CHUNKHDRSIZE + offset)
while size > 0:
to_read = min(size, 640*1024)
data = f.read(to_read)
if not data:
return
LocalReadBytes.add(len(data))
yield data
size -= len(data)
f.close()
return
else:
logger.warning("%s was not found", name)
def read_chunk(host, port, chunkid, version, size, offset=0):
if offset + size > CHUNKSIZE:
raise ValueError("size too large %s > %s" %
(size, CHUNKSIZE-offset))
from dpark.accumulator import RemoteReadBytes
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn.settimeout(10)
conn.connect((host, port))
msg = pack(CUTOCS_READ, uint64(chunkid), version, offset, size)
n = conn.send(msg)
while n < len(msg):
if not n:
raise IOError("write failed")
msg = msg[n:]
n = conn.send(msg)
def recv(n):
d = conn.recv(n)
while len(d) < n:
nd = conn.recv(n-len(d))
if not nd:
raise IOError("not enough data")
d += nd
return d
while size > 0:
cmd, l = unpack("II", recv(8))
if cmd == CSTOCU_READ_STATUS:
if l != 9:
raise Exception("readblock: READ_STATUS incorrect message size")
cid, code = unpack("QB", recv(l))
if cid != chunkid:
raise Exception("readblock; READ_STATUS incorrect chunkid")
conn.close()
return
elif cmd == CSTOCU_READ_DATA:
if l < 20 :
raise Exception("readblock; READ_DATA incorrect message size")
cid, bid, boff, bsize, crc = unpack("QHHII", recv(20))
if cid != chunkid:
raise Exception("readblock; READ_STATUS incorrect chunkid")
if l != 20 + bsize:
raise Exception("readblock; READ_DATA incorrect message size ")
if bsize == 0 : # FIXME
raise Exception("readblock; empty block")
#yield ""
#continue
if bid != offset >> 16:
raise Exception("readblock; READ_DATA incorrect block number")
if boff != offset & 0xFFFF:
raise Exception("readblock; READ_DATA incorrect block offset")
breq = 65536 - boff
if size < breq:
breq = size
if bsize != breq:
raise Exception("readblock; READ_DATA incorrect block size")
while breq > 0:
data = conn.recv(breq)
if not data:
#print chunkid, version, offset, size, bsize, breq
raise IOError("unexpected ending: need %d" % breq)
RemoteReadBytes.add(len(data))
yield data
breq -= len(data)
offset += bsize
size -= bsize
else:
raise Exception("readblock; unknown message: %s" % cmd)
conn.close()
def test():
d = list(read_chunk('192.168.11.3', 9422, 6544760, 1, 6, 0))
print len(d), sum(len(s) for s in d)
d = list(read_chunk('192.168.11.3', 9422, 6544936, 1, 46039893, 0))
print len(d), sum(len(s) for s in d)
if __name__ == '__main__':
test()
| fe11x/dpark | dpark/moosefs/cs.py | Python | bsd-3-clause | 4,952 |
from os import environ as env
import json
import sys
sys.path.append('k5lib')
import k5lib
# Create a log file
k5lib.create_logfile('list_ports.log')
username = env['OS_USERNAME']
password = env['OS_PASSWORD']
domain = env['OS_USER_DOMAIN_NAME']
projectName = env['OS_PROJECT_NAME']
region = env['OS_REGION_NAME']
projectToken = k5lib.get_project_token(username, password, domain, projectName, region)
ports = k5lib.list_ports(projectToken, region)
print(json.dumps(ports, indent=2))
| k5ninjacom/K5lib | examples/list_ports.py | Python | gpl-3.0 | 488 |
from django.contrib import admin
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from services.models import Service, ServiceType, Alias
# See
# <http://docs.djangoproject.com/en/dev/ref/contrib/admin/#django.contrib.admin.ModelAdmin.list_filter>
# for documentation
class StartsWithListFilter(SimpleListFilter):
title = _('Starts with')
parameter_name = 'starts_with'
def lookups(self, request, model_admin):
def first_two(s):
s = unicode(s)
if len(s) < 2:
return s
else:
return s[:2]
prefixes = [first_two(alias.name)
for alias in model_admin.model.objects.only('name')]
prefixes = sorted(set(prefixes))
return [(prefix, prefix) for prefix in prefixes]
def queryset(self, request, queryset):
if self.value():
return queryset.filter(name__istartswith=self.value())
else:
return queryset
class AliasAdmin(admin.ModelAdmin):
list_filter = (StartsWithListFilter,)
admin.site.register(Service)
admin.site.register(ServiceType)
admin.site.register(Alias, AliasAdmin)
| joneskoo/sikteeri | services/admin.py | Python | mit | 1,213 |
import datetime
import sys
import subprocess
import argparse
import calc_time
months = ["Unknown",
"January",
"Febuary",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"]
def get_file_name():
now = datetime.datetime.now()
return "{0}_{1}_{2}.log".format(months[now.month], now.day, now.year)
def add_entry(start, end, label, verbose):
filename = get_file_name()
with open(filename, "a") as fp:
fp.write(str(start) + " -- " + str(end) + " | " + label + "\n")
if verbose: show_log()
def add_class(start, end, label, verbose):
add_entry(start, end, "CLASS_" + label, verbose)
def add_independent(start, end, label, verbose):
add_entry(start, end, label, verbose)
def add_wasted(start, end, verbose):
add_entry(start, end, "WASTED", verbose)
def delete_last():
filename = get_file_name()
with open(filename, "r") as fp:
lines = fp.readlines()
with open(filename, "w") as fp:
for line in lines[:-1]:
fp.write(line)
def show_log():
filename = get_file_name()
with open(filename, "r") as fp:
print fp.read()
print "=" * 10
print "Independent Work", round(calc_time.independent_time(filename), 2), "hours."
if __name__ == '__main__':
parser = argparse.ArgumentParser(description = 'add entry to daily log')
group1 = parser.add_argument_group()
group1.add_argument('-X', '--XLast', action='store_true', help='Remove last item')
group1.add_argument('start_time', metavar = 'T1', nargs='?', type=str, help = 'start time of the task')
group1.add_argument('end_time', metavar = 'T2', nargs='?', type=str, help = 'end time of the task')
group1.add_argument('label', metavar = 'N', nargs='?', type=str, help = 'name of task')
group1.add_argument('-v', '--verbose', action='store_true', help='print out daily log')
group1.add_argument('-p', '--printout', action='store_true', help='print daily log without adding entry')
group11 = group1.add_mutually_exclusive_group()
group11.add_argument('-C', '--Class', action='store_true', help='Add a timestamp for a class attended')
group11.add_argument('-W', '--Wasted', action='store_true', help='Add a timestamp for time wasted')
group2 = parser.add_argument_group()
group2.add_argument('-D', '--Deletelast', action='store_true', help='Delete most recent entry')
args = parser.parse_args()
if args.Deletelast:
delete_last()
elif args.Class:
add_class(args.start_time, args.end_time, args.label, args.verbose)
elif args.Wasted:
add_wasted(args.start_time, args.end_time, args.verbose)
else:
add_independent(args.start_time, args.end_time, args.label, args.verbose) | intermediate-hacker/tprod | add_entry.py | Python | gpl-3.0 | 2,851 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator
TASK_ID = 'test-gcs-to-bq-operator'
TEST_EXPLICIT_DEST = 'test-project.dataset.table'
TEST_BUCKET = 'test-bucket'
MAX_ID_KEY = 'id'
TEST_SOURCE_OBJECTS = ['test/objects/*']
LABELS = {'k1': 'v1'}
DESCRIPTION = "Test Description"
class TestGoogleCloudStorageToBigQueryOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.transfers.gcs_to_bigquery.BigQueryHook')
def test_execute_explicit_project_legacy(self, bq_hook):
operator = GCSToBigQueryOperator(
task_id=TASK_ID,
bucket=TEST_BUCKET,
source_objects=TEST_SOURCE_OBJECTS,
destination_project_dataset_table=TEST_EXPLICIT_DEST,
max_id_key=MAX_ID_KEY,
)
# using legacy SQL
bq_hook.return_value.get_conn.return_value.cursor.return_value.use_legacy_sql = True
operator.execute(None)
bq_hook.return_value.get_conn.return_value.cursor.return_value.execute.assert_called_once_with(
"SELECT MAX(id) FROM [test-project.dataset.table]"
)
@mock.patch('airflow.providers.google.cloud.transfers.gcs_to_bigquery.BigQueryHook')
def test_execute_explicit_project(self, bq_hook):
operator = GCSToBigQueryOperator(
task_id=TASK_ID,
bucket=TEST_BUCKET,
source_objects=TEST_SOURCE_OBJECTS,
destination_project_dataset_table=TEST_EXPLICIT_DEST,
max_id_key=MAX_ID_KEY,
)
# using non-legacy SQL
bq_hook.return_value.get_conn.return_value.cursor.return_value.use_legacy_sql = False
operator.execute(None)
bq_hook.return_value.get_conn.return_value.cursor.return_value.execute.assert_called_once_with(
"SELECT MAX(id) FROM `test-project.dataset.table`"
)
@mock.patch('airflow.providers.google.cloud.transfers.gcs_to_bigquery.BigQueryHook')
def test_labels(self, bq_hook):
operator = GCSToBigQueryOperator(
task_id=TASK_ID,
bucket=TEST_BUCKET,
source_objects=TEST_SOURCE_OBJECTS,
destination_project_dataset_table=TEST_EXPLICIT_DEST,
labels=LABELS,
)
operator.execute(None)
bq_hook.return_value.get_conn.return_value.cursor.return_value.run_load.assert_called_once_with(
destination_project_dataset_table=mock.ANY,
schema_fields=mock.ANY,
source_uris=mock.ANY,
source_format=mock.ANY,
autodetect=mock.ANY,
create_disposition=mock.ANY,
skip_leading_rows=mock.ANY,
write_disposition=mock.ANY,
field_delimiter=mock.ANY,
max_bad_records=mock.ANY,
quote_character=mock.ANY,
ignore_unknown_values=mock.ANY,
allow_quoted_newlines=mock.ANY,
allow_jagged_rows=mock.ANY,
encoding=mock.ANY,
schema_update_options=mock.ANY,
src_fmt_configs=mock.ANY,
time_partitioning=mock.ANY,
cluster_fields=mock.ANY,
encryption_configuration=mock.ANY,
labels=LABELS,
description=mock.ANY,
)
@mock.patch('airflow.providers.google.cloud.transfers.gcs_to_bigquery.BigQueryHook')
def test_description(self, bq_hook):
operator = GCSToBigQueryOperator(
task_id=TASK_ID,
bucket=TEST_BUCKET,
source_objects=TEST_SOURCE_OBJECTS,
destination_project_dataset_table=TEST_EXPLICIT_DEST,
description=DESCRIPTION,
)
operator.execute(None)
bq_hook.return_value.get_conn.return_value.cursor.return_value.run_load.assert_called_once_with(
destination_project_dataset_table=mock.ANY,
schema_fields=mock.ANY,
source_uris=mock.ANY,
source_format=mock.ANY,
autodetect=mock.ANY,
create_disposition=mock.ANY,
skip_leading_rows=mock.ANY,
write_disposition=mock.ANY,
field_delimiter=mock.ANY,
max_bad_records=mock.ANY,
quote_character=mock.ANY,
ignore_unknown_values=mock.ANY,
allow_quoted_newlines=mock.ANY,
allow_jagged_rows=mock.ANY,
encoding=mock.ANY,
schema_update_options=mock.ANY,
src_fmt_configs=mock.ANY,
time_partitioning=mock.ANY,
cluster_fields=mock.ANY,
encryption_configuration=mock.ANY,
labels=mock.ANY,
description=DESCRIPTION,
)
@mock.patch('airflow.providers.google.cloud.transfers.gcs_to_bigquery.BigQueryHook')
def test_labels_external_table(self, bq_hook):
operator = GCSToBigQueryOperator(
task_id=TASK_ID,
bucket=TEST_BUCKET,
source_objects=TEST_SOURCE_OBJECTS,
destination_project_dataset_table=TEST_EXPLICIT_DEST,
labels=LABELS,
external_table=True,
)
operator.execute(None)
# fmt: off
bq_hook.return_value.get_conn.return_value.cursor.return_value.create_external_table. \
assert_called_once_with(
external_project_dataset_table=mock.ANY,
schema_fields=mock.ANY,
source_uris=mock.ANY,
source_format=mock.ANY,
compression=mock.ANY,
skip_leading_rows=mock.ANY,
field_delimiter=mock.ANY,
max_bad_records=mock.ANY,
quote_character=mock.ANY,
ignore_unknown_values=mock.ANY,
allow_quoted_newlines=mock.ANY,
allow_jagged_rows=mock.ANY,
encoding=mock.ANY,
src_fmt_configs=mock.ANY,
encryption_configuration=mock.ANY,
labels=LABELS,
description=mock.ANY,
)
# fmt: on
@mock.patch('airflow.providers.google.cloud.transfers.gcs_to_bigquery.BigQueryHook')
def test_description_external_table(self, bq_hook):
operator = GCSToBigQueryOperator(
task_id=TASK_ID,
bucket=TEST_BUCKET,
source_objects=TEST_SOURCE_OBJECTS,
destination_project_dataset_table=TEST_EXPLICIT_DEST,
description=DESCRIPTION,
external_table=True,
)
operator.execute(None)
# fmt: off
bq_hook.return_value.get_conn.return_value.cursor.return_value.create_external_table. \
assert_called_once_with(
external_project_dataset_table=mock.ANY,
schema_fields=mock.ANY,
source_uris=mock.ANY,
source_format=mock.ANY,
compression=mock.ANY,
skip_leading_rows=mock.ANY,
field_delimiter=mock.ANY,
max_bad_records=mock.ANY,
quote_character=mock.ANY,
ignore_unknown_values=mock.ANY,
allow_quoted_newlines=mock.ANY,
allow_jagged_rows=mock.ANY,
encoding=mock.ANY,
src_fmt_configs=mock.ANY,
encryption_configuration=mock.ANY,
labels=mock.ANY,
description=DESCRIPTION,
)
# fmt: on
| nathanielvarona/airflow | tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py | Python | apache-2.0 | 8,245 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.