repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
andybondar/CloudFerry
tests/cloudferrylib/utils/test_file_utils.py
1
1874
# Copyright 2015 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from cloudferrylib.utils import files from tests import test class RemoteSymlinkTestCase(test.TestCase): def test_symlink_is_removed_on_scope_exit(self): runner = mock.Mock() target = "/tmp/filename" symlink = "_symlink" with files.RemoteSymlink(runner, target, symlink): pass create_symlink = "ln --symbolic %s %s" % (target, symlink) rm_symlink = "rm -f %s" % symlink runner.run.assert_called_once_with(create_symlink) runner.run_ignoring_errors.assert_called_once_with(rm_symlink) class RemoteTempFileTestCase(test.TestCase): def test_temp_file_is_deleted_on_scope_exit(self): runner = mock.Mock() filename = 'file' contents = 'contents' with files.RemoteTempFile(runner, filename, contents): pass rm_file = "rm -f /tmp/{}".format(filename) runner.run_ignoring_errors.assert_called_once_with(rm_file) class RemoteDirTestCase(test.TestCase): def test_temp_dir_is_deleted_on_scope_exit(self): runner = mock.Mock() dirname = 'dir' with files.RemoteDir(runner, dirname): pass rm_dir = "rm -rf {}".format(dirname) runner.run_ignoring_errors.assert_called_once_with(rm_dir)
apache-2.0
lorimccurry/python_koans
python3/koans/about_true_and_false.py
13
1506
#!/usr/bin/env python # -*- coding: utf-8 -*- from runner.koan import * class AboutTrueAndFalse(Koan): def truth_value(self, condition): if condition: return 'true stuff' else: return 'false stuff' def test_true_is_treated_as_true(self): self.assertEqual('true stuff', self.truth_value(True)) def test_false_is_treated_as_false(self): self.assertEqual('false stuff', self.truth_value(False)) def test_none_is_treated_as_false(self): self.assertEqual('false stuff', self.truth_value(None)) def test_zero_is_treated_as_false(self): self.assertEqual('false stuff', self.truth_value(0)) def test_empty_collections_are_treated_as_false(self): self.assertEqual('false stuff', self.truth_value([])) self.assertEqual('false stuff', self.truth_value(())) self.assertEqual('false stuff', self.truth_value({})) self.assertEqual('false stuff', self.truth_value(set())) def test_blank_strings_are_treated_as_false(self): self.assertEqual('false stuff', self.truth_value("")) def test_everything_else_is_treated_as_true(self): self.assertEqual('true stuff', self.truth_value(1)) self.assertEqual('true stuff', self.truth_value(1,)) self.assertEqual('true stuff', self.truth_value("Python is named after Monty Python")) self.assertEqual('true stuff', self.truth_value(' ')) self.assertEqual('true stuff', self.truth_value('0'))
mit
lcrees/tabola
tabola/core.py
1
6504
'''tabola''' from tabola.formats import FORMATS as formats class InvalidDatasetType(Exception): '''Only Datasets can be added to a DataBook.''' class InvalidDimensions(Exception): '''Invalid size.''' class UnsupportedFormat(NotImplementedError): '''Format is not supported''' class Dataset(object): '''Tabular - Dataset object''' def __init__(self, *args, **kw): self._data = list(args) self.headers = kw.get('headers', tuple()) self.title = kw.get('title') self._register_formats() def __len__(self): return self.height def __getitem__(self, key): if isinstance(key, basestring): if key in self._headers: # get 'key' index from each data pos = self._headers.index(key) return list(r[pos] for r in self._data) else: raise KeyError else: return self._data[key] def __setitem__(self, key, value): self._validate_row(value) self._data[key] = tuple(value) def __delitem__(self, key): del self._data[key] def __iter__(self): return self._data.__iter__() def _package(self, dicts=True): '''Packages Dataset into lists of dictionaries for transmission.''' if self.headers: if dicts: data = list(dict(zip(self.headers, r)) for r in self ._data) else: data = [list(self.headers)] + list(self._data) else: data = list(list(r) for r in self._data) return data @classmethod def _register_formats(cls): '''Adds format properties.''' for f in formats: try: try: setattr(cls, f.title, property(f.export_set, f.import_set)) except AttributeError: setattr(cls, f.title, property(f.export_set)) except AttributeError: pass def _get_headers(self): '''Headers property.''' return self._headers def _set_headers(self, collection): '''Validating headers setter.''' if self._validate_headers(collection): try: self._headers = list(collection) except TypeError: self._headers = [] else: self._headers = [] def _del_headers(self): self._headers = None headers = property(_get_headers, _set_headers, _del_headers) def _get_dict(self): '''Returns python dict of Dataset.''' return self._package() def _set_dict(self, rows): if not len(rows): return None if isinstance(rows[0], list): self.clear() for row in rows: self.append_row(row) elif isinstance(rows[0], dict): self.clear() self.headers = rows[0].keys() for row in rows: self.append_row(row.values()) else: raise UnsupportedFormat dict = property(_get_dict, _set_dict) def _validate_column(self, col): if self.headers: is_valid = (len(col) - 1) == self.height else: is_valid = len(col) == self.height if self.height else True if is_valid: return True raise InvalidDimensions() def _validate_headers(self, header): return all((len(x)==self.width for x in self._data)) def _validate_row(self, row): if len(row) == self.width if self.width else True: return True raise InvalidDimensions() @property def height(self): '''Returns the height of the Dataset.''' return len(self._data) @property def width(self): '''Returns the width of the Dataset.''' try: return len(self._data[0]) except IndexError: try: return len(self.headers) except TypeError: return 0 def append_column(self, column): if self._validate_column(column): if self.headers: # pop the first item off and add to headers self.headers.append(column[0]) column = column[1:] if self.height and self.width: for i, row in enumerate(self._data): _row = list(row) _row.append(column[i]) self._data[i] = tuple(_row) else: self._data = [tuple([r]) for r in column] def append_row(self, row): '''Adds a row to the end of Dataset''' if self._validate_row(row): self._data.append(tuple(row)) def clear(self): '''Erases all data from Dataset.''' self._headers = None self._data = list() def get_row_dict(self, key): if self.headers: return dict(zip(self.headers, self.__getitem__(key))) raise UnsupportedFormat() def insert_row(self, i, row): '''Inserts a row at given position in Dataset''' if self._validate(row): self._data.insert(i, tuple(row)) def iter_row_dicts(self): if self.headers: return self._package() raise UnsupportedFormat() class Databook(object): '''A book of Dataset objects.''' def __init__(self, sets=None): if sets is None: sets = [] self._datasets = sets self._register_formats() def __len__(self): '''The number of Datasets within Databook.''' return len(self._datasets) def __iter__(self): return self._datasets.__iter__() def _package(self): '''Packages Databook for delivery.''' return list(dict(title=d.title, data=d.dict) for d in self._datasets) @classmethod def _register_formats(cls): '''Adds format properties.''' for f in formats: try: try: setattr(cls, f.title, property(f.export_book, f.import_book)) except AttributeError: setattr(cls, f.title, property(f.export_book)) except AttributeError: pass def append_dataset(self, dataset): '''Adds given dataset.''' if isinstance(dataset, Dataset): self._datasets.append(dataset) else: raise InvalidDatasetType() def clear(self): self._datasets = []
mit
ayesandarmoe/microblog_flask_tutorial
flask/lib/python2.7/site-packages/flask_wtf/recaptcha/validators.py
91
2398
try: import urllib2 as http except ImportError: # Python 3 from urllib import request as http from flask import request, current_app from wtforms import ValidationError from werkzeug import url_encode from .._compat import to_bytes, to_unicode import json RECAPTCHA_VERIFY_SERVER = 'https://www.google.com/recaptcha/api/siteverify' RECAPTCHA_ERROR_CODES = { 'missing-input-secret': 'The secret parameter is missing.', 'invalid-input-secret': 'The secret parameter is invalid or malformed.', 'missing-input-response': 'The response parameter is missing.', 'invalid-input-response': 'The response parameter is invalid or malformed.' } __all__ = ["Recaptcha"] class Recaptcha(object): """Validates a ReCaptcha.""" def __init__(self, message=None): if message is None: message = RECAPTCHA_ERROR_CODES['missing-input-response'] self.message = message def __call__(self, form, field): if current_app.testing: return True if request.json: response = request.json.get('g-recaptcha-response', '') else: response = request.form.get('g-recaptcha-response', '') remote_ip = request.remote_addr if not response: raise ValidationError(field.gettext(self.message)) if not self._validate_recaptcha(response, remote_ip): field.recaptcha_error = 'incorrect-captcha-sol' raise ValidationError(field.gettext(self.message)) def _validate_recaptcha(self, response, remote_addr): """Performs the actual validation.""" try: private_key = current_app.config['RECAPTCHA_PRIVATE_KEY'] except KeyError: raise RuntimeError("No RECAPTCHA_PRIVATE_KEY config set") data = url_encode({ 'secret': private_key, 'remoteip': remote_addr, 'response': response }) http_response = http.urlopen(RECAPTCHA_VERIFY_SERVER, to_bytes(data)) if http_response.code != 200: return False json_resp = json.loads(to_unicode(http_response.read())) if json_resp["success"]: return True for error in json_resp.get("error-codes", []): if error in RECAPTCHA_ERROR_CODES: raise ValidationError(RECAPTCHA_ERROR_CODES[error]) return False
gpl-2.0
qenter/vlc-android
toolchains/arm/lib/python2.7/test/pickletester.py
36
41622
import unittest import pickle import cPickle import StringIO import cStringIO import pickletools import copy_reg from test.test_support import TestFailed, verbose, have_unicode, TESTFN try: from test.test_support import _2G, _1M, precisionbigmemtest except ImportError: # this import might fail when run on older Python versions by test_xpickle _2G = _1M = 0 def precisionbigmemtest(*args, **kwargs): return lambda self: None # Tests that try a number of pickle protocols should have a # for proto in protocols: # kind of outer loop. assert pickle.HIGHEST_PROTOCOL == cPickle.HIGHEST_PROTOCOL == 2 protocols = range(pickle.HIGHEST_PROTOCOL + 1) # Copy of test.test_support.run_with_locale. This is needed to support Python # 2.4, which didn't include it. This is all to support test_xpickle, which # bounces pickled objects through older Python versions to test backwards # compatibility. def run_with_locale(catstr, *locales): def decorator(func): def inner(*args, **kwds): try: import locale category = getattr(locale, catstr) orig_locale = locale.setlocale(category) except AttributeError: # if the test author gives us an invalid category string raise except: # cannot retrieve original locale, so do nothing locale = orig_locale = None else: for loc in locales: try: locale.setlocale(category, loc) break except: pass # now run the function, resetting the locale on exceptions try: return func(*args, **kwds) finally: if locale and orig_locale: locale.setlocale(category, orig_locale) inner.func_name = func.func_name inner.__doc__ = func.__doc__ return inner return decorator # Return True if opcode code appears in the pickle, else False. def opcode_in_pickle(code, pickle): for op, dummy, dummy in pickletools.genops(pickle): if op.code == code: return True return False # Return the number of times opcode code appears in pickle. def count_opcode(code, pickle): n = 0 for op, dummy, dummy in pickletools.genops(pickle): if op.code == code: n += 1 return n # We can't very well test the extension registry without putting known stuff # in it, but we have to be careful to restore its original state. Code # should do this: # # e = ExtensionSaver(extension_code) # try: # fiddle w/ the extension registry's stuff for extension_code # finally: # e.restore() class ExtensionSaver: # Remember current registration for code (if any), and remove it (if # there is one). def __init__(self, code): self.code = code if code in copy_reg._inverted_registry: self.pair = copy_reg._inverted_registry[code] copy_reg.remove_extension(self.pair[0], self.pair[1], code) else: self.pair = None # Restore previous registration for code. def restore(self): code = self.code curpair = copy_reg._inverted_registry.get(code) if curpair is not None: copy_reg.remove_extension(curpair[0], curpair[1], code) pair = self.pair if pair is not None: copy_reg.add_extension(pair[0], pair[1], code) class C: def __cmp__(self, other): return cmp(self.__dict__, other.__dict__) import __main__ __main__.C = C C.__module__ = "__main__" class myint(int): def __init__(self, x): self.str = str(x) class initarg(C): def __init__(self, a, b): self.a = a self.b = b def __getinitargs__(self): return self.a, self.b class metaclass(type): pass class use_metaclass(object): __metaclass__ = metaclass class pickling_metaclass(type): def __eq__(self, other): return (type(self) == type(other) and self.reduce_args == other.reduce_args) def __reduce__(self): return (create_dynamic_class, self.reduce_args) __hash__ = None def create_dynamic_class(name, bases): result = pickling_metaclass(name, bases, dict()) result.reduce_args = (name, bases) return result # DATA0 .. DATA2 are the pickles we expect under the various protocols, for # the object returned by create_data(). # break into multiple strings to avoid confusing font-lock-mode DATA0 = """(lp1 I0 aL1L aF2 ac__builtin__ complex p2 """ + \ """(F3 F0 tRp3 aI1 aI-1 aI255 aI-255 aI-256 aI65535 aI-65535 aI-65536 aI2147483647 aI-2147483647 aI-2147483648 a""" + \ """(S'abc' p4 g4 """ + \ """(i__main__ C p5 """ + \ """(dp6 S'foo' p7 I1 sS'bar' p8 I2 sbg5 tp9 ag9 aI5 a. """ # Disassembly of DATA0. DATA0_DIS = """\ 0: ( MARK 1: l LIST (MARK at 0) 2: p PUT 1 5: I INT 0 8: a APPEND 9: L LONG 1L 13: a APPEND 14: F FLOAT 2.0 17: a APPEND 18: c GLOBAL '__builtin__ complex' 39: p PUT 2 42: ( MARK 43: F FLOAT 3.0 46: F FLOAT 0.0 49: t TUPLE (MARK at 42) 50: R REDUCE 51: p PUT 3 54: a APPEND 55: I INT 1 58: a APPEND 59: I INT -1 63: a APPEND 64: I INT 255 69: a APPEND 70: I INT -255 76: a APPEND 77: I INT -256 83: a APPEND 84: I INT 65535 91: a APPEND 92: I INT -65535 100: a APPEND 101: I INT -65536 109: a APPEND 110: I INT 2147483647 122: a APPEND 123: I INT -2147483647 136: a APPEND 137: I INT -2147483648 150: a APPEND 151: ( MARK 152: S STRING 'abc' 159: p PUT 4 162: g GET 4 165: ( MARK 166: i INST '__main__ C' (MARK at 165) 178: p PUT 5 181: ( MARK 182: d DICT (MARK at 181) 183: p PUT 6 186: S STRING 'foo' 193: p PUT 7 196: I INT 1 199: s SETITEM 200: S STRING 'bar' 207: p PUT 8 210: I INT 2 213: s SETITEM 214: b BUILD 215: g GET 5 218: t TUPLE (MARK at 151) 219: p PUT 9 222: a APPEND 223: g GET 9 226: a APPEND 227: I INT 5 230: a APPEND 231: . STOP highest protocol among opcodes = 0 """ DATA1 = (']q\x01(K\x00L1L\nG@\x00\x00\x00\x00\x00\x00\x00' 'c__builtin__\ncomplex\nq\x02(G@\x08\x00\x00\x00\x00\x00' '\x00G\x00\x00\x00\x00\x00\x00\x00\x00tRq\x03K\x01J\xff\xff' '\xff\xffK\xffJ\x01\xff\xff\xffJ\x00\xff\xff\xffM\xff\xff' 'J\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff\xff\x7fJ\x01\x00' '\x00\x80J\x00\x00\x00\x80(U\x03abcq\x04h\x04(c__main__\n' 'C\nq\x05oq\x06}q\x07(U\x03fooq\x08K\x01U\x03barq\tK\x02ubh' '\x06tq\nh\nK\x05e.' ) # Disassembly of DATA1. DATA1_DIS = """\ 0: ] EMPTY_LIST 1: q BINPUT 1 3: ( MARK 4: K BININT1 0 6: L LONG 1L 10: G BINFLOAT 2.0 19: c GLOBAL '__builtin__ complex' 40: q BINPUT 2 42: ( MARK 43: G BINFLOAT 3.0 52: G BINFLOAT 0.0 61: t TUPLE (MARK at 42) 62: R REDUCE 63: q BINPUT 3 65: K BININT1 1 67: J BININT -1 72: K BININT1 255 74: J BININT -255 79: J BININT -256 84: M BININT2 65535 87: J BININT -65535 92: J BININT -65536 97: J BININT 2147483647 102: J BININT -2147483647 107: J BININT -2147483648 112: ( MARK 113: U SHORT_BINSTRING 'abc' 118: q BINPUT 4 120: h BINGET 4 122: ( MARK 123: c GLOBAL '__main__ C' 135: q BINPUT 5 137: o OBJ (MARK at 122) 138: q BINPUT 6 140: } EMPTY_DICT 141: q BINPUT 7 143: ( MARK 144: U SHORT_BINSTRING 'foo' 149: q BINPUT 8 151: K BININT1 1 153: U SHORT_BINSTRING 'bar' 158: q BINPUT 9 160: K BININT1 2 162: u SETITEMS (MARK at 143) 163: b BUILD 164: h BINGET 6 166: t TUPLE (MARK at 112) 167: q BINPUT 10 169: h BINGET 10 171: K BININT1 5 173: e APPENDS (MARK at 3) 174: . STOP highest protocol among opcodes = 1 """ DATA2 = ('\x80\x02]q\x01(K\x00\x8a\x01\x01G@\x00\x00\x00\x00\x00\x00\x00' 'c__builtin__\ncomplex\nq\x02G@\x08\x00\x00\x00\x00\x00\x00G\x00' '\x00\x00\x00\x00\x00\x00\x00\x86Rq\x03K\x01J\xff\xff\xff\xffK' '\xffJ\x01\xff\xff\xffJ\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xff' 'J\x00\x00\xff\xffJ\xff\xff\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00' '\x80(U\x03abcq\x04h\x04(c__main__\nC\nq\x05oq\x06}q\x07(U\x03foo' 'q\x08K\x01U\x03barq\tK\x02ubh\x06tq\nh\nK\x05e.') # Disassembly of DATA2. DATA2_DIS = """\ 0: \x80 PROTO 2 2: ] EMPTY_LIST 3: q BINPUT 1 5: ( MARK 6: K BININT1 0 8: \x8a LONG1 1L 11: G BINFLOAT 2.0 20: c GLOBAL '__builtin__ complex' 41: q BINPUT 2 43: G BINFLOAT 3.0 52: G BINFLOAT 0.0 61: \x86 TUPLE2 62: R REDUCE 63: q BINPUT 3 65: K BININT1 1 67: J BININT -1 72: K BININT1 255 74: J BININT -255 79: J BININT -256 84: M BININT2 65535 87: J BININT -65535 92: J BININT -65536 97: J BININT 2147483647 102: J BININT -2147483647 107: J BININT -2147483648 112: ( MARK 113: U SHORT_BINSTRING 'abc' 118: q BINPUT 4 120: h BINGET 4 122: ( MARK 123: c GLOBAL '__main__ C' 135: q BINPUT 5 137: o OBJ (MARK at 122) 138: q BINPUT 6 140: } EMPTY_DICT 141: q BINPUT 7 143: ( MARK 144: U SHORT_BINSTRING 'foo' 149: q BINPUT 8 151: K BININT1 1 153: U SHORT_BINSTRING 'bar' 158: q BINPUT 9 160: K BININT1 2 162: u SETITEMS (MARK at 143) 163: b BUILD 164: h BINGET 6 166: t TUPLE (MARK at 112) 167: q BINPUT 10 169: h BINGET 10 171: K BININT1 5 173: e APPENDS (MARK at 5) 174: . STOP highest protocol among opcodes = 2 """ def create_data(): c = C() c.foo = 1 c.bar = 2 x = [0, 1L, 2.0, 3.0+0j] # Append some integer test cases at cPickle.c's internal size # cutoffs. uint1max = 0xff uint2max = 0xffff int4max = 0x7fffffff x.extend([1, -1, uint1max, -uint1max, -uint1max-1, uint2max, -uint2max, -uint2max-1, int4max, -int4max, -int4max-1]) y = ('abc', 'abc', c, c) x.append(y) x.append(y) x.append(5) return x class AbstractPickleTests(unittest.TestCase): # Subclass must define self.dumps, self.loads, self.error. _testdata = create_data() def setUp(self): pass def test_misc(self): # test various datatypes not tested by testdata for proto in protocols: x = myint(4) s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y) x = (1, ()) s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y) x = initarg(1, x) s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y) # XXX test __reduce__ protocol? def test_roundtrip_equality(self): expected = self._testdata for proto in protocols: s = self.dumps(expected, proto) got = self.loads(s) self.assertEqual(expected, got) def test_load_from_canned_string(self): expected = self._testdata for canned in DATA0, DATA1, DATA2: got = self.loads(canned) self.assertEqual(expected, got) # There are gratuitous differences between pickles produced by # pickle and cPickle, largely because cPickle starts PUT indices at # 1 and pickle starts them at 0. See XXX comment in cPickle's put2() -- # there's a comment with an exclamation point there whose meaning # is a mystery. cPickle also suppresses PUT for objects with a refcount # of 1. def dont_test_disassembly(self): from pickletools import dis for proto, expected in (0, DATA0_DIS), (1, DATA1_DIS): s = self.dumps(self._testdata, proto) filelike = cStringIO.StringIO() dis(s, out=filelike) got = filelike.getvalue() self.assertEqual(expected, got) def test_recursive_list(self): l = [] l.append(l) for proto in protocols: s = self.dumps(l, proto) x = self.loads(s) self.assertEqual(len(x), 1) self.assertTrue(x is x[0]) def test_recursive_tuple(self): t = ([],) t[0].append(t) for proto in protocols: s = self.dumps(t, proto) x = self.loads(s) self.assertEqual(len(x), 1) self.assertEqual(len(x[0]), 1) self.assertTrue(x is x[0][0]) def test_recursive_dict(self): d = {} d[1] = d for proto in protocols: s = self.dumps(d, proto) x = self.loads(s) self.assertEqual(x.keys(), [1]) self.assertTrue(x[1] is x) def test_recursive_inst(self): i = C() i.attr = i for proto in protocols: s = self.dumps(i, proto) x = self.loads(s) self.assertEqual(dir(x), dir(i)) self.assertIs(x.attr, x) def test_recursive_multi(self): l = [] d = {1:l} i = C() i.attr = d l.append(i) for proto in protocols: s = self.dumps(l, proto) x = self.loads(s) self.assertEqual(len(x), 1) self.assertEqual(dir(x[0]), dir(i)) self.assertEqual(x[0].attr.keys(), [1]) self.assertTrue(x[0].attr[1] is x) def test_garyp(self): self.assertRaises(self.error, self.loads, 'garyp') def test_insecure_strings(self): insecure = ["abc", "2 + 2", # not quoted #"'abc' + 'def'", # not a single quoted string "'abc", # quote is not closed "'abc\"", # open quote and close quote don't match "'abc' ?", # junk after close quote "'\\'", # trailing backslash "'", # issue #17710 "' ", # issue #17710 # some tests of the quoting rules #"'abc\"\''", #"'\\\\a\'\'\'\\\'\\\\\''", ] for s in insecure: buf = "S" + s + "\012p0\012." self.assertRaises(ValueError, self.loads, buf) if have_unicode: def test_unicode(self): endcases = [u'', u'<\\u>', u'<\\\u1234>', u'<\n>', u'<\\>', u'<\\\U00012345>'] for proto in protocols: for u in endcases: p = self.dumps(u, proto) u2 = self.loads(p) self.assertEqual(u2, u) def test_unicode_high_plane(self): t = u'\U00012345' for proto in protocols: p = self.dumps(t, proto) t2 = self.loads(p) self.assertEqual(t2, t) def test_ints(self): import sys for proto in protocols: n = sys.maxint while n: for expected in (-n, n): s = self.dumps(expected, proto) n2 = self.loads(s) self.assertEqual(expected, n2) n = n >> 1 def test_maxint64(self): maxint64 = (1L << 63) - 1 data = 'I' + str(maxint64) + '\n.' got = self.loads(data) self.assertEqual(got, maxint64) # Try too with a bogus literal. data = 'I' + str(maxint64) + 'JUNK\n.' self.assertRaises(ValueError, self.loads, data) def test_long(self): for proto in protocols: # 256 bytes is where LONG4 begins. for nbits in 1, 8, 8*254, 8*255, 8*256, 8*257: nbase = 1L << nbits for npos in nbase-1, nbase, nbase+1: for n in npos, -npos: pickle = self.dumps(n, proto) got = self.loads(pickle) self.assertEqual(n, got) # Try a monster. This is quadratic-time in protos 0 & 1, so don't # bother with those. nbase = long("deadbeeffeedface", 16) nbase += nbase << 1000000 for n in nbase, -nbase: p = self.dumps(n, 2) got = self.loads(p) self.assertEqual(n, got) def test_float(self): test_values = [0.0, 4.94e-324, 1e-310, 7e-308, 6.626e-34, 0.1, 0.5, 3.14, 263.44582062374053, 6.022e23, 1e30] test_values = test_values + [-x for x in test_values] for proto in protocols: for value in test_values: pickle = self.dumps(value, proto) got = self.loads(pickle) self.assertEqual(value, got) @run_with_locale('LC_ALL', 'de_DE', 'fr_FR') def test_float_format(self): # make sure that floats are formatted locale independent self.assertEqual(self.dumps(1.2)[0:3], 'F1.') def test_reduce(self): pass def test_getinitargs(self): pass def test_metaclass(self): a = use_metaclass() for proto in protocols: s = self.dumps(a, proto) b = self.loads(s) self.assertEqual(a.__class__, b.__class__) def test_dynamic_class(self): a = create_dynamic_class("my_dynamic_class", (object,)) copy_reg.pickle(pickling_metaclass, pickling_metaclass.__reduce__) for proto in protocols: s = self.dumps(a, proto) b = self.loads(s) self.assertEqual(a, b) def test_structseq(self): import time import os t = time.localtime() for proto in protocols: s = self.dumps(t, proto) u = self.loads(s) self.assertEqual(t, u) if hasattr(os, "stat"): t = os.stat(os.curdir) s = self.dumps(t, proto) u = self.loads(s) self.assertEqual(t, u) if hasattr(os, "statvfs"): t = os.statvfs(os.curdir) s = self.dumps(t, proto) u = self.loads(s) self.assertEqual(t, u) # Tests for protocol 2 def test_proto(self): build_none = pickle.NONE + pickle.STOP for proto in protocols: expected = build_none if proto >= 2: expected = pickle.PROTO + chr(proto) + expected p = self.dumps(None, proto) self.assertEqual(p, expected) oob = protocols[-1] + 1 # a future protocol badpickle = pickle.PROTO + chr(oob) + build_none try: self.loads(badpickle) except ValueError, detail: self.assertTrue(str(detail).startswith( "unsupported pickle protocol")) else: self.fail("expected bad protocol number to raise ValueError") def test_long1(self): x = 12345678910111213141516178920L for proto in protocols: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y) self.assertEqual(opcode_in_pickle(pickle.LONG1, s), proto >= 2) def test_long4(self): x = 12345678910111213141516178920L << (256*8) for proto in protocols: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y) self.assertEqual(opcode_in_pickle(pickle.LONG4, s), proto >= 2) def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y, (proto, x, s, y)) expected = expected_opcode[proto, len(x)] self.assertEqual(opcode_in_pickle(expected, s), True) def test_singletons(self): # Map (proto, singleton) to expected opcode. expected_opcode = {(0, None): pickle.NONE, (1, None): pickle.NONE, (2, None): pickle.NONE, (0, True): pickle.INT, (1, True): pickle.INT, (2, True): pickle.NEWTRUE, (0, False): pickle.INT, (1, False): pickle.INT, (2, False): pickle.NEWFALSE, } for proto in protocols: for x in None, False, True: s = self.dumps(x, proto) y = self.loads(s) self.assertTrue(x is y, (proto, x, s, y)) expected = expected_opcode[proto, x] self.assertEqual(opcode_in_pickle(expected, s), True) def test_newobj_tuple(self): x = MyTuple([1, 2, 3]) x.foo = 42 x.bar = "hello" for proto in protocols: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(tuple(x), tuple(y)) self.assertEqual(x.__dict__, y.__dict__) def test_newobj_list(self): x = MyList([1, 2, 3]) x.foo = 42 x.bar = "hello" for proto in protocols: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(list(x), list(y)) self.assertEqual(x.__dict__, y.__dict__) def test_newobj_generic(self): for proto in protocols: for C in myclasses: B = C.__base__ x = C(C.sample) x.foo = 42 s = self.dumps(x, proto) y = self.loads(s) detail = (proto, C, B, x, y, type(y)) self.assertEqual(B(x), B(y), detail) self.assertEqual(x.__dict__, y.__dict__, detail) # Register a type with copy_reg, with extension code extcode. Pickle # an object of that type. Check that the resulting pickle uses opcode # (EXT[124]) under proto 2, and not in proto 1. def produce_global_ext(self, extcode, opcode): e = ExtensionSaver(extcode) try: copy_reg.add_extension(__name__, "MyList", extcode) x = MyList([1, 2, 3]) x.foo = 42 x.bar = "hello" # Dump using protocol 1 for comparison. s1 = self.dumps(x, 1) self.assertIn(__name__, s1) self.assertIn("MyList", s1) self.assertEqual(opcode_in_pickle(opcode, s1), False) y = self.loads(s1) self.assertEqual(list(x), list(y)) self.assertEqual(x.__dict__, y.__dict__) # Dump using protocol 2 for test. s2 = self.dumps(x, 2) self.assertNotIn(__name__, s2) self.assertNotIn("MyList", s2) self.assertEqual(opcode_in_pickle(opcode, s2), True) y = self.loads(s2) self.assertEqual(list(x), list(y)) self.assertEqual(x.__dict__, y.__dict__) finally: e.restore() def test_global_ext1(self): self.produce_global_ext(0x00000001, pickle.EXT1) # smallest EXT1 code self.produce_global_ext(0x000000ff, pickle.EXT1) # largest EXT1 code def test_global_ext2(self): self.produce_global_ext(0x00000100, pickle.EXT2) # smallest EXT2 code self.produce_global_ext(0x0000ffff, pickle.EXT2) # largest EXT2 code self.produce_global_ext(0x0000abcd, pickle.EXT2) # check endianness def test_global_ext4(self): self.produce_global_ext(0x00010000, pickle.EXT4) # smallest EXT4 code self.produce_global_ext(0x7fffffff, pickle.EXT4) # largest EXT4 code self.produce_global_ext(0x12abcdef, pickle.EXT4) # check endianness def test_list_chunking(self): n = 10 # too small to chunk x = range(n) for proto in protocols: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y) num_appends = count_opcode(pickle.APPENDS, s) self.assertEqual(num_appends, proto > 0) n = 2500 # expect at least two chunks when proto > 0 x = range(n) for proto in protocols: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y) num_appends = count_opcode(pickle.APPENDS, s) if proto == 0: self.assertEqual(num_appends, 0) else: self.assertTrue(num_appends >= 2) def test_dict_chunking(self): n = 10 # too small to chunk x = dict.fromkeys(range(n)) for proto in protocols: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y) num_setitems = count_opcode(pickle.SETITEMS, s) self.assertEqual(num_setitems, proto > 0) n = 2500 # expect at least two chunks when proto > 0 x = dict.fromkeys(range(n)) for proto in protocols: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y) num_setitems = count_opcode(pickle.SETITEMS, s) if proto == 0: self.assertEqual(num_setitems, 0) else: self.assertTrue(num_setitems >= 2) def test_simple_newobj(self): x = object.__new__(SimpleNewObj) # avoid __init__ x.abc = 666 for proto in protocols: s = self.dumps(x, proto) self.assertEqual(opcode_in_pickle(pickle.NEWOBJ, s), proto >= 2) y = self.loads(s) # will raise TypeError if __init__ called self.assertEqual(y.abc, 666) self.assertEqual(x.__dict__, y.__dict__) def test_newobj_list_slots(self): x = SlotList([1, 2, 3]) x.foo = 42 x.bar = "hello" s = self.dumps(x, 2) y = self.loads(s) self.assertEqual(list(x), list(y)) self.assertEqual(x.__dict__, y.__dict__) self.assertEqual(x.foo, y.foo) self.assertEqual(x.bar, y.bar) def test_reduce_overrides_default_reduce_ex(self): for proto in protocols: x = REX_one() self.assertEqual(x._reduce_called, 0) s = self.dumps(x, proto) self.assertEqual(x._reduce_called, 1) y = self.loads(s) self.assertEqual(y._reduce_called, 0) def test_reduce_ex_called(self): for proto in protocols: x = REX_two() self.assertEqual(x._proto, None) s = self.dumps(x, proto) self.assertEqual(x._proto, proto) y = self.loads(s) self.assertEqual(y._proto, None) def test_reduce_ex_overrides_reduce(self): for proto in protocols: x = REX_three() self.assertEqual(x._proto, None) s = self.dumps(x, proto) self.assertEqual(x._proto, proto) y = self.loads(s) self.assertEqual(y._proto, None) def test_reduce_ex_calls_base(self): for proto in protocols: x = REX_four() self.assertEqual(x._proto, None) s = self.dumps(x, proto) self.assertEqual(x._proto, proto) y = self.loads(s) self.assertEqual(y._proto, proto) def test_reduce_calls_base(self): for proto in protocols: x = REX_five() self.assertEqual(x._reduce_called, 0) s = self.dumps(x, proto) self.assertEqual(x._reduce_called, 1) y = self.loads(s) self.assertEqual(y._reduce_called, 1) def test_reduce_bad_iterator(self): # Issue4176: crash when 4th and 5th items of __reduce__() # are not iterators class C(object): def __reduce__(self): # 4th item is not an iterator return list, (), None, [], None class D(object): def __reduce__(self): # 5th item is not an iterator return dict, (), None, None, [] # Protocol 0 is less strict and also accept iterables. for proto in protocols: try: self.dumps(C(), proto) except (AttributeError, pickle.PickleError, cPickle.PickleError): pass try: self.dumps(D(), proto) except (AttributeError, pickle.PickleError, cPickle.PickleError): pass def test_many_puts_and_gets(self): # Test that internal data structures correctly deal with lots of # puts/gets. keys = ("aaa" + str(i) for i in xrange(100)) large_dict = dict((k, [4, 5, 6]) for k in keys) obj = [dict(large_dict), dict(large_dict), dict(large_dict)] for proto in protocols: dumped = self.dumps(obj, proto) loaded = self.loads(dumped) self.assertEqual(loaded, obj, "Failed protocol %d: %r != %r" % (proto, obj, loaded)) def test_attribute_name_interning(self): # Test that attribute names of pickled objects are interned when # unpickling. for proto in protocols: x = C() x.foo = 42 x.bar = "hello" s = self.dumps(x, proto) y = self.loads(s) x_keys = sorted(x.__dict__) y_keys = sorted(y.__dict__) for x_key, y_key in zip(x_keys, y_keys): self.assertIs(x_key, y_key) # Test classes for reduce_ex class REX_one(object): _reduce_called = 0 def __reduce__(self): self._reduce_called = 1 return REX_one, () # No __reduce_ex__ here, but inheriting it from object class REX_two(object): _proto = None def __reduce_ex__(self, proto): self._proto = proto return REX_two, () # No __reduce__ here, but inheriting it from object class REX_three(object): _proto = None def __reduce_ex__(self, proto): self._proto = proto return REX_two, () def __reduce__(self): raise TestFailed, "This __reduce__ shouldn't be called" class REX_four(object): _proto = None def __reduce_ex__(self, proto): self._proto = proto return object.__reduce_ex__(self, proto) # Calling base class method should succeed class REX_five(object): _reduce_called = 0 def __reduce__(self): self._reduce_called = 1 return object.__reduce__(self) # This one used to fail with infinite recursion # Test classes for newobj class MyInt(int): sample = 1 class MyLong(long): sample = 1L class MyFloat(float): sample = 1.0 class MyComplex(complex): sample = 1.0 + 0.0j class MyStr(str): sample = "hello" class MyUnicode(unicode): sample = u"hello \u1234" class MyTuple(tuple): sample = (1, 2, 3) class MyList(list): sample = [1, 2, 3] class MyDict(dict): sample = {"a": 1, "b": 2} myclasses = [MyInt, MyLong, MyFloat, MyComplex, MyStr, MyUnicode, MyTuple, MyList, MyDict] class SlotList(MyList): __slots__ = ["foo"] class SimpleNewObj(object): def __init__(self, a, b, c): # raise an error, to make sure this isn't called raise TypeError("SimpleNewObj.__init__() didn't expect to get called") class AbstractPickleModuleTests(unittest.TestCase): def test_dump_closed_file(self): import os f = open(TESTFN, "w") try: f.close() self.assertRaises(ValueError, self.module.dump, 123, f) finally: os.remove(TESTFN) def test_load_closed_file(self): import os f = open(TESTFN, "w") try: f.close() self.assertRaises(ValueError, self.module.dump, 123, f) finally: os.remove(TESTFN) def test_load_from_and_dump_to_file(self): stream = cStringIO.StringIO() data = [123, {}, 124] self.module.dump(data, stream) stream.seek(0) unpickled = self.module.load(stream) self.assertEqual(unpickled, data) def test_highest_protocol(self): # Of course this needs to be changed when HIGHEST_PROTOCOL changes. self.assertEqual(self.module.HIGHEST_PROTOCOL, 2) def test_callapi(self): f = cStringIO.StringIO() # With and without keyword arguments self.module.dump(123, f, -1) self.module.dump(123, file=f, protocol=-1) self.module.dumps(123, -1) self.module.dumps(123, protocol=-1) self.module.Pickler(f, -1) self.module.Pickler(f, protocol=-1) def test_incomplete_input(self): s = StringIO.StringIO("X''.") self.assertRaises(EOFError, self.module.load, s) def test_restricted(self): # issue7128: cPickle failed in restricted mode builtins = {self.module.__name__: self.module, '__import__': __import__} d = {} teststr = "def f(): {0}.dumps(0)".format(self.module.__name__) exec teststr in {'__builtins__': builtins}, d d['f']() def test_bad_input(self): # Test issue4298 s = '\x58\0\0\0\x54' self.assertRaises(EOFError, self.module.loads, s) # Test issue7455 s = '0' # XXX Why doesn't pickle raise UnpicklingError? self.assertRaises((IndexError, cPickle.UnpicklingError), self.module.loads, s) class AbstractPersistentPicklerTests(unittest.TestCase): # This class defines persistent_id() and persistent_load() # functions that should be used by the pickler. All even integers # are pickled using persistent ids. def persistent_id(self, object): if isinstance(object, int) and object % 2 == 0: self.id_count += 1 return str(object) else: return None def persistent_load(self, oid): self.load_count += 1 object = int(oid) assert object % 2 == 0 return object def test_persistence(self): self.id_count = 0 self.load_count = 0 L = range(10) self.assertEqual(self.loads(self.dumps(L)), L) self.assertEqual(self.id_count, 5) self.assertEqual(self.load_count, 5) def test_bin_persistence(self): self.id_count = 0 self.load_count = 0 L = range(10) self.assertEqual(self.loads(self.dumps(L, 1)), L) self.assertEqual(self.id_count, 5) self.assertEqual(self.load_count, 5) class AbstractPicklerUnpicklerObjectTests(unittest.TestCase): pickler_class = None unpickler_class = None def setUp(self): assert self.pickler_class assert self.unpickler_class def test_clear_pickler_memo(self): # To test whether clear_memo() has any effect, we pickle an object, # then pickle it again without clearing the memo; the two serialized # forms should be different. If we clear_memo() and then pickle the # object again, the third serialized form should be identical to the # first one we obtained. data = ["abcdefg", "abcdefg", 44] f = cStringIO.StringIO() pickler = self.pickler_class(f) pickler.dump(data) first_pickled = f.getvalue() # Reset StringIO object. f.seek(0) f.truncate() pickler.dump(data) second_pickled = f.getvalue() # Reset the Pickler and StringIO objects. pickler.clear_memo() f.seek(0) f.truncate() pickler.dump(data) third_pickled = f.getvalue() self.assertNotEqual(first_pickled, second_pickled) self.assertEqual(first_pickled, third_pickled) def test_priming_pickler_memo(self): # Verify that we can set the Pickler's memo attribute. data = ["abcdefg", "abcdefg", 44] f = cStringIO.StringIO() pickler = self.pickler_class(f) pickler.dump(data) first_pickled = f.getvalue() f = cStringIO.StringIO() primed = self.pickler_class(f) primed.memo = pickler.memo primed.dump(data) primed_pickled = f.getvalue() self.assertNotEqual(first_pickled, primed_pickled) def test_priming_unpickler_memo(self): # Verify that we can set the Unpickler's memo attribute. data = ["abcdefg", "abcdefg", 44] f = cStringIO.StringIO() pickler = self.pickler_class(f) pickler.dump(data) first_pickled = f.getvalue() f = cStringIO.StringIO() primed = self.pickler_class(f) primed.memo = pickler.memo primed.dump(data) primed_pickled = f.getvalue() unpickler = self.unpickler_class(cStringIO.StringIO(first_pickled)) unpickled_data1 = unpickler.load() self.assertEqual(unpickled_data1, data) primed = self.unpickler_class(cStringIO.StringIO(primed_pickled)) primed.memo = unpickler.memo unpickled_data2 = primed.load() primed.memo.clear() self.assertEqual(unpickled_data2, data) self.assertTrue(unpickled_data2 is unpickled_data1) def test_reusing_unpickler_objects(self): data1 = ["abcdefg", "abcdefg", 44] f = cStringIO.StringIO() pickler = self.pickler_class(f) pickler.dump(data1) pickled1 = f.getvalue() data2 = ["abcdefg", 44, 44] f = cStringIO.StringIO() pickler = self.pickler_class(f) pickler.dump(data2) pickled2 = f.getvalue() f = cStringIO.StringIO() f.write(pickled1) f.seek(0) unpickler = self.unpickler_class(f) self.assertEqual(unpickler.load(), data1) f.seek(0) f.truncate() f.write(pickled2) f.seek(0) self.assertEqual(unpickler.load(), data2) class BigmemPickleTests(unittest.TestCase): # Memory requirements: 1 byte per character for input strings, 1 byte # for pickled data, 1 byte for unpickled strings, 1 byte for internal # buffer and 1 byte of free space for resizing of internal buffer. @precisionbigmemtest(size=_2G + 100*_1M, memuse=5) def test_huge_strlist(self, size): chunksize = 2**20 data = [] while size > chunksize: data.append('x' * chunksize) size -= chunksize chunksize += 1 data.append('y' * size) try: for proto in protocols: try: pickled = self.dumps(data, proto) res = self.loads(pickled) self.assertEqual(res, data) finally: res = None pickled = None finally: data = None
gpl-2.0
Snuzzo/vigor_aosp_kernel
scripts/rt-tester/rt-tester.py
11005
5307
#!/usr/bin/python # # rt-mutex tester # # (C) 2006 Thomas Gleixner <tglx@linutronix.de> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # import os import sys import getopt import shutil import string # Globals quiet = 0 test = 0 comments = 0 sysfsprefix = "/sys/devices/system/rttest/rttest" statusfile = "/status" commandfile = "/command" # Command opcodes cmd_opcodes = { "schedother" : "1", "schedfifo" : "2", "lock" : "3", "locknowait" : "4", "lockint" : "5", "lockintnowait" : "6", "lockcont" : "7", "unlock" : "8", "signal" : "11", "resetevent" : "98", "reset" : "99", } test_opcodes = { "prioeq" : ["P" , "eq" , None], "priolt" : ["P" , "lt" , None], "priogt" : ["P" , "gt" , None], "nprioeq" : ["N" , "eq" , None], "npriolt" : ["N" , "lt" , None], "npriogt" : ["N" , "gt" , None], "unlocked" : ["M" , "eq" , 0], "trylock" : ["M" , "eq" , 1], "blocked" : ["M" , "eq" , 2], "blockedwake" : ["M" , "eq" , 3], "locked" : ["M" , "eq" , 4], "opcodeeq" : ["O" , "eq" , None], "opcodelt" : ["O" , "lt" , None], "opcodegt" : ["O" , "gt" , None], "eventeq" : ["E" , "eq" , None], "eventlt" : ["E" , "lt" , None], "eventgt" : ["E" , "gt" , None], } # Print usage information def usage(): print "rt-tester.py <-c -h -q -t> <testfile>" print " -c display comments after first command" print " -h help" print " -q quiet mode" print " -t test mode (syntax check)" print " testfile: read test specification from testfile" print " otherwise from stdin" return # Print progress when not in quiet mode def progress(str): if not quiet: print str # Analyse a status value def analyse(val, top, arg): intval = int(val) if top[0] == "M": intval = intval / (10 ** int(arg)) intval = intval % 10 argval = top[2] elif top[0] == "O": argval = int(cmd_opcodes.get(arg, arg)) else: argval = int(arg) # progress("%d %s %d" %(intval, top[1], argval)) if top[1] == "eq" and intval == argval: return 1 if top[1] == "lt" and intval < argval: return 1 if top[1] == "gt" and intval > argval: return 1 return 0 # Parse the commandline try: (options, arguments) = getopt.getopt(sys.argv[1:],'chqt') except getopt.GetoptError, ex: usage() sys.exit(1) # Parse commandline options for option, value in options: if option == "-c": comments = 1 elif option == "-q": quiet = 1 elif option == "-t": test = 1 elif option == '-h': usage() sys.exit(0) # Select the input source if arguments: try: fd = open(arguments[0]) except Exception,ex: sys.stderr.write("File not found %s\n" %(arguments[0])) sys.exit(1) else: fd = sys.stdin linenr = 0 # Read the test patterns while 1: linenr = linenr + 1 line = fd.readline() if not len(line): break line = line.strip() parts = line.split(":") if not parts or len(parts) < 1: continue if len(parts[0]) == 0: continue if parts[0].startswith("#"): if comments > 1: progress(line) continue if comments == 1: comments = 2 progress(line) cmd = parts[0].strip().lower() opc = parts[1].strip().lower() tid = parts[2].strip() dat = parts[3].strip() try: # Test or wait for a status value if cmd == "t" or cmd == "w": testop = test_opcodes[opc] fname = "%s%s%s" %(sysfsprefix, tid, statusfile) if test: print fname continue while 1: query = 1 fsta = open(fname, 'r') status = fsta.readline().strip() fsta.close() stat = status.split(",") for s in stat: s = s.strip() if s.startswith(testop[0]): # Separate status value val = s[2:].strip() query = analyse(val, testop, dat) break if query or cmd == "t": break progress(" " + status) if not query: sys.stderr.write("Test failed in line %d\n" %(linenr)) sys.exit(1) # Issue a command to the tester elif cmd == "c": cmdnr = cmd_opcodes[opc] # Build command string and sys filename cmdstr = "%s:%s" %(cmdnr, dat) fname = "%s%s%s" %(sysfsprefix, tid, commandfile) if test: print fname continue fcmd = open(fname, 'w') fcmd.write(cmdstr) fcmd.close() except Exception,ex: sys.stderr.write(str(ex)) sys.stderr.write("\nSyntax error in line %d\n" %(linenr)) if not test: fd.close() sys.exit(1) # Normal exit pass print "Pass" sys.exit(0)
gpl-2.0
eeshangarg/oh-mainline
vendor/packages/scrapy/scrapy/utils/conf.py
40
2132
import sys import os from ConfigParser import SafeConfigParser from operator import itemgetter def build_component_list(base, custom): """Compose a component list based on a custom and base dict of components (typically middlewares or extensions), unless custom is already a list, in which case it's returned. """ if isinstance(custom, (list, tuple)): return custom compdict = base.copy() compdict.update(custom) return [k for k, v in sorted(compdict.items(), key=itemgetter(1)) \ if v is not None] def arglist_to_dict(arglist): """Convert a list of arguments like ['arg1=val1', 'arg2=val2', ...] to a dict """ return dict(x.split('=', 1) for x in arglist) def closest_scrapy_cfg(path='.', prevpath=None): """Return the path to the closest scrapy.cfg file by traversing the current directory and its parents """ if path == prevpath: return '' path = os.path.abspath(path) cfgfile = os.path.join(path, 'scrapy.cfg') if os.path.exists(cfgfile): return cfgfile return closest_scrapy_cfg(os.path.dirname(path), path) def init_env(project='default', set_syspath=True): """Initialize environment to use command-line tool from inside a project dir. This sets the Scrapy settings module and modifies the Python path to be able to locate the project module. """ cfg = get_config() if cfg.has_option('settings', project): os.environ['SCRAPY_SETTINGS_MODULE'] = cfg.get('settings', project) closest = closest_scrapy_cfg() if closest: projdir = os.path.dirname(closest) if set_syspath and projdir not in sys.path: sys.path.append(projdir) def get_config(use_closest=True): """Get Scrapy config file as a SafeConfigParser""" sources = get_sources(use_closest) cfg = SafeConfigParser() cfg.read(sources) return cfg def get_sources(use_closest=True): sources = ['/etc/scrapy.cfg', r'c:\scrapy\scrapy.cfg', \ os.path.expanduser('~/.scrapy.cfg')] if use_closest: sources.append(closest_scrapy_cfg()) return sources
agpl-3.0
alikins/ansible
lib/ansible/modules/network/aci/aci_interface_policy_fc.py
5
3812
#!/usr/bin/python # -*- coding: utf-8 -*- # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: aci_interface_policy_fc short_description: Manage Fibre Channel interface policies on Cisco ACI fabrics (fc:IfPol) description: - Manage ACI Fiber Channel interface policies on Cisco ACI fabrics. - More information from the internal APIC class I(fc:IfPol) at U(https://developer.cisco.com/docs/apic-mim-ref/). author: - Dag Wieers (@dagwieers) version_added: '2.4' options: fc_policy: description: - The name of the Fiber Channel interface policy. required: yes aliases: [ name ] description: description: - The description of the Fiber Channel interface policy. aliases: [ descr ] port_mode: description: - Port Mode choices: [ f, np ] default: f state: description: - Use C(present) or C(absent) for adding or removing. - Use C(query) for listing an object or multiple objects. choices: [ absent, present, query ] default: present extends_documentation_fragment: aci ''' EXAMPLES = r''' - aci_interface_policy_fc: host: '{{ hostname }}' username: '{{ username }}' password: '{{ password }}' fc_policy: '{{ fc_policy }}' port_mode: '{{ port_mode }}' description: '{{ description }}' state: present ''' RETURN = r''' # ''' from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec from ansible.module_utils.basic import AnsibleModule def main(): argument_spec = aci_argument_spec() argument_spec.update( fc_policy=dict(type='str', required=False, aliases=['name']), # Not required for querying all objects description=dict(type='str', aliases=['descr']), port_mode=dict(type='str', choices=['f', 'np']), # No default provided on purpose state=dict(type='str', default='present', choices=['absent', 'present', 'query']), method=dict(type='str', choices=['delete', 'get', 'post'], aliases=['action'], removed_in_version='2.6'), # Deprecated starting from v2.6 protocol=dict(type='str', removed_in_version='2.6'), # Deprecated in v2.6 ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=[ ['state', 'absent', ['fc_policy']], ['state', 'present', ['fc_policy']], ], ) fc_policy = module.params['fc_policy'] port_mode = module.params['port_mode'] description = module.params['description'] state = module.params['state'] aci = ACIModule(module) aci.construct_url( root_class=dict( aci_class='fcIfPol', aci_rn='infra/fcIfPol-{0}'.format(fc_policy), filter_target='eq(fcIfPol.name, "{0}")'.format(fc_policy), module_object=fc_policy, ), ) aci.get_existing() if state == 'present': # Filter out module parameters with null values aci.payload( aci_class='fcIfPol', class_config=dict( name=fc_policy, descr=description, portMode=port_mode, ), ) # Generate config diff which will be used as POST request body aci.get_diff(aci_class='fcIfPol') # Submit changes if module not in check_mode and the proposed is different than existing aci.post_config() elif state == 'absent': aci.delete_config() module.exit_json(**aci.result) if __name__ == "__main__": main()
gpl-3.0
IsCoolEntertainment/debpkg_python-boto
tests/db/test_lists.py
136
3474
# Copyright (c) 2010 Chris Moyer http://coredumped.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # from boto.sdb.db.property import ListProperty from boto.sdb.db.model import Model import time class SimpleListModel(Model): """Test the List Property""" nums = ListProperty(int) strs = ListProperty(str) class TestLists(object): """Test the List property""" def setup_class(cls): """Setup this class""" cls.objs = [] def teardown_class(cls): """Remove our objects""" for o in cls.objs: try: o.delete() except: pass def test_list_order(self): """Testing the order of lists""" t = SimpleListModel() t.nums = [5, 4, 1, 3, 2] t.strs = ["B", "C", "A", "D", "Foo"] t.put() self.objs.append(t) time.sleep(3) t = SimpleListModel.get_by_id(t.id) assert(t.nums == [5, 4, 1, 3, 2]) assert(t.strs == ["B", "C", "A", "D", "Foo"]) def test_old_compat(self): """Testing to make sure the old method of encoding lists will still return results""" t = SimpleListModel() t.put() self.objs.append(t) time.sleep(3) item = t._get_raw_item() item['strs'] = ["A", "B", "C"] item.save() time.sleep(3) t = SimpleListModel.get_by_id(t.id) i1 = sorted(item['strs']) i2 = t.strs i2.sort() assert(i1 == i2) def test_query_equals(self): """We noticed a slight problem with querying, since the query uses the same encoder, it was asserting that the value was at the same position in the list, not just "in" the list""" t = SimpleListModel() t.strs = ["Bizzle", "Bar"] t.put() self.objs.append(t) time.sleep(3) assert(SimpleListModel.find(strs="Bizzle").count() == 1) assert(SimpleListModel.find(strs="Bar").count() == 1) assert(SimpleListModel.find(strs=["Bar", "Bizzle"]).count() == 1) def test_query_not_equals(self): """Test a not equal filter""" t = SimpleListModel() t.strs = ["Fizzle"] t.put() self.objs.append(t) time.sleep(3) print SimpleListModel.all().filter("strs !=", "Fizzle").get_query() for tt in SimpleListModel.all().filter("strs !=", "Fizzle"): print tt.strs assert("Fizzle" not in tt.strs)
mit
jmehnle/ansible
lib/ansible/modules/network/netvisor/pn_trunk.py
59
13979
#!/usr/bin/python """ PN CLI trunk-create/trunk-delete/trunk-modify """ # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ --- module: pn_trunk author: "Pluribus Networks (@amitsi)" version_added: "2.2" short_description: CLI command to create/delete/modify a trunk. description: - Execute trunk-create or trunk-delete command. - Trunks can be used to aggregate network links at Layer 2 on the local switch. Use this command to create a new trunk. options: pn_cliusername: description: - Provide login username if user is not root. required: False pn_clipassword: description: - Provide login password if user is not root. required: False pn_cliswitch: description: - Target switch(es) to run the cli on. required: False state: description: - State the action to perform. Use 'present' to create trunk, 'absent' to delete trunk and 'update' to modify trunk. required: True choices: ['present', 'absent', 'update'] pn_name: description: - Specify the name for the trunk configuration. required: true pn_ports: description: - Specify the port number(s) for the link(s) to aggregate into the trunk. - Required for trunk-create. pn_speed: description: - Specify the port speed or disable the port. choices: ['disable', '10m', '100m', '1g', '2.5g', '10g', '40g'] pn_egress_rate_limit: description: - Specify an egress port data rate limit for the configuration. pn_jumbo: description: - Specify if the port can receive jumbo frames. pn_lacp_mode: description: - Specify the LACP mode for the configuration. choices: ['off', 'passive', 'active'] pn_lacp_priority: description: - Specify the LACP priority. This is a number between 1 and 65535 with a default value of 32768. pn_lacp_timeout: description: - Specify the LACP time out as slow (30 seconds) or fast (4seconds). The default value is slow. choices: ['slow', 'fast'] pn_lacp_fallback: description: - Specify the LACP fallback mode as bundles or individual. choices: ['bundle', 'individual'] pn_lacp_fallback_timeout: description: - Specify the LACP fallback timeout in seconds. The range is between 30 and 60 seconds with a default value of 50 seconds. pn_edge_switch: description: - Specify if the switch is an edge switch. pn_pause: description: - Specify if pause frames are sent. pn_description: description: - Specify a description for the trunk configuration. pn_loopback: description: - Specify loopback if you want to use loopback. pn_mirror_receive: description: - Specify if the configuration receives mirrored traffic. pn_unknown_ucast_level: description: - Specify an unknown unicast level in percent. The default value is 100%. pn_unknown_mcast_level: description: - Specify an unknown multicast level in percent. The default value is 100%. pn_broadcast_level: description: - Specify a broadcast level in percent. The default value is 100%. pn_port_macaddr: description: - Specify the MAC address of the port. pn_loopvlans: description: - Specify a list of looping vlans. pn_routing: description: - Specify if the port participates in routing on the network. pn_host: description: - Host facing port control setting. """ EXAMPLES = """ - name: create trunk pn_trunk: state: 'present' pn_name: 'spine-to-leaf' pn_ports: '11,12,13,14' - name: delete trunk pn_trunk: state: 'absent' pn_name: 'spine-to-leaf' """ RETURN = """ command: description: The CLI command run on the target node(s). returned: always type: str stdout: description: The set of responses from the trunk command. returned: always type: list stderr: description: The set of error responses from the trunk command. returned: on error type: list changed: description: Indicates whether the CLI caused changes on the target. returned: always type: bool """ import shlex TRUNK_EXISTS = None def pn_cli(module): """ This method is to generate the cli portion to launch the Netvisor cli. It parses the username, password, switch parameters from module. :param module: The Ansible module to fetch username, password and switch :return: returns the cli string for further processing """ username = module.params['pn_cliusername'] password = module.params['pn_clipassword'] cliswitch = module.params['pn_cliswitch'] if username and password: cli = '/usr/bin/cli --quiet --user %s:%s ' % (username, password) else: cli = '/usr/bin/cli --quiet ' if cliswitch == 'local': cli += ' switch-local ' else: cli += ' switch ' + cliswitch return cli def check_cli(module, cli): """ This method checks for idempotency using the trunk-show command. If a trunk with given name exists, return TRUNK_EXISTS as True else False. :param module: The Ansible module to fetch input parameters :param cli: The CLI string :return Global Booleans: TRUNK_EXISTS """ name = module.params['pn_name'] show = cli + ' trunk-show format switch,name no-show-headers' show = shlex.split(show) out = module.run_command(show)[1] out = out.split() # Global flags global TRUNK_EXISTS if name in out: TRUNK_EXISTS = True else: TRUNK_EXISTS = False def run_cli(module, cli): """ This method executes the cli command on the target node(s) and returns the output. The module then exits based on the output. :param cli: the complete cli string to be executed on the target node(s). :param module: The Ansible module to fetch command """ cliswitch = module.params['pn_cliswitch'] state = module.params['state'] command = get_command_from_state(state) cmd = shlex.split(cli) # 'out' contains the output # 'err' contains the error messages result, out, err = module.run_command(cmd) print_cli = cli.split(cliswitch)[1] # Response in JSON format if result != 0: module.exit_json( command=print_cli, stderr=err.strip(), msg="%s operation failed" % command, changed=False ) if out: module.exit_json( command=print_cli, stdout=out.strip(), msg="%s operation completed" % command, changed=True ) else: module.exit_json( command=print_cli, msg="%s operation completed" % command, changed=True ) def get_command_from_state(state): """ This method gets appropriate command name for the state specified. It returns the command name for the specified state. :param state: The state for which the respective command name is required. """ command = None if state == 'present': command = 'trunk-create' if state == 'absent': command = 'trunk-delete' if state == 'update': command = 'trunk-modify' return command def main(): """ This portion is for arguments parsing """ module = AnsibleModule( argument_spec=dict( pn_cliusername=dict(required=False, type='str'), pn_clipassword=dict(required=False, type='str', no_log=True), pn_cliswitch=dict(required=False, type='str', default='local'), state=dict(required=True, type='str', choices=['present', 'absent', 'update']), pn_name=dict(required=True, type='str'), pn_ports=dict(type='str'), pn_speed=dict(type='str', choices=['disable', '10m', '100m', '1g', '2.5g', '10g', '40g']), pn_egress_rate_limit=dict(type='str'), pn_jumbo=dict(type='bool'), pn_lacp_mode=dict(type='str', choices=[ 'off', 'passive', 'active']), pn_lacp_priority=dict(type='int'), pn_lacp_timeout=dict(type='str'), pn_lacp_fallback=dict(type='str', choices=[ 'bundle', 'individual']), pn_lacp_fallback_timeout=dict(type='str'), pn_edge_switch=dict(type='bool'), pn_pause=dict(type='bool'), pn_description=dict(type='str'), pn_loopback=dict(type='bool'), pn_mirror_receive=dict(type='bool'), pn_unknown_ucast_level=dict(type='str'), pn_unknown_mcast_level=dict(type='str'), pn_broadcast_level=dict(type='str'), pn_port_macaddr=dict(type='str'), pn_loopvlans=dict(type='str'), pn_routing=dict(type='bool'), pn_host=dict(type='bool') ), required_if=( ["state", "present", ["pn_name", "pn_ports"]], ["state", "absent", ["pn_name"]], ["state", "update", ["pn_name"]] ) ) # Accessing the arguments state = module.params['state'] name = module.params['pn_name'] ports = module.params['pn_ports'] speed = module.params['pn_speed'] egress_rate_limit = module.params['pn_egress_rate_limit'] jumbo = module.params['pn_jumbo'] lacp_mode = module.params['pn_lacp_mode'] lacp_priority = module.params['pn_lacp_priority'] lacp_timeout = module.params['pn_lacp_timeout'] lacp_fallback = module.params['pn_lacp_fallback'] lacp_fallback_timeout = module.params['pn_lacp_fallback_timeout'] edge_switch = module.params['pn_edge_switch'] pause = module.params['pn_pause'] description = module.params['pn_description'] loopback = module.params['pn_loopback'] mirror_receive = module.params['pn_mirror_receive'] unknown_ucast_level = module.params['pn_unknown_ucast_level'] unknown_mcast_level = module.params['pn_unknown_mcast_level'] broadcast_level = module.params['pn_broadcast_level'] port_macaddr = module.params['pn_port_macaddr'] loopvlans = module.params['pn_loopvlans'] routing = module.params['pn_routing'] host = module.params['pn_host'] command = get_command_from_state(state) # Building the CLI command string cli = pn_cli(module) if command == 'trunk-delete': check_cli(module, cli) if TRUNK_EXISTS is False: module.exit_json( skipped=True, msg='Trunk with name %s does not exist' % name ) cli += ' %s name %s ' % (command, name) else: if command == 'trunk-create': check_cli(module, cli) if TRUNK_EXISTS is True: module.exit_json( skipped=True, msg='Trunk with name %s already exists' % name ) cli += ' %s name %s ' % (command, name) # Appending options if ports: cli += ' ports ' + ports if speed: cli += ' speed ' + speed if egress_rate_limit: cli += ' egress-rate-limit ' + egress_rate_limit if jumbo is True: cli += ' jumbo ' if jumbo is False: cli += ' no-jumbo ' if lacp_mode: cli += ' lacp-mode ' + lacp_mode if lacp_priority: cli += ' lacp-priority ' + lacp_priority if lacp_timeout: cli += ' lacp-timeout ' + lacp_timeout if lacp_fallback: cli += ' lacp-fallback ' + lacp_fallback if lacp_fallback_timeout: cli += ' lacp-fallback-timeout ' + lacp_fallback_timeout if edge_switch is True: cli += ' edge-switch ' if edge_switch is False: cli += ' no-edge-switch ' if pause is True: cli += ' pause ' if pause is False: cli += ' no-pause ' if description: cli += ' description ' + description if loopback is True: cli += ' loopback ' if loopback is False: cli += ' no-loopback ' if mirror_receive is True: cli += ' mirror-receive-only ' if mirror_receive is False: cli += ' no-mirror-receive-only ' if unknown_ucast_level: cli += ' unknown-ucast-level ' + unknown_ucast_level if unknown_mcast_level: cli += ' unknown-mcast-level ' + unknown_mcast_level if broadcast_level: cli += ' broadcast-level ' + broadcast_level if port_macaddr: cli += ' port-mac-address ' + port_macaddr if loopvlans: cli += ' loopvlans ' + loopvlans if routing is True: cli += ' routing ' if routing is False: cli += ' no-routing ' if host is True: cli += ' host-enable ' if host is False: cli += ' host-disable ' run_cli(module, cli) # Ansible boiler-plate from ansible.module_utils.basic import AnsibleModule if __name__ == '__main__': main()
gpl-3.0
ganjafuzz/PureKernel-v2-CAF
arch/ia64/scripts/unwcheck.py
13143
1714
#!/usr/bin/python # # Usage: unwcheck.py FILE # # This script checks the unwind info of each function in file FILE # and verifies that the sum of the region-lengths matches the total # length of the function. # # Based on a shell/awk script originally written by Harish Patil, # which was converted to Perl by Matthew Chapman, which was converted # to Python by David Mosberger. # import os import re import sys if len(sys.argv) != 2: print "Usage: %s FILE" % sys.argv[0] sys.exit(2) readelf = os.getenv("READELF", "readelf") start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]") rlen_pattern = re.compile(".*rlen=([0-9]+)") def check_func (func, slots, rlen_sum): if slots != rlen_sum: global num_errors num_errors += 1 if not func: func = "[%#x-%#x]" % (start, end) print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum) return num_funcs = 0 num_errors = 0 func = False slots = 0 rlen_sum = 0 for line in os.popen("%s -u %s" % (readelf, sys.argv[1])): m = start_pattern.match(line) if m: check_func(func, slots, rlen_sum) func = m.group(1) start = long(m.group(2), 16) end = long(m.group(3), 16) slots = 3 * (end - start) / 16 rlen_sum = 0L num_funcs += 1 else: m = rlen_pattern.match(line) if m: rlen_sum += long(m.group(1)) check_func(func, slots, rlen_sum) if num_errors == 0: print "No errors detected in %u functions." % num_funcs else: if num_errors > 1: err="errors" else: err="error" print "%u %s detected in %u functions." % (num_errors, err, num_funcs) sys.exit(1)
gpl-2.0
KyleAMoore/KanjiNani
Android/.buildozer/android/platform/build/build/python-installs/KanjiNani/kivy/animation.py
9
24508
''' Animation ========= :class:`Animation` and :class:`AnimationTransition` are used to animate :class:`~kivy.uix.widget.Widget` properties. You must specify at least a property name and target value. To use an Animation, follow these steps: * Setup an Animation object * Use the Animation object on a Widget Simple animation ---------------- To animate a Widget's x or y position, simply specify the target x/y values where you want the widget positioned at the end of the animation:: anim = Animation(x=100, y=100) anim.start(widget) The animation will last for 1 second unless :attr:`duration` is specified. When anim.start() is called, the Widget will move smoothly from the current x/y position to (100, 100). Multiple properties and transitions ----------------------------------- You can animate multiple properties and use built-in or custom transition functions using :attr:`transition` (or the `t=` shortcut). For example, to animate the position and size using the 'in_quad' transition:: anim = Animation(x=50, size=(80, 80), t='in_quad') anim.start(widget) Note that the `t=` parameter can be the string name of a method in the :class:`AnimationTransition` class or your own animation function. Sequential animation -------------------- To join animations sequentially, use the '+' operator. The following example will animate to x=50 over 1 second, then animate the size to (80, 80) over the next two seconds:: anim = Animation(x=50) + Animation(size=(80, 80), duration=2.) anim.start(widget) Parallel animation ------------------ To join animations in parallel, use the '&' operator. The following example will animate the position to (80, 10) over 1 second, whilst in parallel animating the size to (800, 800):: anim = Animation(pos=(80, 10)) anim &= Animation(size=(800, 800), duration=2.) anim.start(widget) Keep in mind that creating overlapping animations on the same property may have unexpected results. If you want to apply multiple animations to the same property, you should either schedule them sequentially (via the '+' operator or using the *on_complete* callback) or cancel previous animations using the :attr:`~Animation.cancel_all` method. Repeating animation ------------------- .. versionadded:: 1.8.0 .. note:: This is currently only implemented for 'Sequence' animations. To set an animation to repeat, simply set the :attr:`Sequence.repeat` property to `True`:: anim = Animation(...) + Animation(...) anim.repeat = True anim.start(widget) For flow control of animations such as stopping and cancelling, use the methods already in place in the animation module. ''' __all__ = ('Animation', 'AnimationTransition') from math import sqrt, cos, sin, pi from kivy.event import EventDispatcher from kivy.clock import Clock from kivy.compat import string_types, iterkeys from kivy.weakproxy import WeakProxy class Animation(EventDispatcher): '''Create an animation definition that can be used to animate a Widget. :Parameters: `duration` or `d`: float, defaults to 1. Duration of the animation, in seconds. `transition` or `t`: str or func Transition function for animate properties. It can be the name of a method from :class:`AnimationTransition`. `step` or `s`: float Step in milliseconds of the animation. Defaults to 0, which means the animation is updated for every frame. To update the animation less often, set the step value to a float. For example, if you want to animate at 30 FPS, use s=1/30. :Events: `on_start`: animation, widget Fired when the animation is started on a widget. `on_complete`: animation, widget Fired when the animation is completed or stopped on a widget. `on_progress`: animation, widget, progression Fired when the progression of the animation is changing. .. versionchanged:: 1.4.0 Added s/step parameter. .. versionchanged:: 1.10.0 The default value of the step parameter was changed from 1/60. to 0. ''' _update_ev = None _instances = set() __events__ = ('on_start', 'on_progress', 'on_complete') def __init__(self, **kw): super(Animation, self).__init__() # Initialize self._clock_installed = False self._duration = kw.pop('d', kw.pop('duration', 1.)) self._transition = kw.pop('t', kw.pop('transition', 'linear')) self._step = kw.pop('s', kw.pop('step', 0)) if isinstance(self._transition, string_types): self._transition = getattr(AnimationTransition, self._transition) self._animated_properties = kw self._widgets = {} @property def duration(self): '''Return the duration of the animation. ''' return self._duration @property def transition(self): '''Return the transition of the animation. ''' return self._transition @property def animated_properties(self): '''Return the properties used to animate. ''' return self._animated_properties @staticmethod def stop_all(widget, *largs): '''Stop all animations that concern a specific widget / list of properties. Example:: anim = Animation(x=50) anim.start(widget) # and later Animation.stop_all(widget, 'x') ''' if len(largs): for animation in list(Animation._instances): for x in largs: animation.stop_property(widget, x) else: for animation in set(Animation._instances): animation.stop(widget) @staticmethod def cancel_all(widget, *largs): '''Cancel all animations that concern a specific widget / list of properties. See :attr:`cancel`. Example:: anim = Animation(x=50) anim.start(widget) # and later Animation.cancel_all(widget, 'x') .. versionadded:: 1.4.0 ''' if len(largs): for animation in list(Animation._instances): for x in largs: animation.cancel_property(widget, x) else: for animation in set(Animation._instances): animation.cancel(widget) def start(self, widget): '''Start the animation on a widget. ''' self.stop(widget) self._initialize(widget) self._register() self.dispatch('on_start', widget) def stop(self, widget): '''Stop the animation previously applied to a widget, triggering the `on_complete` event.''' props = self._widgets.pop(widget.uid, None) if props: self.dispatch('on_complete', widget) self.cancel(widget) def cancel(self, widget): '''Cancel the animation previously applied to a widget. Same effect as :attr:`stop`, except the `on_complete` event will *not* be triggered! .. versionadded:: 1.4.0 ''' self._widgets.pop(widget.uid, None) self._clock_uninstall() if not self._widgets: self._unregister() def stop_property(self, widget, prop): '''Even if an animation is running, remove a property. It will not be animated futher. If it was the only/last property being animated, the animation will be stopped (see :attr:`stop`). ''' props = self._widgets.get(widget.uid, None) if not props: return props['properties'].pop(prop, None) # no more properties to animation ? kill the animation. if not props['properties']: self.stop(widget) def cancel_property(self, widget, prop): '''Even if an animation is running, remove a property. It will not be animated further. If it was the only/last property being animated, the animation will be canceled (see :attr:`cancel`) .. versionadded:: 1.4.0 ''' props = self._widgets.get(widget.uid, None) if not props: return props['properties'].pop(prop, None) # no more properties to animation ? kill the animation. if not props['properties']: self.cancel(widget) def have_properties_to_animate(self, widget): '''Return True if a widget still has properties to animate. .. versionadded:: 1.8.0 ''' props = self._widgets.get(widget.uid, None) if props and props['properties']: return True # # Private # def _register(self): Animation._instances.add(self) def _unregister(self): if self in Animation._instances: Animation._instances.remove(self) def _initialize(self, widget): d = self._widgets[widget.uid] = { 'widget': widget, 'properties': {}, 'time': None} # get current values p = d['properties'] for key, value in self._animated_properties.items(): original_value = getattr(widget, key) if isinstance(original_value, (tuple, list)): original_value = original_value[:] elif isinstance(original_value, dict): original_value = original_value.copy() p[key] = (original_value, value) # install clock self._clock_install() def _clock_install(self): if self._clock_installed: return self._update_ev = Clock.schedule_interval(self._update, self._step) self._clock_installed = True def _clock_uninstall(self): if self._widgets or not self._clock_installed: return self._clock_installed = False if self._update_ev is not None: self._update_ev.cancel() self._update_ev = None def _update(self, dt): widgets = self._widgets transition = self._transition calculate = self._calculate for uid in list(widgets.keys())[:]: anim = widgets[uid] widget = anim['widget'] if isinstance(widget, WeakProxy) and not len(dir(widget)): # empty proxy, widget is gone. ref: #2458 self._widgets.pop(uid, None) self._clock_uninstall() if not self._widgets: self._unregister() continue if anim['time'] is None: anim['time'] = 0. else: anim['time'] += dt # calculate progression if self._duration: progress = min(1., anim['time'] / self._duration) else: progress = 1 t = transition(progress) # apply progression on widget for key, values in anim['properties'].items(): a, b = values value = calculate(a, b, t) setattr(widget, key, value) self.dispatch('on_progress', widget, progress) # time to stop ? if progress >= 1.: self.stop(widget) def _calculate(self, a, b, t): _calculate = self._calculate if isinstance(a, list) or isinstance(a, tuple): if isinstance(a, list): tp = list else: tp = tuple return tp([_calculate(a[x], b[x], t) for x in range(len(a))]) elif isinstance(a, dict): d = {} for x in iterkeys(a): if x not in b: # User requested to animate only part of the dict. # Copy the rest d[x] = a[x] else: d[x] = _calculate(a[x], b[x], t) return d else: return (a * (1. - t)) + (b * t) # # Default handlers # def on_start(self, widget): pass def on_progress(self, widget, progress): pass def on_complete(self, widget): pass def __add__(self, animation): return Sequence(self, animation) def __and__(self, animation): return Parallel(self, animation) class Sequence(Animation): def __init__(self, anim1, anim2): super(Sequence, self).__init__() #: Repeat the sequence. See 'Repeating animation' in the header #: documentation. self.repeat = False self.anim1 = anim1 self.anim2 = anim2 self.anim1.bind(on_start=self.on_anim1_start, on_progress=self.on_anim1_progress) self.anim2.bind(on_complete=self.on_anim2_complete, on_progress=self.on_anim2_progress) @property def duration(self): return self.anim1.duration + self.anim2.duration def start(self, widget): self.stop(widget) self._widgets[widget.uid] = True self._register() self.anim1.start(widget) self.anim1.bind(on_complete=self.on_anim1_complete) def stop(self, widget): self.anim1.stop(widget) self.anim2.stop(widget) props = self._widgets.pop(widget.uid, None) if props: self.dispatch('on_complete', widget) super(Sequence, self).cancel(widget) def stop_property(self, widget, prop): self.anim1.stop_property(widget, prop) self.anim2.stop_property(widget, prop) if (not self.anim1.have_properties_to_animate(widget) and not self.anim2.have_properties_to_animate(widget)): self.stop(widget) def cancel(self, widget): self.anim1.cancel(widget) self.anim2.cancel(widget) super(Sequence, self).cancel(widget) def cancel_property(self, widget, prop): '''Even if an animation is running, remove a property. It will not be animated further. If it was the only/last property being animated, the animation will be canceled (see :attr:`cancel`) This method overrides `:class:kivy.animation.Animation`'s version, to cancel it on all animations of the Sequence. .. versionadded:: 1.10.0 ''' self.anim1.cancel_property(widget, prop) self.anim2.cancel_property(widget, prop) if (not self.anim1.have_properties_to_animate(widget) and not self.anim2.have_properties_to_animate(widget)): self.cancel(widget) def on_anim1_start(self, instance, widget): self.dispatch('on_start', widget) def on_anim1_complete(self, instance, widget): self.anim1.unbind(on_complete=self.on_anim1_complete) self.anim2.start(widget) def on_anim1_progress(self, instance, widget, progress): self.dispatch('on_progress', widget, progress / 2.) def on_anim2_complete(self, instance, widget): '''Repeating logic used with boolean variable "repeat". .. versionadded:: 1.7.1 ''' if self.repeat: self.anim1.start(widget) self.anim1.bind(on_complete=self.on_anim1_complete) else: self.dispatch('on_complete', widget) self.cancel(widget) def on_anim2_progress(self, instance, widget, progress): self.dispatch('on_progress', widget, .5 + progress / 2.) class Parallel(Animation): def __init__(self, anim1, anim2): super(Parallel, self).__init__() self.anim1 = anim1 self.anim2 = anim2 self.anim1.bind(on_complete=self.on_anim_complete) self.anim2.bind(on_complete=self.on_anim_complete) @property def duration(self): return max(self.anim1.duration, self.anim2.duration) def start(self, widget): self.stop(widget) self.anim1.start(widget) self.anim2.start(widget) self._widgets[widget.uid] = {'complete': 0} self._register() self.dispatch('on_start', widget) def stop(self, widget): self.anim1.stop(widget) self.anim2.stop(widget) props = self._widgets.pop(widget.uid, None) if props: self.dispatch('on_complete', widget) super(Parallel, self).cancel(widget) def stop_property(self, widget, prop): self.anim1.stop_property(widget, prop) self.anim2.stop_property(widget, prop) if (not self.anim1.have_properties_to_animate(widget) and not self.anim2.have_properties_to_animate(widget)): self.stop(widget) def cancel(self, widget): self.anim1.cancel(widget) self.anim2.cancel(widget) super(Parallel, self).cancel(widget) def on_anim_complete(self, instance, widget): self._widgets[widget.uid]['complete'] += 1 if self._widgets[widget.uid]['complete'] == 2: self.stop(widget) class AnimationTransition(object): '''Collection of animation functions to be used with the Animation object. Easing Functions ported to Kivy from the Clutter Project https://developer.gnome.org/clutter/stable/ClutterAlpha.html The `progress` parameter in each animation function is in the range 0-1. ''' @staticmethod def linear(progress): '''.. image:: images/anim_linear.png''' return progress @staticmethod def in_quad(progress): '''.. image:: images/anim_in_quad.png ''' return progress * progress @staticmethod def out_quad(progress): '''.. image:: images/anim_out_quad.png ''' return -1.0 * progress * (progress - 2.0) @staticmethod def in_out_quad(progress): '''.. image:: images/anim_in_out_quad.png ''' p = progress * 2 if p < 1: return 0.5 * p * p p -= 1.0 return -0.5 * (p * (p - 2.0) - 1.0) @staticmethod def in_cubic(progress): '''.. image:: images/anim_in_cubic.png ''' return progress * progress * progress @staticmethod def out_cubic(progress): '''.. image:: images/anim_out_cubic.png ''' p = progress - 1.0 return p * p * p + 1.0 @staticmethod def in_out_cubic(progress): '''.. image:: images/anim_in_out_cubic.png ''' p = progress * 2 if p < 1: return 0.5 * p * p * p p -= 2 return 0.5 * (p * p * p + 2.0) @staticmethod def in_quart(progress): '''.. image:: images/anim_in_quart.png ''' return progress * progress * progress * progress @staticmethod def out_quart(progress): '''.. image:: images/anim_out_quart.png ''' p = progress - 1.0 return -1.0 * (p * p * p * p - 1.0) @staticmethod def in_out_quart(progress): '''.. image:: images/anim_in_out_quart.png ''' p = progress * 2 if p < 1: return 0.5 * p * p * p * p p -= 2 return -0.5 * (p * p * p * p - 2.0) @staticmethod def in_quint(progress): '''.. image:: images/anim_in_quint.png ''' return progress * progress * progress * progress * progress @staticmethod def out_quint(progress): '''.. image:: images/anim_out_quint.png ''' p = progress - 1.0 return p * p * p * p * p + 1.0 @staticmethod def in_out_quint(progress): '''.. image:: images/anim_in_out_quint.png ''' p = progress * 2 if p < 1: return 0.5 * p * p * p * p * p p -= 2.0 return 0.5 * (p * p * p * p * p + 2.0) @staticmethod def in_sine(progress): '''.. image:: images/anim_in_sine.png ''' return -1.0 * cos(progress * (pi / 2.0)) + 1.0 @staticmethod def out_sine(progress): '''.. image:: images/anim_out_sine.png ''' return sin(progress * (pi / 2.0)) @staticmethod def in_out_sine(progress): '''.. image:: images/anim_in_out_sine.png ''' return -0.5 * (cos(pi * progress) - 1.0) @staticmethod def in_expo(progress): '''.. image:: images/anim_in_expo.png ''' if progress == 0: return 0.0 return pow(2, 10 * (progress - 1.0)) @staticmethod def out_expo(progress): '''.. image:: images/anim_out_expo.png ''' if progress == 1.0: return 1.0 return -pow(2, -10 * progress) + 1.0 @staticmethod def in_out_expo(progress): '''.. image:: images/anim_in_out_expo.png ''' if progress == 0: return 0.0 if progress == 1.: return 1.0 p = progress * 2 if p < 1: return 0.5 * pow(2, 10 * (p - 1.0)) p -= 1.0 return 0.5 * (-pow(2, -10 * p) + 2.0) @staticmethod def in_circ(progress): '''.. image:: images/anim_in_circ.png ''' return -1.0 * (sqrt(1.0 - progress * progress) - 1.0) @staticmethod def out_circ(progress): '''.. image:: images/anim_out_circ.png ''' p = progress - 1.0 return sqrt(1.0 - p * p) @staticmethod def in_out_circ(progress): '''.. image:: images/anim_in_out_circ.png ''' p = progress * 2 if p < 1: return -0.5 * (sqrt(1.0 - p * p) - 1.0) p -= 2.0 return 0.5 * (sqrt(1.0 - p * p) + 1.0) @staticmethod def in_elastic(progress): '''.. image:: images/anim_in_elastic.png ''' p = .3 s = p / 4.0 q = progress if q == 1: return 1.0 q -= 1.0 return -(pow(2, 10 * q) * sin((q - s) * (2 * pi) / p)) @staticmethod def out_elastic(progress): '''.. image:: images/anim_out_elastic.png ''' p = .3 s = p / 4.0 q = progress if q == 1: return 1.0 return pow(2, -10 * q) * sin((q - s) * (2 * pi) / p) + 1.0 @staticmethod def in_out_elastic(progress): '''.. image:: images/anim_in_out_elastic.png ''' p = .3 * 1.5 s = p / 4.0 q = progress * 2 if q == 2: return 1.0 if q < 1: q -= 1.0 return -.5 * (pow(2, 10 * q) * sin((q - s) * (2.0 * pi) / p)) else: q -= 1.0 return pow(2, -10 * q) * sin((q - s) * (2.0 * pi) / p) * .5 + 1.0 @staticmethod def in_back(progress): '''.. image:: images/anim_in_back.png ''' return progress * progress * ((1.70158 + 1.0) * progress - 1.70158) @staticmethod def out_back(progress): '''.. image:: images/anim_out_back.png ''' p = progress - 1.0 return p * p * ((1.70158 + 1) * p + 1.70158) + 1.0 @staticmethod def in_out_back(progress): '''.. image:: images/anim_in_out_back.png ''' p = progress * 2. s = 1.70158 * 1.525 if p < 1: return 0.5 * (p * p * ((s + 1.0) * p - s)) p -= 2.0 return 0.5 * (p * p * ((s + 1.0) * p + s) + 2.0) @staticmethod def _out_bounce_internal(t, d): p = t / d if p < (1.0 / 2.75): return 7.5625 * p * p elif p < (2.0 / 2.75): p -= (1.5 / 2.75) return 7.5625 * p * p + .75 elif p < (2.5 / 2.75): p -= (2.25 / 2.75) return 7.5625 * p * p + .9375 else: p -= (2.625 / 2.75) return 7.5625 * p * p + .984375 @staticmethod def _in_bounce_internal(t, d): return 1.0 - AnimationTransition._out_bounce_internal(d - t, d) @staticmethod def in_bounce(progress): '''.. image:: images/anim_in_bounce.png ''' return AnimationTransition._in_bounce_internal(progress, 1.) @staticmethod def out_bounce(progress): '''.. image:: images/anim_out_bounce.png ''' return AnimationTransition._out_bounce_internal(progress, 1.) @staticmethod def in_out_bounce(progress): '''.. image:: images/anim_in_out_bounce.png ''' p = progress * 2. if p < 1.: return AnimationTransition._in_bounce_internal(p, 1.) * .5 return AnimationTransition._out_bounce_internal(p - 1., 1.) * .5 + .5
gpl-3.0
Cadasta/cadasta-platform
cadasta/questionnaires/migrations/0017_populate_additional_fields.py
1
3046
# -*- coding: utf-8 -*- # Generated by Django 1.9.11 on 2016-11-23 09:04 from __future__ import unicode_literals from django.db import migrations from pyxform.xls2json import parse_file_to_json from botocore.exceptions import ClientError def populate_additional_fields(apps, schema_editor): Project = apps.get_model('organization', 'Project') Questionnaire = apps.get_model('questionnaires', 'Questionnaire') QuestionGroup = apps.get_model('questionnaires', 'QuestionGroup') Question = apps.get_model('questionnaires', 'Question') def update_question(child, **kwargs): relevant = None required = False bind = child.get('bind') if bind: relevant = bind.get('relevant', None) required = True if bind.get('required', 'no') == 'yes' else False question = Question.objects.get(**kwargs) question.default = child.get('default', None) question.hint = child.get('hint', None) question.relevant = relevant question.required = required question.save() def update_group(child, group=None, **kwargs): group = QuestionGroup.objects.get(**kwargs) relevant = None bind = child.get('bind') if bind: relevant = bind.get('relevant', None) group.relevant = relevant group.type = child['type'] return group.id def update_children(children, questionnaire_id, question_group_id=None): for child in children: if child['type'] in ['group', 'repeat']: group_id = update_group(child, questionnaire_id=questionnaire_id, question_group_id=question_group_id, name=child['name']) update_children(child.get('children', []), questionnaire_id, question_group_id=group_id) else: update_question(child, questionnaire_id=questionnaire_id, question_group_id=question_group_id, name=child['name']) for project in Project.objects.all(): if project.current_questionnaire: questionnaire = Questionnaire.objects.get( id=project.current_questionnaire) if questionnaire.xls_form: try: q_json = parse_file_to_json( questionnaire.xls_form.file.name) update_children( q_json.get('children', []), questionnaire.id) except ClientError: pass class Migration(migrations.Migration): dependencies = [ ('questionnaires', '0016_populate_question_index_field'), ] operations = [ migrations.RunPython( populate_additional_fields, reverse_code=migrations.RunPython.noop ) ]
agpl-3.0
EricSekyere/airmozilla
airmozilla/base/helpers.py
6
1916
import jinja2 from jingo import register from django.contrib.sites.models import RequestSite from django.utils.http import urlquote from funfactory.helpers import static @register.function @jinja2.contextfunction def abs_static(context, path): """Make sure we always return a FULL absolute URL that starts with 'http'. """ path = static(path) prefix = context['request'].is_secure() and 'https' or 'http' if path.startswith('/') and not path.startswith('//'): # e.g. '/media/foo.png' root_url = '%s://%s' % (prefix, RequestSite(context['request']).domain) path = root_url + path if path.startswith('//'): path = '%s:%s' % (prefix, path) assert path.startswith('http://') or path.startswith('https://') return path @register.function def show_duration(duration, include_seconds=False): hours = duration / 3600 seconds = duration % 3600 minutes = seconds / 60 seconds = seconds % 60 out = [] if hours > 1: out.append('%d hours' % hours) elif hours: out.append('1 hour') if minutes > 1: out.append('%d minutes' % minutes) elif minutes: out.append('1 minute') if include_seconds or (not hours and not minutes): if seconds > 1: out.append('%d seconds' % seconds) elif seconds: out.append('1 second') return ' '.join(out) @register.function def show_duration_compact(duration): hours = duration / 3600 seconds = duration % 3600 minutes = seconds / 60 seconds = seconds % 60 out = [] if hours: out.append('%dh' % hours) if hours or minutes: out.append('%dm' % minutes) if hours or minutes or seconds: out.append('%ds' % seconds) return ''.join(out) @register.function def mozillians_permalink(username): return 'https://mozillians.org/u/%s' % urlquote(username)
bsd-3-clause
kenshay/ImageScripter
ProgramData/SystemFiles/Python/Lib/site-packages/networkx/readwrite/nx_yaml.py
6
2576
""" **** YAML **** Read and write NetworkX graphs in YAML format. "YAML is a data serialization format designed for human readability and interaction with scripting languages." See http://www.yaml.org for documentation. Format ------ http://pyyaml.org/wiki/PyYAML """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2018 by # Aric Hagberg <hagberg@lanl.gov> # Dan Schult <dschult@colgate.edu> # Pieter Swart <swart@lanl.gov> # All rights reserved. # BSD license. __all__ = ['read_yaml', 'write_yaml'] import networkx as nx from networkx.utils import open_file @open_file(1, mode='w') def write_yaml(G_to_be_yaml, path_for_yaml_output, **kwds): """Write graph G in YAML format to path. YAML is a data serialization format designed for human readability and interaction with scripting languages [1]_. Parameters ---------- G : graph A NetworkX graph path : file or string File or filename to write. Filenames ending in .gz or .bz2 will be compressed. Notes ----- To use encoding on the output file include e.g. `encoding='utf-8'` in the keyword arguments. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_yaml(G,'test.yaml') References ---------- .. [1] http://www.yaml.org """ try: import yaml except ImportError: raise ImportError("write_yaml() requires PyYAML: http://pyyaml.org/") yaml.dump(G_to_be_yaml, path_for_yaml_output, **kwds) @open_file(0, mode='r') def read_yaml(path): """Read graph in YAML format from path. YAML is a data serialization format designed for human readability and interaction with scripting languages [1]_. Parameters ---------- path : file or string File or filename to read. Filenames ending in .gz or .bz2 will be uncompressed. Returns ------- G : NetworkX graph Examples -------- >>> G=nx.path_graph(4) >>> nx.write_yaml(G,'test.yaml') >>> G=nx.read_yaml('test.yaml') References ---------- .. [1] http://www.yaml.org """ try: import yaml except ImportError: raise ImportError("read_yaml() requires PyYAML: http://pyyaml.org/") G = yaml.load(path) return G # fixture for nose tests def setup_module(module): from nose import SkipTest try: import yaml except: raise SkipTest("PyYAML not available") # fixture for nose tests def teardown_module(module): import os os.unlink('test.yaml')
gpl-3.0
annarev/tensorflow
tensorflow/python/autograph/impl/conversion_test.py
14
4103
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for conversion module.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import imp import sys import types import weakref import six from tensorflow.python.autograph import utils from tensorflow.python.autograph.core import config from tensorflow.python.autograph.core import converter from tensorflow.python.autograph.impl import api from tensorflow.python.autograph.impl import conversion from tensorflow.python.autograph.impl.testing import pybind_for_testing from tensorflow.python.eager import function from tensorflow.python.framework import constant_op from tensorflow.python.platform import test class ConversionTest(test.TestCase): def _simple_program_ctx(self): return converter.ProgramContext( options=converter.ConversionOptions(recursive=True), autograph_module=api) def test_is_allowlisted(self): def test_fn(): return constant_op.constant(1) self.assertFalse(conversion.is_allowlisted(test_fn)) self.assertTrue(conversion.is_allowlisted(utils)) self.assertTrue(conversion.is_allowlisted(constant_op.constant)) def test_is_allowlisted_tensorflow_like(self): tf_like = imp.new_module('tensorflow_foo') def test_fn(): pass tf_like.test_fn = test_fn test_fn.__module__ = tf_like self.assertFalse(conversion.is_allowlisted(tf_like.test_fn)) def test_is_allowlisted_callable_allowlisted_call(self): allowlisted_mod = imp.new_module('test_allowlisted_call') sys.modules['test_allowlisted_call'] = allowlisted_mod config.CONVERSION_RULES = ((config.DoNotConvert('test_allowlisted_call'),) + config.CONVERSION_RULES) class TestClass(object): def __call__(self): pass def allowlisted_method(self): pass TestClass.__module__ = 'test_allowlisted_call' if six.PY2: TestClass.__call__.__func__.__module__ = 'test_allowlisted_call' else: TestClass.__call__.__module__ = 'test_allowlisted_call' class Subclass(TestClass): def converted_method(self): pass tc = Subclass() self.assertTrue(conversion.is_allowlisted(TestClass.__call__)) self.assertTrue(conversion.is_allowlisted(tc)) self.assertTrue(conversion.is_allowlisted(tc.__call__)) self.assertTrue(conversion.is_allowlisted(tc.allowlisted_method)) self.assertFalse(conversion.is_allowlisted(Subclass)) self.assertFalse(conversion.is_allowlisted(tc.converted_method)) def test_is_allowlisted_tfmethodwrapper(self): class TestClass(object): def member_function(self): pass TestClass.__module__ = 'test_allowlisted_call' test_obj = TestClass() def test_fn(self): del self bound_method = types.MethodType( test_fn, function.TfMethodTarget( weakref.ref(test_obj), test_obj.member_function)) self.assertTrue(conversion.is_allowlisted(bound_method)) def test_is_allowlisted_pybind(self): test_object = pybind_for_testing.TestClassDef() with test.mock.patch.object(config, 'CONVERSION_RULES', ()): # TODO(mdan): This should return True for functions and methods. # Note: currently, native bindings are allowlisted by a separate check. self.assertFalse(conversion.is_allowlisted(test_object.method)) if __name__ == '__main__': test.main()
apache-2.0
pquentin/django
tests/template_tests/filter_tests/test_date.py
207
2534
from datetime import datetime, time from django.template.defaultfilters import date from django.test import SimpleTestCase from django.utils import timezone from ..utils import setup from .timezone_utils import TimezoneTestCase class DateTests(TimezoneTestCase): @setup({'date01': '{{ d|date:"m" }}'}) def test_date01(self): output = self.engine.render_to_string('date01', {'d': datetime(2008, 1, 1)}) self.assertEqual(output, '01') @setup({'date02': '{{ d|date }}'}) def test_date02(self): output = self.engine.render_to_string('date02', {'d': datetime(2008, 1, 1)}) self.assertEqual(output, 'Jan. 1, 2008') @setup({'date03': '{{ d|date:"m" }}'}) def test_date03(self): """ #9520: Make sure |date doesn't blow up on non-dates """ output = self.engine.render_to_string('date03', {'d': 'fail_string'}) self.assertEqual(output, '') # ISO date formats @setup({'date04': '{{ d|date:"o" }}'}) def test_date04(self): output = self.engine.render_to_string('date04', {'d': datetime(2008, 12, 29)}) self.assertEqual(output, '2009') @setup({'date05': '{{ d|date:"o" }}'}) def test_date05(self): output = self.engine.render_to_string('date05', {'d': datetime(2010, 1, 3)}) self.assertEqual(output, '2009') # Timezone name @setup({'date06': '{{ d|date:"e" }}'}) def test_date06(self): output = self.engine.render_to_string('date06', {'d': datetime(2009, 3, 12, tzinfo=timezone.get_fixed_timezone(30))}) self.assertEqual(output, '+0030') @setup({'date07': '{{ d|date:"e" }}'}) def test_date07(self): output = self.engine.render_to_string('date07', {'d': datetime(2009, 3, 12)}) self.assertEqual(output, '') # #19370: Make sure |date doesn't blow up on a midnight time object @setup({'date08': '{{ t|date:"H:i" }}'}) def test_date08(self): output = self.engine.render_to_string('date08', {'t': time(0, 1)}) self.assertEqual(output, '00:01') @setup({'date09': '{{ t|date:"H:i" }}'}) def test_date09(self): output = self.engine.render_to_string('date09', {'t': time(0, 0)}) self.assertEqual(output, '00:00') class FunctionTests(SimpleTestCase): def test_date(self): self.assertEqual(date(datetime(2005, 12, 29), "d F Y"), '29 December 2005') def test_escape_characters(self): self.assertEqual(date(datetime(2005, 12, 29), r'jS \o\f F'), '29th of December')
bsd-3-clause
gaboflowers/mallador_v3
unidecode/x062.py
252
4620
data = ( 'Lian ', # 0x00 'Nan ', # 0x01 'Mi ', # 0x02 'Tang ', # 0x03 'Jue ', # 0x04 'Gang ', # 0x05 'Gang ', # 0x06 'Gang ', # 0x07 'Ge ', # 0x08 'Yue ', # 0x09 'Wu ', # 0x0a 'Jian ', # 0x0b 'Xu ', # 0x0c 'Shu ', # 0x0d 'Rong ', # 0x0e 'Xi ', # 0x0f 'Cheng ', # 0x10 'Wo ', # 0x11 'Jie ', # 0x12 'Ge ', # 0x13 'Jian ', # 0x14 'Qiang ', # 0x15 'Huo ', # 0x16 'Qiang ', # 0x17 'Zhan ', # 0x18 'Dong ', # 0x19 'Qi ', # 0x1a 'Jia ', # 0x1b 'Die ', # 0x1c 'Zei ', # 0x1d 'Jia ', # 0x1e 'Ji ', # 0x1f 'Shi ', # 0x20 'Kan ', # 0x21 'Ji ', # 0x22 'Kui ', # 0x23 'Gai ', # 0x24 'Deng ', # 0x25 'Zhan ', # 0x26 'Chuang ', # 0x27 'Ge ', # 0x28 'Jian ', # 0x29 'Jie ', # 0x2a 'Yu ', # 0x2b 'Jian ', # 0x2c 'Yan ', # 0x2d 'Lu ', # 0x2e 'Xi ', # 0x2f 'Zhan ', # 0x30 'Xi ', # 0x31 'Xi ', # 0x32 'Chuo ', # 0x33 'Dai ', # 0x34 'Qu ', # 0x35 'Hu ', # 0x36 'Hu ', # 0x37 'Hu ', # 0x38 'E ', # 0x39 'Shi ', # 0x3a 'Li ', # 0x3b 'Mao ', # 0x3c 'Hu ', # 0x3d 'Li ', # 0x3e 'Fang ', # 0x3f 'Suo ', # 0x40 'Bian ', # 0x41 'Dian ', # 0x42 'Jiong ', # 0x43 'Shang ', # 0x44 'Yi ', # 0x45 'Yi ', # 0x46 'Shan ', # 0x47 'Hu ', # 0x48 'Fei ', # 0x49 'Yan ', # 0x4a 'Shou ', # 0x4b 'T ', # 0x4c 'Cai ', # 0x4d 'Zha ', # 0x4e 'Qiu ', # 0x4f 'Le ', # 0x50 'Bu ', # 0x51 'Ba ', # 0x52 'Da ', # 0x53 'Reng ', # 0x54 'Fu ', # 0x55 'Hameru ', # 0x56 'Zai ', # 0x57 'Tuo ', # 0x58 'Zhang ', # 0x59 'Diao ', # 0x5a 'Kang ', # 0x5b 'Yu ', # 0x5c 'Ku ', # 0x5d 'Han ', # 0x5e 'Shen ', # 0x5f 'Cha ', # 0x60 'Yi ', # 0x61 'Gu ', # 0x62 'Kou ', # 0x63 'Wu ', # 0x64 'Tuo ', # 0x65 'Qian ', # 0x66 'Zhi ', # 0x67 'Ren ', # 0x68 'Kuo ', # 0x69 'Men ', # 0x6a 'Sao ', # 0x6b 'Yang ', # 0x6c 'Niu ', # 0x6d 'Ban ', # 0x6e 'Che ', # 0x6f 'Rao ', # 0x70 'Xi ', # 0x71 'Qian ', # 0x72 'Ban ', # 0x73 'Jia ', # 0x74 'Yu ', # 0x75 'Fu ', # 0x76 'Ao ', # 0x77 'Xi ', # 0x78 'Pi ', # 0x79 'Zhi ', # 0x7a 'Zi ', # 0x7b 'E ', # 0x7c 'Dun ', # 0x7d 'Zhao ', # 0x7e 'Cheng ', # 0x7f 'Ji ', # 0x80 'Yan ', # 0x81 'Kuang ', # 0x82 'Bian ', # 0x83 'Chao ', # 0x84 'Ju ', # 0x85 'Wen ', # 0x86 'Hu ', # 0x87 'Yue ', # 0x88 'Jue ', # 0x89 'Ba ', # 0x8a 'Qin ', # 0x8b 'Zhen ', # 0x8c 'Zheng ', # 0x8d 'Yun ', # 0x8e 'Wan ', # 0x8f 'Nu ', # 0x90 'Yi ', # 0x91 'Shu ', # 0x92 'Zhua ', # 0x93 'Pou ', # 0x94 'Tou ', # 0x95 'Dou ', # 0x96 'Kang ', # 0x97 'Zhe ', # 0x98 'Pou ', # 0x99 'Fu ', # 0x9a 'Pao ', # 0x9b 'Ba ', # 0x9c 'Ao ', # 0x9d 'Ze ', # 0x9e 'Tuan ', # 0x9f 'Kou ', # 0xa0 'Lun ', # 0xa1 'Qiang ', # 0xa2 '[?] ', # 0xa3 'Hu ', # 0xa4 'Bao ', # 0xa5 'Bing ', # 0xa6 'Zhi ', # 0xa7 'Peng ', # 0xa8 'Tan ', # 0xa9 'Pu ', # 0xaa 'Pi ', # 0xab 'Tai ', # 0xac 'Yao ', # 0xad 'Zhen ', # 0xae 'Zha ', # 0xaf 'Yang ', # 0xb0 'Bao ', # 0xb1 'He ', # 0xb2 'Ni ', # 0xb3 'Yi ', # 0xb4 'Di ', # 0xb5 'Chi ', # 0xb6 'Pi ', # 0xb7 'Za ', # 0xb8 'Mo ', # 0xb9 'Mo ', # 0xba 'Shen ', # 0xbb 'Ya ', # 0xbc 'Chou ', # 0xbd 'Qu ', # 0xbe 'Min ', # 0xbf 'Chu ', # 0xc0 'Jia ', # 0xc1 'Fu ', # 0xc2 'Zhan ', # 0xc3 'Zhu ', # 0xc4 'Dan ', # 0xc5 'Chai ', # 0xc6 'Mu ', # 0xc7 'Nian ', # 0xc8 'La ', # 0xc9 'Fu ', # 0xca 'Pao ', # 0xcb 'Ban ', # 0xcc 'Pai ', # 0xcd 'Ling ', # 0xce 'Na ', # 0xcf 'Guai ', # 0xd0 'Qian ', # 0xd1 'Ju ', # 0xd2 'Tuo ', # 0xd3 'Ba ', # 0xd4 'Tuo ', # 0xd5 'Tuo ', # 0xd6 'Ao ', # 0xd7 'Ju ', # 0xd8 'Zhuo ', # 0xd9 'Pan ', # 0xda 'Zhao ', # 0xdb 'Bai ', # 0xdc 'Bai ', # 0xdd 'Di ', # 0xde 'Ni ', # 0xdf 'Ju ', # 0xe0 'Kuo ', # 0xe1 'Long ', # 0xe2 'Jian ', # 0xe3 '[?] ', # 0xe4 'Yong ', # 0xe5 'Lan ', # 0xe6 'Ning ', # 0xe7 'Bo ', # 0xe8 'Ze ', # 0xe9 'Qian ', # 0xea 'Hen ', # 0xeb 'Gua ', # 0xec 'Shi ', # 0xed 'Jie ', # 0xee 'Zheng ', # 0xef 'Nin ', # 0xf0 'Gong ', # 0xf1 'Gong ', # 0xf2 'Quan ', # 0xf3 'Shuan ', # 0xf4 'Cun ', # 0xf5 'Zan ', # 0xf6 'Kao ', # 0xf7 'Chi ', # 0xf8 'Xie ', # 0xf9 'Ce ', # 0xfa 'Hui ', # 0xfb 'Pin ', # 0xfc 'Zhuai ', # 0xfd 'Shi ', # 0xfe 'Na ', # 0xff )
gpl-3.0
robk5uj/invenio
modules/bibrank/lib/bibrank_regression_tests.py
8
7969
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """BibRank Regression Test Suite.""" __revision__ = "$Id$" import unittest from invenio.config import CFG_SITE_URL, CFG_SITE_RECORD from invenio.dbquery import run_sql from invenio.testutils import make_test_suite, run_test_suite, \ test_web_page_content, merge_error_messages class BibRankWebPagesAvailabilityTest(unittest.TestCase): """Check BibRank web pages whether they are up or not.""" def test_rank_by_word_similarity_pages_availability(self): """bibrank - availability of ranking search results pages""" baseurl = CFG_SITE_URL + '/search' _exports = ['?p=ellis&r=wrd'] error_messages = [] for url in [baseurl + page for page in _exports]: error_messages.extend(test_web_page_content(url)) if error_messages: self.fail(merge_error_messages(error_messages)) return def test_similar_records_pages_availability(self): """bibrank - availability of similar records results pages""" baseurl = CFG_SITE_URL + '/search' _exports = ['?p=recid%3A18&rm=wrd'] error_messages = [] for url in [baseurl + page for page in _exports]: error_messages.extend(test_web_page_content(url)) if error_messages: self.fail(merge_error_messages(error_messages)) return class BibRankIntlMethodNames(unittest.TestCase): """Check BibRank I18N ranking method names.""" def test_i18n_ranking_method_names(self): """bibrank - I18N ranking method names""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/collection/Articles%20%26%20Preprints?as=1', expected_text="times cited")) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/collection/Articles%20%26%20Preprints?as=1', expected_text="journal impact factor")) class BibRankWordSimilarityRankingTest(unittest.TestCase): """Check BibRank word similarity ranking tools.""" def test_search_results_ranked_by_similarity(self): """bibrank - search results ranked by word similarity""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?p=ellis&rm=wrd&of=id', expected_text="[8, 10, 11, 12, 47, 17, 13, 16, 9, 14, 18, 15]")) def test_similar_records_link(self): """bibrank - 'Similar records' link""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?p=recid%3A77&rm=wrd&of=id', expected_text="[84, 96, 95, 85, 77]")) class BibRankCitationRankingTest(unittest.TestCase): """Check BibRank citation ranking tools.""" def test_search_results_ranked_by_citations(self): """bibrank - search results ranked by number of citations""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?cc=Articles+%26+Preprints&p=Klebanov&rm=citation&of=id', expected_text="[85, 77, 84]")) def test_search_results_ranked_by_citations_verbose(self): """bibrank - search results ranked by number of citations, verbose output""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?cc=Articles+%26+Preprints&p=Klebanov&rm=citation&verbose=2', expected_text="find_citations retlist [[85, 0], [77, 2], [84, 3]]")) def test_detailed_record_citations_tab(self): """bibrank - detailed record, citations tab""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/'+ CFG_SITE_RECORD +'/79/citations', expected_text=["Cited by: 1 records", "Co-cited with: 2 records"])) class BibRankExtCitesTest(unittest.TestCase): """Check BibRank citation ranking tools with respect to the external cites.""" def _detect_extcite_info(self, extcitepubinfo): """ Helper function to return list of recIDs citing given extcitepubinfo. Could be move to the business logic, if interesting for other callers. """ res = run_sql("""SELECT id_bibrec FROM rnkCITATIONDATAEXT WHERE extcitepubinfo=%s""", (extcitepubinfo,)) return [int(x[0]) for x in res] def test_extcite_via_report_number(self): """bibrank - external cites, via report number""" # The external paper hep-th/0112258 is cited by 9 demo # records: you can find out via 999:"hep-th/0112258", and we # could eventually automatize this query, but it is maybe # safer to leave it manual in case queries fail for some # reason. test_case_repno = "hep-th/0112258" test_case_repno_cited_by = [77, 78, 81, 82, 85, 86, 88, 90, 91] self.assertEqual(self._detect_extcite_info(test_case_repno), test_case_repno_cited_by) def test_extcite_via_publication_reference(self): """bibrank - external cites, via publication reference""" # The external paper "J. Math. Phys. 4 (1963) 915" does not # have any report number, and is cited by 1 demo record. test_case_pubinfo = "J. Math. Phys. 4 (1963) 915" test_case_pubinfo_cited_by = [90] self.assertEqual(self._detect_extcite_info(test_case_pubinfo), test_case_pubinfo_cited_by) def test_intcite_via_report_number(self): """bibrank - external cites, no internal papers via report number""" # The internal paper hep-th/9809057 is cited by 2 demo # records, but it also exists as a demo record, so it should # not be found in the extcite table. test_case_repno = "hep-th/9809057" test_case_repno_cited_by = [] self.assertEqual(self._detect_extcite_info(test_case_repno), test_case_repno_cited_by) def test_intcite_via_publication_reference(self): """bibrank - external cites, no internal papers via publication reference""" # The internal paper #18 has only pubinfo, no repno, and is # cited by internal paper #96 via its pubinfo, so should not # be present in the extcite list: test_case_repno = "Phys. Lett., B 151 (1985) 357" test_case_repno_cited_by = [] self.assertEqual(self._detect_extcite_info(test_case_repno), test_case_repno_cited_by) TEST_SUITE = make_test_suite(BibRankWebPagesAvailabilityTest, BibRankIntlMethodNames, BibRankWordSimilarityRankingTest, BibRankCitationRankingTest, BibRankExtCitesTest) if __name__ == "__main__": run_test_suite(TEST_SUITE, warn_user=True)
gpl-2.0
victorzhao/miniblink49
third_party/ply/lex.py
482
40739
# ----------------------------------------------------------------------------- # ply: lex.py # # Copyright (C) 2001-2011, # David M. Beazley (Dabeaz LLC) # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the David Beazley or Dabeaz LLC may be used to # endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ----------------------------------------------------------------------------- __version__ = "3.4" __tabversion__ = "3.2" # Version of table file used import re, sys, types, copy, os # This tuple contains known string types try: # Python 2.6 StringTypes = (types.StringType, types.UnicodeType) except AttributeError: # Python 3.0 StringTypes = (str, bytes) # Extract the code attribute of a function. Different implementations # are for Python 2/3 compatibility. if sys.version_info[0] < 3: def func_code(f): return f.func_code else: def func_code(f): return f.__code__ # This regular expression is used to match valid token names _is_identifier = re.compile(r'^[a-zA-Z0-9_]+$') # Exception thrown when invalid token encountered and no default error # handler is defined. class LexError(Exception): def __init__(self,message,s): self.args = (message,) self.text = s # Token class. This class is used to represent the tokens produced. class LexToken(object): def __str__(self): return "LexToken(%s,%r,%d,%d)" % (self.type,self.value,self.lineno,self.lexpos) def __repr__(self): return str(self) # This object is a stand-in for a logging object created by the # logging module. class PlyLogger(object): def __init__(self,f): self.f = f def critical(self,msg,*args,**kwargs): self.f.write((msg % args) + "\n") def warning(self,msg,*args,**kwargs): self.f.write("WARNING: "+ (msg % args) + "\n") def error(self,msg,*args,**kwargs): self.f.write("ERROR: " + (msg % args) + "\n") info = critical debug = critical # Null logger is used when no output is generated. Does nothing. class NullLogger(object): def __getattribute__(self,name): return self def __call__(self,*args,**kwargs): return self # ----------------------------------------------------------------------------- # === Lexing Engine === # # The following Lexer class implements the lexer runtime. There are only # a few public methods and attributes: # # input() - Store a new string in the lexer # token() - Get the next token # clone() - Clone the lexer # # lineno - Current line number # lexpos - Current position in the input string # ----------------------------------------------------------------------------- class Lexer: def __init__(self): self.lexre = None # Master regular expression. This is a list of # tuples (re,findex) where re is a compiled # regular expression and findex is a list # mapping regex group numbers to rules self.lexretext = None # Current regular expression strings self.lexstatere = {} # Dictionary mapping lexer states to master regexs self.lexstateretext = {} # Dictionary mapping lexer states to regex strings self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names self.lexstate = "INITIAL" # Current lexer state self.lexstatestack = [] # Stack of lexer states self.lexstateinfo = None # State information self.lexstateignore = {} # Dictionary of ignored characters for each state self.lexstateerrorf = {} # Dictionary of error functions for each state self.lexreflags = 0 # Optional re compile flags self.lexdata = None # Actual input data (as a string) self.lexpos = 0 # Current position in input text self.lexlen = 0 # Length of the input text self.lexerrorf = None # Error rule (if any) self.lextokens = None # List of valid tokens self.lexignore = "" # Ignored characters self.lexliterals = "" # Literal characters that can be passed through self.lexmodule = None # Module self.lineno = 1 # Current line number self.lexoptimize = 0 # Optimized mode def clone(self,object=None): c = copy.copy(self) # If the object parameter has been supplied, it means we are attaching the # lexer to a new object. In this case, we have to rebind all methods in # the lexstatere and lexstateerrorf tables. if object: newtab = { } for key, ritem in self.lexstatere.items(): newre = [] for cre, findex in ritem: newfindex = [] for f in findex: if not f or not f[0]: newfindex.append(f) continue newfindex.append((getattr(object,f[0].__name__),f[1])) newre.append((cre,newfindex)) newtab[key] = newre c.lexstatere = newtab c.lexstateerrorf = { } for key, ef in self.lexstateerrorf.items(): c.lexstateerrorf[key] = getattr(object,ef.__name__) c.lexmodule = object return c # ------------------------------------------------------------ # writetab() - Write lexer information to a table file # ------------------------------------------------------------ def writetab(self,tabfile,outputdir=""): if isinstance(tabfile,types.ModuleType): return basetabfilename = tabfile.split(".")[-1] filename = os.path.join(outputdir,basetabfilename)+".py" tf = open(filename,"w") tf.write("# %s.py. This file automatically created by PLY (version %s). Don't edit!\n" % (tabfile,__version__)) tf.write("_tabversion = %s\n" % repr(__version__)) tf.write("_lextokens = %s\n" % repr(self.lextokens)) tf.write("_lexreflags = %s\n" % repr(self.lexreflags)) tf.write("_lexliterals = %s\n" % repr(self.lexliterals)) tf.write("_lexstateinfo = %s\n" % repr(self.lexstateinfo)) tabre = { } # Collect all functions in the initial state initial = self.lexstatere["INITIAL"] initialfuncs = [] for part in initial: for f in part[1]: if f and f[0]: initialfuncs.append(f) for key, lre in self.lexstatere.items(): titem = [] for i in range(len(lre)): titem.append((self.lexstateretext[key][i],_funcs_to_names(lre[i][1],self.lexstaterenames[key][i]))) tabre[key] = titem tf.write("_lexstatere = %s\n" % repr(tabre)) tf.write("_lexstateignore = %s\n" % repr(self.lexstateignore)) taberr = { } for key, ef in self.lexstateerrorf.items(): if ef: taberr[key] = ef.__name__ else: taberr[key] = None tf.write("_lexstateerrorf = %s\n" % repr(taberr)) tf.close() # ------------------------------------------------------------ # readtab() - Read lexer information from a tab file # ------------------------------------------------------------ def readtab(self,tabfile,fdict): if isinstance(tabfile,types.ModuleType): lextab = tabfile else: if sys.version_info[0] < 3: exec("import %s as lextab" % tabfile) else: env = { } exec("import %s as lextab" % tabfile, env,env) lextab = env['lextab'] if getattr(lextab,"_tabversion","0.0") != __version__: raise ImportError("Inconsistent PLY version") self.lextokens = lextab._lextokens self.lexreflags = lextab._lexreflags self.lexliterals = lextab._lexliterals self.lexstateinfo = lextab._lexstateinfo self.lexstateignore = lextab._lexstateignore self.lexstatere = { } self.lexstateretext = { } for key,lre in lextab._lexstatere.items(): titem = [] txtitem = [] for i in range(len(lre)): titem.append((re.compile(lre[i][0],lextab._lexreflags | re.VERBOSE),_names_to_funcs(lre[i][1],fdict))) txtitem.append(lre[i][0]) self.lexstatere[key] = titem self.lexstateretext[key] = txtitem self.lexstateerrorf = { } for key,ef in lextab._lexstateerrorf.items(): self.lexstateerrorf[key] = fdict[ef] self.begin('INITIAL') # ------------------------------------------------------------ # input() - Push a new string into the lexer # ------------------------------------------------------------ def input(self,s): # Pull off the first character to see if s looks like a string c = s[:1] if not isinstance(c,StringTypes): raise ValueError("Expected a string") self.lexdata = s self.lexpos = 0 self.lexlen = len(s) # ------------------------------------------------------------ # begin() - Changes the lexing state # ------------------------------------------------------------ def begin(self,state): if not state in self.lexstatere: raise ValueError("Undefined state") self.lexre = self.lexstatere[state] self.lexretext = self.lexstateretext[state] self.lexignore = self.lexstateignore.get(state,"") self.lexerrorf = self.lexstateerrorf.get(state,None) self.lexstate = state # ------------------------------------------------------------ # push_state() - Changes the lexing state and saves old on stack # ------------------------------------------------------------ def push_state(self,state): self.lexstatestack.append(self.lexstate) self.begin(state) # ------------------------------------------------------------ # pop_state() - Restores the previous state # ------------------------------------------------------------ def pop_state(self): self.begin(self.lexstatestack.pop()) # ------------------------------------------------------------ # current_state() - Returns the current lexing state # ------------------------------------------------------------ def current_state(self): return self.lexstate # ------------------------------------------------------------ # skip() - Skip ahead n characters # ------------------------------------------------------------ def skip(self,n): self.lexpos += n # ------------------------------------------------------------ # opttoken() - Return the next token from the Lexer # # Note: This function has been carefully implemented to be as fast # as possible. Don't make changes unless you really know what # you are doing # ------------------------------------------------------------ def token(self): # Make local copies of frequently referenced attributes lexpos = self.lexpos lexlen = self.lexlen lexignore = self.lexignore lexdata = self.lexdata while lexpos < lexlen: # This code provides some short-circuit code for whitespace, tabs, and other ignored characters if lexdata[lexpos] in lexignore: lexpos += 1 continue # Look for a regular expression match for lexre,lexindexfunc in self.lexre: m = lexre.match(lexdata,lexpos) if not m: continue # Create a token for return tok = LexToken() tok.value = m.group() tok.lineno = self.lineno tok.lexpos = lexpos i = m.lastindex func,tok.type = lexindexfunc[i] if not func: # If no token type was set, it's an ignored token if tok.type: self.lexpos = m.end() return tok else: lexpos = m.end() break lexpos = m.end() # If token is processed by a function, call it tok.lexer = self # Set additional attributes useful in token rules self.lexmatch = m self.lexpos = lexpos newtok = func(tok) # Every function must return a token, if nothing, we just move to next token if not newtok: lexpos = self.lexpos # This is here in case user has updated lexpos. lexignore = self.lexignore # This is here in case there was a state change break # Verify type of the token. If not in the token map, raise an error if not self.lexoptimize: if not newtok.type in self.lextokens: raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % ( func_code(func).co_filename, func_code(func).co_firstlineno, func.__name__, newtok.type),lexdata[lexpos:]) return newtok else: # No match, see if in literals if lexdata[lexpos] in self.lexliterals: tok = LexToken() tok.value = lexdata[lexpos] tok.lineno = self.lineno tok.type = tok.value tok.lexpos = lexpos self.lexpos = lexpos + 1 return tok # No match. Call t_error() if defined. if self.lexerrorf: tok = LexToken() tok.value = self.lexdata[lexpos:] tok.lineno = self.lineno tok.type = "error" tok.lexer = self tok.lexpos = lexpos self.lexpos = lexpos newtok = self.lexerrorf(tok) if lexpos == self.lexpos: # Error method didn't change text position at all. This is an error. raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:]) lexpos = self.lexpos if not newtok: continue return newtok self.lexpos = lexpos raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos],lexpos), lexdata[lexpos:]) self.lexpos = lexpos + 1 if self.lexdata is None: raise RuntimeError("No input string given with input()") return None # Iterator interface def __iter__(self): return self def next(self): t = self.token() if t is None: raise StopIteration return t __next__ = next # ----------------------------------------------------------------------------- # ==== Lex Builder === # # The functions and classes below are used to collect lexing information # and build a Lexer object from it. # ----------------------------------------------------------------------------- # ----------------------------------------------------------------------------- # get_caller_module_dict() # # This function returns a dictionary containing all of the symbols defined within # a caller further down the call stack. This is used to get the environment # associated with the yacc() call if none was provided. # ----------------------------------------------------------------------------- def get_caller_module_dict(levels): try: raise RuntimeError except RuntimeError: e,b,t = sys.exc_info() f = t.tb_frame while levels > 0: f = f.f_back levels -= 1 ldict = f.f_globals.copy() if f.f_globals != f.f_locals: ldict.update(f.f_locals) return ldict # ----------------------------------------------------------------------------- # _funcs_to_names() # # Given a list of regular expression functions, this converts it to a list # suitable for output to a table file # ----------------------------------------------------------------------------- def _funcs_to_names(funclist,namelist): result = [] for f,name in zip(funclist,namelist): if f and f[0]: result.append((name, f[1])) else: result.append(f) return result # ----------------------------------------------------------------------------- # _names_to_funcs() # # Given a list of regular expression function names, this converts it back to # functions. # ----------------------------------------------------------------------------- def _names_to_funcs(namelist,fdict): result = [] for n in namelist: if n and n[0]: result.append((fdict[n[0]],n[1])) else: result.append(n) return result # ----------------------------------------------------------------------------- # _form_master_re() # # This function takes a list of all of the regex components and attempts to # form the master regular expression. Given limitations in the Python re # module, it may be necessary to break the master regex into separate expressions. # ----------------------------------------------------------------------------- def _form_master_re(relist,reflags,ldict,toknames): if not relist: return [] regex = "|".join(relist) try: lexre = re.compile(regex,re.VERBOSE | reflags) # Build the index to function map for the matching engine lexindexfunc = [ None ] * (max(lexre.groupindex.values())+1) lexindexnames = lexindexfunc[:] for f,i in lexre.groupindex.items(): handle = ldict.get(f,None) if type(handle) in (types.FunctionType, types.MethodType): lexindexfunc[i] = (handle,toknames[f]) lexindexnames[i] = f elif handle is not None: lexindexnames[i] = f if f.find("ignore_") > 0: lexindexfunc[i] = (None,None) else: lexindexfunc[i] = (None, toknames[f]) return [(lexre,lexindexfunc)],[regex],[lexindexnames] except Exception: m = int(len(relist)/2) if m == 0: m = 1 llist, lre, lnames = _form_master_re(relist[:m],reflags,ldict,toknames) rlist, rre, rnames = _form_master_re(relist[m:],reflags,ldict,toknames) return llist+rlist, lre+rre, lnames+rnames # ----------------------------------------------------------------------------- # def _statetoken(s,names) # # Given a declaration name s of the form "t_" and a dictionary whose keys are # state names, this function returns a tuple (states,tokenname) where states # is a tuple of state names and tokenname is the name of the token. For example, # calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM') # ----------------------------------------------------------------------------- def _statetoken(s,names): nonstate = 1 parts = s.split("_") for i in range(1,len(parts)): if not parts[i] in names and parts[i] != 'ANY': break if i > 1: states = tuple(parts[1:i]) else: states = ('INITIAL',) if 'ANY' in states: states = tuple(names) tokenname = "_".join(parts[i:]) return (states,tokenname) # ----------------------------------------------------------------------------- # LexerReflect() # # This class represents information needed to build a lexer as extracted from a # user's input file. # ----------------------------------------------------------------------------- class LexerReflect(object): def __init__(self,ldict,log=None,reflags=0): self.ldict = ldict self.error_func = None self.tokens = [] self.reflags = reflags self.stateinfo = { 'INITIAL' : 'inclusive'} self.files = {} self.error = 0 if log is None: self.log = PlyLogger(sys.stderr) else: self.log = log # Get all of the basic information def get_all(self): self.get_tokens() self.get_literals() self.get_states() self.get_rules() # Validate all of the information def validate_all(self): self.validate_tokens() self.validate_literals() self.validate_rules() return self.error # Get the tokens map def get_tokens(self): tokens = self.ldict.get("tokens",None) if not tokens: self.log.error("No token list is defined") self.error = 1 return if not isinstance(tokens,(list, tuple)): self.log.error("tokens must be a list or tuple") self.error = 1 return if not tokens: self.log.error("tokens is empty") self.error = 1 return self.tokens = tokens # Validate the tokens def validate_tokens(self): terminals = {} for n in self.tokens: if not _is_identifier.match(n): self.log.error("Bad token name '%s'",n) self.error = 1 if n in terminals: self.log.warning("Token '%s' multiply defined", n) terminals[n] = 1 # Get the literals specifier def get_literals(self): self.literals = self.ldict.get("literals","") # Validate literals def validate_literals(self): try: for c in self.literals: if not isinstance(c,StringTypes) or len(c) > 1: self.log.error("Invalid literal %s. Must be a single character", repr(c)) self.error = 1 continue except TypeError: self.log.error("Invalid literals specification. literals must be a sequence of characters") self.error = 1 def get_states(self): self.states = self.ldict.get("states",None) # Build statemap if self.states: if not isinstance(self.states,(tuple,list)): self.log.error("states must be defined as a tuple or list") self.error = 1 else: for s in self.states: if not isinstance(s,tuple) or len(s) != 2: self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')",repr(s)) self.error = 1 continue name, statetype = s if not isinstance(name,StringTypes): self.log.error("State name %s must be a string", repr(name)) self.error = 1 continue if not (statetype == 'inclusive' or statetype == 'exclusive'): self.log.error("State type for state %s must be 'inclusive' or 'exclusive'",name) self.error = 1 continue if name in self.stateinfo: self.log.error("State '%s' already defined",name) self.error = 1 continue self.stateinfo[name] = statetype # Get all of the symbols with a t_ prefix and sort them into various # categories (functions, strings, error functions, and ignore characters) def get_rules(self): tsymbols = [f for f in self.ldict if f[:2] == 't_' ] # Now build up a list of functions and a list of strings self.toknames = { } # Mapping of symbols to token names self.funcsym = { } # Symbols defined as functions self.strsym = { } # Symbols defined as strings self.ignore = { } # Ignore strings by state self.errorf = { } # Error functions by state for s in self.stateinfo: self.funcsym[s] = [] self.strsym[s] = [] if len(tsymbols) == 0: self.log.error("No rules of the form t_rulename are defined") self.error = 1 return for f in tsymbols: t = self.ldict[f] states, tokname = _statetoken(f,self.stateinfo) self.toknames[f] = tokname if hasattr(t,"__call__"): if tokname == 'error': for s in states: self.errorf[s] = t elif tokname == 'ignore': line = func_code(t).co_firstlineno file = func_code(t).co_filename self.log.error("%s:%d: Rule '%s' must be defined as a string",file,line,t.__name__) self.error = 1 else: for s in states: self.funcsym[s].append((f,t)) elif isinstance(t, StringTypes): if tokname == 'ignore': for s in states: self.ignore[s] = t if "\\" in t: self.log.warning("%s contains a literal backslash '\\'",f) elif tokname == 'error': self.log.error("Rule '%s' must be defined as a function", f) self.error = 1 else: for s in states: self.strsym[s].append((f,t)) else: self.log.error("%s not defined as a function or string", f) self.error = 1 # Sort the functions by line number for f in self.funcsym.values(): if sys.version_info[0] < 3: f.sort(lambda x,y: cmp(func_code(x[1]).co_firstlineno,func_code(y[1]).co_firstlineno)) else: # Python 3.0 f.sort(key=lambda x: func_code(x[1]).co_firstlineno) # Sort the strings by regular expression length for s in self.strsym.values(): if sys.version_info[0] < 3: s.sort(lambda x,y: (len(x[1]) < len(y[1])) - (len(x[1]) > len(y[1]))) else: # Python 3.0 s.sort(key=lambda x: len(x[1]),reverse=True) # Validate all of the t_rules collected def validate_rules(self): for state in self.stateinfo: # Validate all rules defined by functions for fname, f in self.funcsym[state]: line = func_code(f).co_firstlineno file = func_code(f).co_filename self.files[file] = 1 tokname = self.toknames[fname] if isinstance(f, types.MethodType): reqargs = 2 else: reqargs = 1 nargs = func_code(f).co_argcount if nargs > reqargs: self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__) self.error = 1 continue if nargs < reqargs: self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__) self.error = 1 continue if not f.__doc__: self.log.error("%s:%d: No regular expression defined for rule '%s'",file,line,f.__name__) self.error = 1 continue try: c = re.compile("(?P<%s>%s)" % (fname,f.__doc__), re.VERBOSE | self.reflags) if c.match(""): self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file,line,f.__name__) self.error = 1 except re.error: _etype, e, _etrace = sys.exc_info() self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file,line,f.__name__,e) if '#' in f.__doc__: self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'",file,line, f.__name__) self.error = 1 # Validate all rules defined by strings for name,r in self.strsym[state]: tokname = self.toknames[name] if tokname == 'error': self.log.error("Rule '%s' must be defined as a function", name) self.error = 1 continue if not tokname in self.tokens and tokname.find("ignore_") < 0: self.log.error("Rule '%s' defined for an unspecified token %s",name,tokname) self.error = 1 continue try: c = re.compile("(?P<%s>%s)" % (name,r),re.VERBOSE | self.reflags) if (c.match("")): self.log.error("Regular expression for rule '%s' matches empty string",name) self.error = 1 except re.error: _etype, e, _etrace = sys.exc_info() self.log.error("Invalid regular expression for rule '%s'. %s",name,e) if '#' in r: self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'",name) self.error = 1 if not self.funcsym[state] and not self.strsym[state]: self.log.error("No rules defined for state '%s'",state) self.error = 1 # Validate the error function efunc = self.errorf.get(state,None) if efunc: f = efunc line = func_code(f).co_firstlineno file = func_code(f).co_filename self.files[file] = 1 if isinstance(f, types.MethodType): reqargs = 2 else: reqargs = 1 nargs = func_code(f).co_argcount if nargs > reqargs: self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__) self.error = 1 if nargs < reqargs: self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__) self.error = 1 for f in self.files: self.validate_file(f) # ----------------------------------------------------------------------------- # validate_file() # # This checks to see if there are duplicated t_rulename() functions or strings # in the parser input file. This is done using a simple regular expression # match on each line in the given file. # ----------------------------------------------------------------------------- def validate_file(self,filename): import os.path base,ext = os.path.splitext(filename) if ext != '.py': return # No idea what the file is. Return OK try: f = open(filename) lines = f.readlines() f.close() except IOError: return # Couldn't find the file. Don't worry about it fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(') sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=') counthash = { } linen = 1 for l in lines: m = fre.match(l) if not m: m = sre.match(l) if m: name = m.group(1) prev = counthash.get(name) if not prev: counthash[name] = linen else: self.log.error("%s:%d: Rule %s redefined. Previously defined on line %d",filename,linen,name,prev) self.error = 1 linen += 1 # ----------------------------------------------------------------------------- # lex(module) # # Build all of the regular expression rules from definitions in the supplied module # ----------------------------------------------------------------------------- def lex(module=None,object=None,debug=0,optimize=0,lextab="lextab",reflags=0,nowarn=0,outputdir="", debuglog=None, errorlog=None): global lexer ldict = None stateinfo = { 'INITIAL' : 'inclusive'} lexobj = Lexer() lexobj.lexoptimize = optimize global token,input if errorlog is None: errorlog = PlyLogger(sys.stderr) if debug: if debuglog is None: debuglog = PlyLogger(sys.stderr) # Get the module dictionary used for the lexer if object: module = object if module: _items = [(k,getattr(module,k)) for k in dir(module)] ldict = dict(_items) else: ldict = get_caller_module_dict(2) # Collect parser information from the dictionary linfo = LexerReflect(ldict,log=errorlog,reflags=reflags) linfo.get_all() if not optimize: if linfo.validate_all(): raise SyntaxError("Can't build lexer") if optimize and lextab: try: lexobj.readtab(lextab,ldict) token = lexobj.token input = lexobj.input lexer = lexobj return lexobj except ImportError: pass # Dump some basic debugging information if debug: debuglog.info("lex: tokens = %r", linfo.tokens) debuglog.info("lex: literals = %r", linfo.literals) debuglog.info("lex: states = %r", linfo.stateinfo) # Build a dictionary of valid token names lexobj.lextokens = { } for n in linfo.tokens: lexobj.lextokens[n] = 1 # Get literals specification if isinstance(linfo.literals,(list,tuple)): lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals) else: lexobj.lexliterals = linfo.literals # Get the stateinfo dictionary stateinfo = linfo.stateinfo regexs = { } # Build the master regular expressions for state in stateinfo: regex_list = [] # Add rules defined by functions first for fname, f in linfo.funcsym[state]: line = func_code(f).co_firstlineno file = func_code(f).co_filename regex_list.append("(?P<%s>%s)" % (fname,f.__doc__)) if debug: debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",fname,f.__doc__, state) # Now add all of the simple rules for name,r in linfo.strsym[state]: regex_list.append("(?P<%s>%s)" % (name,r)) if debug: debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",name,r, state) regexs[state] = regex_list # Build the master regular expressions if debug: debuglog.info("lex: ==== MASTER REGEXS FOLLOW ====") for state in regexs: lexre, re_text, re_names = _form_master_re(regexs[state],reflags,ldict,linfo.toknames) lexobj.lexstatere[state] = lexre lexobj.lexstateretext[state] = re_text lexobj.lexstaterenames[state] = re_names if debug: for i in range(len(re_text)): debuglog.info("lex: state '%s' : regex[%d] = '%s'",state, i, re_text[i]) # For inclusive states, we need to add the regular expressions from the INITIAL state for state,stype in stateinfo.items(): if state != "INITIAL" and stype == 'inclusive': lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL']) lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL']) lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL']) lexobj.lexstateinfo = stateinfo lexobj.lexre = lexobj.lexstatere["INITIAL"] lexobj.lexretext = lexobj.lexstateretext["INITIAL"] lexobj.lexreflags = reflags # Set up ignore variables lexobj.lexstateignore = linfo.ignore lexobj.lexignore = lexobj.lexstateignore.get("INITIAL","") # Set up error functions lexobj.lexstateerrorf = linfo.errorf lexobj.lexerrorf = linfo.errorf.get("INITIAL",None) if not lexobj.lexerrorf: errorlog.warning("No t_error rule is defined") # Check state information for ignore and error rules for s,stype in stateinfo.items(): if stype == 'exclusive': if not s in linfo.errorf: errorlog.warning("No error rule is defined for exclusive state '%s'", s) if not s in linfo.ignore and lexobj.lexignore: errorlog.warning("No ignore rule is defined for exclusive state '%s'", s) elif stype == 'inclusive': if not s in linfo.errorf: linfo.errorf[s] = linfo.errorf.get("INITIAL",None) if not s in linfo.ignore: linfo.ignore[s] = linfo.ignore.get("INITIAL","") # Create global versions of the token() and input() functions token = lexobj.token input = lexobj.input lexer = lexobj # If in optimize mode, we write the lextab if lextab and optimize: lexobj.writetab(lextab,outputdir) return lexobj # ----------------------------------------------------------------------------- # runmain() # # This runs the lexer as a main program # ----------------------------------------------------------------------------- def runmain(lexer=None,data=None): if not data: try: filename = sys.argv[1] f = open(filename) data = f.read() f.close() except IndexError: sys.stdout.write("Reading from standard input (type EOF to end):\n") data = sys.stdin.read() if lexer: _input = lexer.input else: _input = input _input(data) if lexer: _token = lexer.token else: _token = token while 1: tok = _token() if not tok: break sys.stdout.write("(%s,%r,%d,%d)\n" % (tok.type, tok.value, tok.lineno,tok.lexpos)) # ----------------------------------------------------------------------------- # @TOKEN(regex) # # This decorator function can be used to set the regex expression on a function # when its docstring might need to be set in an alternative way # ----------------------------------------------------------------------------- def TOKEN(r): def set_doc(f): if hasattr(r,"__call__"): f.__doc__ = r.__doc__ else: f.__doc__ = r return f return set_doc # Alternative spelling of the TOKEN decorator Token = TOKEN
gpl-3.0
waterponey/scikit-learn
benchmarks/bench_plot_parallel_pairwise.py
127
1270
# Author: Mathieu Blondel <mathieu@mblondel.org> # License: BSD 3 clause import time import matplotlib.pyplot as plt from sklearn.utils import check_random_state from sklearn.metrics.pairwise import pairwise_distances from sklearn.metrics.pairwise import pairwise_kernels def plot(func): random_state = check_random_state(0) one_core = [] multi_core = [] sample_sizes = range(1000, 6000, 1000) for n_samples in sample_sizes: X = random_state.rand(n_samples, 300) start = time.time() func(X, n_jobs=1) one_core.append(time.time() - start) start = time.time() func(X, n_jobs=-1) multi_core.append(time.time() - start) plt.figure('scikit-learn parallel %s benchmark results' % func.__name__) plt.plot(sample_sizes, one_core, label="one core") plt.plot(sample_sizes, multi_core, label="multi core") plt.xlabel('n_samples') plt.ylabel('Time (s)') plt.title('Parallel %s' % func.__name__) plt.legend() def euclidean_distances(X, n_jobs): return pairwise_distances(X, metric="euclidean", n_jobs=n_jobs) def rbf_kernels(X, n_jobs): return pairwise_kernels(X, metric="rbf", n_jobs=n_jobs, gamma=0.1) plot(euclidean_distances) plot(rbf_kernels) plt.show()
bsd-3-clause
beblount/Steer-Clear-Backend-Web
env/Lib/encodings/cp1258.py
593
13620
""" Python Character Mapping Codec cp1258 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1258.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='cp1258', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> NULL u'\x01' # 0x01 -> START OF HEADING u'\x02' # 0x02 -> START OF TEXT u'\x03' # 0x03 -> END OF TEXT u'\x04' # 0x04 -> END OF TRANSMISSION u'\x05' # 0x05 -> ENQUIRY u'\x06' # 0x06 -> ACKNOWLEDGE u'\x07' # 0x07 -> BELL u'\x08' # 0x08 -> BACKSPACE u'\t' # 0x09 -> HORIZONTAL TABULATION u'\n' # 0x0A -> LINE FEED u'\x0b' # 0x0B -> VERTICAL TABULATION u'\x0c' # 0x0C -> FORM FEED u'\r' # 0x0D -> CARRIAGE RETURN u'\x0e' # 0x0E -> SHIFT OUT u'\x0f' # 0x0F -> SHIFT IN u'\x10' # 0x10 -> DATA LINK ESCAPE u'\x11' # 0x11 -> DEVICE CONTROL ONE u'\x12' # 0x12 -> DEVICE CONTROL TWO u'\x13' # 0x13 -> DEVICE CONTROL THREE u'\x14' # 0x14 -> DEVICE CONTROL FOUR u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE u'\x16' # 0x16 -> SYNCHRONOUS IDLE u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK u'\x18' # 0x18 -> CANCEL u'\x19' # 0x19 -> END OF MEDIUM u'\x1a' # 0x1A -> SUBSTITUTE u'\x1b' # 0x1B -> ESCAPE u'\x1c' # 0x1C -> FILE SEPARATOR u'\x1d' # 0x1D -> GROUP SEPARATOR u'\x1e' # 0x1E -> RECORD SEPARATOR u'\x1f' # 0x1F -> UNIT SEPARATOR u' ' # 0x20 -> SPACE u'!' # 0x21 -> EXCLAMATION MARK u'"' # 0x22 -> QUOTATION MARK u'#' # 0x23 -> NUMBER SIGN u'$' # 0x24 -> DOLLAR SIGN u'%' # 0x25 -> PERCENT SIGN u'&' # 0x26 -> AMPERSAND u"'" # 0x27 -> APOSTROPHE u'(' # 0x28 -> LEFT PARENTHESIS u')' # 0x29 -> RIGHT PARENTHESIS u'*' # 0x2A -> ASTERISK u'+' # 0x2B -> PLUS SIGN u',' # 0x2C -> COMMA u'-' # 0x2D -> HYPHEN-MINUS u'.' # 0x2E -> FULL STOP u'/' # 0x2F -> SOLIDUS u'0' # 0x30 -> DIGIT ZERO u'1' # 0x31 -> DIGIT ONE u'2' # 0x32 -> DIGIT TWO u'3' # 0x33 -> DIGIT THREE u'4' # 0x34 -> DIGIT FOUR u'5' # 0x35 -> DIGIT FIVE u'6' # 0x36 -> DIGIT SIX u'7' # 0x37 -> DIGIT SEVEN u'8' # 0x38 -> DIGIT EIGHT u'9' # 0x39 -> DIGIT NINE u':' # 0x3A -> COLON u';' # 0x3B -> SEMICOLON u'<' # 0x3C -> LESS-THAN SIGN u'=' # 0x3D -> EQUALS SIGN u'>' # 0x3E -> GREATER-THAN SIGN u'?' # 0x3F -> QUESTION MARK u'@' # 0x40 -> COMMERCIAL AT u'A' # 0x41 -> LATIN CAPITAL LETTER A u'B' # 0x42 -> LATIN CAPITAL LETTER B u'C' # 0x43 -> LATIN CAPITAL LETTER C u'D' # 0x44 -> LATIN CAPITAL LETTER D u'E' # 0x45 -> LATIN CAPITAL LETTER E u'F' # 0x46 -> LATIN CAPITAL LETTER F u'G' # 0x47 -> LATIN CAPITAL LETTER G u'H' # 0x48 -> LATIN CAPITAL LETTER H u'I' # 0x49 -> LATIN CAPITAL LETTER I u'J' # 0x4A -> LATIN CAPITAL LETTER J u'K' # 0x4B -> LATIN CAPITAL LETTER K u'L' # 0x4C -> LATIN CAPITAL LETTER L u'M' # 0x4D -> LATIN CAPITAL LETTER M u'N' # 0x4E -> LATIN CAPITAL LETTER N u'O' # 0x4F -> LATIN CAPITAL LETTER O u'P' # 0x50 -> LATIN CAPITAL LETTER P u'Q' # 0x51 -> LATIN CAPITAL LETTER Q u'R' # 0x52 -> LATIN CAPITAL LETTER R u'S' # 0x53 -> LATIN CAPITAL LETTER S u'T' # 0x54 -> LATIN CAPITAL LETTER T u'U' # 0x55 -> LATIN CAPITAL LETTER U u'V' # 0x56 -> LATIN CAPITAL LETTER V u'W' # 0x57 -> LATIN CAPITAL LETTER W u'X' # 0x58 -> LATIN CAPITAL LETTER X u'Y' # 0x59 -> LATIN CAPITAL LETTER Y u'Z' # 0x5A -> LATIN CAPITAL LETTER Z u'[' # 0x5B -> LEFT SQUARE BRACKET u'\\' # 0x5C -> REVERSE SOLIDUS u']' # 0x5D -> RIGHT SQUARE BRACKET u'^' # 0x5E -> CIRCUMFLEX ACCENT u'_' # 0x5F -> LOW LINE u'`' # 0x60 -> GRAVE ACCENT u'a' # 0x61 -> LATIN SMALL LETTER A u'b' # 0x62 -> LATIN SMALL LETTER B u'c' # 0x63 -> LATIN SMALL LETTER C u'd' # 0x64 -> LATIN SMALL LETTER D u'e' # 0x65 -> LATIN SMALL LETTER E u'f' # 0x66 -> LATIN SMALL LETTER F u'g' # 0x67 -> LATIN SMALL LETTER G u'h' # 0x68 -> LATIN SMALL LETTER H u'i' # 0x69 -> LATIN SMALL LETTER I u'j' # 0x6A -> LATIN SMALL LETTER J u'k' # 0x6B -> LATIN SMALL LETTER K u'l' # 0x6C -> LATIN SMALL LETTER L u'm' # 0x6D -> LATIN SMALL LETTER M u'n' # 0x6E -> LATIN SMALL LETTER N u'o' # 0x6F -> LATIN SMALL LETTER O u'p' # 0x70 -> LATIN SMALL LETTER P u'q' # 0x71 -> LATIN SMALL LETTER Q u'r' # 0x72 -> LATIN SMALL LETTER R u's' # 0x73 -> LATIN SMALL LETTER S u't' # 0x74 -> LATIN SMALL LETTER T u'u' # 0x75 -> LATIN SMALL LETTER U u'v' # 0x76 -> LATIN SMALL LETTER V u'w' # 0x77 -> LATIN SMALL LETTER W u'x' # 0x78 -> LATIN SMALL LETTER X u'y' # 0x79 -> LATIN SMALL LETTER Y u'z' # 0x7A -> LATIN SMALL LETTER Z u'{' # 0x7B -> LEFT CURLY BRACKET u'|' # 0x7C -> VERTICAL LINE u'}' # 0x7D -> RIGHT CURLY BRACKET u'~' # 0x7E -> TILDE u'\x7f' # 0x7F -> DELETE u'\u20ac' # 0x80 -> EURO SIGN u'\ufffe' # 0x81 -> UNDEFINED u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK u'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS u'\u2020' # 0x86 -> DAGGER u'\u2021' # 0x87 -> DOUBLE DAGGER u'\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT u'\u2030' # 0x89 -> PER MILLE SIGN u'\ufffe' # 0x8A -> UNDEFINED u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK u'\u0152' # 0x8C -> LATIN CAPITAL LIGATURE OE u'\ufffe' # 0x8D -> UNDEFINED u'\ufffe' # 0x8E -> UNDEFINED u'\ufffe' # 0x8F -> UNDEFINED u'\ufffe' # 0x90 -> UNDEFINED u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK u'\u2022' # 0x95 -> BULLET u'\u2013' # 0x96 -> EN DASH u'\u2014' # 0x97 -> EM DASH u'\u02dc' # 0x98 -> SMALL TILDE u'\u2122' # 0x99 -> TRADE MARK SIGN u'\ufffe' # 0x9A -> UNDEFINED u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK u'\u0153' # 0x9C -> LATIN SMALL LIGATURE OE u'\ufffe' # 0x9D -> UNDEFINED u'\ufffe' # 0x9E -> UNDEFINED u'\u0178' # 0x9F -> LATIN CAPITAL LETTER Y WITH DIAERESIS u'\xa0' # 0xA0 -> NO-BREAK SPACE u'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK u'\xa2' # 0xA2 -> CENT SIGN u'\xa3' # 0xA3 -> POUND SIGN u'\xa4' # 0xA4 -> CURRENCY SIGN u'\xa5' # 0xA5 -> YEN SIGN u'\xa6' # 0xA6 -> BROKEN BAR u'\xa7' # 0xA7 -> SECTION SIGN u'\xa8' # 0xA8 -> DIAERESIS u'\xa9' # 0xA9 -> COPYRIGHT SIGN u'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xac' # 0xAC -> NOT SIGN u'\xad' # 0xAD -> SOFT HYPHEN u'\xae' # 0xAE -> REGISTERED SIGN u'\xaf' # 0xAF -> MACRON u'\xb0' # 0xB0 -> DEGREE SIGN u'\xb1' # 0xB1 -> PLUS-MINUS SIGN u'\xb2' # 0xB2 -> SUPERSCRIPT TWO u'\xb3' # 0xB3 -> SUPERSCRIPT THREE u'\xb4' # 0xB4 -> ACUTE ACCENT u'\xb5' # 0xB5 -> MICRO SIGN u'\xb6' # 0xB6 -> PILCROW SIGN u'\xb7' # 0xB7 -> MIDDLE DOT u'\xb8' # 0xB8 -> CEDILLA u'\xb9' # 0xB9 -> SUPERSCRIPT ONE u'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS u'\xbf' # 0xBF -> INVERTED QUESTION MARK u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX u'\u0102' # 0xC3 -> LATIN CAPITAL LETTER A WITH BREVE u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS u'\u0300' # 0xCC -> COMBINING GRAVE ACCENT u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS u'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE u'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE u'\u0309' # 0xD2 -> COMBINING HOOK ABOVE u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX u'\u01a0' # 0xD5 -> LATIN CAPITAL LETTER O WITH HORN u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS u'\xd7' # 0xD7 -> MULTIPLICATION SIGN u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS u'\u01af' # 0xDD -> LATIN CAPITAL LETTER U WITH HORN u'\u0303' # 0xDE -> COMBINING TILDE u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX u'\u0103' # 0xE3 -> LATIN SMALL LETTER A WITH BREVE u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS u'\u0301' # 0xEC -> COMBINING ACUTE ACCENT u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS u'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE u'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE u'\u0323' # 0xF2 -> COMBINING DOT BELOW u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX u'\u01a1' # 0xF5 -> LATIN SMALL LETTER O WITH HORN u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS u'\xf7' # 0xF7 -> DIVISION SIGN u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS u'\u01b0' # 0xFD -> LATIN SMALL LETTER U WITH HORN u'\u20ab' # 0xFE -> DONG SIGN u'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
mit
VanirAOSP/kernel_oppo_n1
arch/ia64/scripts/unwcheck.py
13143
1714
#!/usr/bin/python # # Usage: unwcheck.py FILE # # This script checks the unwind info of each function in file FILE # and verifies that the sum of the region-lengths matches the total # length of the function. # # Based on a shell/awk script originally written by Harish Patil, # which was converted to Perl by Matthew Chapman, which was converted # to Python by David Mosberger. # import os import re import sys if len(sys.argv) != 2: print "Usage: %s FILE" % sys.argv[0] sys.exit(2) readelf = os.getenv("READELF", "readelf") start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]") rlen_pattern = re.compile(".*rlen=([0-9]+)") def check_func (func, slots, rlen_sum): if slots != rlen_sum: global num_errors num_errors += 1 if not func: func = "[%#x-%#x]" % (start, end) print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum) return num_funcs = 0 num_errors = 0 func = False slots = 0 rlen_sum = 0 for line in os.popen("%s -u %s" % (readelf, sys.argv[1])): m = start_pattern.match(line) if m: check_func(func, slots, rlen_sum) func = m.group(1) start = long(m.group(2), 16) end = long(m.group(3), 16) slots = 3 * (end - start) / 16 rlen_sum = 0L num_funcs += 1 else: m = rlen_pattern.match(line) if m: rlen_sum += long(m.group(1)) check_func(func, slots, rlen_sum) if num_errors == 0: print "No errors detected in %u functions." % num_funcs else: if num_errors > 1: err="errors" else: err="error" print "%u %s detected in %u functions." % (num_errors, err, num_funcs) sys.exit(1)
gpl-2.0
karan1276/servo
tests/wpt/web-platform-tests/conformance-checkers/tools/dl.py
107
4915
# -*- coding: utf-8 -*- import os ccdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) template = """<!DOCTYPE html> <meta charset=utf-8> """ errors = { "dl-in-p": "<p><dl><dt>text<dd>text</dl></p>", "header-in-dt": "<dl><dt><header>text</header><dd>text</dl>", "footer-in-dt": "<dl><dt><footer>text</footer><dd>text</dl>", "article-in-dt": "<dl><dt><article><h2>text</h2></article><dd>text</dl>", "aside-in-dt": "<dl><dt><aside><h2>text</h2></aside><dd>text</dl>", "nav-in-dt": "<dl><dt><nav><h2>text</h2></nav><dd>text</dl>", "section-in-dt": "<dl><dt><section><h2>text</h2></section><dd>text</dl>", "h1-in-dt": "<dl><dt><h1>text</h1><dd>text</dl>", "h2-in-dt": "<dl><dt><h2>text</h2><dd>text</dl>", "h3-in-dt": "<dl><dt><h3>text</h3><dd>text</dl>", "h4-in-dt": "<dl><dt><h4>text</h4><dd>text</dl>", "h5-in-dt": "<dl><dt><h5>text</h5><dd>text</dl>", "h6-in-dt": "<dl><dt><h6>text</h6><dd>text</dl>", "hgroup-in-dt": "<dl><dt><hgroup><h1>text</h1></hgroup><dd>text</dl>", "only-dt": "<dl><dt>1</dl>", "only-dd": "<dl><dd>a</dl>", "first-dd": "<dl><dd>a<dt>2<dd>b</dl>", "last-dt": "<dl><dt>1<dd>a<dt>2</dl>", "dd-in-template": "<dl><dt>1</dt><template><dd>a</dd></template></dl>", "dt-in-template": "<dl><template><dt>1</dt></template><dd>a</dl>", "dl-contains-text": "<dl><dt>1</dt>x</dl>", "dl-contains-text-2": "<dl><dt>1<dd>a</dd>x</dl>", "dl-contains-dl": "<dl><dt>1<dd>a</dd><dl></dl></dl>", # div "empty-div": "<dl><div></div></dl>", "empty-div-2": "<dl><div></div><div><dt>2<dd>b</div></dl>", "mixed-dt-dd-div": "<dl><dt>1<dd>a</dd><div><dt>2<dd>b</div></dl>", "mixed-div-dt-dd": "<dl><div><dt>1<dd>a</div><dt>2<dd>b</dd></dl>", "nested-divs": "<dl><div><div><dt>1<dd>a</div></div></dl>", "div-splitting-groups": "<dl><div><dt>1</div><div><dd>a</div></dl>", "div-splitting-groups-2": "<dl><div><dt>1<dd>a</div><div><dd>b</div></dl>", "div-splitting-groups-3": "<dl><div><dt>1</div><div><dt>2<dd>b</div></dl>", "div-contains-text": "<dl><div>x</div><dt>2<dd>b</div></dl>", "div-contains-dl": "<dl><div><dl></dl></div><dt>2<dd>b</div></dl>", "div-multiple-groups": "<dl><div><dt>1<dd>a<dt>2<dd>a<dd>b<dt>3<dt>4<dt>5<dd>a</div></dl>", } non_errors_in_head = { "parent-template-in-head": "<template><dl><dt>text<dd>text</dl></template>", } non_errors = { "basic": "<dl><dt>text<dd>text</dl>", "empty": "<dl></dl>", "empty-dt-dd": "<dl><dt><dd></dl>", "multiple-groups": "<dl><dt>1<dd>a<dt>2<dd>a<dd>b<dt>3<dt>4<dt>5<dd>a</dl>", "header-in-dd": "<dl><dt>text<dd><header>text</header></dl>", "footer-in-dd": "<dl><dt>text<dd><footer>text</footer></dl>", "article-in-dd": "<dl><dt>text<dd><article><h2>text</h2></article></dl>", "aside-in-dd": "<dl><dt>text<dd><aside><h2>text</h2></aside></dl>", "nav-in-dd": "<dl><dt>text<dd><nav><h2>text</h2></nav></dl>", "section-in-dd": "<dl><dt>text<dd><section><h2>text</h2></section></dl>", "h1-in-dd": "<dl><dt>text<dd><h1>text</h1></dl>", "h2-in-dd": "<dl><dt>text<dd><h2>text</h2></dl>", "h3-in-dd": "<dl><dt>text<dd><h3>text</h3></dl>", "h4-in-dd": "<dl><dt>text<dd><h4>text</h4></dl>", "h5-in-dd": "<dl><dt>text<dd><h5>text</h5></dl>", "h6-in-dd": "<dl><dt>text<dd><h6>text</h6></dl>", "p-in-dt": "<dl><dt><p>1<p>1<dd>a</dl>", "dl-in-dt": "<dl><dt><dl><dt>1<dd>a</dl><dd>b</dl>", "dl-in-dd": "<dl><dt>1<dd><dl><dt>2<dd>a</dl></dl>", "interactive": "<dl><dt><a href='#'>1</a><dd><a href='#'>a</a></dl>", "script": "<dl><script></script></dl>", "dt-script-dd": "<dl><dt>1</dt><script></script><dd>a</dl>", "dt-template-dd": "<dl><dt>1</dt><template></template><dd>a</dl>", # div "div-basic": "<dl><div><dt>1<dd>a</div></dl>", "div-script": "<dl><div><dt>1<dd>a</div><script></script></dl>", "div-script-2": "<dl><div><dt>1</dt><script></script><dd>a</div></dl>", "div-template": "<dl><div><dt>1<dd>a</div><template></template></dl>", "div-template-2": "<dl><div><dt>1</dt><template></template><dd>a</div></dl>", "div-multiple-groups": "<dl><div><dt>1<dd>a</div><div><dt>2<dd>a<dd>b</div><div><dt>3<dt>4<dt>5<dd>a</div></dl>", } for key in errors.keys(): template_error = template template_error += '<title>invalid %s</title>\n' % key template_error += errors[key] file = open(os.path.join(ccdir, "html/elements/dl/%s-novalid.html" % key), 'wb') file.write(template_error) file.close() file = open(os.path.join(ccdir, "html/elements/dl/dl-isvalid.html"), 'wb') file.write(template + '<title>valid dl</title>\n') for key in non_errors_in_head.keys(): file.write('%s <!-- %s -->\n' % (non_errors_in_head[key], key)) file.write('<body>\n') for key in non_errors.keys(): file.write('%s <!-- %s -->\n' % (non_errors[key], key)) file.close() # vim: ts=4:sw=4
mpl-2.0
danielsunzhongyuan/my_leetcode_in_python
binary_tree_preorder_traversal_144.py
1
1028
# Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution(object): def preorderTraversal(self, root): """ :type root: TreeNode :rtype: List[int] """ if not root: return [] result = [] result.append(root.val) result += self.preorderTraversal(root.left) result += self.preorderTraversal(root.right) return result class Solution2(object): def preorderTraversal(self, root): """ :type root: TreeNode :rtype: List[int] """ if not root: return [] result = [] stack = [] stack.append(root) while stack: tmp = stack.pop() result.append(tmp.val) if tmp.right: stack.append(tmp.right) if tmp.left: stack.append(tmp.left) return result
apache-2.0
poljeff/odoo
addons/account/wizard/account_vat.py
378
2896
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class account_vat_declaration(osv.osv_memory): _name = 'account.vat.declaration' _description = 'Account Vat Declaration' _inherit = "account.common.report" _columns = { 'based_on': fields.selection([('invoices', 'Invoices'), ('payments', 'Payments'),], 'Based on', required=True), 'chart_tax_id': fields.many2one('account.tax.code', 'Chart of Tax', help='Select Charts of Taxes', required=True, domain = [('parent_id','=', False)]), 'display_detail': fields.boolean('Display Detail'), } def _get_tax(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) taxes = self.pool.get('account.tax.code').search(cr, uid, [('parent_id', '=', False), ('company_id', '=', user.company_id.id)], limit=1) return taxes and taxes[0] or False _defaults = { 'based_on': 'invoices', 'chart_tax_id': _get_tax } def create_vat(self, cr, uid, ids, context=None): if context is None: context = {} datas = {'ids': context.get('active_ids', [])} datas['model'] = 'account.tax.code' datas['form'] = self.read(cr, uid, ids, context=context)[0] for field in datas['form'].keys(): if isinstance(datas['form'][field], tuple): datas['form'][field] = datas['form'][field][0] taxcode_obj = self.pool.get('account.tax.code') taxcode_id = datas['form']['chart_tax_id'] taxcode = taxcode_obj.browse(cr, uid, [taxcode_id], context=context)[0] datas['form']['company_id'] = taxcode.company_id.id return self.pool['report'].get_action(cr, uid, [], 'account.report_vat', data=datas, context=context) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
GauravButola/nereid-project
tag.py
3
4787
# -*- coding: utf-8 -*- """ tag :copyright: (c) 2012-2014 by Openlabs Technologies & Consulting (P) Limited :license: GPLv3, see LICENSE for more details. """ from nereid import ( request, login_required, url_for, redirect, flash, jsonify, route ) from trytond.model import ModelView, ModelSQL, fields from trytond.pool import Pool, PoolMeta from trytond.transaction import Transaction from trytond import backend __all__ = ['Tag', 'TaskTags'] __metaclass__ = PoolMeta class Tag(ModelSQL, ModelView): "Tags" __name__ = "project.work.tag" name = fields.Char('Name', required=True) color = fields.Char('Color Code', required=True) project = fields.Many2One( 'project.work', 'Project', required=True, domain=[('type', '=', 'project')], ondelete='CASCADE', ) @classmethod def __setup__(cls): super(Tag, cls).__setup__() cls._sql_constraints += [ ('unique_name_project', 'UNIQUE(name, project)', 'Duplicate Tag') ] @staticmethod def default_color(): ''' Default for color ''' return "#999" def serialize(self, purpose=None): ''' Serialize the tag and returns a dictionary. ''' return { 'create_date': self.create_date.isoformat(), "url": url_for( 'project.work.render_task_list', project_id=self.project.id, state="opened", tag=self.id ), "objectType": self.__name__, "id": self.id, "displayName": self.name, } @classmethod @route('/project-<int:project_id>/tag/-new', methods=['GET', 'POST']) @login_required def create_tag(cls, project_id): """ Create a new tag for the specific project :params project_id: Project id for which need to be created """ Project = Pool().get('project.work') Activity = Pool().get('nereid.activity') project = Project.get_project(project_id) # Check if user is among the project admin members if not request.nereid_user.is_admin_of_project(project): flash( "Sorry! You are not allowed to create new tags." + " Contact your project admin for the same." ) return redirect(request.referrer) if request.method == 'POST': tag, = cls.create([{ 'name': request.form['name'], 'color': request.form['color'], 'project': project.id }]) Activity.create([{ 'actor': request.nereid_user.id, 'object_': 'project.work.tag, %d' % tag.id, 'verb': 'created_tag', 'target': 'project.work, %d' % project.id, 'project': project.id, }]) flash("Successfully created tag") return redirect(request.referrer) flash("Could not create tag. Try Again") return redirect(request.referrer) @login_required @route('/tag-<int:active_id>/-delete', methods=['GET', 'POST']) def delete_tag(self): """ Delete the tag from project """ # Check if user is among the project admins if request.method == 'POST' and not \ request.nereid_user.is_admin_of_project(self.project): flash( "Sorry! You are not allowed to delete tags." + " Contact your project admin for the same." ) return redirect(request.referrer) if request.method == 'POST' and request.is_xhr: self.delete([self]) return jsonify({ 'success': True, }) flash("Could not delete tag! Try again.") return redirect(request.referrer) class TaskTags(ModelSQL): 'Task Tags' __name__ = 'project.work-project.work.tag' task = fields.Many2One( 'project.work', 'Project', ondelete='CASCADE', select=1, required=True, domain=[('type', '=', 'task')] ) tag = fields.Many2One( 'project.work.tag', 'Tag', select=1, required=True, ondelete='CASCADE', ) @classmethod def __register__(cls, module_name): ''' Register class and update table name to new. ''' cursor = Transaction().cursor TableHandler = backend.get('TableHandler') table = TableHandler(cursor, cls, module_name) super(TaskTags, cls).__register__(module_name) # Migration if table.table_exist(cursor, 'project_work_tag_rel'): table.table_rename( cursor, 'project_work_tag_rel', 'project_work-project_work_tag' )
gpl-3.0
dsgouda/autorest
src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/Paging/fixtures/acceptancetestspaging/operations/paging_operations.py
14
33608
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError import uuid from .. import models class PagingOperations(object): """PagingOperations operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An objec model deserializer. """ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self.config = config def get_single_pages( self, custom_headers=None, raw=False, **operation_config): """A paging operation that finishes on the first call without a nextlink. :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged <fixtures.acceptancetestspaging.models.ProductPaged>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/single' # Construct parameters query_parameters = {} else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get_multiple_pages( self, client_request_id=None, paging_get_multiple_pages_options=None, custom_headers=None, raw=False, **operation_config): """A paging operation that includes a nextLink that has 10 pages. :param client_request_id: :type client_request_id: str :param paging_get_multiple_pages_options: Additional parameters for the operation :type paging_get_multiple_pages_options: :class:`PagingGetMultiplePagesOptions <fixtures.acceptancetestspaging.models.PagingGetMultiplePagesOptions>` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged <fixtures.acceptancetestspaging.models.ProductPaged>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ maxresults = None if paging_get_multiple_pages_options is not None: maxresults = paging_get_multiple_pages_options.maxresults timeout = None if paging_get_multiple_pages_options is not None: timeout = paging_get_multiple_pages_options.timeout def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/multiple' # Construct parameters query_parameters = {} else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if client_request_id is not None: header_parameters['client-request-id'] = self._serialize.header("client_request_id", client_request_id, 'str') if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') if maxresults is not None: header_parameters['maxresults'] = self._serialize.header("maxresults", maxresults, 'int') if timeout is not None: header_parameters['timeout'] = self._serialize.header("timeout", timeout, 'int') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get_odata_multiple_pages( self, client_request_id=None, paging_get_odata_multiple_pages_options=None, custom_headers=None, raw=False, **operation_config): """A paging operation that includes a nextLink in odata format that has 10 pages. :param client_request_id: :type client_request_id: str :param paging_get_odata_multiple_pages_options: Additional parameters for the operation :type paging_get_odata_multiple_pages_options: :class:`PagingGetOdataMultiplePagesOptions <fixtures.acceptancetestspaging.models.PagingGetOdataMultiplePagesOptions>` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged1 <fixtures.acceptancetestspaging.models.ProductPaged1>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ maxresults = None if paging_get_odata_multiple_pages_options is not None: maxresults = paging_get_odata_multiple_pages_options.maxresults timeout = None if paging_get_odata_multiple_pages_options is not None: timeout = paging_get_odata_multiple_pages_options.timeout def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/multiple/odata' # Construct parameters query_parameters = {} else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if client_request_id is not None: header_parameters['client-request-id'] = self._serialize.header("client_request_id", client_request_id, 'str') if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') if maxresults is not None: header_parameters['maxresults'] = self._serialize.header("maxresults", maxresults, 'int') if timeout is not None: header_parameters['timeout'] = self._serialize.header("timeout", timeout, 'int') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged1(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged1(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get_multiple_pages_with_offset( self, paging_get_multiple_pages_with_offset_options, client_request_id=None, custom_headers=None, raw=False, **operation_config): """A paging operation that includes a nextLink that has 10 pages. :param paging_get_multiple_pages_with_offset_options: Additional parameters for the operation :type paging_get_multiple_pages_with_offset_options: :class:`PagingGetMultiplePagesWithOffsetOptions <fixtures.acceptancetestspaging.models.PagingGetMultiplePagesWithOffsetOptions>` :param client_request_id: :type client_request_id: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged <fixtures.acceptancetestspaging.models.ProductPaged>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ maxresults = None if paging_get_multiple_pages_with_offset_options is not None: maxresults = paging_get_multiple_pages_with_offset_options.maxresults offset = None if paging_get_multiple_pages_with_offset_options is not None: offset = paging_get_multiple_pages_with_offset_options.offset timeout = None if paging_get_multiple_pages_with_offset_options is not None: timeout = paging_get_multiple_pages_with_offset_options.timeout def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/multiple/withpath/{offset}' path_format_arguments = { 'offset': self._serialize.url("offset", offset, 'int') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if client_request_id is not None: header_parameters['client-request-id'] = self._serialize.header("client_request_id", client_request_id, 'str') if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') if maxresults is not None: header_parameters['maxresults'] = self._serialize.header("maxresults", maxresults, 'int') if timeout is not None: header_parameters['timeout'] = self._serialize.header("timeout", timeout, 'int') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get_multiple_pages_retry_first( self, custom_headers=None, raw=False, **operation_config): """A paging operation that fails on the first call with 500 and then retries and then get a response including a nextLink that has 10 pages. :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged <fixtures.acceptancetestspaging.models.ProductPaged>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/multiple/retryfirst' # Construct parameters query_parameters = {} else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get_multiple_pages_retry_second( self, custom_headers=None, raw=False, **operation_config): """A paging operation that includes a nextLink that has 10 pages, of which the 2nd call fails first with 500. The client should retry and finish all 10 pages eventually. :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged <fixtures.acceptancetestspaging.models.ProductPaged>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/multiple/retrysecond' # Construct parameters query_parameters = {} else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get_single_pages_failure( self, custom_headers=None, raw=False, **operation_config): """A paging operation that receives a 400 on the first call. :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged <fixtures.acceptancetestspaging.models.ProductPaged>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/single/failure' # Construct parameters query_parameters = {} else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get_multiple_pages_failure( self, custom_headers=None, raw=False, **operation_config): """A paging operation that receives a 400 on the second call. :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged <fixtures.acceptancetestspaging.models.ProductPaged>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/multiple/failure' # Construct parameters query_parameters = {} else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get_multiple_pages_failure_uri( self, custom_headers=None, raw=False, **operation_config): """A paging operation that receives an invalid nextLink. :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged <fixtures.acceptancetestspaging.models.ProductPaged>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/multiple/failureuri' # Construct parameters query_parameters = {} else: url = next_link query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get_multiple_pages_fragment_next_link( self, api_version, tenant, custom_headers=None, raw=False, **operation_config): """A paging operation that doesn't return a full URL, just a fragment. :param api_version: Sets the api version to use. :type api_version: str :param tenant: Sets the tenant to use. :type tenant: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged1 <fixtures.acceptancetestspaging.models.ProductPaged1>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/multiple/fragment/{tenant}' path_format_arguments = { 'tenant': self._serialize.url("tenant", tenant, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api_version'] = self._serialize.query("api_version", api_version, 'str') else: url = '/paging/multiple/fragment/{tenant}/{nextLink}' path_format_arguments = { 'tenant': self._serialize.url("tenant", tenant, 'str'), 'nextLink': self._serialize.url("next_link", next_link, 'str', skip_quote=True) } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api_version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged1(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged1(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized def get_multiple_pages_fragment_with_grouping_next_link( self, custom_parameter_group, custom_headers=None, raw=False, **operation_config): """A paging operation that doesn't return a full URL, just a fragment with parameters grouped. :param custom_parameter_group: Additional parameters for the operation :type custom_parameter_group: :class:`CustomParameterGroup <fixtures.acceptancetestspaging.models.CustomParameterGroup>` :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: :class:`ProductPaged1 <fixtures.acceptancetestspaging.models.ProductPaged1>` :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ api_version = None if custom_parameter_group is not None: api_version = custom_parameter_group.api_version tenant = None if custom_parameter_group is not None: tenant = custom_parameter_group.tenant def internal_paging(next_link=None, raw=False): if not next_link: # Construct URL url = '/paging/multiple/fragmentwithgrouping/{tenant}' path_format_arguments = { 'tenant': self._serialize.url("tenant", tenant, 'str') } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} query_parameters['api_version'] = self._serialize.query("api_version", api_version, 'str') else: url = '/paging/multiple/fragmentwithgrouping/{tenant}/{nextLink}' path_format_arguments = { 'tenant': self._serialize.url("tenant", tenant, 'str'), 'nextLink': self._serialize.url("next_link", next_link, 'str', skip_quote=True) } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} query_parameters['api_version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/json; charset=utf-8' if self.config.generate_client_request_id: header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') # Construct and send request request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, **operation_config) if response.status_code not in [200]: exp = CloudError(response) exp.request_id = response.headers.get('x-ms-request-id') raise exp return response # Deserialize response deserialized = models.ProductPaged1(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.ProductPaged1(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized
mit
Elettronik/SickRage
lib/pgi/foreign/__init__.py
19
1931
# Copyright 2013 Christoph Reiter # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. """Foreign structs make it possible for gi to interact with other python libraries. For example it allows functions to take cairo structs created with cairocffi. """ import importlib from ._base import ForeignStruct, ForeignError _MODULES = {} def get_foreign_module(namespace): """Returns the module or raises ForeignError""" if namespace not in _MODULES: try: module = importlib.import_module("." + namespace, __package__) except ImportError: module = None _MODULES[namespace] = module module = _MODULES.get(namespace) if module is None: raise ForeignError("Foreign %r structs not supported" % namespace) return module def get_foreign_struct(namespace, name): """Returns a ForeignStruct implementation or raises ForeignError""" get_foreign_module(namespace) try: return ForeignStruct.get(namespace, name) except KeyError: raise ForeignError("Foreign %s.%s not supported" % (namespace, name)) def require_foreign(namespace, symbol=None): """Raises ImportError if the specified foreign module isn't supported or the needed dependencies aren't installed. e.g.: check_foreign('cairo', 'Context') """ try: if symbol is None: get_foreign_module(namespace) else: get_foreign_struct(namespace, symbol) except ForeignError as e: raise ImportError(e) def get_foreign(namespace, name): """Returns a ForeignStruct instance or None""" try: return get_foreign_struct(namespace, name) except ForeignError: return None
gpl-3.0
MihaiMoldovanu/ansible
lib/ansible/modules/cloud/google/gcp_url_map.py
99
17159
#!/usr/bin/python # Copyright 2017 Google Inc. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: gcp_url_map version_added: "2.4" short_description: Create, Update or Destory a Url_Map. description: - Create, Update or Destory a Url_Map. See U(https://cloud.google.com/compute/docs/load-balancing/http/url-map) for an overview. More details on the Url_Map API can be found at U(https://cloud.google.com/compute/docs/reference/latest/urlMaps#resource). requirements: - "python >= 2.6" - "google-api-python-client >= 1.6.2" - "google-auth >= 0.9.0" - "google-auth-httplib2 >= 0.0.2" notes: - Only supports global Backend Services. - Url_Map tests are not currently supported. author: - "Tom Melendez (@supertom) <tom@supertom.com>" options: url_map_name: description: - Name of the Url_Map. required: true default_service: description: - Default Backend Service if no host rules match. required: true host_rules: description: - The list of HostRules to use against the URL. Contains a list of hosts and an associated path_matcher. - The 'hosts' parameter is a list of host patterns to match. They must be valid hostnames, except * will match any string of ([a-z0-9-.]*). In that case, * must be the first character and must be followed in the pattern by either - or .. - The 'path_matcher' parameter is name of the PathMatcher to use to match the path portion of the URL if the hostRule matches the URL's host portion. required: false path_matchers: description: - The list of named PathMatchers to use against the URL. Contains path_rules, which is a list of paths and an associated service. A default_service can also be specified for each path_matcher. - The 'name' parameter to which this path_matcher is referred by the host_rule. - The 'default_service' parameter is the name of the BackendService resource. This will be used if none of the path_rules defined by this path_matcher is matched by the URL's path portion. - The 'path_rules' parameter is a list of dictionaries containing a list of paths and a service to direct traffic to. Each path item must start with / and the only place a * is allowed is at the end following a /. The string fed to the path matcher does not include any text after the first ? or #, and those chars are not allowed here. required: false ''' EXAMPLES = ''' - name: Create Minimal Url_Map gcp_url_map: service_account_email: "{{ service_account_email }}" credentials_file: "{{ credentials_file }}" project_id: "{{ project_id }}" url_map_name: my-url_map default_service: my-backend-service state: present - name: Create UrlMap with pathmatcher gcp_url_map: service_account_email: "{{ service_account_email }}" credentials_file: "{{ credentials_file }}" project_id: "{{ project_id }}" url_map_name: my-url-map-pm default_service: default-backend-service path_matchers: - name: 'path-matcher-one' description: 'path matcher one' default_service: 'bes-pathmatcher-one-default' path_rules: - service: 'my-one-bes' paths: - '/data' - '/aboutus' host_rules: - hosts: - '*.' path_matcher: 'path-matcher-one' state: "present" ''' RETURN = ''' host_rules: description: List of HostRules. returned: If specified. type: dict sample: [ { hosts: ["*."], "path_matcher": "my-pm" } ] path_matchers: description: The list of named PathMatchers to use against the URL. returned: If specified. type: dict sample: [ { "name": "my-pm", "path_rules": [ { "paths": [ "/data" ] } ], "service": "my-service" } ] state: description: state of the Url_Map returned: Always. type: str sample: present updated_url_map: description: True if the url_map has been updated. Will not appear on initial url_map creation. returned: if the url_map has been updated. type: bool sample: true url_map_name: description: Name of the Url_Map returned: Always type: str sample: my-url-map url_map: description: GCP Url_Map dictionary returned: Always. Refer to GCP documentation for detailed field descriptions. type: dict sample: { "name": "my-url-map", "hostRules": [...], "pathMatchers": [...] } ''' try: from ast import literal_eval HAS_PYTHON26 = True except ImportError: HAS_PYTHON26 = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.gcp import check_params, get_google_api_client, GCPUtils from ansible.module_utils.six import string_types USER_AGENT_PRODUCT = 'ansible-url_map' USER_AGENT_VERSION = '0.0.1' def _validate_params(params): """ Validate url_map params. This function calls _validate_host_rules_params to verify the host_rules-specific parameters. This function calls _validate_path_matchers_params to verify the path_matchers-specific parameters. :param params: Ansible dictionary containing configuration. :type params: ``dict`` :return: True or raises ValueError :rtype: ``bool`` or `class:ValueError` """ fields = [ {'name': 'default_service', 'type': str, 'required': True}, {'name': 'host_rules', 'type': list}, {'name': 'path_matchers', 'type': list}, ] try: check_params(params, fields) if 'path_matchers' in params and params['path_matchers'] is not None: _validate_path_matcher_params(params['path_matchers']) if 'host_rules' in params and params['host_rules'] is not None: _validate_host_rules_params(params['host_rules']) except: raise return (True, '') def _validate_path_matcher_params(path_matchers): """ Validate configuration for path_matchers. :param path_matchers: Ansible dictionary containing path_matchers configuration (only). :type path_matchers: ``dict`` :return: True or raises ValueError :rtype: ``bool`` or `class:ValueError` """ fields = [ {'name': 'name', 'type': str, 'required': True}, {'name': 'default_service', 'type': str, 'required': True}, {'name': 'path_rules', 'type': list, 'required': True}, {'name': 'max_rate', 'type': int}, {'name': 'max_rate_per_instance', 'type': float}, ] pr_fields = [ {'name': 'service', 'type': str, 'required': True}, {'name': 'paths', 'type': list, 'required': True}, ] if not path_matchers: raise ValueError(('path_matchers should be a list. %s (%s) provided' % (path_matchers, type(path_matchers)))) for pm in path_matchers: try: check_params(pm, fields) for pr in pm['path_rules']: check_params(pr, pr_fields) for path in pr['paths']: if not path.startswith('/'): raise ValueError("path for %s must start with /" % ( pm['name'])) except: raise return (True, '') def _validate_host_rules_params(host_rules): """ Validate configuration for host_rules. :param host_rules: Ansible dictionary containing host_rules configuration (only). :type host_rules ``dict`` :return: True or raises ValueError :rtype: ``bool`` or `class:ValueError` """ fields = [ {'name': 'path_matcher', 'type': str, 'required': True}, ] if not host_rules: raise ValueError('host_rules should be a list.') for hr in host_rules: try: check_params(hr, fields) for host in hr['hosts']: if not isinstance(host, string_types): raise ValueError("host in hostrules must be a string") elif '*' in host: if host.index('*') != 0: raise ValueError("wildcard must be first char in host, %s" % ( host)) else: if host[1] not in ['.', '-', ]: raise ValueError("wildcard be followed by a '.' or '-', %s" % ( host)) except: raise return (True, '') def _build_path_matchers(path_matcher_list, project_id): """ Reformat services in path matchers list. Specifically, builds out URLs. :param path_matcher_list: The GCP project ID. :type path_matcher_list: ``list`` of ``dict`` :param project_id: The GCP project ID. :type project_id: ``str`` :return: list suitable for submission to GCP UrlMap API Path Matchers list. :rtype ``list`` of ``dict`` """ url = '' if project_id: url = GCPUtils.build_googleapi_url(project_id) for pm in path_matcher_list: if 'defaultService' in pm: pm['defaultService'] = '%s/global/backendServices/%s' % (url, pm['defaultService']) if 'pathRules' in pm: for rule in pm['pathRules']: if 'service' in rule: rule['service'] = '%s/global/backendServices/%s' % (url, rule['service']) return path_matcher_list def _build_url_map_dict(params, project_id=None): """ Reformat services in Ansible Params. :param params: Params from AnsibleModule object :type params: ``dict`` :param project_id: The GCP project ID. :type project_id: ``str`` :return: dictionary suitable for submission to GCP UrlMap API. :rtype ``dict`` """ url = '' if project_id: url = GCPUtils.build_googleapi_url(project_id) gcp_dict = GCPUtils.params_to_gcp_dict(params, 'url_map_name') if 'defaultService' in gcp_dict: gcp_dict['defaultService'] = '%s/global/backendServices/%s' % (url, gcp_dict['defaultService']) if 'pathMatchers' in gcp_dict: gcp_dict['pathMatchers'] = _build_path_matchers(gcp_dict['pathMatchers'], project_id) return gcp_dict def get_url_map(client, name, project_id=None): """ Get a Url_Map from GCP. :param client: An initialized GCE Compute Disovery resource. :type client: :class: `googleapiclient.discovery.Resource` :param name: Name of the Url Map. :type name: ``str`` :param project_id: The GCP project ID. :type project_id: ``str`` :return: A dict resp from the respective GCP 'get' request. :rtype: ``dict`` """ try: req = client.urlMaps().get(project=project_id, urlMap=name) return GCPUtils.execute_api_client_req(req, raise_404=False) except: raise def create_url_map(client, params, project_id): """ Create a new Url_Map. :param client: An initialized GCE Compute Disovery resource. :type client: :class: `googleapiclient.discovery.Resource` :param params: Dictionary of arguments from AnsibleModule. :type params: ``dict`` :return: Tuple with changed status and response dict :rtype: ``tuple`` in the format of (bool, dict) """ gcp_dict = _build_url_map_dict(params, project_id) try: req = client.urlMaps().insert(project=project_id, body=gcp_dict) return_data = GCPUtils.execute_api_client_req(req, client, raw=False) if not return_data: return_data = get_url_map(client, name=params['url_map_name'], project_id=project_id) return (True, return_data) except: raise def delete_url_map(client, name, project_id): """ Delete a Url_Map. :param client: An initialized GCE Compute Disover resource. :type client: :class: `googleapiclient.discovery.Resource` :param name: Name of the Url Map. :type name: ``str`` :param project_id: The GCP project ID. :type project_id: ``str`` :return: Tuple with changed status and response dict :rtype: ``tuple`` in the format of (bool, dict) """ try: req = client.urlMaps().delete(project=project_id, urlMap=name) return_data = GCPUtils.execute_api_client_req(req, client) return (True, return_data) except: raise def update_url_map(client, url_map, params, name, project_id): """ Update a Url_Map. If the url_map has not changed, the update will not occur. :param client: An initialized GCE Compute Disovery resource. :type client: :class: `googleapiclient.discovery.Resource` :param url_map: Name of the Url Map. :type url_map: ``dict`` :param params: Dictionary of arguments from AnsibleModule. :type params: ``dict`` :param name: Name of the Url Map. :type name: ``str`` :param project_id: The GCP project ID. :type project_id: ``str`` :return: Tuple with changed status and response dict :rtype: ``tuple`` in the format of (bool, dict) """ gcp_dict = _build_url_map_dict(params, project_id) ans = GCPUtils.are_params_equal(url_map, gcp_dict) if ans: return (False, 'no update necessary') gcp_dict['fingerprint'] = url_map['fingerprint'] try: req = client.urlMaps().update(project=project_id, urlMap=name, body=gcp_dict) return_data = GCPUtils.execute_api_client_req(req, client=client, raw=False) return (True, return_data) except: raise def main(): module = AnsibleModule(argument_spec=dict( url_map_name=dict(required=True), state=dict(choices=['absent', 'present'], default='present'), default_service=dict(required=True), path_matchers=dict(type='list', required=False), host_rules=dict(type='list', required=False), service_account_email=dict(), service_account_permissions=dict(type='list'), pem_file=dict(), credentials_file=dict(), project_id=dict(), ), required_together=[ ['path_matchers', 'host_rules'], ]) client, conn_params = get_google_api_client(module, 'compute', user_agent_product=USER_AGENT_PRODUCT, user_agent_version=USER_AGENT_VERSION) params = {} params['state'] = module.params.get('state') params['url_map_name'] = module.params.get('url_map_name') params['default_service'] = module.params.get('default_service') if module.params.get('path_matchers'): params['path_matchers'] = module.params.get('path_matchers') if module.params.get('host_rules'): params['host_rules'] = module.params.get('host_rules') try: _validate_params(params) except Exception as e: module.fail_json(msg=e.message, changed=False) changed = False json_output = {'state': params['state']} url_map = get_url_map(client, name=params['url_map_name'], project_id=conn_params['project_id']) if not url_map: if params['state'] == 'absent': # Doesn't exist in GCE, and state==absent. changed = False module.fail_json( msg="Cannot delete unknown url_map: %s" % (params['url_map_name'])) else: # Create changed, json_output['url_map'] = create_url_map(client, params=params, project_id=conn_params['project_id']) elif params['state'] == 'absent': # Delete changed, json_output['url_map'] = delete_url_map(client, name=params['url_map_name'], project_id=conn_params['project_id']) else: changed, json_output['url_map'] = update_url_map(client, url_map=url_map, params=params, name=params['url_map_name'], project_id=conn_params['project_id']) json_output['updated_url_map'] = changed json_output['changed'] = changed json_output.update(params) module.exit_json(**json_output) if __name__ == '__main__': main()
gpl-3.0
stonekyx/binary
vendor/scons-local-2.3.4/SCons/CacheDir.py
9
8083
# # Copyright (c) 2001 - 2014 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/CacheDir.py 2014/09/27 12:51:43 garyo" __doc__ = """ CacheDir support """ import os.path import stat import sys import SCons.Action cache_enabled = True cache_debug = False cache_force = False cache_show = False cache_readonly = False def CacheRetrieveFunc(target, source, env): t = target[0] fs = t.fs cd = env.get_CacheDir() cachedir, cachefile = cd.cachepath(t) if not fs.exists(cachefile): cd.CacheDebug('CacheRetrieve(%s): %s not in cache\n', t, cachefile) return 1 cd.CacheDebug('CacheRetrieve(%s): retrieving from %s\n', t, cachefile) if SCons.Action.execute_actions: if fs.islink(cachefile): fs.symlink(fs.readlink(cachefile), t.path) else: env.copy_from_cache(cachefile, t.path) st = fs.stat(cachefile) fs.chmod(t.path, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) return 0 def CacheRetrieveString(target, source, env): t = target[0] fs = t.fs cd = env.get_CacheDir() cachedir, cachefile = cd.cachepath(t) if t.fs.exists(cachefile): return "Retrieved `%s' from cache" % t.path return None CacheRetrieve = SCons.Action.Action(CacheRetrieveFunc, CacheRetrieveString) CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None) def CachePushFunc(target, source, env): if cache_readonly: return t = target[0] if t.nocache: return fs = t.fs cd = env.get_CacheDir() cachedir, cachefile = cd.cachepath(t) if fs.exists(cachefile): # Don't bother copying it if it's already there. Note that # usually this "shouldn't happen" because if the file already # existed in cache, we'd have retrieved the file from there, # not built it. This can happen, though, in a race, if some # other person running the same build pushes their copy to # the cache after we decide we need to build it but before our # build completes. cd.CacheDebug('CachePush(%s): %s already exists in cache\n', t, cachefile) return cd.CacheDebug('CachePush(%s): pushing to %s\n', t, cachefile) tempfile = cachefile+'.tmp'+str(os.getpid()) errfmt = "Unable to copy %s to cache. Cache file is %s" if not fs.isdir(cachedir): try: fs.makedirs(cachedir) except EnvironmentError: # We may have received an exception because another process # has beaten us creating the directory. if not fs.isdir(cachedir): msg = errfmt % (str(target), cachefile) raise SCons.Errors.EnvironmentError(msg) try: if fs.islink(t.path): fs.symlink(fs.readlink(t.path), tempfile) else: fs.copy2(t.path, tempfile) fs.rename(tempfile, cachefile) st = fs.stat(t.path) fs.chmod(cachefile, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) except EnvironmentError: # It's possible someone else tried writing the file at the # same time we did, or else that there was some problem like # the CacheDir being on a separate file system that's full. # In any case, inability to push a file to cache doesn't affect # the correctness of the build, so just print a warning. msg = errfmt % (str(target), cachefile) SCons.Warnings.warn(SCons.Warnings.CacheWriteErrorWarning, msg) CachePush = SCons.Action.Action(CachePushFunc, None) class CacheDir(object): def __init__(self, path): try: import hashlib except ImportError: msg = "No hashlib or MD5 module available, CacheDir() not supported" SCons.Warnings.warn(SCons.Warnings.NoMD5ModuleWarning, msg) self.path = None else: self.path = path self.current_cache_debug = None self.debugFP = None def CacheDebug(self, fmt, target, cachefile): if cache_debug != self.current_cache_debug: if cache_debug == '-': self.debugFP = sys.stdout elif cache_debug: self.debugFP = open(cache_debug, 'w') else: self.debugFP = None self.current_cache_debug = cache_debug if self.debugFP: self.debugFP.write(fmt % (target, os.path.split(cachefile)[1])) def is_enabled(self): return (cache_enabled and not self.path is None) def is_readonly(self): return cache_readonly def cachepath(self, node): """ """ if not self.is_enabled(): return None, None sig = node.get_cachedir_bsig() subdir = sig[0].upper() dir = os.path.join(self.path, subdir) return dir, os.path.join(dir, sig) def retrieve(self, node): """ This method is called from multiple threads in a parallel build, so only do thread safe stuff here. Do thread unsafe stuff in built(). Note that there's a special trick here with the execute flag (one that's not normally done for other actions). Basically if the user requested a no_exec (-n) build, then SCons.Action.execute_actions is set to 0 and when any action is called, it does its showing but then just returns zero instead of actually calling the action execution operation. The problem for caching is that if the file does NOT exist in cache then the CacheRetrieveString won't return anything to show for the task, but the Action.__call__ won't call CacheRetrieveFunc; instead it just returns zero, which makes the code below think that the file *was* successfully retrieved from the cache, therefore it doesn't do any subsequent building. However, the CacheRetrieveString didn't print anything because it didn't actually exist in the cache, and no more build actions will be performed, so the user just sees nothing. The fix is to tell Action.__call__ to always execute the CacheRetrieveFunc and then have the latter explicitly check SCons.Action.execute_actions itself. """ if not self.is_enabled(): return False env = node.get_build_env() if cache_show: if CacheRetrieveSilent(node, [], env, execute=1) == 0: node.build(presub=0, execute=0) return True else: if CacheRetrieve(node, [], env, execute=1) == 0: return True return False def push(self, node): if self.is_readonly() or not self.is_enabled(): return return CachePush(node, [], node.get_build_env()) def push_if_forced(self, node): if cache_force: return self.push(node) # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
gpl-3.0
bsipocz/pyspeckit
pyspeckit/cubes/mapplot.py
2
16597
""" MapPlot ------- Make plots of the cube and interactively connect them to spectrum plotting. This is really an interactive component of the package; nothing in here is meant for publication-quality plots, but more for user interactive analysis. That said, the plotter makes use of `APLpy <https://github.com/aplpy/aplpy>`_, so it is possible to make publication-quality plots. :author: Adam Ginsburg :date: 03/17/2011 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ """ import matplotlib import matplotlib.pyplot import matplotlib.figure import numpy as np import copy import itertools from pyspeckit.specwarnings import warn try: import astropy.wcs as pywcs import astropy.io.fits as pyfits pywcsOK = True except ImportError: try: import pyfits import pywcs pywcsOK = True except ImportError: pywcsOK = False import cubes try: import aplpy icanhasaplpy = True except: # aplpy fails with generic exceptions instead of ImportError icanhasaplpy = False try: import coords icanhascoords = True except ImportError: icanhascoords = False class MapPlotter(object): """ Class to plot a spectrum See `mapplot` for use documentation; this docstring is only for initialization. """ def __init__(self, Cube=None, figure=None, doplot=False, **kwargs): """ Create a map figure for future plotting """ # figure out where to put the plot if isinstance(figure,matplotlib.figure.Figure): self.figure = figure elif type(figure) is int: self.figure = matplotlib.pyplot.figure(figure) else: self.figure = None self.axis = None self.FITSFigure = None self._click_marks = [] self._circles = [] self._clickX = None self._clickY = None self.overplot_colorcycle = itertools.cycle(['b', 'g', 'r', 'c', 'm', 'y']) self.overplot_linestyle = '-' self.Cube = Cube if self.Cube is not None: self.header = cubes.flatten_header(self.Cube.header, delete=True) if pywcsOK: self.wcs = pywcs.WCS(self.header) if doplot: self.mapplot(**kwargs) def __call__(self, **kwargs): """ see mapplot """ return self.mapplot(**kwargs) def mapplot(self, convention='calabretta', colorbar=True, useaplpy=True, vmin=None, vmax=None, cmap=None, plotkwargs={}, **kwargs): """ Plot up a map based on an input data cube. The map to be plotted is selected using `makeplane`. The `estimator` keyword argument is passed to that function. The plotted map, once shown, is interactive. You can click on it with any of the three mouse buttons. Button 1 or keyboard '1': Plot the selected pixel's spectrum in another window. Mark the clicked pixel with an 'x' Button 2 or keyboard 'o': Overplot a second (or third, fourth, fifth...) spectrum in the external plot window Button 3: Disconnect the interactive viewer You can also click-and-drag with button 1 to average over a circular region. This same effect can be achieved by using the 'c' key to set the /c/enter of a circle and the 'r' key to set its /r/adius (i.e., hover over the center and press 'c', then hover some distance away and press 'r'). Parameters ---------- convention : 'calabretta' or 'griesen' The default projection to assume for Galactic data when plotting with aplpy. colorbar : bool Whether to show a colorbar plotkwargs : dict, optional A dictionary of keyword arguments to pass to aplpy.show_colorscale or matplotlib.pyplot.imshow useaplpy : bool Use aplpy if a FITS header is available vmin, vmax: float or None Override values for the vmin/vmax values. Will be automatically determined if left as None .. todo: Allow mapplot in subfigure """ if self.figure is None: self.figure = matplotlib.pyplot.figure() else: self._disconnect() self.figure.clf() # this is where the map is created; everything below this is just plotting self.makeplane(**kwargs) # have tot pop out estimator so that kwargs can be passed to imshow if 'estimator' in kwargs: kwargs.pop('estimator') # Below here is all plotting stuff if vmin is None: vmin = self.plane[self.plane==self.plane].min() if vmax is None: vmax = self.plane[self.plane==self.plane].max() if icanhasaplpy and useaplpy: self.fitsfile = pyfits.PrimaryHDU(data=self.plane,header=self.header) self.FITSFigure = aplpy.FITSFigure(self.fitsfile,figure=self.figure,convention=convention) self.FITSFigure.show_colorscale(vmin=vmin, vmax=vmax, cmap=cmap, **plotkwargs) self.axis = self.FITSFigure._ax1 if colorbar: try: self.FITSFigure.add_colorbar() except Exception as ex: print "ERROR: Could not create colorbar! Error was %s" % str(ex) self._origin = 0 # FITS convention # TODO: set _origin to 1 if using PIXEL units, not real wcs else: self.axis = self.figure.add_subplot(111) if hasattr(self,'colorbar') and self.colorbar is not None: if self.colorbar.ax in self.axis.figure.axes: self.axis.figure.delaxes(self.colorbar.ax) self.axis.imshow(self.plane, vmin=vmin, vmax=vmax, cmap=cmap, **plotkwargs) if colorbar: try: self.colorbar = matplotlib.pyplot.colorbar(self.axis.images[0]) except Exception as ex: print "ERROR: Could not create colorbar! Error was %s" % str(ex) self._origin = 0 # normal convention self.canvas = self.axis.figure.canvas self._connect() def _connect(self): """ Connect click, click up (release click), and key press to events """ self.clickid = self.canvas.callbacks.connect('button_press_event',self.click) self.clickupid = self.canvas.callbacks.connect('button_release_event',self.plot_spectrum) self.keyid = self.canvas.callbacks.connect('key_press_event',self.plot_spectrum) def _disconnect(self): """ Disconnect click, click up (release click), and key press from events """ if hasattr(self,'canvas'): self.canvas.mpl_disconnect(self.clickid) self.canvas.mpl_disconnect(self.clickupid) self.canvas.mpl_disconnect(self.keyid) def makeplane(self, estimator=np.mean): """ Create a "plane" view of the cube, either by slicing or projecting it or by showing a slice from the best-fit model parameter cube. Parameters ---------- estimator : [ function | 'max' | 'int' | FITS filename | integer | slice ] A non-pythonic, non-duck-typed variable. If it's a function, apply that function along the cube's spectral axis to obtain an estimate (e.g., mean, min, max, etc.). 'max' will do the same thing as passing np.max 'int' will attempt to integrate the image (which is why I didn't duck-type) (integrate means sum and multiply by dx) a .fits filename will be read using pyfits (so you can make your own cover figure) an integer will get the n'th slice in the parcube if it exists If it's a slice, slice the input data cube along the Z-axis with this slice """ # THIS IS A HACK!!! isinstance(a function, function) must be a thing... FUNCTION = type(np.max) # estimator is NOT duck-typed if type(estimator) is FUNCTION: self.plane = estimator(self.Cube.cube,axis=0) elif type(estimator) is str: if estimator == 'max': self.plane = self.Cube.cube.max(axis=0) elif estimator == 'int': dx = np.abs(self.Cube.xarr[1:] - self.Cube.xarr[:-1]) dx = np.concatenate([dx,[dx[-1]]]) self.plane = (self.Cube.cube * dx[:,np.newaxis,np.newaxis]).sum(axis=0) elif estimator[-5:] == ".fits": self.plane = pyfits.getdata(estimator) elif type(estimator) is slice: self.plane = self.Cube.cube[estimator,:,:] elif type(estimator) is int: if hasattr(self.Cube,'parcube'): self.plane = self.Cube.parcube[estimator,:,:] if self.plane is None: raise ValueError("Invalid estimator %s" % (str(estimator))) if np.sum(np.isfinite(self.plane)) == 0: raise ValueError("Map is all NaNs or infs. Check your estimator or your input cube.") def click(self,event): """ Record location of downclick """ if event.inaxes: self._clickX = np.round(event.xdata) - self._origin self._clickY = np.round(event.ydata) - self._origin def plot_spectrum(self, event, plot_fit=True): """ Connects map cube to Spectrum... """ self.event = event if event.inaxes: clickX = np.round(event.xdata) - self._origin clickY = np.round(event.ydata) - self._origin # grab toolbar info so that we don't do anything if a tool is selected tb = self.canvas.toolbar if tb.mode != '': return elif event.key is not None: if event.key == 'c': self._center = (clickX-1,clickY-1) self._remove_circle() self._add_click_mark(clickX,clickY,clear=True) elif event.key == 'r': x,y = self._center self._add_circle(x,y,clickX,clickY) self.circle(x,y,clickX-1,clickY-1) elif event.key == 'o': clickX,clickY = round(clickX),round(clickY) print "OverPlotting spectrum from point %i,%i" % (clickX-1,clickY-1) color=self.overplot_colorcycle.next() self._add_click_mark(clickX,clickY,clear=False, color=color) self.Cube.plot_spectrum(clickX-1,clickY-1,clear=False, color=color, linestyle=self.overplot_linestyle) elif event.key in ('1','2'): event.button = int(event.key) event.key = None self.plot_spectrum(event) elif (hasattr(event,'button') and event.button in (1,2) and not (self._clickX == clickX and self._clickY == clickY)): if event.button == 1: self._remove_circle() clear=True color = 'k' linestyle = 'steps-mid' else: color = self.overplot_colorcycle.next() linestyle = self.overplot_linestyle clear=False rad = ( (self._clickX-clickX)**2 + (self._clickY-clickY)**2 )**0.5 print "Plotting circle from point %i,%i to %i,%i (r=%f)" % (self._clickX-1,self._clickY-1,clickX-1,clickY-1,rad) self._add_circle(self._clickX,self._clickY,clickX,clickY) self.circle(self._clickX-1,self._clickY-1,clickX-1,clickY-1,clear=clear,linestyle=linestyle,color=color) elif hasattr(event,'button') and event.button is not None: if event.button==1: clickX,clickY = round(clickX),round(clickY) print "Plotting spectrum from point %i,%i" % (clickX-1,clickY-1) self._remove_circle() self._add_click_mark(clickX,clickY,clear=True) self.Cube.plot_spectrum(clickX-1,clickY-1,clear=True) if plot_fit: self.Cube.plot_fit(clickX-1, clickY-1, silent=True) elif event.button==2: clickX,clickY = round(clickX),round(clickY) print "OverPlotting spectrum from point %i,%i" % (clickX-1,clickY-1) color=self.overplot_colorcycle.next() self._add_click_mark(clickX,clickY,clear=False, color=color) self.Cube.plot_spectrum(clickX-1,clickY-1,clear=False, color=color, linestyle=self.overplot_linestyle) elif event.button==3: print "Disconnecting GAIA-like tool" self._disconnect() else: print "Call failed for some reason: " print "event: ",event else: pass # never really needed... warn("Click outside of axes") def _add_click_mark(self,x,y,clear=False,color='k'): """ Add an X at some position """ if clear: self._clear_click_marks() if self.FITSFigure is not None: label = 'xmark%i' % (len(self._click_marks)+1) x,y = self.FITSFigure.pixel2world(x,y) self.FITSFigure.show_markers(x,y,marker='x',c=color,layer=label) self._click_marks.append( label ) else: self._click_marks.append( self.axis.plot(x,y,'kx') ) self.refresh() def _clear_click_marks(self): """ Remove all marks added by previous clicks """ if self.FITSFigure is not None: for mark in self._click_marks: if mark in self.FITSFigure._layers: self.FITSFigure.remove_layer(mark) else: for mark in self._click_marks: self._click_marks.remove(mark) if mark in self.axis.lines: self.axis.lines.remove(mark) self.refresh() def _add_circle(self,x,y,x2,y2,**kwargs): """ """ if self.FITSFigure is not None: x,y = self.FITSFigure.pixel2world(x,y) x2,y2 = self.FITSFigure.pixel2world(x2,y2) r = (np.linalg.norm(np.array([x,y])-np.array([x2,y2]))) #self.FITSFigure.show_markers(x,y,s=r,marker='o',facecolor='none',edgecolor='black',layer='circle') layername = "circle%02i" % len(self._circles) self.FITSFigure.show_circles(x,y,r,edgecolor='black',facecolor='none',layer=layername,**kwargs) self._circles.append(layername) else: r = np.linalg.norm(np.array([x,y])-np.array([x2,y2])) circle = matplotlib.patches.Circle([x,y],radius=r,**kwargs) self._circles.append( circle ) self.axis.patches.append(circle) self.refresh() def _remove_circle(self): """ """ if self.FITSFigure is not None: for layername in self._circles: if layername in self.FITSFigure._layers: self.FITSFigure.remove_layer(layername) else: for circle in self._circles: if circle in self.axis.patches: self.axis.patches.remove(circle) self._circles.remove(circle) self.refresh() def refresh(self): if self.axis is not None: self.axis.figure.canvas.draw() def circle(self,x1,y1,x2,y2,**kwargs): """ Plot the spectrum of a circular aperture """ r = (np.linalg.norm(np.array([x1,y1])-np.array([x2,y2]))) self.Cube.plot_apspec([x1,y1,r],**kwargs) #self.Cube.data = cubes.extract_aperture( self.Cube.cube, [x1,y1,r] , coordsys=None ) #self.Cube.plotter() def copy(self, parent=None): """ Create a copy of the map plotter with blank (uninitialized) axis & figure [ parent ] A spectroscopic axis instance that is the parent of the specfit instance. This needs to be specified at some point, but defaults to None to prevent overwriting a previous plot. """ newmapplot = copy.copy(self) newmapplot.Cube = parent newmapplot.axis = None newmapplot.figure = None return newmapplot
mit
2014c2g2/teamwork
exts/w2/static/Brython2.0.0-20140209-164925/Lib/external_import.py
742
2985
import os from browser import doc import urllib.request ## this module is able to download modules that are external to ## localhost/src ## so we could download from any URL class ModuleFinder: def __init__(self, path_entry): print("external_import here..") #print(path_entry) self._module=None if path_entry.startswith('http://'): self.path_entry=path_entry else: raise ImportError() def __str__(self): return '<%s for "%s">' % (self.__class__.__name__, self.path_entry) def find_module(self, fullname, path=None): path = path or self.path_entry #print('looking for "%s" in %s ...' % (fullname, path)) for _ext in ['js', 'pyj', 'py']: _fp,_url,_headers=urllib.request.urlopen(path + '/' + '%s.%s' % (fullname, _ext)) self._module=_fp.read() _fp.close() if self._module is not None: print("module found at %s:%s" % (path, fullname)) return ModuleLoader(path, fullname, self._module) print('module %s not found' % fullname) raise ImportError() return None class ModuleLoader: """Load source for modules""" def __init__(self, filepath, name, module_source): self._filepath=filepath self._name=name self._module_source=module_source def get_source(self): return self._module_source def is_package(self): return '.' in self._name def load_module(self): if self._name in sys.modules: #print('reusing existing module from previous import of "%s"' % fullname) mod = sys.modules[self._name] return mod _src=self.get_source() if self._filepath.endswith('.js'): mod=JSObject(import_js_module(_src, self._filepath, self._name)) elif self._filepath.endswith('.py'): mod=JSObject(import_py_module(_src, self._filepath, self._name)) elif self._filepath.endswith('.pyj'): mod=JSObject(import_pyj_module(_src, self._filepath, self._name)) else: raise ImportError('Invalid Module: %s' % self._filepath) # Set a few properties required by PEP 302 mod.__file__ = self._filepath mod.__name__ = self._name mod.__path__ = os.path.abspath(self._filepath) mod.__loader__ = self mod.__package__ = '.'.join(self._name.split('.')[:-1]) if self.is_package(): print('adding path for package') # Set __path__ for packages # so we can find the sub-modules. mod.__path__ = [ self._filepath ] else: print('imported as regular module') print('creating a new module object for "%s"' % self._name) sys.modules.setdefault(self._name, mod) JSObject(__BRYTHON__.imported)[self._name]=mod return mod
gpl-2.0
jeremyh/agdc
src/waitForProjectUserQueueSlot.py
4
4718
#!/usr/bin/env python #=============================================================================== # Copyright (c) 2014 Geoscience Australia # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither Geoscience Australia nor the names of its contributors may be # used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #=============================================================================== '''Wait for a queue slot for the specified user ensuring maxJobsForUser is never exceeded Sleep the specfied number of seconds between tests if queue is "full" usage ./waitForUserQueueSlot.py -q <queuename> -m <maxJobsForUser> -s <sleepTimeSeconds> TODO: Note, queuename is currently ignored, all queues are counted @Author: Steven Ring Modified by Matthew Hardy 27/02/2014 to set a limit on jobs running under a project ''' import os, sys, re, argparse, subprocess, time def waitForQueueSlot(queue, projectCode, maxJobsForUser, maxJobsForProject, sleepTimeSeconds) : while True: [userJobCount,projectJobCount] = countJobsForUserAndProject(os.getenv('LOGNAME'),projectCode) print "User has %d jobs in queue, max is %d\\nProject has %d jobs in queue, max is %d\n" % (userJobCount, maxJobsForUser, projectJobCount, maxJobsForProject) if userJobCount >= maxJobsForUser or projectJobCount >= maxJobsForProject: print "sleeping %d seconds, waiting for queue slot" % sleepTimeSeconds time.sleep(sleepTimeSeconds) else: return def countJobsForUserAndProject(userId, projectCode) : s1 = "Job_Owner = %s@(.*)" % userId s2 = "group_list = %s" % projectCode userPattern = re.compile(s1) projectPattern = re.compile(s2) proc = subprocess.Popen(["/opt/pbs/default/bin/qstat","-f"], stdout=subprocess.PIPE) userCount = 0 projectCount = 0 while True: line = proc.stdout.readline() if line : if userPattern.search(line): userCount += 1 if projectPattern.search(line): projectCount += 1 else: break return [userCount, projectCount] description="" parser = argparse.ArgumentParser(description) parser.add_argument('-q', dest="queue", help="Name of the queue for the water_stacker jobs (default: normal)", default="normal") parser.add_argument('-m', dest="maxUserJobsInQueue", help="Maximum jobs queued under user ID at any instant (default: 10)", default=10) parser.add_argument('-p', dest="maxProjectJobsInQueue", help="Maximum jobs queued under project ID at any instant (default: 10)", default=10) parser.add_argument('-c', dest="projectCode", help="string defining project code (default: v10)", default="v10") parser.add_argument('-s', dest="sleepTimeSeconds", help="Sleep time in seconds between slot availablity test (default: 60)", default=60) args = parser.parse_args() queue = args.queue projectCode = args.projectCode # parse and check maxJobsInQueue maxUserJobsInQueue = int(args.maxUserJobsInQueue) maxProjectJobsInQueue = int(args.maxProjectJobsInQueue) if (not maxUserJobsInQueue) or (not maxProjectJobsInQueue): print "maxUserJobsInQueue and maxProjectJobsInQueue must be a positive integer" parser.print_usage() sys.exit(1) waitForQueueSlot(args.queue, projectCode, maxUserJobsInQueue, maxProjectJobsInQueue, int(args.sleepTimeSeconds))
bsd-3-clause
vishnugonela/boto
tests/unit/ec2/test_blockdevicemapping.py
111
6739
from tests.compat import unittest from boto.ec2.connection import EC2Connection from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping from tests.compat import OrderedDict from tests.unit import AWSMockServiceTestCase class BlockDeviceTypeTests(unittest.TestCase): def setUp(self): self.block_device_type = BlockDeviceType() def check_that_attribute_has_been_set(self, name, value, attribute): self.block_device_type.endElement(name, value, None) self.assertEqual(getattr(self.block_device_type, attribute), value) def test_endElement_sets_correct_attributes_with_values(self): for arguments in [("volumeId", 1, "volume_id"), ("virtualName", "some name", "ephemeral_name"), ("snapshotId", 1, "snapshot_id"), ("volumeSize", 1, "size"), ("status", "some status", "status"), ("attachTime", 1, "attach_time"), ("somethingRandom", "somethingRandom", "somethingRandom")]: self.check_that_attribute_has_been_set(arguments[0], arguments[1], arguments[2]) def test_endElement_with_name_NoDevice_value_true(self): self.block_device_type.endElement("NoDevice", 'true', None) self.assertEqual(self.block_device_type.no_device, True) def test_endElement_with_name_NoDevice_value_other(self): self.block_device_type.endElement("NoDevice", 'something else', None) self.assertEqual(self.block_device_type.no_device, False) def test_endElement_with_name_deleteOnTermination_value_true(self): self.block_device_type.endElement("deleteOnTermination", "true", None) self.assertEqual(self.block_device_type.delete_on_termination, True) def test_endElement_with_name_deleteOnTermination_value_other(self): self.block_device_type.endElement("deleteOnTermination", 'something else', None) self.assertEqual(self.block_device_type.delete_on_termination, False) def test_endElement_with_name_encrypted_value_true(self): self.block_device_type.endElement("Encrypted", "true", None) self.assertEqual(self.block_device_type.encrypted, True) def test_endElement_with_name_Encrypted_value_other(self): self.block_device_type.endElement("Encrypted", 'something else', None) self.assertEqual(self.block_device_type.encrypted, False) class BlockDeviceMappingTests(unittest.TestCase): def setUp(self): self.block_device_mapping = BlockDeviceMapping() def block_device_type_eq(self, b1, b2): if isinstance(b1, BlockDeviceType) and isinstance(b2, BlockDeviceType): return all([b1.connection == b2.connection, b1.ephemeral_name == b2.ephemeral_name, b1.no_device == b2.no_device, b1.volume_id == b2.volume_id, b1.snapshot_id == b2.snapshot_id, b1.status == b2.status, b1.attach_time == b2.attach_time, b1.delete_on_termination == b2.delete_on_termination, b1.size == b2.size, b1.encrypted == b2.encrypted]) def test_startElement_with_name_ebs_sets_and_returns_current_value(self): retval = self.block_device_mapping.startElement("ebs", None, None) assert self.block_device_type_eq(retval, BlockDeviceType(self.block_device_mapping)) def test_startElement_with_name_virtualName_sets_and_returns_current_value(self): retval = self.block_device_mapping.startElement("virtualName", None, None) assert self.block_device_type_eq(retval, BlockDeviceType(self.block_device_mapping)) def test_endElement_with_name_device_sets_current_name_dev_null(self): self.block_device_mapping.endElement("device", "/dev/null", None) self.assertEqual(self.block_device_mapping.current_name, "/dev/null") def test_endElement_with_name_device_sets_current_name(self): self.block_device_mapping.endElement("deviceName", "some device name", None) self.assertEqual(self.block_device_mapping.current_name, "some device name") def test_endElement_with_name_item_sets_current_name_key_to_current_value(self): self.block_device_mapping.current_name = "some name" self.block_device_mapping.current_value = "some value" self.block_device_mapping.endElement("item", "some item", None) self.assertEqual(self.block_device_mapping["some name"], "some value") class TestLaunchConfiguration(AWSMockServiceTestCase): connection_class = EC2Connection def default_body(self): # This is a dummy response return b""" <DescribeLaunchConfigurationsResponse> </DescribeLaunchConfigurationsResponse> """ def test_run_instances_block_device_mapping(self): # Same as the test in ``unit/ec2/autoscale/test_group.py:TestLaunchConfiguration``, # but with modified request parameters (due to a mismatch between EC2 & # Autoscaling). self.set_http_response(status_code=200) dev_sdf = BlockDeviceType(snapshot_id='snap-12345') dev_sdg = BlockDeviceType(snapshot_id='snap-12346', delete_on_termination=True, encrypted=True) class OrderedBlockDeviceMapping(OrderedDict, BlockDeviceMapping): pass bdm = OrderedBlockDeviceMapping() bdm.update(OrderedDict((('/dev/sdf', dev_sdf), ('/dev/sdg', dev_sdg)))) response = self.service_connection.run_instances( image_id='123456', instance_type='m1.large', security_groups=['group1', 'group2'], block_device_map=bdm ) self.assert_request_parameters({ 'Action': 'RunInstances', 'BlockDeviceMapping.1.DeviceName': '/dev/sdf', 'BlockDeviceMapping.1.Ebs.DeleteOnTermination': 'false', 'BlockDeviceMapping.1.Ebs.SnapshotId': 'snap-12345', 'BlockDeviceMapping.2.DeviceName': '/dev/sdg', 'BlockDeviceMapping.2.Ebs.DeleteOnTermination': 'true', 'BlockDeviceMapping.2.Ebs.SnapshotId': 'snap-12346', 'BlockDeviceMapping.2.Ebs.Encrypted': 'true', 'ImageId': '123456', 'InstanceType': 'm1.large', 'MaxCount': 1, 'MinCount': 1, 'SecurityGroup.1': 'group1', 'SecurityGroup.2': 'group2', }, ignore_params_values=[ 'Version', 'AWSAccessKeyId', 'SignatureMethod', 'SignatureVersion', 'Timestamp' ]) if __name__ == "__main__": unittest.main()
mit
wscullin/spack
var/spack/repos/builtin/packages/tmhmm/package.py
3
2341
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * import os class Tmhmm(Package): """Transmembrane helices in proteins Note: A manual download is required for TMHMM. Spack will search your current directory for the download file. Alternatively, add this file to a mirror so that Spack can find it. For instructions on how to set up a mirror, see http://spack.readthedocs.io/en/latest/mirrors.html""" homepage = "http://www.cbs.dtu.dk/cgi-bin/nph-sw_request?tmhmm" url = "file://{0}/tmhmm-2.0c.Linux.tar.gz".format(os.getcwd()) version('2.0c', '359db0c4ecf84d1ade5786abe844d54e') depends_on('perl', type='run') def patch(self): with working_dir('bin'): tmhmm = FileFilter('tmhmmformat.pl') tmhmm.filter('#!/usr/local/bin/perl -w', '#!/usr/bin/env perl') tmhmm = FileFilter('tmhmm') tmhmm.filter('#!/usr/local/bin/perl', '#!/usr/bin/env perl') def install(self, spec, prefix): install_tree('bin', prefix.bin) install_tree('lib', prefix.lib)
lgpl-2.1
smpanaro/sublime-rdio
sublime_rdio.py
1
17869
import sublime, sublime_plugin from queue import Queue, Empty import threading import json import time from datetime import datetime import random from urllib.error import HTTPError import sys from Rdio.rdio import Rdio ARTIST_TYPE = "artist" ALBUM_TYPE = "album" TRACK_TYPE = "track" RDIO_ARTIST_TYPE = 'r' RDIO_ALBUM_TYPE = 'a' RDIO_TRACK_TYPE = 't' RDIO_API_KEY = None RDIO_API_SECRET = None VALID_API_CREDENTIALS = False try: # ST2 from urllib.request import urlopen from urllib.parse import quote_plus except: # ST3 from urllib2 import urlopen from urllib import quote_plus sublime3 = int(sublime.version()) >= 3000 if sublime3: from Rdio.applescript_rdio_player import AppleScriptRdioPlayer as RdioPlayer from Rdio.status_updater import MusicPlayerStatusUpdater else: from rdio_player import RdioPlayer from status_updater import MusicPlayerStatusUpdater def plugin_loaded(): global RDIO_API_KEY, RDIO_API_SECRET, VALID_API_CREDENTIALS s = sublime.load_settings("Rdio.sublime-settings") RDIO_API_KEY = s.get("rdio_api_key") RDIO_API_SECRET = s.get("rdio_api_secret") # Test to see if the credentials are valid. try: response = Rdio((RDIO_API_KEY, RDIO_API_SECRET)).call("get", {"keys":""}) VALID_API_CREDENTIALS = True except HTTPError: VALID_API_CREDENTIALS = False class RdioCommand(sublime_plugin.WindowCommand): def __init__(self, window): self.window = window self.player = RdioPlayer.Instance() if not self.player.status_updater: self.player.status_updater = MusicPlayerStatusUpdater(self.player) class RdioPlayCommand(RdioCommand): def run(self): self.player.play() class RdioPauseCommand(RdioCommand): def run(self): self.player.pause() class RdioNextTrackCommand(RdioCommand): def run(self): self.player.next() class RdioPreviousTrackCommand(RdioCommand): def run(self): self.player.previous() class RdioToggleShuffleCommand(RdioCommand): def run(self): self.player.toggle_shuffle() class RdioNowPlaying(RdioCommand): def run(self): self.player.show_status_message() class RdioSearchCommand(RdioCommand): """ Handle all of the mechanics around searching. This includes taking input, providing and accepting suggestions from the Rdio, and playing the result. """ def __init__(self, window): RdioCommand.__init__(self,window) self.just_opened = True self.typed = "" self.last_content = "" self.suggestion_selector = "→" self.selected_suggestion_index = None self.input_view_width = 0 # Two way producer / consumer self.last_sent_query = "" self.query_q = Queue() self.suggestion_q = Queue() self.suggestions = [] self.END_OF_SUGGESTIONS = '' self.STOP_THREAD_MESSAGE = 'END_OF_THREAD_TIME' # passed as a query to stop the thread settings = sublime.load_settings("Preferences.sublime-settings") self.user_tab_complete_value = settings.get("tab_completion", None) rdio_settings = sublime.load_settings("Rdio.sublime-settings") self.enable_search_suggestions = rdio_settings.get("enable_search_suggestions") def run(self): if not VALID_API_CREDENTIALS: sublime.error_message( "Sorry, search requires a valid API key and secret to work. " + "See the Rdio package settings (Preferences -> Package Settings -> Rdio) for more information.") return # Disable tab complete so that we can tab through suggestions. if self.user_tab_complete_value == False: settings = sublime.load_settings("Preferences.sublime-settings") settings.set("tab_completion", False) sublime.save_settings("Preferences.sublime-settings") self.typed = "" self.open_search_panel("") # Start search suggestion thread. if self.enable_search_suggestions: t = threading.Thread(target=self.run_search_suggestion_helper) t.start() def open_search_panel(self, content): tabbed = False self.just_opened = True self.last_content = content v = self.window.show_input_panel("Search Rdio", content, self.on_done, self.on_change, self.on_cancel) # Move cursor to end of query (before the suggestion text). content = v.substr(sublime.Region(0, v.size())) suggestion_start = v.find("Suggestions", 0).begin() if suggestion_start != -1: cursor_x = suggestion_start - 2 else: cursor_x = max(0, v.size()) pt = v.text_point(0, cursor_x) v.sel().clear() v.sel().add(sublime.Region(pt)) v.show(pt) self.input_view_length = v.viewport_extent()[0]//v.em_width() - 1 def on_change(self, content): """ Update the search field with suggestions, if necessary. Specifically, intercept the newly-typed letter. If it is a tab, highlight the next suggestion (if there are any suggestions). Also, submit the current search query to the query_q Queue for processing by the suggestion thread. Finally, display the most recent search suggestion list as retrieved from the suggestion_q Queue. """ # If search suggestions are disabled, we just take text input and wait for a "done" or "cancel" event. if not self.enable_search_suggestions: self.typed = content return MIN_QUERY_LENGTH = 2 tabbed = False if self.just_opened: self.just_opened = False return # allows ctrl-a + delete if len(content) == 0: self.typed = "" self.open_search_panel("") return new_c = content.split(" (Suggestions")[0][-1] if len(self.last_content) - 1 == len(content): # then backspace self.typed = self.typed[:-1] elif len(self.last_content) > 0 and len(content) == 1: # then ctrl-a and type a new character self.typed = content elif new_c == '\t': tabbed = True else: self.typed += new_c if len(self.typed) > MIN_QUERY_LENGTH: self.last_sent_query = self.typed self.query_q.put(self.typed) # send query to every two character differences # Fetch the latest suggestions. try: while True: self.suggestions = self.suggestion_q.get_nowait() time.sleep(0.1) except Empty: pass # Try to prevent unhelpful suggestions. if len(self.typed) < MIN_QUERY_LENGTH: self.suggestions = [] # Generate the text to display in the suggestions section. if tabbed and len(self.suggestions) > 0: if self.selected_suggestion_index is None: self.selected_suggestion_index = 0 else: self.selected_suggestion_index += 1 self.selected_suggestion_index %= len(self.suggestions) suggestion_names = list(list(zip(*(self.suggestions)))[0]) # [(a,b),(c,d),...] -> [a,c,...] suggestion_names[self.selected_suggestion_index] = self.suggestion_selector + suggestion_names[self.selected_suggestion_index] comma_separated_suggestions = ", ".join(suggestion_names) elif len(self.suggestions) > 0: suggestion_names = list(list(zip(*(self.suggestions)))[0]) comma_separated_suggestions = ", ".join(suggestion_names) else: comma_separated_suggestions = "" # Reset the highlighted selection if the query is changed. if not tabbed: self.selected_suggestion_index = None suggestion_string = "" if len(comma_separated_suggestions) > 0: suggestion_string = " (Suggestions[TAB to select]: {})".format(comma_separated_suggestions, self.END_OF_SUGGESTIONS) self.open_search_panel("{}{}{}".format(self.typed, suggestion_string, self.END_OF_SUGGESTIONS)) def on_done(self, final_query): self.query_q.put(self.STOP_THREAD_MESSAGE) # tell the thread to stop query, key = self.parse_selected_suggestion(final_query) if key == None: self.search('search', {'query':query, 'types':'Artist, Album, Track'}) elif key.startswith(RDIO_ARTIST_TYPE): self.display_artist_options(query, key) elif key.startswith(RDIO_ALBUM_TYPE): self.display_album_options(query, key) elif key.startswith(RDIO_TRACK_TYPE): self.player.play_track(key) self.restore_tab_setting() def on_cancel(self): self.query_q.put(self.STOP_THREAD_MESSAGE) # tell the thread to stop self.restore_tab_setting() def restore_tab_setting(self): """ Restore tab complete settings now that we're done. Only write them if the user explicitly set them to False, since the default is True. """ if self.user_tab_complete_value == False: settings = sublime.load_settings("Preferences.sublime-settings") settings.set("tab_completion", False) sublime.save_settings("Preferences.sublime-settings") def parse_selected_suggestion(self, final_query): """ Returns a tuple (query, key) depending on the contents of self.suggestions. key is a Rdio key or None of the query is just a search query. """ query = self.typed key = None if self.suggestion_selector in final_query: query = self.suggestions[self.selected_suggestion_index][0] key = self.suggestions[self.selected_suggestion_index][1] return (query, key) def get_suggestions(self, rdio_results): MAX_TEXT_LENGTH = self.input_view_length - len(self.typed) - len(" (Suggestions[TAB to select]: )") - 2 suggestions = [] for res in rdio_results['result']: if res['type'] == RDIO_ARTIST_TYPE: artist = res.get("name", None) key = res.get("key", None) t = (artist,key) if artist and t not in suggestions: suggestions.append(t) elif res['type'] == RDIO_ALBUM_TYPE: album = res.get("name", None) key = res.get("key", None) t = (album,key) if album and t not in suggestions: suggestions.append(t) elif res['type'] == RDIO_TRACK_TYPE: track = res.get("name", None) key = res.get("key", None) t = (track,key) if track and t not in suggestions: suggestions.append(t) else: continue #ignore other types of results s_list = list(zip(*suggestions))[0] if len(", ".join(s_list)) > MAX_TEXT_LENGTH: suggestions.pop() break return suggestions def run_search_suggestion_helper(self): """ Reads from the self.query_q Queue and searches the Rdio suggestions API. Places the results in the self.suggestions_q Queue in the form of: [("{suggestion}", "{Rdio Key}")] """ rdio = Rdio((RDIO_API_KEY, RDIO_API_SECRET)) new_query = "" last_query = None while True: try: while True: #Empty the queue. new_query = self.query_q.get_nowait() time.sleep(0.1) # very important, else these while True loops hog the cpu. except Empty: pass if new_query == self.STOP_THREAD_MESSAGE: break if new_query != last_query: last_query = new_query response = rdio.call('searchSuggestions', {'query':new_query}) suggestions = self.get_suggestions(response) self.suggestion_q.put(suggestions) time.sleep(0.1) def display_artist_options(self, query, key): self.window.show_quick_panel(["Songs by " + query, "Albums by " + query], lambda idx: self.handle_artist_selection(idx, key)) def handle_artist_selection(self, index, key): if index == 0: self.search("getTracksForArtist", {"artist":key, "count":"50"}) if index == 1: self.search("getAlbumsForArtist", {"artist":key, "count":"20"}) def display_album_options(self, query, key): self.window.show_quick_panel(["Play " + query, "Show tracks on " + query], lambda idx: self.handle_album_selection(idx, key, query)) def handle_album_selection(self, index, key, album_name): if index == 0: self.player.play_album(key, album_name) if index == 1: track_thread = ThreadedRdioTrackRequest(key, self) track_thread.setDaemon(True) track_thread.start() def search(self, query, params): url_thread = ThreadedRdioSearchRequest(query, params, self) url_thread.setDaemon(True) url_thread.start() def handle_search_response(self, method, response, error_message): """ Parse the various types of searches and display the results in the quick panel. """ MAX_RESULTS = 50 if error_message is not None: sublime.error_message("Unable to search:\n%s" % error_message) return if (method == "search" and response["result"]["number_results"] == 0) or \ (method == "getTracksForArtist" and len(response["result"]) == 0) or \ (method == "getAlbumsForArtist" and len(response["result"]) == 0) or \ (method == "getTracksForAlbum" and len(response) == 0): self.open_search_panel("No results found, try again?") return if method == "search": results = response["result"]["results"] elif method == "getTracksForArtist" or method == "getAlbumsForArtist": results = response["result"] elif method == "getTracksForAlbum": results = response rows = [] self.rdio_keys = [] self.result_names = [] # for use in further dialogs for r in results: if r['type'] == RDIO_TRACK_TYPE: song = r.get("name","") artists = r.get("artist","") album = r.get("album", "") rows.append([u"{0} by {1}".format(song, artists), u"{0}".format(album)]) self.result_names.append(song) self.rdio_keys.append(r.get("key", "")) elif r['type'] == RDIO_ALBUM_TYPE: name = r.get("name","") artists = r.get("artist", "") num_tracks = r.get("length", "") rows.append([u"{0} [Album]".format(name), u"by {0}".format(artists)]) self.result_names.append(name) self.rdio_keys.append(r.get("key", "")) elif r['type'] == RDIO_ARTIST_TYPE: name = r.get("name", "") rows.append([u"{0} [Artist]".format(name),""]) self.result_names.append(name) self.rdio_keys.append(r.get("key", "")) if len(rows) > MAX_RESULTS: break self.window.show_quick_panel(rows, self.handle_search_quick_panel_selection) def handle_search_quick_panel_selection(self, index): if index == -1: return # dialog was cancelled key = self.rdio_keys[index] if key.startswith(RDIO_ALBUM_TYPE): sublime.set_timeout(lambda: self.display_album_options(self.result_names[index], key), 10) elif key.startswith(RDIO_ARTIST_TYPE): sublime.set_timeout(lambda: self.display_artist_options(self.result_names[index], key), 10) else: self.player.play_track(key) class ThreadedRdioSearchRequest(threading.Thread): """ Given a Rdio API method and parameters, return the response via a callback. """ def __init__(self, method, params, caller): threading.Thread.__init__(self) self.method = method self.params = params self.caller = caller self.rdio = Rdio((RDIO_API_KEY, RDIO_API_SECRET)) def run(self): error = None try: response = self.rdio.call(self.method, self.params) if response['status'] != 'ok': error = "Rdio internal server error." except e: response = None error = e # Start playing on the main thread. sublime.set_timeout(lambda: self.caller.handle_search_response(self.method, response, error), 10) class ThreadedRdioTrackRequest(threading.Thread): """ Given a Rdio album key (e.g. "a123123") and a caller, returns a list of track information via a callback. """ def __init__(self, album_key, caller): threading.Thread.__init__(self) self.album_key = album_key self.caller = caller self.rdio = Rdio((RDIO_API_KEY, RDIO_API_SECRET)) def run(self): error = None try: album_response = self.rdio.call("get", {"keys":self.album_key}) track_keys = album_response["result"][self.album_key]["trackKeys"] track_response = self.rdio.call("get", {"keys":", ".join(track_keys)}) response = [] for k in track_keys: response.append(track_response["result"][k]) except e: response = None error = e # Start playing on the main thread. sublime.set_timeout(lambda: self.caller.handle_search_response("getTracksForAlbum", response, error), 10)
mit
e-gob/plataforma-kioscos-autoatencion
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/network/f5/bigip_node.py
16
16390
#!/usr/bin/python # -*- coding: utf-8 -*- # # (c) 2013, Matt Hite <mhite@hotmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: bigip_node short_description: "Manages F5 BIG-IP LTM nodes" description: - "Manages F5 BIG-IP LTM nodes via iControl SOAP API" version_added: "1.4" author: - Matt Hite (@mhite) - Tim Rupp (@caphrim007) notes: - "Requires BIG-IP software version >= 11" - "F5 developed module 'bigsuds' required (see http://devcentral.f5.com)" - "Best run as a local_action in your playbook" requirements: - bigsuds options: state: description: - Pool member state required: true default: present choices: ['present', 'absent'] aliases: [] session_state: description: - Set new session availability status for node version_added: "1.9" required: false default: null choices: ['enabled', 'disabled'] aliases: [] monitor_state: description: - Set monitor availability status for node version_added: "1.9" required: false default: null choices: ['enabled', 'disabled'] aliases: [] partition: description: - Partition required: false default: 'Common' choices: [] aliases: [] name: description: - "Node name" required: false default: null choices: [] monitor_type: description: - Monitor rule type when monitors > 1 version_added: "2.2" required: False default: null choices: ['and_list', 'm_of_n'] aliases: [] quorum: description: - Monitor quorum value when monitor_type is m_of_n version_added: "2.2" required: False default: null choices: [] aliases: [] monitors: description: - Monitor template name list. Always use the full path to the monitor. version_added: "2.2" required: False default: null choices: [] aliases: [] host: description: - "Node IP. Required when state=present and node does not exist. Error when state=absent." required: true default: null choices: [] aliases: ['address', 'ip'] description: description: - "Node description." required: false default: null choices: [] extends_documentation_fragment: f5 ''' EXAMPLES = ''' - name: Add node bigip_node: server: "lb.mydomain.com" user: "admin" password: "secret" state: "present" partition: "Common" host: "10.20.30.40" name: "10.20.30.40" # Note that the BIG-IP automatically names the node using the # IP address specified in previous play's host parameter. # Future plays referencing this node no longer use the host # parameter but instead use the name parameter. # Alternatively, you could have specified a name with the # name parameter when state=present. - name: Add node with a single 'ping' monitor bigip_node: server: "lb.mydomain.com" user: "admin" password: "secret" state: "present" partition: "Common" host: "10.20.30.40" name: "mytestserver" monitors: - /Common/icmp delegate_to: localhost - name: Modify node description bigip_node: server: "lb.mydomain.com" user: "admin" password: "secret" state: "present" partition: "Common" name: "10.20.30.40" description: "Our best server yet" delegate_to: localhost - name: Delete node bigip_node: server: "lb.mydomain.com" user: "admin" password: "secret" state: "absent" partition: "Common" name: "10.20.30.40" # The BIG-IP GUI doesn't map directly to the API calls for "Node -> # General Properties -> State". The following states map to API monitor # and session states. # # Enabled (all traffic allowed): # monitor_state=enabled, session_state=enabled # Disabled (only persistent or active connections allowed): # monitor_state=enabled, session_state=disabled # Forced offline (only active connections allowed): # monitor_state=disabled, session_state=disabled # # See https://devcentral.f5.com/questions/icontrol-equivalent-call-for-b-node-down - name: Force node offline bigip_node: server: "lb.mydomain.com" user: "admin" password: "mysecret" state: "present" session_state: "disabled" monitor_state: "disabled" partition: "Common" name: "10.20.30.40" ''' def node_exists(api, address): # hack to determine if node exists result = False try: api.LocalLB.NodeAddressV2.get_object_status(nodes=[address]) result = True except bigsuds.OperationFailed as e: if "was not found" in str(e): result = False else: # genuine exception raise return result def create_node_address(api, address, name): try: api.LocalLB.NodeAddressV2.create( nodes=[name], addresses=[address], limits=[0] ) result = True desc = "" except bigsuds.OperationFailed as e: if "already exists" in str(e): result = False desc = "referenced name or IP already in use" else: # genuine exception raise return (result, desc) def get_node_address(api, name): return api.LocalLB.NodeAddressV2.get_address(nodes=[name])[0] def delete_node_address(api, address): try: api.LocalLB.NodeAddressV2.delete_node_address(nodes=[address]) result = True desc = "" except bigsuds.OperationFailed as e: if "is referenced by a member of pool" in str(e): result = False desc = "node referenced by pool" else: # genuine exception raise return (result, desc) def set_node_description(api, name, description): api.LocalLB.NodeAddressV2.set_description(nodes=[name], descriptions=[description]) def get_node_description(api, name): return api.LocalLB.NodeAddressV2.get_description(nodes=[name])[0] def set_node_session_enabled_state(api, name, session_state): session_state = "STATE_%s" % session_state.strip().upper() api.LocalLB.NodeAddressV2.set_session_enabled_state(nodes=[name], states=[session_state]) def get_node_session_status(api, name): result = api.LocalLB.NodeAddressV2.get_session_status(nodes=[name])[0] result = result.split("SESSION_STATUS_")[-1].lower() return result def set_node_monitor_state(api, name, monitor_state): monitor_state = "STATE_%s" % monitor_state.strip().upper() api.LocalLB.NodeAddressV2.set_monitor_state(nodes=[name], states=[monitor_state]) def get_node_monitor_status(api, name): result = api.LocalLB.NodeAddressV2.get_monitor_status(nodes=[name])[0] result = result.split("MONITOR_STATUS_")[-1].lower() return result def get_monitors(api, name): result = api.LocalLB.NodeAddressV2.get_monitor_rule(nodes=[name])[0] monitor_type = result['type'].split("MONITOR_RULE_TYPE_")[-1].lower() quorum = result['quorum'] monitor_templates = result['monitor_templates'] return (monitor_type, quorum, monitor_templates) def set_monitors(api, name, monitor_type, quorum, monitor_templates): monitor_type = "MONITOR_RULE_TYPE_%s" % monitor_type.strip().upper() monitor_rule = {'type': monitor_type, 'quorum': quorum, 'monitor_templates': monitor_templates} api.LocalLB.NodeAddressV2.set_monitor_rule(nodes=[name], monitor_rules=[monitor_rule]) def main(): monitor_type_choices = ['and_list', 'm_of_n'] argument_spec = f5_argument_spec() meta_args = dict( session_state=dict(type='str', choices=['enabled', 'disabled']), monitor_state=dict(type='str', choices=['enabled', 'disabled']), name=dict(type='str', required=True), host=dict(type='str', aliases=['address', 'ip']), description=dict(type='str'), monitor_type=dict(type='str', choices=monitor_type_choices), quorum=dict(type='int'), monitors=dict(type='list') ) argument_spec.update(meta_args) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True ) if module.params['validate_certs']: import ssl if not hasattr(ssl, 'SSLContext'): module.fail_json( msg='bigsuds does not support verifying certificates with python < 2.7.9. Either update python or set validate_certs=False on the task' ) server = module.params['server'] server_port = module.params['server_port'] user = module.params['user'] password = module.params['password'] state = module.params['state'] partition = module.params['partition'] validate_certs = module.params['validate_certs'] session_state = module.params['session_state'] monitor_state = module.params['monitor_state'] host = module.params['host'] name = module.params['name'] address = fq_name(partition, name) description = module.params['description'] monitor_type = module.params['monitor_type'] if monitor_type: monitor_type = monitor_type.lower() quorum = module.params['quorum'] monitors = module.params['monitors'] if monitors: monitors = [] for monitor in module.params['monitors']: monitors.append(fq_name(partition, monitor)) # sanity check user supplied values if state == 'absent' and host is not None: module.fail_json(msg="host parameter invalid when state=absent") if monitors: if len(monitors) == 1: # set default required values for single monitor quorum = 0 monitor_type = 'single' elif len(monitors) > 1: if not monitor_type: module.fail_json(msg="monitor_type required for monitors > 1") if monitor_type == 'm_of_n' and not quorum: module.fail_json(msg="quorum value required for monitor_type m_of_n") if monitor_type != 'm_of_n': quorum = 0 elif monitor_type: # no monitors specified but monitor_type exists module.fail_json(msg="monitor_type require monitors parameter") elif quorum is not None: # no monitors specified but quorum exists module.fail_json(msg="quorum requires monitors parameter") try: api = bigip_api(server, user, password, validate_certs, port=server_port) result = {'changed': False} # default if state == 'absent': if node_exists(api, address): if not module.check_mode: deleted, desc = delete_node_address(api, address) if not deleted: module.fail_json(msg="unable to delete: %s" % desc) else: result = {'changed': True} else: # check-mode return value result = {'changed': True} elif state == 'present': if not node_exists(api, address): if host is None: module.fail_json(msg="host parameter required when " "state=present and node does not exist") if not module.check_mode: created, desc = create_node_address(api, address=host, name=address) if not created: module.fail_json(msg="unable to create: %s" % desc) else: result = {'changed': True} if session_state is not None: set_node_session_enabled_state(api, address, session_state) result = {'changed': True} if monitor_state is not None: set_node_monitor_state(api, address, monitor_state) result = {'changed': True} if description is not None: set_node_description(api, address, description) result = {'changed': True} if monitors: set_monitors(api, address, monitor_type, quorum, monitors) else: # check-mode return value result = {'changed': True} else: # node exists -- potentially modify attributes if host is not None: if get_node_address(api, address) != host: module.fail_json(msg="Changing the node address is " "not supported by the API; " "delete and recreate the node.") if session_state is not None: session_status = get_node_session_status(api, address) if session_state == 'enabled' and \ session_status == 'forced_disabled': if not module.check_mode: set_node_session_enabled_state(api, address, session_state) result = {'changed': True} elif session_state == 'disabled' and \ session_status != 'force_disabled': if not module.check_mode: set_node_session_enabled_state(api, address, session_state) result = {'changed': True} if monitor_state is not None: monitor_status = get_node_monitor_status(api, address) if monitor_state == 'enabled' and \ monitor_status == 'forced_down': if not module.check_mode: set_node_monitor_state(api, address, monitor_state) result = {'changed': True} elif monitor_state == 'disabled' and \ monitor_status != 'forced_down': if not module.check_mode: set_node_monitor_state(api, address, monitor_state) result = {'changed': True} if description is not None: if get_node_description(api, address) != description: if not module.check_mode: set_node_description(api, address, description) result = {'changed': True} if monitors: t_monitor_type, t_quorum, t_monitor_templates = get_monitors(api, address) if (t_monitor_type != monitor_type) or (t_quorum != quorum) or (set(t_monitor_templates) != set(monitors)): if not module.check_mode: set_monitors(api, address, monitor_type, quorum, monitors) result = {'changed': True} except Exception as e: module.fail_json(msg="received exception: %s" % e) module.exit_json(**result) from ansible.module_utils.basic import * from ansible.module_utils.f5_utils import * if __name__ == '__main__': main()
bsd-3-clause
sbstp/streamlink
tests/test_plugin_tvplayer.py
3
1093
import unittest from streamlink.plugins.tvplayer import TVPlayer class TestPluginTVPlayer(unittest.TestCase): def test_can_handle_url(self): # should match self.assertTrue(TVPlayer.can_handle_url("http://tvplayer.com/watch/")) self.assertTrue(TVPlayer.can_handle_url("http://www.tvplayer.com/watch/")) self.assertTrue(TVPlayer.can_handle_url("http://tvplayer.com/watch")) self.assertTrue(TVPlayer.can_handle_url("http://www.tvplayer.com/watch")) self.assertTrue(TVPlayer.can_handle_url("http://tvplayer.com/watch/dave")) self.assertTrue(TVPlayer.can_handle_url("http://www.tvplayer.com/watch/itv")) self.assertTrue(TVPlayer.can_handle_url("https://www.tvplayer.com/watch/itv")) self.assertTrue(TVPlayer.can_handle_url("https://tvplayer.com/watch/itv")) # shouldn't match self.assertFalse(TVPlayer.can_handle_url("http://www.tvplayer.com/")) self.assertFalse(TVPlayer.can_handle_url("http://www.tvcatchup.com/")) self.assertFalse(TVPlayer.can_handle_url("http://www.youtube.com/"))
bsd-2-clause
mbrukman/libcloud
docs/examples/compute/openstack/hpcloud.py
61
1075
from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver HPCLOUD_AUTH_URL_USWEST = \ 'https://region-a.geo-1.identity.hpcloudsvc.com:35357/v2.0/tokens' HPCLOUD_AUTH_URL_USEAST = \ 'https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/tokens' OpenStack = get_driver(Provider.OPENSTACK) # HP Cloud US West driver = OpenStack('your_auth_username', 'your_auth_password', ex_force_auth_version='2.0_password', ex_force_auth_url=HPCLOUD_AUTH_URL_USWEST, ex_tenant_name='your_tenant_name', ex_force_service_region='region-a.geo-1', ex_force_service_name='Compute') # HP Cloud US East driver = OpenStack('your_auth_username', 'your_auth_password', ex_force_auth_version='2.0_password', ex_force_auth_url=HPCLOUD_AUTH_URL_USEAST, ex_tenant_name='your_tenant_name', ex_force_service_region='region-b.geo-1', ex_force_service_name='Compute')
apache-2.0
Vishwanath17/words-battle
third_party/tools/upload.py
4
86298
#!/usr/bin/python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tool for uploading diffs from a version control system to the codereview app. Usage summary: upload.py [options] [-- diff_options] [path...] Diff options are passed to the diff command of the underlying system. Supported version control systems: Git Mercurial Subversion Perforce CVS It is important for Git/Mercurial users to specify a tree/node/branch to diff against by using the '--rev' option. """ # This code is derived from appcfg.py in the App Engine SDK (open source), # and from ASPN recipe #146306. import ConfigParser import cookielib import errno import fnmatch import getpass import logging import marshal import mimetypes import optparse import os import re import socket import subprocess import sys import urllib import urllib2 import urlparse # The md5 module was deprecated in Python 2.5. try: from hashlib import md5 except ImportError: from md5 import md5 try: import readline except ImportError: pass try: import keyring except ImportError: keyring = None # The logging verbosity: # 0: Errors only. # 1: Status messages. # 2: Info logs. # 3: Debug logs. verbosity = 1 # The account type used for authentication. # This line could be changed by the review server (see handler for # upload.py). AUTH_ACCOUNT_TYPE = "GOOGLE" # URL of the default review server. As for AUTH_ACCOUNT_TYPE, this line could be # changed by the review server (see handler for upload.py). DEFAULT_REVIEW_SERVER = "codereview.appspot.com" # Max size of patch or base file. MAX_UPLOAD_SIZE = 900 * 1024 # Constants for version control names. Used by GuessVCSName. VCS_GIT = "Git" VCS_MERCURIAL = "Mercurial" VCS_SUBVERSION = "Subversion" VCS_PERFORCE = "Perforce" VCS_CVS = "CVS" VCS_UNKNOWN = "Unknown" VCS_ABBREVIATIONS = { VCS_MERCURIAL.lower(): VCS_MERCURIAL, "hg": VCS_MERCURIAL, VCS_SUBVERSION.lower(): VCS_SUBVERSION, "svn": VCS_SUBVERSION, VCS_PERFORCE.lower(): VCS_PERFORCE, "p4": VCS_PERFORCE, VCS_GIT.lower(): VCS_GIT, VCS_CVS.lower(): VCS_CVS, } # The result of parsing Subversion's [auto-props] setting. svn_auto_props_map = None def GetEmail(prompt): """Prompts the user for their email address and returns it. The last used email address is saved to a file and offered up as a suggestion to the user. If the user presses enter without typing in anything the last used email address is used. If the user enters a new address, it is saved for next time we prompt. """ last_email_file_name = os.path.expanduser("~/.last_codereview_email_address") last_email = "" if os.path.exists(last_email_file_name): try: last_email_file = open(last_email_file_name, "r") last_email = last_email_file.readline().strip("\n") last_email_file.close() prompt += " [%s]" % last_email except IOError, e: pass email = raw_input(prompt + ": ").strip() if email: try: last_email_file = open(last_email_file_name, "w") last_email_file.write(email) last_email_file.close() except IOError, e: pass else: email = last_email return email def StatusUpdate(msg): """Print a status message to stdout. If 'verbosity' is greater than 0, print the message. Args: msg: The string to print. """ if verbosity > 0: print msg def ErrorExit(msg): """Print an error message to stderr and exit.""" print >>sys.stderr, msg sys.exit(1) class ClientLoginError(urllib2.HTTPError): """Raised to indicate there was an error authenticating with ClientLogin.""" def __init__(self, url, code, msg, headers, args): urllib2.HTTPError.__init__(self, url, code, msg, headers, None) self.args = args self.reason = args["Error"] self.info = args.get("Info", None) class AbstractRpcServer(object): """Provides a common interface for a simple RPC server.""" def __init__(self, host, auth_function, host_override=None, extra_headers={}, save_cookies=False, account_type=AUTH_ACCOUNT_TYPE): """Creates a new HttpRpcServer. Args: host: The host to send requests to. auth_function: A function that takes no arguments and returns an (email, password) tuple when called. Will be called if authentication is required. host_override: The host header to send to the server (defaults to host). extra_headers: A dict of extra headers to append to every request. save_cookies: If True, save the authentication cookies to local disk. If False, use an in-memory cookiejar instead. Subclasses must implement this functionality. Defaults to False. account_type: Account type used for authentication. Defaults to AUTH_ACCOUNT_TYPE. """ self.host = host if (not self.host.startswith("http://") and not self.host.startswith("https://")): self.host = "http://" + self.host self.host_override = host_override self.auth_function = auth_function self.authenticated = False self.extra_headers = extra_headers self.save_cookies = save_cookies self.account_type = account_type self.opener = self._GetOpener() if self.host_override: logging.info("Server: %s; Host: %s", self.host, self.host_override) else: logging.info("Server: %s", self.host) def _GetOpener(self): """Returns an OpenerDirector for making HTTP requests. Returns: A urllib2.OpenerDirector object. """ raise NotImplementedError() def _CreateRequest(self, url, data=None): """Creates a new urllib request.""" logging.debug("Creating request for: '%s' with payload:\n%s", url, data) req = urllib2.Request(url, data=data) if self.host_override: req.add_header("Host", self.host_override) for key, value in self.extra_headers.iteritems(): req.add_header(key, value) return req def _GetAuthToken(self, email, password): """Uses ClientLogin to authenticate the user, returning an auth token. Args: email: The user's email address password: The user's password Raises: ClientLoginError: If there was an error authenticating with ClientLogin. HTTPError: If there was some other form of HTTP error. Returns: The authentication token returned by ClientLogin. """ account_type = self.account_type if self.host.endswith(".google.com"): # Needed for use inside Google. account_type = "HOSTED" req = self._CreateRequest( url="https://www.google.com/accounts/ClientLogin", data=urllib.urlencode({ "Email": email, "Passwd": password, "service": "ah", "source": "rietveld-codereview-upload", "accountType": account_type, }), ) try: response = self.opener.open(req) response_body = response.read() response_dict = dict(x.split("=") for x in response_body.split("\n") if x) return response_dict["Auth"] except urllib2.HTTPError, e: if e.code == 403: body = e.read() response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) raise ClientLoginError(req.get_full_url(), e.code, e.msg, e.headers, response_dict) else: raise def _GetAuthCookie(self, auth_token): """Fetches authentication cookies for an authentication token. Args: auth_token: The authentication token returned by ClientLogin. Raises: HTTPError: If there was an error fetching the authentication cookies. """ # This is a dummy value to allow us to identify when we're successful. continue_location = "http://localhost/" args = {"continue": continue_location, "auth": auth_token} req = self._CreateRequest("%s/_ah/login?%s" % (self.host, urllib.urlencode(args))) try: response = self.opener.open(req) except urllib2.HTTPError, e: response = e if (response.code != 302 or response.info()["location"] != continue_location): raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, response.headers, response.fp) self.authenticated = True def _Authenticate(self): """Authenticates the user. The authentication process works as follows: 1) We get a username and password from the user 2) We use ClientLogin to obtain an AUTH token for the user (see http://code.google.com/apis/accounts/AuthForInstalledApps.html). 3) We pass the auth token to /_ah/login on the server to obtain an authentication cookie. If login was successful, it tries to redirect us to the URL we provided. If we attempt to access the upload API without first obtaining an authentication cookie, it returns a 401 response (or a 302) and directs us to authenticate ourselves with ClientLogin. """ for i in range(3): credentials = self.auth_function() try: auth_token = self._GetAuthToken(credentials[0], credentials[1]) except ClientLoginError, e: print >>sys.stderr, '' if e.reason == "BadAuthentication": if e.info == "InvalidSecondFactor": print >>sys.stderr, ( "Use an application-specific password instead " "of your regular account password.\n" "See http://www.google.com/" "support/accounts/bin/answer.py?answer=185833") else: print >>sys.stderr, "Invalid username or password." elif e.reason == "CaptchaRequired": print >>sys.stderr, ( "Please go to\n" "https://www.google.com/accounts/DisplayUnlockCaptcha\n" "and verify you are a human. Then try again.\n" "If you are using a Google Apps account the URL is:\n" "https://www.google.com/a/yourdomain.com/UnlockCaptcha") elif e.reason == "NotVerified": print >>sys.stderr, "Account not verified." elif e.reason == "TermsNotAgreed": print >>sys.stderr, "User has not agreed to TOS." elif e.reason == "AccountDeleted": print >>sys.stderr, "The user account has been deleted." elif e.reason == "AccountDisabled": print >>sys.stderr, "The user account has been disabled." break elif e.reason == "ServiceDisabled": print >>sys.stderr, ("The user's access to the service has been " "disabled.") elif e.reason == "ServiceUnavailable": print >>sys.stderr, "The service is not available; try again later." else: # Unknown error. raise print >>sys.stderr, '' continue self._GetAuthCookie(auth_token) return def Send(self, request_path, payload=None, content_type="application/octet-stream", timeout=None, extra_headers=None, **kwargs): """Sends an RPC and returns the response. Args: request_path: The path to send the request to, eg /api/appversion/create. payload: The body of the request, or None to send an empty request. content_type: The Content-Type header to use. timeout: timeout in seconds; default None i.e. no timeout. (Note: for large requests on OS X, the timeout doesn't work right.) extra_headers: Dict containing additional HTTP headers that should be included in the request (string header names mapped to their values), or None to not include any additional headers. kwargs: Any keyword arguments are converted into query string parameters. Returns: The response body, as a string. """ # TODO: Don't require authentication. Let the server say # whether it is necessary. if not self.authenticated: self._Authenticate() old_timeout = socket.getdefaulttimeout() socket.setdefaulttimeout(timeout) try: tries = 0 while True: tries += 1 args = dict(kwargs) url = "%s%s" % (self.host, request_path) if args: url += "?" + urllib.urlencode(args) req = self._CreateRequest(url=url, data=payload) req.add_header("Content-Type", content_type) if extra_headers: for header, value in extra_headers.items(): req.add_header(header, value) try: f = self.opener.open(req) response = f.read() f.close() return response except urllib2.HTTPError, e: if tries > 3: raise elif e.code == 401 or e.code == 302: self._Authenticate() ## elif e.code >= 500 and e.code < 600: ## # Server Error - try again. ## continue elif e.code == 301: # Handle permanent redirect manually. url = e.info()["location"] url_loc = urlparse.urlparse(url) self.host = '%s://%s' % (url_loc[0], url_loc[1]) else: raise finally: socket.setdefaulttimeout(old_timeout) class HttpRpcServer(AbstractRpcServer): """Provides a simplified RPC-style interface for HTTP requests.""" def _Authenticate(self): """Save the cookie jar after authentication.""" super(HttpRpcServer, self)._Authenticate() if self.save_cookies: StatusUpdate("Saving authentication cookies to %s" % self.cookie_file) self.cookie_jar.save() def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects. Returns: A urllib2.OpenerDirector object. """ opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) if self.save_cookies: self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) if os.path.exists(self.cookie_file): try: self.cookie_jar.load() self.authenticated = True StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file) except (cookielib.LoadError, IOError): # Failed to load cookies - just ignore them. pass else: # Create an empty cookie file with mode 600 fd = os.open(self.cookie_file, os.O_CREAT, 0600) os.close(fd) # Always chmod the cookie file os.chmod(self.cookie_file, 0600) else: # Don't save cookies across runs of update.py. self.cookie_jar = cookielib.CookieJar() opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) return opener class CondensedHelpFormatter(optparse.IndentedHelpFormatter): """Frees more horizontal space by removing indentation from group options and collapsing arguments between short and long, e.g. '-o ARG, --opt=ARG' to -o --opt ARG""" def format_heading(self, heading): return "%s:\n" % heading def format_option(self, option): self.dedent() res = optparse.HelpFormatter.format_option(self, option) self.indent() return res def format_option_strings(self, option): self.set_long_opt_delimiter(" ") optstr = optparse.HelpFormatter.format_option_strings(self, option) optlist = optstr.split(", ") if len(optlist) > 1: if option.takes_value(): # strip METAVAR from all but the last option optlist = [x.split()[0] for x in optlist[:-1]] + optlist[-1:] optstr = " ".join(optlist) return optstr parser = optparse.OptionParser( usage="%prog [options] [-- diff_options] [path...]", add_help_option=False, formatter=CondensedHelpFormatter() ) parser.add_option("-h", "--help", action="store_true", help="Show this help message and exit.") parser.add_option("-y", "--assume_yes", action="store_true", dest="assume_yes", default=False, help="Assume that the answer to yes/no questions is 'yes'.") # Logging group = parser.add_option_group("Logging options") group.add_option("-q", "--quiet", action="store_const", const=0, dest="verbose", help="Print errors only.") group.add_option("-v", "--verbose", action="store_const", const=2, dest="verbose", default=1, help="Print info level logs.") group.add_option("--noisy", action="store_const", const=3, dest="verbose", help="Print all logs.") group.add_option("--print_diffs", dest="print_diffs", action="store_true", help="Print full diffs.") # Review server group = parser.add_option_group("Review server options") group.add_option("-s", "--server", action="store", dest="server", default=DEFAULT_REVIEW_SERVER, metavar="SERVER", help=("The server to upload to. The format is host[:port]. " "Defaults to '%default'.")) group.add_option("-e", "--email", action="store", dest="email", metavar="EMAIL", default=None, help="The username to use. Will prompt if omitted.") group.add_option("-H", "--host", action="store", dest="host", metavar="HOST", default=None, help="Overrides the Host header sent with all RPCs.") group.add_option("--no_cookies", action="store_false", dest="save_cookies", default=True, help="Do not save authentication cookies to local disk.") group.add_option("--account_type", action="store", dest="account_type", metavar="TYPE", default=AUTH_ACCOUNT_TYPE, choices=["GOOGLE", "HOSTED"], help=("Override the default account type " "(defaults to '%default', " "valid choices are 'GOOGLE' and 'HOSTED').")) # Issue group = parser.add_option_group("Issue options") group.add_option("-d", "--description", action="store", dest="description", metavar="DESCRIPTION", default=None, help="Optional description when creating an issue.") group.add_option("-f", "--description_file", action="store", dest="description_file", metavar="DESCRIPTION_FILE", default=None, help="Optional path of a file that contains " "the description when creating an issue.") group.add_option("-r", "--reviewers", action="store", dest="reviewers", metavar="REVIEWERS", default=None, help="Add reviewers (comma separated email addresses).") group.add_option("--cc", action="store", dest="cc", metavar="CC", default=None, help="Add CC (comma separated email addresses).") group.add_option("--private", action="store_true", dest="private", default=False, help="Make the issue restricted to reviewers and those CCed") # Upload options group = parser.add_option_group("Patch options") group.add_option("-m", "--message", action="store", dest="message", metavar="MESSAGE", default=None, help="A message to identify the patch. " "Will prompt if omitted.") group.add_option("-i", "--issue", type="int", action="store", metavar="ISSUE", default=None, help="Issue number to which to add. Defaults to new issue.") group.add_option("--base_url", action="store", dest="base_url", default=None, help="Base URL path for files (listed as \"Base URL\" when " "viewing issue). If omitted, will be guessed automatically " "for SVN repos and left blank for others.") group.add_option("--download_base", action="store_true", dest="download_base", default=False, help="Base files will be downloaded by the server " "(side-by-side diffs may not work on files with CRs).") group.add_option("--rev", action="store", dest="revision", metavar="REV", default=None, help="Base revision/branch/tree to diff against. Use " "rev1:rev2 range to review already committed changeset.") group.add_option("--send_mail", action="store_true", dest="send_mail", default=False, help="Send notification email to reviewers.") group.add_option("-p", "--send_patch", action="store_true", dest="send_patch", default=False, help="Same as --send_mail, but include diff as an " "attachment, and prepend email subject with 'PATCH:'.") group.add_option("--vcs", action="store", dest="vcs", metavar="VCS", default=None, help=("Version control system (optional, usually upload.py " "already guesses the right VCS).")) group.add_option("--emulate_svn_auto_props", action="store_true", dest="emulate_svn_auto_props", default=False, help=("Emulate Subversion's auto properties feature.")) # Perforce-specific group = parser.add_option_group("Perforce-specific options " "(overrides P4 environment variables)") group.add_option("--p4_port", action="store", dest="p4_port", metavar="P4_PORT", default=None, help=("Perforce server and port (optional)")) group.add_option("--p4_changelist", action="store", dest="p4_changelist", metavar="P4_CHANGELIST", default=None, help=("Perforce changelist id")) group.add_option("--p4_client", action="store", dest="p4_client", metavar="P4_CLIENT", default=None, help=("Perforce client/workspace")) group.add_option("--p4_user", action="store", dest="p4_user", metavar="P4_USER", default=None, help=("Perforce user")) def GetRpcServer(server, email=None, host_override=None, save_cookies=True, account_type=AUTH_ACCOUNT_TYPE): """Returns an instance of an AbstractRpcServer. Args: server: String containing the review server URL. email: String containing user's email address. host_override: If not None, string containing an alternate hostname to use in the host header. save_cookies: Whether authentication cookies should be saved to disk. account_type: Account type for authentication, either 'GOOGLE' or 'HOSTED'. Defaults to AUTH_ACCOUNT_TYPE. Returns: A new AbstractRpcServer, on which RPC calls can be made. """ rpc_server_class = HttpRpcServer # If this is the dev_appserver, use fake authentication. host = (host_override or server).lower() if re.match(r'(http://)?localhost([:/]|$)', host): if email is None: email = "test@example.com" logging.info("Using debug user %s. Override with --email" % email) server = rpc_server_class( server, lambda: (email, "password"), host_override=host_override, extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email}, save_cookies=save_cookies, account_type=account_type) # Don't try to talk to ClientLogin. server.authenticated = True return server def GetUserCredentials(): """Prompts the user for a username and password.""" # Create a local alias to the email variable to avoid Python's crazy # scoping rules. global keyring local_email = email if local_email is None: local_email = GetEmail("Email (login for uploading to %s)" % server) password = None if keyring: try: password = keyring.get_password(host, local_email) except: # Sadly, we have to trap all errors here as # gnomekeyring.IOError inherits from object. :/ print "Failed to get password from keyring" keyring = None if password is not None: print "Using password from system keyring." else: password = getpass.getpass("Password for %s: " % local_email) if keyring: answer = raw_input("Store password in system keyring?(y/N) ").strip() if answer == "y": keyring.set_password(host, local_email, password) return (local_email, password) return rpc_server_class(server, GetUserCredentials, host_override=host_override, save_cookies=save_cookies) def EncodeMultipartFormData(fields, files): """Encode form fields for multipart/form-data. Args: fields: A sequence of (name, value) elements for regular form fields. files: A sequence of (name, filename, value) elements for data to be uploaded as files. Returns: (content_type, body) ready for httplib.HTTP instance. Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306 """ BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-' CRLF = '\r\n' lines = [] for (key, value) in fields: lines.append('--' + BOUNDARY) lines.append('Content-Disposition: form-data; name="%s"' % key) lines.append('') if isinstance(value, unicode): value = value.encode('utf-8') lines.append(value) for (key, filename, value) in files: lines.append('--' + BOUNDARY) lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)) lines.append('Content-Type: %s' % GetContentType(filename)) lines.append('') if isinstance(value, unicode): value = value.encode('utf-8') lines.append(value) lines.append('--' + BOUNDARY + '--') lines.append('') body = CRLF.join(lines) content_type = 'multipart/form-data; boundary=%s' % BOUNDARY return content_type, body def GetContentType(filename): """Helper to guess the content-type from the filename.""" return mimetypes.guess_type(filename)[0] or 'application/octet-stream' # Use a shell for subcommands on Windows to get a PATH search. use_shell = sys.platform.startswith("win") def RunShellWithReturnCodeAndStderr(command, print_output=False, universal_newlines=True, env=os.environ): """Executes a command and returns the output from stdout, stderr and the return code. Args: command: Command to execute. print_output: If True, the output is printed to stdout. If False, both stdout and stderr are ignored. universal_newlines: Use universal_newlines flag (default: True). Returns: Tuple (stdout, stderr, return code) """ logging.info("Running %s", command) env = env.copy() env['LC_MESSAGES'] = 'C' p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=use_shell, universal_newlines=universal_newlines, env=env) if print_output: output_array = [] while True: line = p.stdout.readline() if not line: break print line.strip("\n") output_array.append(line) output = "".join(output_array) else: output = p.stdout.read() p.wait() errout = p.stderr.read() if print_output and errout: print >>sys.stderr, errout p.stdout.close() p.stderr.close() return output, errout, p.returncode def RunShellWithReturnCode(command, print_output=False, universal_newlines=True, env=os.environ): """Executes a command and returns the output from stdout and the return code.""" out, err, retcode = RunShellWithReturnCodeAndStderr(command, print_output, universal_newlines, env) return out, retcode def RunShell(command, silent_ok=False, universal_newlines=True, print_output=False, env=os.environ): data, retcode = RunShellWithReturnCode(command, print_output, universal_newlines, env) if retcode: ErrorExit("Got error status from %s:\n%s" % (command, data)) if not silent_ok and not data: ErrorExit("No output from %s" % command) return data class VersionControlSystem(object): """Abstract base class providing an interface to the VCS.""" def __init__(self, options): """Constructor. Args: options: Command line options. """ self.options = options def GetGUID(self): """Return string to distinguish the repository from others, for example to query all opened review issues for it""" raise NotImplementedError( "abstract method -- subclass %s must override" % self.__class__) def PostProcessDiff(self, diff): """Return the diff with any special post processing this VCS needs, e.g. to include an svn-style "Index:".""" return diff def GenerateDiff(self, args): """Return the current diff as a string. Args: args: Extra arguments to pass to the diff command. """ raise NotImplementedError( "abstract method -- subclass %s must override" % self.__class__) def GetUnknownFiles(self): """Return a list of files unknown to the VCS.""" raise NotImplementedError( "abstract method -- subclass %s must override" % self.__class__) def CheckForUnknownFiles(self): """Show an "are you sure?" prompt if there are unknown files.""" unknown_files = self.GetUnknownFiles() if unknown_files: print "The following files are not added to version control:" for line in unknown_files: print line prompt = "Are you sure to continue?(y/N) " answer = raw_input(prompt).strip() if answer != "y": ErrorExit("User aborted") def GetBaseFile(self, filename): """Get the content of the upstream version of a file. Returns: A tuple (base_content, new_content, is_binary, status) base_content: The contents of the base file. new_content: For text files, this is empty. For binary files, this is the contents of the new file, since the diff output won't contain information to reconstruct the current file. is_binary: True iff the file is binary. status: The status of the file. """ raise NotImplementedError( "abstract method -- subclass %s must override" % self.__class__) def GetBaseFiles(self, diff): """Helper that calls GetBase file for each file in the patch. Returns: A dictionary that maps from filename to GetBaseFile's tuple. Filenames are retrieved based on lines that start with "Index:" or "Property changes on:". """ files = {} for line in diff.splitlines(True): if line.startswith('Index:') or line.startswith('Property changes on:'): unused, filename = line.split(':', 1) # On Windows if a file has property changes its filename uses '\' # instead of '/'. filename = filename.strip().replace('\\', '/') files[filename] = self.GetBaseFile(filename) return files def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options, files): """Uploads the base files (and if necessary, the current ones as well).""" def UploadFile(filename, file_id, content, is_binary, status, is_base): """Uploads a file to the server.""" file_too_large = False if is_base: type = "base" else: type = "current" if len(content) > MAX_UPLOAD_SIZE: print ("Not uploading the %s file for %s because it's too large." % (type, filename)) file_too_large = True content = "" checksum = md5(content).hexdigest() if options.verbose > 0 and not file_too_large: print "Uploading %s file for %s" % (type, filename) url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id) form_fields = [("filename", filename), ("status", status), ("checksum", checksum), ("is_binary", str(is_binary)), ("is_current", str(not is_base)), ] if file_too_large: form_fields.append(("file_too_large", "1")) if options.email: form_fields.append(("user", options.email)) ctype, body = EncodeMultipartFormData(form_fields, [("data", filename, content)]) response_body = rpc_server.Send(url, body, content_type=ctype) if not response_body.startswith("OK"): StatusUpdate(" --> %s" % response_body) sys.exit(1) patches = dict() [patches.setdefault(v, k) for k, v in patch_list] for filename in patches.keys(): base_content, new_content, is_binary, status = files[filename] file_id_str = patches.get(filename) if file_id_str.find("nobase") != -1: base_content = None file_id_str = file_id_str[file_id_str.rfind("_") + 1:] file_id = int(file_id_str) if base_content != None: UploadFile(filename, file_id, base_content, is_binary, status, True) if new_content != None: UploadFile(filename, file_id, new_content, is_binary, status, False) def IsImage(self, filename): """Returns true if the filename has an image extension.""" mimetype = mimetypes.guess_type(filename)[0] if not mimetype: return False return mimetype.startswith("image/") def IsBinaryData(self, data): """Returns true if data contains a null byte.""" # Derived from how Mercurial's heuristic, see # http://selenic.com/hg/file/848a6658069e/mercurial/util.py#l229 return bool(data and "\0" in data) class SubversionVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for Subversion.""" def __init__(self, options): super(SubversionVCS, self).__init__(options) if self.options.revision: match = re.match(r"(\d+)(:(\d+))?", self.options.revision) if not match: ErrorExit("Invalid Subversion revision %s." % self.options.revision) self.rev_start = match.group(1) self.rev_end = match.group(3) else: self.rev_start = self.rev_end = None # Cache output from "svn list -r REVNO dirname". # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev). self.svnls_cache = {} # Base URL is required to fetch files deleted in an older revision. # Result is cached to not guess it over and over again in GetBaseFile(). required = self.options.download_base or self.options.revision is not None self.svn_base = self._GuessBase(required) def GetGUID(self): return self._GetInfo("Repository UUID") def GuessBase(self, required): """Wrapper for _GuessBase.""" return self.svn_base def _GuessBase(self, required): """Returns base URL for current diff. Args: required: If true, exits if the url can't be guessed, otherwise None is returned. """ url = self._GetInfo("URL") if url: scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) guess = "" # TODO(anatoli) - repository specific hacks should be handled by server if netloc == "svn.python.org" and scheme == "svn+ssh": path = "projects" + path scheme = "http" guess = "Python " elif netloc.endswith(".googlecode.com"): scheme = "http" guess = "Google Code " path = path + "/" base = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) logging.info("Guessed %sbase = %s", guess, base) return base if required: ErrorExit("Can't find URL in output from svn info") return None def _GetInfo(self, key): """Parses 'svn info' for current dir. Returns value for key or None""" for line in RunShell(["svn", "info"]).splitlines(): if line.startswith(key + ": "): return line.split(":", 1)[1].strip() def _EscapeFilename(self, filename): """Escapes filename for SVN commands.""" if "@" in filename and not filename.endswith("@"): filename = "%s@" % filename return filename def GenerateDiff(self, args): cmd = ["svn", "diff"] if self.options.revision: cmd += ["-r", self.options.revision] cmd.extend(args) data = RunShell(cmd) count = 0 for line in data.splitlines(): if line.startswith("Index:") or line.startswith("Property changes on:"): count += 1 logging.info(line) if not count: ErrorExit("No valid patches found in output from svn diff") return data def _CollapseKeywords(self, content, keyword_str): """Collapses SVN keywords.""" # svn cat translates keywords but svn diff doesn't. As a result of this # behavior patching.PatchChunks() fails with a chunk mismatch error. # This part was originally written by the Review Board development team # who had the same problem (http://reviews.review-board.org/r/276/). # Mapping of keywords to known aliases svn_keywords = { # Standard keywords 'Date': ['Date', 'LastChangedDate'], 'Revision': ['Revision', 'LastChangedRevision', 'Rev'], 'Author': ['Author', 'LastChangedBy'], 'HeadURL': ['HeadURL', 'URL'], 'Id': ['Id'], # Aliases 'LastChangedDate': ['LastChangedDate', 'Date'], 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'], 'LastChangedBy': ['LastChangedBy', 'Author'], 'URL': ['URL', 'HeadURL'], } def repl(m): if m.group(2): return "$%s::%s$" % (m.group(1), " " * len(m.group(3))) return "$%s$" % m.group(1) keywords = [keyword for name in keyword_str.split(" ") for keyword in svn_keywords.get(name, [])] return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content) def GetUnknownFiles(self): status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True) unknown_files = [] for line in status.split("\n"): if line and line[0] == "?": unknown_files.append(line) return unknown_files def ReadFile(self, filename): """Returns the contents of a file.""" file = open(filename, 'rb') result = "" try: result = file.read() finally: file.close() return result def GetStatus(self, filename): """Returns the status of a file.""" if not self.options.revision: status = RunShell(["svn", "status", "--ignore-externals", self._EscapeFilename(filename)]) if not status: ErrorExit("svn status returned no output for %s" % filename) status_lines = status.splitlines() # If file is in a cl, the output will begin with # "\n--- Changelist 'cl_name':\n". See # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt if (len(status_lines) == 3 and not status_lines[0] and status_lines[1].startswith("--- Changelist")): status = status_lines[2] else: status = status_lines[0] # If we have a revision to diff against we need to run "svn list" # for the old and the new revision and compare the results to get # the correct status for a file. else: dirname, relfilename = os.path.split(filename) if dirname not in self.svnls_cache: cmd = ["svn", "list", "-r", self.rev_start, self._EscapeFilename(dirname) or "."] out, err, returncode = RunShellWithReturnCodeAndStderr(cmd) if returncode: # Directory might not yet exist at start revison # svn: Unable to find repository location for 'abc' in revision nnn if re.match('^svn: Unable to find repository location for .+ in revision \d+', err): old_files = () else: ErrorExit("Failed to get status for %s:\n%s" % (filename, err)) else: old_files = out.splitlines() args = ["svn", "list"] if self.rev_end: args += ["-r", self.rev_end] cmd = args + [self._EscapeFilename(dirname) or "."] out, returncode = RunShellWithReturnCode(cmd) if returncode: ErrorExit("Failed to run command %s" % cmd) self.svnls_cache[dirname] = (old_files, out.splitlines()) old_files, new_files = self.svnls_cache[dirname] if relfilename in old_files and relfilename not in new_files: status = "D " elif relfilename in old_files and relfilename in new_files: status = "M " else: status = "A " return status def GetBaseFile(self, filename): status = self.GetStatus(filename) base_content = None new_content = None # If a file is copied its status will be "A +", which signifies # "addition-with-history". See "svn st" for more information. We need to # upload the original file or else diff parsing will fail if the file was # edited. if status[0] == "A" and status[3] != "+": # We'll need to upload the new content if we're adding a binary file # since diff's output won't contain it. mimetype = RunShell(["svn", "propget", "svn:mime-type", self._EscapeFilename(filename)], silent_ok=True) base_content = "" is_binary = bool(mimetype) and not mimetype.startswith("text/") if is_binary and self.IsImage(filename): new_content = self.ReadFile(filename) elif (status[0] in ("M", "D", "R") or (status[0] == "A" and status[3] == "+") or # Copied file. (status[0] == " " and status[1] == "M")): # Property change. args = [] if self.options.revision: # filename must not be escaped. We already add an ampersand here. url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) else: # Don't change filename, it's needed later. url = filename args += ["-r", "BASE"] cmd = ["svn"] + args + ["propget", "svn:mime-type", url] mimetype, returncode = RunShellWithReturnCode(cmd) if returncode: # File does not exist in the requested revision. # Reset mimetype, it contains an error message. mimetype = "" else: mimetype = mimetype.strip() get_base = False # this test for binary is exactly the test prescribed by the # official SVN docs at # http://subversion.apache.org/faq.html#binary-files is_binary = (bool(mimetype) and not mimetype.startswith("text/") and mimetype not in ("image/x-xbitmap", "image/x-xpixmap")) if status[0] == " ": # Empty base content just to force an upload. base_content = "" elif is_binary: if self.IsImage(filename): get_base = True if status[0] == "M": if not self.rev_end: new_content = self.ReadFile(filename) else: url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end) new_content = RunShell(["svn", "cat", url], universal_newlines=True, silent_ok=True) else: base_content = "" else: get_base = True if get_base: if is_binary: universal_newlines = False else: universal_newlines = True if self.rev_start: # "svn cat -r REV delete_file.txt" doesn't work. cat requires # the full URL with "@REV" appended instead of using "-r" option. url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) base_content = RunShell(["svn", "cat", url], universal_newlines=universal_newlines, silent_ok=True) else: base_content, ret_code = RunShellWithReturnCode( ["svn", "cat", self._EscapeFilename(filename)], universal_newlines=universal_newlines) if ret_code and status[0] == "R": # It's a replaced file without local history (see issue208). # The base file needs to be fetched from the server. url = "%s/%s" % (self.svn_base, filename) base_content = RunShell(["svn", "cat", url], universal_newlines=universal_newlines, silent_ok=True) elif ret_code: ErrorExit("Got error status from 'svn cat %s'" % filename) if not is_binary: args = [] if self.rev_start: url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) else: url = filename args += ["-r", "BASE"] cmd = ["svn"] + args + ["propget", "svn:keywords", url] keywords, returncode = RunShellWithReturnCode(cmd) if keywords and not returncode: base_content = self._CollapseKeywords(base_content, keywords) else: StatusUpdate("svn status returned unexpected output: %s" % status) sys.exit(1) return base_content, new_content, is_binary, status[0:5] class GitVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for Git.""" def __init__(self, options): super(GitVCS, self).__init__(options) # Map of filename -> (hash before, hash after) of base file. # Hashes for "no such file" are represented as None. self.hashes = {} # Map of new filename -> old filename for renames. self.renames = {} def GetGUID(self): revlist = RunShell("git rev-list --parents HEAD".split()).splitlines() # M-A: Return the 1st root hash, there could be multiple when a # subtree is merged. In that case, more analysis would need to # be done to figure out which HEAD is the 'most representative'. for r in revlist: if ' ' not in r: return r def PostProcessDiff(self, gitdiff): """Converts the diff output to include an svn-style "Index:" line as well as record the hashes of the files, so we can upload them along with our diff.""" # Special used by git to indicate "no such content". NULL_HASH = "0"*40 def IsFileNew(filename): return filename in self.hashes and self.hashes[filename][0] is None def AddSubversionPropertyChange(filename): """Add svn's property change information into the patch if given file is new file. We use Subversion's auto-props setting to retrieve its property. See http://svnbook.red-bean.com/en/1.1/ch07.html#svn-ch-7-sect-1.3.2 for Subversion's [auto-props] setting. """ if self.options.emulate_svn_auto_props and IsFileNew(filename): svnprops = GetSubversionPropertyChanges(filename) if svnprops: svndiff.append("\n" + svnprops + "\n") svndiff = [] filecount = 0 filename = None for line in gitdiff.splitlines(): match = re.match(r"diff --git a/(.*) b/(.*)$", line) if match: # Add auto property here for previously seen file. if filename is not None: AddSubversionPropertyChange(filename) filecount += 1 # Intentionally use the "after" filename so we can show renames. filename = match.group(2) svndiff.append("Index: %s\n" % filename) if match.group(1) != match.group(2): self.renames[match.group(2)] = match.group(1) else: # The "index" line in a git diff looks like this (long hashes elided): # index 82c0d44..b2cee3f 100755 # We want to save the left hash, as that identifies the base file. match = re.match(r"index (\w+)\.\.(\w+)", line) if match: before, after = (match.group(1), match.group(2)) if before == NULL_HASH: before = None if after == NULL_HASH: after = None self.hashes[filename] = (before, after) svndiff.append(line + "\n") if not filecount: ErrorExit("No valid patches found in output from git diff") # Add auto property for the last seen file. assert filename is not None AddSubversionPropertyChange(filename) return "".join(svndiff) def GenerateDiff(self, extra_args): extra_args = extra_args[:] if self.options.revision: if ":" in self.options.revision: extra_args = self.options.revision.split(":", 1) + extra_args else: extra_args = [self.options.revision] + extra_args # --no-ext-diff is broken in some versions of Git, so try to work around # this by overriding the environment (but there is still a problem if the # git config key "diff.external" is used). env = os.environ.copy() if 'GIT_EXTERNAL_DIFF' in env: del env['GIT_EXTERNAL_DIFF'] return RunShell(["git", "diff", "--no-ext-diff", "--full-index", "--ignore-submodules", "-M"] + extra_args, env=env) def GetUnknownFiles(self): status = RunShell(["git", "ls-files", "--exclude-standard", "--others"], silent_ok=True) return status.splitlines() def GetFileContent(self, file_hash, is_binary): """Returns the content of a file identified by its git hash.""" data, retcode = RunShellWithReturnCode(["git", "show", file_hash], universal_newlines=not is_binary) if retcode: ErrorExit("Got error status from 'git show %s'" % file_hash) return data def GetBaseFile(self, filename): hash_before, hash_after = self.hashes.get(filename, (None,None)) base_content = None new_content = None status = None if filename in self.renames: status = "A +" # Match svn attribute name for renames. if filename not in self.hashes: # If a rename doesn't change the content, we never get a hash. base_content = RunShell(["git", "show", "HEAD:" + filename]) elif not hash_before: status = "A" base_content = "" elif not hash_after: status = "D" else: status = "M" is_binary = self.IsBinaryData(base_content) is_image = self.IsImage(filename) # Grab the before/after content if we need it. # We should include file contents if it's text or it's an image. if not is_binary or is_image: # Grab the base content if we don't have it already. if base_content is None and hash_before: base_content = self.GetFileContent(hash_before, is_binary) # Only include the "after" file if it's an image; otherwise it # it is reconstructed from the diff. if is_image and hash_after: new_content = self.GetFileContent(hash_after, is_binary) return (base_content, new_content, is_binary, status) class CVSVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for CVS.""" def __init__(self, options): super(CVSVCS, self).__init__(options) def GetGUID(self): """For now we don't know how to get repository ID for CVS""" return def GetOriginalContent_(self, filename): RunShell(["cvs", "up", filename], silent_ok=True) # TODO need detect file content encoding content = open(filename).read() return content.replace("\r\n", "\n") def GetBaseFile(self, filename): base_content = None new_content = None status = "A" output, retcode = RunShellWithReturnCode(["cvs", "status", filename]) if retcode: ErrorExit("Got error status from 'cvs status %s'" % filename) if output.find("Status: Locally Modified") != -1: status = "M" temp_filename = "%s.tmp123" % filename os.rename(filename, temp_filename) base_content = self.GetOriginalContent_(filename) os.rename(temp_filename, filename) elif output.find("Status: Locally Added"): status = "A" base_content = "" elif output.find("Status: Needs Checkout"): status = "D" base_content = self.GetOriginalContent_(filename) return (base_content, new_content, self.IsBinaryData(base_content), status) def GenerateDiff(self, extra_args): cmd = ["cvs", "diff", "-u", "-N"] if self.options.revision: cmd += ["-r", self.options.revision] cmd.extend(extra_args) data, retcode = RunShellWithReturnCode(cmd) count = 0 if retcode in [0, 1]: for line in data.splitlines(): if line.startswith("Index:"): count += 1 logging.info(line) if not count: ErrorExit("No valid patches found in output from cvs diff") return data def GetUnknownFiles(self): data, retcode = RunShellWithReturnCode(["cvs", "diff"]) if retcode not in [0, 1]: ErrorExit("Got error status from 'cvs diff':\n%s" % (data,)) unknown_files = [] for line in data.split("\n"): if line and line[0] == "?": unknown_files.append(line) return unknown_files class MercurialVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for Mercurial.""" def __init__(self, options, repo_dir): super(MercurialVCS, self).__init__(options) # Absolute path to repository (we can be in a subdir) self.repo_dir = os.path.normpath(repo_dir) # Compute the subdir cwd = os.path.normpath(os.getcwd()) assert cwd.startswith(self.repo_dir) self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/") if self.options.revision: self.base_rev = self.options.revision else: self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip() def GetGUID(self): # See chapter "Uniquely identifying a repository" # http://hgbook.red-bean.com/read/customizing-the-output-of-mercurial.html info = RunShell("hg log -r0 --template {node}".split()) return info.strip() def _GetRelPath(self, filename): """Get relative path of a file according to the current directory, given its logical path in the repo.""" assert filename.startswith(self.subdir), (filename, self.subdir) return filename[len(self.subdir):].lstrip(r"\/") def GenerateDiff(self, extra_args): cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args data = RunShell(cmd, silent_ok=True) svndiff = [] filecount = 0 for line in data.splitlines(): m = re.match("diff --git a/(\S+) b/(\S+)", line) if m: # Modify line to make it look like as it comes from svn diff. # With this modification no changes on the server side are required # to make upload.py work with Mercurial repos. # NOTE: for proper handling of moved/copied files, we have to use # the second filename. filename = m.group(2) svndiff.append("Index: %s" % filename) svndiff.append("=" * 67) filecount += 1 logging.info(line) else: svndiff.append(line) if not filecount: ErrorExit("No valid patches found in output from hg diff") return "\n".join(svndiff) + "\n" def GetUnknownFiles(self): """Return a list of files unknown to the VCS.""" args = [] status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."], silent_ok=True) unknown_files = [] for line in status.splitlines(): st, fn = line.split(" ", 1) if st == "?": unknown_files.append(fn) return unknown_files def GetBaseFile(self, filename): # "hg status" and "hg cat" both take a path relative to the current subdir # rather than to the repo root, but "hg diff" has given us the full path # to the repo root. base_content = "" new_content = None is_binary = False oldrelpath = relpath = self._GetRelPath(filename) # "hg status -C" returns two lines for moved/copied files, one otherwise out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath]) out = out.splitlines() # HACK: strip error message about missing file/directory if it isn't in # the working copy if out[0].startswith('%s: ' % relpath): out = out[1:] status, _ = out[0].split(' ', 1) if len(out) > 1 and status == "A": # Moved/copied => considered as modified, use old filename to # retrieve base contents oldrelpath = out[1].strip() status = "M" if ":" in self.base_rev: base_rev = self.base_rev.split(":", 1)[0] else: base_rev = self.base_rev if status != "A": base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True) is_binary = self.IsBinaryData(base_content) if status != "R": new_content = open(relpath, "rb").read() is_binary = is_binary or self.IsBinaryData(new_content) if is_binary and base_content: # Fetch again without converting newlines base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True, universal_newlines=False) if not is_binary or not self.IsImage(relpath): new_content = None return base_content, new_content, is_binary, status class PerforceVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for Perforce.""" def __init__(self, options): def ConfirmLogin(): # Make sure we have a valid perforce session while True: data, retcode = self.RunPerforceCommandWithReturnCode( ["login", "-s"], marshal_output=True) if not data: ErrorExit("Error checking perforce login") if not retcode and (not "code" in data or data["code"] != "error"): break print "Enter perforce password: " self.RunPerforceCommandWithReturnCode(["login"]) super(PerforceVCS, self).__init__(options) self.p4_changelist = options.p4_changelist if not self.p4_changelist: ErrorExit("A changelist id is required") if (options.revision): ErrorExit("--rev is not supported for perforce") self.p4_port = options.p4_port self.p4_client = options.p4_client self.p4_user = options.p4_user ConfirmLogin() if not options.message: description = self.RunPerforceCommand(["describe", self.p4_changelist], marshal_output=True) if description and "desc" in description: # Rietveld doesn't support multi-line descriptions raw_message = description["desc"].strip() lines = raw_message.splitlines() if len(lines): options.message = lines[0] def GetGUID(self): """For now we don't know how to get repository ID for Perforce""" return def RunPerforceCommandWithReturnCode(self, extra_args, marshal_output=False, universal_newlines=True): args = ["p4"] if marshal_output: # -G makes perforce format its output as marshalled python objects args.extend(["-G"]) if self.p4_port: args.extend(["-p", self.p4_port]) if self.p4_client: args.extend(["-c", self.p4_client]) if self.p4_user: args.extend(["-u", self.p4_user]) args.extend(extra_args) data, retcode = RunShellWithReturnCode( args, print_output=False, universal_newlines=universal_newlines) if marshal_output and data: data = marshal.loads(data) return data, retcode def RunPerforceCommand(self, extra_args, marshal_output=False, universal_newlines=True): # This might be a good place to cache call results, since things like # describe or fstat might get called repeatedly. data, retcode = self.RunPerforceCommandWithReturnCode( extra_args, marshal_output, universal_newlines) if retcode: ErrorExit("Got error status from %s:\n%s" % (extra_args, data)) return data def GetFileProperties(self, property_key_prefix = "", command = "describe"): description = self.RunPerforceCommand(["describe", self.p4_changelist], marshal_output=True) changed_files = {} file_index = 0 # Try depotFile0, depotFile1, ... until we don't find a match while True: file_key = "depotFile%d" % file_index if file_key in description: filename = description[file_key] change_type = description[property_key_prefix + str(file_index)] changed_files[filename] = change_type file_index += 1 else: break return changed_files def GetChangedFiles(self): return self.GetFileProperties("action") def GetUnknownFiles(self): # Perforce doesn't detect new files, they have to be explicitly added return [] def IsBaseBinary(self, filename): base_filename = self.GetBaseFilename(filename) return self.IsBinaryHelper(base_filename, "files") def IsPendingBinary(self, filename): return self.IsBinaryHelper(filename, "describe") def IsBinaryHelper(self, filename, command): file_types = self.GetFileProperties("type", command) if not filename in file_types: ErrorExit("Trying to check binary status of unknown file %s." % filename) # This treats symlinks, macintosh resource files, temporary objects, and # unicode as binary. See the Perforce docs for more details: # http://www.perforce.com/perforce/doc.current/manuals/cmdref/o.ftypes.html return not file_types[filename].endswith("text") def GetFileContent(self, filename, revision, is_binary): file_arg = filename if revision: file_arg += "#" + revision # -q suppresses the initial line that displays the filename and revision return self.RunPerforceCommand(["print", "-q", file_arg], universal_newlines=not is_binary) def GetBaseFilename(self, filename): actionsWithDifferentBases = [ "move/add", # p4 move "branch", # p4 integrate (to a new file), similar to hg "add" "add", # p4 integrate (to a new file), after modifying the new file ] # We only see a different base for "add" if this is a downgraded branch # after a file was branched (integrated), then edited. if self.GetAction(filename) in actionsWithDifferentBases: # -Or shows information about pending integrations/moves fstat_result = self.RunPerforceCommand(["fstat", "-Or", filename], marshal_output=True) baseFileKey = "resolveFromFile0" # I think it's safe to use only file0 if baseFileKey in fstat_result: return fstat_result[baseFileKey] return filename def GetBaseRevision(self, filename): base_filename = self.GetBaseFilename(filename) have_result = self.RunPerforceCommand(["have", base_filename], marshal_output=True) if "haveRev" in have_result: return have_result["haveRev"] def GetLocalFilename(self, filename): where = self.RunPerforceCommand(["where", filename], marshal_output=True) if "path" in where: return where["path"] def GenerateDiff(self, args): class DiffData: def __init__(self, perforceVCS, filename, action): self.perforceVCS = perforceVCS self.filename = filename self.action = action self.base_filename = perforceVCS.GetBaseFilename(filename) self.file_body = None self.base_rev = None self.prefix = None self.working_copy = True self.change_summary = None def GenerateDiffHeader(diffData): header = [] header.append("Index: %s" % diffData.filename) header.append("=" * 67) if diffData.base_filename != diffData.filename: if diffData.action.startswith("move"): verb = "rename" else: verb = "copy" header.append("%s from %s" % (verb, diffData.base_filename)) header.append("%s to %s" % (verb, diffData.filename)) suffix = "\t(revision %s)" % diffData.base_rev header.append("--- " + diffData.base_filename + suffix) if diffData.working_copy: suffix = "\t(working copy)" header.append("+++ " + diffData.filename + suffix) if diffData.change_summary: header.append(diffData.change_summary) return header def GenerateMergeDiff(diffData, args): # -du generates a unified diff, which is nearly svn format diffData.file_body = self.RunPerforceCommand( ["diff", "-du", diffData.filename] + args) diffData.base_rev = self.GetBaseRevision(diffData.filename) diffData.prefix = "" # We have to replace p4's file status output (the lines starting # with +++ or ---) to match svn's diff format lines = diffData.file_body.splitlines() first_good_line = 0 while (first_good_line < len(lines) and not lines[first_good_line].startswith("@@")): first_good_line += 1 diffData.file_body = "\n".join(lines[first_good_line:]) return diffData def GenerateAddDiff(diffData): fstat = self.RunPerforceCommand(["fstat", diffData.filename], marshal_output=True) if "headRev" in fstat: diffData.base_rev = fstat["headRev"] # Re-adding a deleted file else: diffData.base_rev = "0" # Brand new file diffData.working_copy = False rel_path = self.GetLocalFilename(diffData.filename) diffData.file_body = open(rel_path, 'r').read() # Replicate svn's list of changed lines line_count = len(diffData.file_body.splitlines()) diffData.change_summary = "@@ -0,0 +1" if line_count > 1: diffData.change_summary += ",%d" % line_count diffData.change_summary += " @@" diffData.prefix = "+" return diffData def GenerateDeleteDiff(diffData): diffData.base_rev = self.GetBaseRevision(diffData.filename) is_base_binary = self.IsBaseBinary(diffData.filename) # For deletes, base_filename == filename diffData.file_body = self.GetFileContent(diffData.base_filename, None, is_base_binary) # Replicate svn's list of changed lines line_count = len(diffData.file_body.splitlines()) diffData.change_summary = "@@ -1" if line_count > 1: diffData.change_summary += ",%d" % line_count diffData.change_summary += " +0,0 @@" diffData.prefix = "-" return diffData changed_files = self.GetChangedFiles() svndiff = [] filecount = 0 for (filename, action) in changed_files.items(): svn_status = self.PerforceActionToSvnStatus(action) if svn_status == "SKIP": continue diffData = DiffData(self, filename, action) # Is it possible to diff a branched file? Stackoverflow says no: # http://stackoverflow.com/questions/1771314/in-perforce-command-line-how-to-diff-a-file-reopened-for-add if svn_status == "M": diffData = GenerateMergeDiff(diffData, args) elif svn_status == "A": diffData = GenerateAddDiff(diffData) elif svn_status == "D": diffData = GenerateDeleteDiff(diffData) else: ErrorExit("Unknown file action %s (svn action %s)." % \ (action, svn_status)) svndiff += GenerateDiffHeader(diffData) for line in diffData.file_body.splitlines(): svndiff.append(diffData.prefix + line) filecount += 1 if not filecount: ErrorExit("No valid patches found in output from p4 diff") return "\n".join(svndiff) + "\n" def PerforceActionToSvnStatus(self, status): # Mirroring the list at http://permalink.gmane.org/gmane.comp.version-control.mercurial.devel/28717 # Is there something more official? return { "add" : "A", "branch" : "A", "delete" : "D", "edit" : "M", # Also includes changing file types. "integrate" : "M", "move/add" : "M", "move/delete": "SKIP", "purge" : "D", # How does a file's status become "purge"? }[status] def GetAction(self, filename): changed_files = self.GetChangedFiles() if not filename in changed_files: ErrorExit("Trying to get base version of unknown file %s." % filename) return changed_files[filename] def GetBaseFile(self, filename): base_filename = self.GetBaseFilename(filename) base_content = "" new_content = None status = self.PerforceActionToSvnStatus(self.GetAction(filename)) if status != "A": revision = self.GetBaseRevision(base_filename) if not revision: ErrorExit("Couldn't find base revision for file %s" % filename) is_base_binary = self.IsBaseBinary(base_filename) base_content = self.GetFileContent(base_filename, revision, is_base_binary) is_binary = self.IsPendingBinary(filename) if status != "D" and status != "SKIP": relpath = self.GetLocalFilename(filename) if is_binary and self.IsImage(relpath): new_content = open(relpath, "rb").read() return base_content, new_content, is_binary, status # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync. def SplitPatch(data): """Splits a patch into separate pieces for each file. Args: data: A string containing the output of svn diff. Returns: A list of 2-tuple (filename, text) where text is the svn diff output pertaining to filename. """ patches = [] filename = None diff = [] for line in data.splitlines(True): new_filename = None if line.startswith('Index:'): unused, new_filename = line.split(':', 1) new_filename = new_filename.strip() elif line.startswith('Property changes on:'): unused, temp_filename = line.split(':', 1) # When a file is modified, paths use '/' between directories, however # when a property is modified '\' is used on Windows. Make them the same # otherwise the file shows up twice. temp_filename = temp_filename.strip().replace('\\', '/') if temp_filename != filename: # File has property changes but no modifications, create a new diff. new_filename = temp_filename if new_filename: if filename and diff: patches.append((filename, ''.join(diff))) filename = new_filename diff = [line] continue if diff is not None: diff.append(line) if filename and diff: patches.append((filename, ''.join(diff))) return patches def UploadSeparatePatches(issue, rpc_server, patchset, data, options): """Uploads a separate patch for each file in the diff output. Returns a list of [patch_key, filename] for each file. """ patches = SplitPatch(data) rv = [] for patch in patches: if len(patch[1]) > MAX_UPLOAD_SIZE: print ("Not uploading the patch for " + patch[0] + " because the file is too large.") continue form_fields = [("filename", patch[0])] if not options.download_base: form_fields.append(("content_upload", "1")) files = [("data", "data.diff", patch[1])] ctype, body = EncodeMultipartFormData(form_fields, files) url = "/%d/upload_patch/%d" % (int(issue), int(patchset)) print "Uploading patch for " + patch[0] response_body = rpc_server.Send(url, body, content_type=ctype) lines = response_body.splitlines() if not lines or lines[0] != "OK": StatusUpdate(" --> %s" % response_body) sys.exit(1) rv.append([lines[1], patch[0]]) return rv def GuessVCSName(options): """Helper to guess the version control system. This examines the current directory, guesses which VersionControlSystem we're using, and returns an string indicating which VCS is detected. Returns: A pair (vcs, output). vcs is a string indicating which VCS was detected and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, VCS_PERFORCE, VCS_CVS, or VCS_UNKNOWN. Since local perforce repositories can't be easily detected, this method will only guess VCS_PERFORCE if any perforce options have been specified. output is a string containing any interesting output from the vcs detection routine, or None if there is nothing interesting. """ for attribute, value in options.__dict__.iteritems(): if attribute.startswith("p4") and value != None: return (VCS_PERFORCE, None) def RunDetectCommand(vcs_type, command): """Helper to detect VCS by executing command. Returns: A pair (vcs, output) or None. Throws exception on error. """ try: out, returncode = RunShellWithReturnCode(command) if returncode == 0: return (vcs_type, out.strip()) except OSError, (errcode, message): if errcode != errno.ENOENT: # command not found code raise # Mercurial has a command to get the base directory of a repository # Try running it, but don't die if we don't have hg installed. # NOTE: we try Mercurial first as it can sit on top of an SVN working copy. res = RunDetectCommand(VCS_MERCURIAL, ["hg", "root"]) if res != None: return res # Subversion has a .svn in all working directories. if os.path.isdir('.svn'): logging.info("Guessed VCS = Subversion") return (VCS_SUBVERSION, None) # Git has a command to test if you're in a git tree. # Try running it, but don't die if we don't have git installed. res = RunDetectCommand(VCS_GIT, ["git", "rev-parse", "--is-inside-work-tree"]) if res != None: return res # detect CVS repos use `cvs status && $? == 0` rules res = RunDetectCommand(VCS_CVS, ["cvs", "status"]) if res != None: return res return (VCS_UNKNOWN, None) def GuessVCS(options): """Helper to guess the version control system. This verifies any user-specified VersionControlSystem (by command line or environment variable). If the user didn't specify one, this examines the current directory, guesses which VersionControlSystem we're using, and returns an instance of the appropriate class. Exit with an error if we can't figure it out. Returns: A VersionControlSystem instance. Exits if the VCS can't be guessed. """ vcs = options.vcs if not vcs: vcs = os.environ.get("CODEREVIEW_VCS") if vcs: v = VCS_ABBREVIATIONS.get(vcs.lower()) if v is None: ErrorExit("Unknown version control system %r specified." % vcs) (vcs, extra_output) = (v, None) else: (vcs, extra_output) = GuessVCSName(options) if vcs == VCS_MERCURIAL: if extra_output is None: extra_output = RunShell(["hg", "root"]).strip() return MercurialVCS(options, extra_output) elif vcs == VCS_SUBVERSION: return SubversionVCS(options) elif vcs == VCS_PERFORCE: return PerforceVCS(options) elif vcs == VCS_GIT: return GitVCS(options) elif vcs == VCS_CVS: return CVSVCS(options) ErrorExit(("Could not guess version control system. " "Are you in a working copy directory?")) def CheckReviewer(reviewer): """Validate a reviewer -- either a nickname or an email addres. Args: reviewer: A nickname or an email address. Calls ErrorExit() if it is an invalid email address. """ if "@" not in reviewer: return # Assume nickname parts = reviewer.split("@") if len(parts) > 2: ErrorExit("Invalid email address: %r" % reviewer) assert len(parts) == 2 if "." not in parts[1]: ErrorExit("Invalid email address: %r" % reviewer) def LoadSubversionAutoProperties(): """Returns the content of [auto-props] section of Subversion's config file as a dictionary. Returns: A dictionary whose key-value pair corresponds the [auto-props] section's key-value pair. In following cases, returns empty dictionary: - config file doesn't exist, or - 'enable-auto-props' is not set to 'true-like-value' in [miscellany]. """ if os.name == 'nt': subversion_config = os.environ.get("APPDATA") + "\\Subversion\\config" else: subversion_config = os.path.expanduser("~/.subversion/config") if not os.path.exists(subversion_config): return {} config = ConfigParser.ConfigParser() config.read(subversion_config) if (config.has_section("miscellany") and config.has_option("miscellany", "enable-auto-props") and config.getboolean("miscellany", "enable-auto-props") and config.has_section("auto-props")): props = {} for file_pattern in config.options("auto-props"): props[file_pattern] = ParseSubversionPropertyValues( config.get("auto-props", file_pattern)) return props else: return {} def ParseSubversionPropertyValues(props): """Parse the given property value which comes from [auto-props] section and returns a list whose element is a (svn_prop_key, svn_prop_value) pair. See the following doctest for example. >>> ParseSubversionPropertyValues('svn:eol-style=LF') [('svn:eol-style', 'LF')] >>> ParseSubversionPropertyValues('svn:mime-type=image/jpeg') [('svn:mime-type', 'image/jpeg')] >>> ParseSubversionPropertyValues('svn:eol-style=LF;svn:executable') [('svn:eol-style', 'LF'), ('svn:executable', '*')] """ key_value_pairs = [] for prop in props.split(";"): key_value = prop.split("=") assert len(key_value) <= 2 if len(key_value) == 1: # If value is not given, use '*' as a Subversion's convention. key_value_pairs.append((key_value[0], "*")) else: key_value_pairs.append((key_value[0], key_value[1])) return key_value_pairs def GetSubversionPropertyChanges(filename): """Return a Subversion's 'Property changes on ...' string, which is used in the patch file. Args: filename: filename whose property might be set by [auto-props] config. Returns: A string like 'Property changes on |filename| ...' if given |filename| matches any entries in [auto-props] section. None, otherwise. """ global svn_auto_props_map if svn_auto_props_map is None: svn_auto_props_map = LoadSubversionAutoProperties() all_props = [] for file_pattern, props in svn_auto_props_map.items(): if fnmatch.fnmatch(filename, file_pattern): all_props.extend(props) if all_props: return FormatSubversionPropertyChanges(filename, all_props) return None def FormatSubversionPropertyChanges(filename, props): """Returns Subversion's 'Property changes on ...' strings using given filename and properties. Args: filename: filename props: A list whose element is a (svn_prop_key, svn_prop_value) pair. Returns: A string which can be used in the patch file for Subversion. See the following doctest for example. >>> print FormatSubversionPropertyChanges('foo.cc', [('svn:eol-style', 'LF')]) Property changes on: foo.cc ___________________________________________________________________ Added: svn:eol-style + LF <BLANKLINE> """ prop_changes_lines = [ "Property changes on: %s" % filename, "___________________________________________________________________"] for key, value in props: prop_changes_lines.append("Added: " + key) prop_changes_lines.append(" + " + value) return "\n".join(prop_changes_lines) + "\n" def RealMain(argv, data=None): """The real main function. Args: argv: Command line arguments. data: Diff contents. If None (default) the diff is generated by the VersionControlSystem implementation returned by GuessVCS(). Returns: A 2-tuple (issue id, patchset id). The patchset id is None if the base files are not uploaded by this script (applies only to SVN checkouts). """ options, args = parser.parse_args(argv[1:]) if options.help: if options.verbose < 2: # hide Perforce options parser.epilog = "Use '--help -v' to show additional Perforce options." parser.option_groups.remove(parser.get_option_group('--p4_port')) parser.print_help() sys.exit(0) global verbosity verbosity = options.verbose if verbosity >= 3: logging.getLogger().setLevel(logging.DEBUG) elif verbosity >= 2: logging.getLogger().setLevel(logging.INFO) vcs = GuessVCS(options) base = options.base_url if isinstance(vcs, SubversionVCS): # Guessing the base field is only supported for Subversion. # Note: Fetching base files may become deprecated in future releases. guessed_base = vcs.GuessBase(options.download_base) if base: if guessed_base and base != guessed_base: print "Using base URL \"%s\" from --base_url instead of \"%s\"" % \ (base, guessed_base) else: base = guessed_base if not base and options.download_base: options.download_base = True logging.info("Enabled upload of base file") if not options.assume_yes: vcs.CheckForUnknownFiles() if data is None: data = vcs.GenerateDiff(args) data = vcs.PostProcessDiff(data) if options.print_diffs: print "Rietveld diff start:*****" print data print "Rietveld diff end:*****" files = vcs.GetBaseFiles(data) if verbosity >= 1: print "Upload server:", options.server, "(change with -s/--server)" if options.issue: prompt = "Message describing this patch set: " else: prompt = "New issue subject: " message = options.message or raw_input(prompt).strip() if not message: ErrorExit("A non-empty message is required") rpc_server = GetRpcServer(options.server, options.email, options.host, options.save_cookies, options.account_type) form_fields = [("subject", message)] repo_guid = vcs.GetGUID() if repo_guid: form_fields.append(("repo_guid", repo_guid)) if base: b = urlparse.urlparse(base) username, netloc = urllib.splituser(b.netloc) if username: logging.info("Removed username from base URL") base = urlparse.urlunparse((b.scheme, netloc, b.path, b.params, b.query, b.fragment)) form_fields.append(("base", base)) if options.issue: form_fields.append(("issue", str(options.issue))) if options.email: form_fields.append(("user", options.email)) if options.reviewers: for reviewer in options.reviewers.split(','): CheckReviewer(reviewer) form_fields.append(("reviewers", options.reviewers)) if options.cc: for cc in options.cc.split(','): CheckReviewer(cc) form_fields.append(("cc", options.cc)) description = options.description if options.description_file: if options.description: ErrorExit("Can't specify description and description_file") file = open(options.description_file, 'r') description = file.read() file.close() if description: form_fields.append(("description", description)) # Send a hash of all the base file so the server can determine if a copy # already exists in an earlier patchset. base_hashes = "" for file, info in files.iteritems(): if not info[0] is None: checksum = md5(info[0]).hexdigest() if base_hashes: base_hashes += "|" base_hashes += checksum + ":" + file form_fields.append(("base_hashes", base_hashes)) if options.private: if options.issue: print "Warning: Private flag ignored when updating an existing issue." else: form_fields.append(("private", "1")) if options.send_patch: options.send_mail = True # If we're uploading base files, don't send the email before the uploads, so # that it contains the file status. if options.send_mail and options.download_base: form_fields.append(("send_mail", "1")) if not options.download_base: form_fields.append(("content_upload", "1")) if len(data) > MAX_UPLOAD_SIZE: print "Patch is large, so uploading file patches separately." uploaded_diff_file = [] form_fields.append(("separate_patches", "1")) else: uploaded_diff_file = [("data", "data.diff", data)] ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file) response_body = rpc_server.Send("/upload", body, content_type=ctype) patchset = None if not options.download_base or not uploaded_diff_file: lines = response_body.splitlines() if len(lines) >= 2: msg = lines[0] patchset = lines[1].strip() patches = [x.split(" ", 1) for x in lines[2:]] else: msg = response_body else: msg = response_body StatusUpdate(msg) if not response_body.startswith("Issue created.") and \ not response_body.startswith("Issue updated."): sys.exit(0) issue = msg[msg.rfind("/")+1:] if not uploaded_diff_file: result = UploadSeparatePatches(issue, rpc_server, patchset, data, options) if not options.download_base: patches = result if not options.download_base: vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files) if options.send_mail: payload = "" if options.send_patch: payload=urllib.urlencode({"attach_patch": "yes"}) rpc_server.Send("/" + issue + "/mail", payload=payload) return issue, patchset def main(): try: logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:" "%(lineno)s %(message)s ")) os.environ['LC_ALL'] = 'C' RealMain(sys.argv) except KeyboardInterrupt: print StatusUpdate("Interrupted.") sys.exit(1) if __name__ == "__main__": main()
bsd-3-clause
MattsFleaMarket/python-for-android
python3-alpha/python3-src/Lib/turtledemo/forest.py
65
2988
#!/usr/bin/env python3 """ turtlegraphics-example-suite: tdemo_forest.py Displays a 'forest' of 3 'breadth-first-trees' similar to the one from example tree. For further remarks see xtx_tree.py This example is a 'breadth-first'-rewrite of a Logo program written by Erich Neuwirth. See: http://homepage.univie.ac.at/erich.neuwirth/ """ from turtle import Turtle, colormode, tracer, mainloop from random import randrange from time import clock def symRandom(n): return randrange(-n,n+1) def randomize( branchlist, angledist, sizedist ): return [ (angle+symRandom(angledist), sizefactor*1.01**symRandom(sizedist)) for angle, sizefactor in branchlist ] def randomfd( t, distance, parts, angledist ): for i in range(parts): t.left(symRandom(angledist)) t.forward( (1.0 * distance)/parts ) def tree(tlist, size, level, widthfactor, branchlists, angledist=10, sizedist=5): # benutzt Liste von turtles und Liste von Zweiglisten, # fuer jede turtle eine! if level > 0: lst = [] brs = [] for t, branchlist in list(zip(tlist,branchlists)): t.pensize( size * widthfactor ) t.pencolor( 255 - (180 - 11 * level + symRandom(15)), 180 - 11 * level + symRandom(15), 0 ) t.pendown() randomfd(t, size, level, angledist ) yield 1 for angle, sizefactor in branchlist: t.left(angle) lst.append(t.clone()) brs.append(randomize(branchlist, angledist, sizedist)) t.right(angle) for x in tree(lst, size*sizefactor, level-1, widthfactor, brs, angledist, sizedist): yield None def start(t,x,y): colormode(255) t.reset() t.speed(0) t.hideturtle() t.left(90) t.penup() t.setpos(x,y) t.pendown() def doit1(level, pen): pen.hideturtle() start(pen, 20, -208) t = tree( [pen], 80, level, 0.1, [[ (45,0.69), (0,0.65), (-45,0.71) ]] ) return t def doit2(level, pen): pen.hideturtle() start(pen, -135, -130) t = tree( [pen], 120, level, 0.1, [[ (45,0.69), (-45,0.71) ]] ) return t def doit3(level, pen): pen.hideturtle() start(pen, 190, -90) t = tree( [pen], 100, level, 0.1, [[ (45,0.7), (0,0.72), (-45,0.65) ]] ) return t # Hier 3 Baumgeneratoren: def main(): p = Turtle() p.ht() tracer(75,0) u = doit1(6, Turtle(undobuffersize=1)) s = doit2(7, Turtle(undobuffersize=1)) t = doit3(5, Turtle(undobuffersize=1)) a = clock() while True: done = 0 for b in u,s,t: try: b.__next__() except: done += 1 if done == 3: break tracer(1,10) b = clock() return "runtime: %.2f sec." % (b-a) if __name__ == '__main__': msg = main() print(msg) mainloop()
apache-2.0
Karosuo/Linux_tools
xls_handlers/xls_sum_venv/lib/python3.6/site-packages/xlsxwriter/chart_doughnut.py
1
2721
############################################################################### # # ChartDoughnut - A class for writing the Excel XLSX Doughnut charts. # # Copyright 2013-2019, John McNamara, jmcnamara@cpan.org # from warnings import warn from . import chart_pie class ChartDoughnut(chart_pie.ChartPie): """ A class for writing the Excel XLSX Doughnut charts. """ ########################################################################### # # Public API. # ########################################################################### def __init__(self, options=None): """ Constructor. """ super(ChartDoughnut, self).__init__() if options is None: options = {} self.vary_data_color = 1 self.rotation = 0 self.hole_size = 50 def set_hole_size(self, size): """ Set the Doughnut chart hole size. Args: size: 10 <= size <= 90. Returns: Nothing. """ if size is None: return # Ensure the size is in Excel's range. if size < 10 or size > 90: warn("Chart hole size %d outside Excel range: 10 <= size <= 90" % size) return self.hole_size = int(size) ########################################################################### # # Private API. # ########################################################################### def _write_chart_type(self, args): # Override the virtual superclass method with a chart specific method. # Write the c:doughnutChart element. self._write_doughnut_chart(args) ########################################################################### # # XML methods. # ########################################################################### def _write_doughnut_chart(self, args): # Write the <c:doughnutChart> element. Over-ridden method to remove # axis_id code since Doughnut charts don't require val and cat axes. self._xml_start_tag('c:doughnutChart') # Write the c:varyColors element. self._write_vary_colors() # Write the series elements. for data in self.series: self._write_ser(data) # Write the c:firstSliceAng element. self._write_first_slice_ang() # Write the c:holeSize element. self._write_c_hole_size() self._xml_end_tag('c:doughnutChart') def _write_c_hole_size(self): # Write the <c:holeSize> element. attributes = [('val', self.hole_size)] self._xml_empty_tag('c:holeSize', attributes)
gpl-3.0
ggiscan/Interactor
core/commons.py
4
1384
''' Created on 4 Apr 2014 @author: geo ''' from collections import namedtuple import logging error_type = namedtuple('error', ['code', 'message']) (ERR_INVALID_PRODUCT, WARN_ALREADY_REGISTERED, WARN_EXISTING_CATEGORY, ERR_USERNOTREGISTERED, ERR_RETRY_LATER) = range(400,405) ERROR_MESSAGES = {ERR_INVALID_PRODUCT: 'Invalid product for registration', WARN_ALREADY_REGISTERED: 'Already registered for this product', WARN_EXISTING_CATEGORY: 'The category is already created', ERR_USERNOTREGISTERED: 'The user is not registered for product {}', ERR_RETRY_LATER: 'Unable to process your request at this time. Please try again later'} def user_not_registered(): return error(ERR_USERNOTREGISTERED) def retry_later(): return error(ERR_RETRY_LATER) def success(): return error_type(0, "Success") def error(error_code): return error_type(error_code, ERROR_MESSAGES[error_code]) def feedback(message): print "Feedback to be implemented: %s" % message return message class Context: def __init__(self, session): self.session = session def init_logging(): logging.basicConfig(filename='global.log', format='%(asctime)s %(levelname)s %(message)s:', level=logging.DEBUG)
gpl-2.0
benhc123/cjdns
node_build/dependencies/libuv/build/gyp/test/mac/gyptest-postbuild-fail.py
165
2190
#!/usr/bin/env python # Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Verifies that a failing postbuild step lets the build fail. """ import TestGyp import sys if sys.platform == 'darwin': # set |match| to ignore build stderr output. test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'], match = lambda a, b: True) test.run_gyp('test.gyp', chdir='postbuild-fail') build_error_code = { 'xcode': [1, 65], # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`) 'make': 2, 'ninja': 1, }[test.format] # If a postbuild fails, all postbuilds should be re-run on the next build. # In Xcode 3, even if the first postbuild fails the other postbuilds were # still executed. In Xcode 4, postbuilds are stopped after the first # failing postbuild. This test checks for the Xcode 4 behavior. # Ignore this test on Xcode 3. import subprocess job = subprocess.Popen(['xcodebuild', '-version'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, err = job.communicate() if job.returncode != 0: print out raise Exception('Error %d running xcodebuild' % job.returncode) if out.startswith('Xcode 3.'): test.pass_test() # Non-bundles test.build('test.gyp', 'nonbundle', chdir='postbuild-fail', status=build_error_code) test.built_file_must_not_exist('static_touch', chdir='postbuild-fail') # Check for non-up-to-date-ness by checking if building again produces an # error. test.build('test.gyp', 'nonbundle', chdir='postbuild-fail', status=build_error_code) # Bundles test.build('test.gyp', 'bundle', chdir='postbuild-fail', status=build_error_code) test.built_file_must_not_exist('dynamic_touch', chdir='postbuild-fail') # Check for non-up-to-date-ness by checking if building again produces an # error. test.build('test.gyp', 'bundle', chdir='postbuild-fail', status=build_error_code) test.pass_test()
gpl-3.0
TheTincho/nemu
src/nemu/__init__.py
1
2303
# vim:ts=4:sw=4:et:ai:sts=4 # -*- coding: utf-8 -*- # Copyright 2010, 2011 INRIA # Copyright 2011 Martín Ferrari <martin.ferrari@gmail.com> # # This file is part of Nemu. # # Nemu is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License version 2, as published by the Free # Software Foundation. # # Nemu is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Nemu. If not, see <http://www.gnu.org/licenses/>. """Nemu package. Nemu (Netwok EMUlator) is a small Python library to create emulated networks and run and test programs in them. """ # pylint: disable=W0401,R0903 import os, pwd from nemu.node import * from nemu.interface import * class _Config(object): """Global configuration singleton for Nemu.""" def __init__(self): self._run_as = 65534 try: pwd.getpwnam('nobody') self._run_as = 'nobody' except KeyError: pass # User not found. def _set_run_as(self, user): """Setter for `run_as'.""" if str(user).isdigit(): uid = int(user) try: _user = pwd.getpwuid(uid)[0] except: raise AttributeError("UID %d does not exist" % int(user)) run_as = int(user) else: try: uid = pwd.getpwnam(str(user))[2] except: raise AttributeError("User %s does not exist" % str(user)) run_as = str(user) if uid == 0: raise AttributeError("Cannot run as root by default") self._run_as = run_as return run_as def _get_run_as(self): """Setter for `run_as'.""" return self._run_as run_as = property(_get_run_as, _set_run_as, None, "Default user to run applications as") config = _Config() # pylint: disable=C0103 # FIXME: set atfork hooks # http://code.google.com/p/python-atfork/source/browse/atfork/__init__.py #def set_cleanup_hooks(on_exit = False, on_signals = []): # pass
gpl-2.0
deKupini/erp
addons/mass_mailing/models/mail_mail.py
7
5375
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # ############################################################################## import urllib import urlparse from openerp import tools from openerp import SUPERUSER_ID from openerp.osv import osv, fields class MailMail(osv.Model): """Add the mass mailing campaign data to mail""" _name = 'mail.mail' _inherit = ['mail.mail'] _columns = { 'mailing_id': fields.many2one('mail.mass_mailing', 'Mass Mailing'), 'statistics_ids': fields.one2many( 'mail.mail.statistics', 'mail_mail_id', string='Statistics', ), } def create(self, cr, uid, values, context=None): """ Override mail_mail creation to create an entry in mail.mail.statistics """ # TDE note: should be after 'all values computed', to have values (FIXME after merging other branch holding create refactoring) mail_id = super(MailMail, self).create(cr, uid, values, context=context) if values.get('statistics_ids'): mail = self.browse(cr, SUPERUSER_ID, mail_id, context=context) for stat in mail.statistics_ids: self.pool['mail.mail.statistics'].write(cr, uid, [stat.id], {'message_id': mail.message_id, 'state': 'outgoing'}, context=context) return mail_id def _get_tracking_url(self, cr, uid, mail, partner=None, context=None): base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url') track_url = urlparse.urljoin( base_url, 'mail/track/%(mail_id)s/blank.gif?%(params)s' % { 'mail_id': mail.id, 'params': urllib.urlencode({'db': cr.dbname}) } ) return '<img src="%s" alt=""/>' % track_url def _get_unsubscribe_url(self, cr, uid, mail, email_to, context=None): base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url') url = urlparse.urljoin( base_url, 'mail/mailing/%(mailing_id)s/unsubscribe?%(params)s' % { 'mailing_id': mail.mailing_id.id, 'params': urllib.urlencode({'db': cr.dbname, 'res_id': mail.res_id, 'email': email_to}) } ) return url def send_get_mail_body(self, cr, uid, mail, partner=None, context=None): """ Override to add the tracking URL to the body. """ body = super(MailMail, self).send_get_mail_body(cr, uid, mail, partner=partner, context=context) # prepend <base> tag for images using absolute urls domain = self.pool.get("ir.config_parameter").get_param(cr, uid, "web.base.url", context=context) base = "<base href='%s'>" % domain body = tools.append_content_to_html(base, body, plaintext=False, container_tag='div') # generate tracking URL if mail.statistics_ids: tracking_url = self._get_tracking_url(cr, uid, mail, partner, context=context) if tracking_url: body = tools.append_content_to_html(body, tracking_url, plaintext=False, container_tag='div') return body def send_get_email_dict(self, cr, uid, mail, partner=None, context=None): res = super(MailMail, self).send_get_email_dict(cr, uid, mail, partner, context=context) base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url') if mail.mailing_id and res.get('body') and res.get('email_to'): emails = tools.email_split(res.get('email_to')[0]) email_to = emails and emails[0] or False unsubscribe_url= self._get_unsubscribe_url(cr, uid, mail, email_to, context=context) link_to_replace = base_url+'/unsubscribe_from_list' if link_to_replace in res['body']: res['body'] = res['body'].replace(link_to_replace, unsubscribe_url if unsubscribe_url else '#') return res def _postprocess_sent_message(self, cr, uid, mail, context=None, mail_sent=True): if mail_sent is True and mail.statistics_ids: self.pool['mail.mail.statistics'].write(cr, uid, [s.id for s in mail.statistics_ids], {'sent': fields.datetime.now()}, context=context) elif mail_sent is False and mail.statistics_ids: self.pool['mail.mail.statistics'].write(cr, uid, [s.id for s in mail.statistics_ids], {'exception': fields.datetime.now()}, context=context) return super(MailMail, self)._postprocess_sent_message(cr, uid, mail, context=context, mail_sent=mail_sent)
agpl-3.0
holyglenn/bosen
app/dml/script/run_local.py
13
2246
#!/usr/bin/env python """ This script starts a process locally, using <client-id> <hostfile> as inputs. """ import os from os.path import dirname from os.path import join import time import sys if len(sys.argv) != 3: print "usage: %s <client-id> <hostfile>" % sys.argv[0] sys.exit(1) # Please set the FULL app dir path here app_dir = "/home/user/bosen/app/dml" client_id = sys.argv[1] hostfile = sys.argv[2] proj_dir = dirname(dirname(app_dir)) params = { "staleness": 10 , "parafile": join(app_dir, "datasets/dml_para.txt") #, "parafile": "hdfs://hdfs-domain/user/bosen/dataset/dml/datasets/dml_para.txt" , "feature_file": join(app_dir, "datasets/mnist_petuum/minist_reformatted.txt") #, "feature_file": "hdfs://hdfs-domain/user/bosen/dataset/dml/datasets/mnist_petuum/minist_reformatted.txt" , "simi_pairs_file": join(app_dir, "datasets/mnist_petuum/mnist_simi_pairs.txt") #, "simi_pairs_file": "hdfs://hdfs-domain/user/bosen/dataset/dml/datasets/mnist_petuum/mnist_simi_pairs.txt" , "diff_pairs_file": join(app_dir, "datasets/mnist_petuum/mnist_diff_pairs.txt") #, "diff_pairs_file": "hdfs://hdfs-domain/user/bosen/dataset/dml/datasets/mnist_petuum/mnist_diff_pairs.txt" , "model_weight_file": join(app_dir, "datasets/dismat.txt") #, "model_weight_file": "hdfs://hdfs-domain/user/bosen/dataset/dml/datasets/dismat.txt" } petuum_params = { "hostfile": hostfile, "num_worker_threads": 4 } prog_name = "DML" prog_path = join(app_dir, "bin", prog_name) hadoop_path = os.popen('hadoop classpath --glob').read() env_params = ( "GLOG_logtostderr=true " "GLOG_v=-1 " "GLOG_minloglevel=0 " ) # Get host IPs with open(hostfile, "r") as f: hostlines = f.read().splitlines() host_ips = [line.split()[1] for line in hostlines] petuum_params["num_clients"] = len(host_ips) cmd = "killall -q " + prog_name # os.system is synchronous call. os.system(cmd) print "Done killing" cmd = "export CLASSPATH=`hadoop classpath --glob`:$CLASSPATH; " cmd += env_params + prog_path petuum_params["client_id"] = client_id cmd += "".join([" --%s=%s" % (k,v) for k,v in petuum_params.items()]) cmd += "".join([" --%s=%s" % (k,v) for k,v in params.items()]) print cmd os.system(cmd)
bsd-3-clause
cathyyul/sumo-0.18
docs/tutorial/output_parsing/runner.py
4
1280
#!/usr/bin/env python """ @file runner.py @author Michael Behrisch @date 2012-12-09 @version $Id: runner.py 13146 2012-12-10 12:01:07Z behrisch $ This script is a test runner for the output_parsing tutorial. SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/ Copyright (C) 2008-2012 DLR (http://www.dlr.de/) and contributors All rights reserved """ import os,subprocess,sys,time import shutil sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', "tools")) sys.path.append(os.path.join(os.environ.get("SUMO_HOME", os.path.join(os.path.dirname(__file__), "..", "..", "..")), "tools")) from sumolib import checkBinary netconvertBinary = checkBinary('netconvert') sumoBinary = checkBinary('sumo') # build/check network retcode = subprocess.call([netconvertBinary, "-c", "data/circular.netccfg"], stdout=sys.stdout, stderr=sys.stderr) try: shutil.copy("data/circular.net.xml", "net.net.xml") except: print "Missing 'circular.net.xml'" print ">> Netbuilding closed with status %s" % retcode sys.stdout.flush() # run simulation retcode = subprocess.call([sumoBinary, "-c", "data/output_file.sumocfg","--no-step-log"], stdout=sys.stdout, stderr=sys.stderr) print ">> Simulation closed with status %s" % retcode sys.stdout.flush()
gpl-3.0
JonZeolla/metron
metron-deployment/packaging/ambari/metron-mpack/src/main/resources/common-services/METRON/CURRENT/package/scripts/profiler_master.py
9
4117
""" Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import metron_security import metron_service from metron_security import storm_security_setup from profiler_commands import ProfilerCommands from resource_management.core.exceptions import ComponentIsNotRunning from resource_management.core.logger import Logger from resource_management.core.resources.system import File from resource_management.core.source import Template from resource_management.libraries.functions.format import format from resource_management.libraries.script import Script class Profiler(Script): __configured = False def install(self, env): from params import params env.set_params(params) self.install_packages(env) def configure(self, env, upgrade_type=None, config_dir=None): from params import params env.set_params(params) Logger.info("Running profiler configure") File(format("{metron_config_path}/profiler.properties"), content=Template("profiler.properties.j2"), owner=params.metron_user, group=params.metron_group ) if not metron_service.is_zk_configured(params): metron_service.init_zk_config(params) metron_service.set_zk_configured(params) metron_service.refresh_configs(params) commands = ProfilerCommands(params) if not commands.is_hbase_configured(): commands.create_hbase_tables() if params.security_enabled and not commands.is_hbase_acl_configured(): commands.set_hbase_acls() if params.security_enabled and not commands.is_acl_configured(): commands.init_kafka_acls() commands.set_acl_configured() Logger.info("Calling security setup") storm_security_setup(params) if not commands.is_configured(): commands.set_configured() def start(self, env, upgrade_type=None): from params import params env.set_params(params) self.configure(env) commands = ProfilerCommands(params) if params.security_enabled: metron_security.kinit(params.kinit_path_local, params.metron_keytab_path, params.metron_principal_name, execute_user=params.metron_user) if params.security_enabled and not commands.is_hbase_acl_configured(): commands.set_hbase_acls() if params.security_enabled and not commands.is_acl_configured(): commands.init_kafka_acls() commands.set_acl_configured() commands.start_profiler_topology(env) def stop(self, env, upgrade_type=None): from params import params env.set_params(params) commands = ProfilerCommands(params) commands.stop_profiler_topology(env) def status(self, env): from params import status_params env.set_params(status_params) commands = ProfilerCommands(status_params) if not commands.is_topology_active(env): raise ComponentIsNotRunning() def restart(self, env): from params import params env.set_params(params) self.configure(env) commands = ProfilerCommands(params) commands.restart_profiler_topology(env) if __name__ == "__main__": Profiler().execute()
apache-2.0
boundarydevices/android_external_chromium_org
build/android/pylib/forwarder.py
8
13644
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # pylint: disable=W0212 import fcntl import logging import os import psutil from pylib import cmd_helper from pylib import constants from pylib import valgrind_tools # TODO(jbudorick) Remove once telemetry gets switched over. import pylib.android_commands import pylib.device.device_utils def _GetProcessStartTime(pid): return psutil.Process(pid).create_time class _FileLock(object): """With statement-aware implementation of a file lock. File locks are needed for cross-process synchronization when the multiprocessing Python module is used. """ def __init__(self, path): self._fd = -1 self._path = path def __enter__(self): self._fd = os.open(self._path, os.O_RDONLY | os.O_CREAT) if self._fd < 0: raise Exception('Could not open file %s for reading' % self._path) fcntl.flock(self._fd, fcntl.LOCK_EX) def __exit__(self, _exception_type, _exception_value, traceback): fcntl.flock(self._fd, fcntl.LOCK_UN) os.close(self._fd) class Forwarder(object): """Thread-safe class to manage port forwards from the device to the host.""" _DEVICE_FORWARDER_FOLDER = (constants.TEST_EXECUTABLE_DIR + '/forwarder/') _DEVICE_FORWARDER_PATH = (constants.TEST_EXECUTABLE_DIR + '/forwarder/device_forwarder') _LOCK_PATH = '/tmp/chrome.forwarder.lock' _MULTIPROCESSING_ENV_VAR = 'CHROME_FORWARDER_USE_MULTIPROCESSING' # Defined in host_forwarder_main.cc _HOST_FORWARDER_LOG = '/tmp/host_forwarder_log' _instance = None @staticmethod def UseMultiprocessing(): """Tells the forwarder that multiprocessing is used.""" os.environ[Forwarder._MULTIPROCESSING_ENV_VAR] = '1' @staticmethod def Map(port_pairs, device, tool=None): """Runs the forwarder. Args: port_pairs: A list of tuples (device_port, host_port) to forward. Note that you can specify 0 as a device_port, in which case a port will by dynamically assigned on the device. You can get the number of the assigned port using the DevicePortForHostPort method. device: A DeviceUtils instance. tool: Tool class to use to get wrapper, if necessary, for executing the forwarder (see valgrind_tools.py). Raises: Exception on failure to forward the port. """ # TODO(jbudorick) Remove once telemetry gets switched over. if isinstance(device, pylib.android_commands.AndroidCommands): device = pylib.device.device_utils.DeviceUtils(device) if not tool: tool = valgrind_tools.CreateTool(None, device) with _FileLock(Forwarder._LOCK_PATH): instance = Forwarder._GetInstanceLocked(tool) instance._InitDeviceLocked(device, tool) device_serial = device.old_interface.Adb().GetSerialNumber() redirection_commands = [ ['--serial-id=' + device_serial, '--map', str(device), str(host)] for device, host in port_pairs] logging.info('Forwarding using commands: %s', redirection_commands) for redirection_command in redirection_commands: try: (exit_code, output) = cmd_helper.GetCmdStatusAndOutput( [instance._host_forwarder_path] + redirection_command) except OSError as e: if e.errno == 2: raise Exception('Unable to start host forwarder. Make sure you have' ' built host_forwarder.') else: raise if exit_code != 0: raise Exception('%s exited with %d:\n%s' % ( instance._host_forwarder_path, exit_code, '\n'.join(output))) tokens = output.split(':') if len(tokens) != 2: raise Exception(('Unexpected host forwarder output "%s", ' + 'expected "device_port:host_port"') % output) device_port = int(tokens[0]) host_port = int(tokens[1]) serial_with_port = (device_serial, device_port) instance._device_to_host_port_map[serial_with_port] = host_port instance._host_to_device_port_map[host_port] = serial_with_port logging.info('Forwarding device port: %d to host port: %d.', device_port, host_port) @staticmethod def UnmapDevicePort(device_port, device): """Unmaps a previously forwarded device port. Args: device: A DeviceUtils instance. device_port: A previously forwarded port (through Map()). """ # TODO(jbudorick) Remove once telemetry gets switched over. if isinstance(device, pylib.android_commands.AndroidCommands): device = pylib.device.device_utils.DeviceUtils(device) with _FileLock(Forwarder._LOCK_PATH): Forwarder._UnmapDevicePortLocked(device_port, device) @staticmethod def UnmapAllDevicePorts(device): """Unmaps all the previously forwarded ports for the provided device. Args: device: A DeviceUtils instance. port_pairs: A list of tuples (device_port, host_port) to unmap. """ # TODO(jbudorick) Remove once telemetry gets switched over. if isinstance(device, pylib.android_commands.AndroidCommands): device = pylib.device.device_utils.DeviceUtils(device) with _FileLock(Forwarder._LOCK_PATH): if not Forwarder._instance: return adb_serial = device.old_interface.Adb().GetSerialNumber() if adb_serial not in Forwarder._instance._initialized_devices: return port_map = Forwarder._GetInstanceLocked( None)._device_to_host_port_map for (device_serial, device_port) in port_map.keys(): if adb_serial == device_serial: Forwarder._UnmapDevicePortLocked(device_port, device) # There are no more ports mapped, kill the device_forwarder. tool = valgrind_tools.CreateTool(None, device) Forwarder._KillDeviceLocked(device, tool) Forwarder._instance._initialized_devices.remove(adb_serial) @staticmethod def DevicePortForHostPort(host_port): """Returns the device port that corresponds to a given host port.""" with _FileLock(Forwarder._LOCK_PATH): (_device_serial, device_port) = Forwarder._GetInstanceLocked( None)._host_to_device_port_map.get(host_port) return device_port @staticmethod def RemoveHostLog(): if os.path.exists(Forwarder._HOST_FORWARDER_LOG): os.unlink(Forwarder._HOST_FORWARDER_LOG) @staticmethod def GetHostLog(): if not os.path.exists(Forwarder._HOST_FORWARDER_LOG): return '' with file(Forwarder._HOST_FORWARDER_LOG, 'r') as f: return f.read() @staticmethod def _GetInstanceLocked(tool): """Returns the singleton instance. Note that the global lock must be acquired before calling this method. Args: tool: Tool class to use to get wrapper, if necessary, for executing the forwarder (see valgrind_tools.py). """ if not Forwarder._instance: Forwarder._instance = Forwarder(tool) return Forwarder._instance def __init__(self, tool): """Constructs a new instance of Forwarder. Note that Forwarder is a singleton therefore this constructor should be called only once. Args: tool: Tool class to use to get wrapper, if necessary, for executing the forwarder (see valgrind_tools.py). """ assert not Forwarder._instance self._tool = tool self._initialized_devices = set() self._device_to_host_port_map = dict() self._host_to_device_port_map = dict() self._host_forwarder_path = os.path.join( constants.GetOutDirectory(), 'host_forwarder') assert os.path.exists(self._host_forwarder_path), 'Please build forwarder2' self._device_forwarder_path_on_host = os.path.join( constants.GetOutDirectory(), 'forwarder_dist') self._InitHostLocked() @staticmethod def _UnmapDevicePortLocked(device_port, device): """Internal method used by UnmapDevicePort(). Note that the global lock must be acquired before calling this method. """ instance = Forwarder._GetInstanceLocked(None) serial = device.old_interface.Adb().GetSerialNumber() serial_with_port = (serial, device_port) if not serial_with_port in instance._device_to_host_port_map: logging.error('Trying to unmap non-forwarded port %d' % device_port) return redirection_command = ['--serial-id=' + serial, '--unmap', str(device_port)] (exit_code, output) = cmd_helper.GetCmdStatusAndOutput( [instance._host_forwarder_path] + redirection_command) if exit_code != 0: logging.error('%s exited with %d:\n%s' % ( instance._host_forwarder_path, exit_code, '\n'.join(output))) host_port = instance._device_to_host_port_map[serial_with_port] del instance._device_to_host_port_map[serial_with_port] del instance._host_to_device_port_map[host_port] @staticmethod def _GetPidForLock(): """Returns the PID used for host_forwarder initialization. In case multi-process sharding is used, the PID of the "sharder" is used. The "sharder" is the initial process that forks that is the parent process. By default, multi-processing is not used. In that case the PID of the current process is returned. """ use_multiprocessing = Forwarder._MULTIPROCESSING_ENV_VAR in os.environ return os.getppid() if use_multiprocessing else os.getpid() def _InitHostLocked(self): """Initializes the host forwarder daemon. Note that the global lock must be acquired before calling this method. This method kills any existing host_forwarder process that could be stale. """ # See if the host_forwarder daemon was already initialized by a concurrent # process or thread (in case multi-process sharding is not used). pid_for_lock = Forwarder._GetPidForLock() fd = os.open(Forwarder._LOCK_PATH, os.O_RDWR | os.O_CREAT) with os.fdopen(fd, 'r+') as pid_file: pid_with_start_time = pid_file.readline() if pid_with_start_time: (pid, process_start_time) = pid_with_start_time.split(':') if pid == str(pid_for_lock): if process_start_time == str(_GetProcessStartTime(pid_for_lock)): return self._KillHostLocked() pid_file.seek(0) pid_file.write( '%s:%s' % (pid_for_lock, str(_GetProcessStartTime(pid_for_lock)))) def _InitDeviceLocked(self, device, tool): """Initializes the device_forwarder daemon for a specific device (once). Note that the global lock must be acquired before calling this method. This method kills any existing device_forwarder daemon on the device that could be stale, pushes the latest version of the daemon (to the device) and starts it. Args: device: A DeviceUtils instance. tool: Tool class to use to get wrapper, if necessary, for executing the forwarder (see valgrind_tools.py). """ device_serial = device.old_interface.Adb().GetSerialNumber() if device_serial in self._initialized_devices: return Forwarder._KillDeviceLocked(device, tool) device.old_interface.PushIfNeeded( self._device_forwarder_path_on_host, Forwarder._DEVICE_FORWARDER_FOLDER) cmd = '%s %s' % (tool.GetUtilWrapper(), Forwarder._DEVICE_FORWARDER_PATH) (exit_code, output) = device.old_interface.GetAndroidToolStatusAndOutput( cmd, lib_path=Forwarder._DEVICE_FORWARDER_FOLDER) if exit_code != 0: raise Exception( 'Failed to start device forwarder:\n%s' % '\n'.join(output)) self._initialized_devices.add(device_serial) def _KillHostLocked(self): """Kills the forwarder process running on the host. Note that the global lock must be acquired before calling this method. """ logging.info('Killing host_forwarder.') (exit_code, output) = cmd_helper.GetCmdStatusAndOutput( [self._host_forwarder_path, '--kill-server']) if exit_code != 0: (exit_code, output) = cmd_helper.GetCmdStatusAndOutput( ['pkill', '-9', 'host_forwarder']) if exit_code != 0: raise Exception('%s exited with %d:\n%s' % ( self._host_forwarder_path, exit_code, '\n'.join(output))) @staticmethod def _KillDeviceLocked(device, tool): """Kills the forwarder process running on the device. Note that the global lock must be acquired before calling this method. Args: device: Instance of DeviceUtils for talking to the device. tool: Wrapper tool (e.g. valgrind) that can be used to execute the device forwarder (see valgrind_tools.py). """ logging.info('Killing device_forwarder.') if not device.old_interface.FileExistsOnDevice( Forwarder._DEVICE_FORWARDER_PATH): return cmd = '%s %s --kill-server' % (tool.GetUtilWrapper(), Forwarder._DEVICE_FORWARDER_PATH) device.old_interface.GetAndroidToolStatusAndOutput( cmd, lib_path=Forwarder._DEVICE_FORWARDER_FOLDER) # TODO(pliard): Remove the following call to KillAllBlocking() when we are # sure that the old version of device_forwarder (not supporting # 'kill-server') is not running on the bots anymore. timeout_sec = 5 processes_killed = device.old_interface.KillAllBlocking( 'device_forwarder', timeout_sec) if not processes_killed: pids = device.old_interface.ExtractPid('device_forwarder') if pids: raise Exception('Timed out while killing device_forwarder')
bsd-3-clause
andrewcmyers/tensorflow
tensorflow/python/training/session_run_hook.py
23
10423
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """A SessionRunHook extends `session.run()` calls for the `MonitoredSession`. SessionRunHooks are useful to track training, report progress, request early stopping and more. SessionRunHooks use the observer pattern and notify at the following points: - when a session starts being used - before a call to the `session.run()` - after a call to the `session.run()` - when the session closed A SessionRunHook encapsulates a piece of reusable/composable computation that can piggyback a call to `MonitoredSession.run()`. A hook can add any ops-or-tensor/feeds to the run call, and when the run call finishes with success gets the outputs it requested. Hooks are allowed to add ops to the graph in `hook.begin()`. The graph is finalized after the `begin()` method is called. There are a few pre-defined monitors: - StopAtStepHook: Request stop based on global_step - CheckpointSaverHook: saves checkpoint - LoggingTensorHook: outputs one or more tensor values to log - NanTensorHook: Request stop if given `Tensor` contains Nans. - SummarySaverHook: saves summaries to a summary writer For more specific needs, you can create custom hooks: class ExampleHook(SessionRunHook): def begin(self): # You can add ops to the graph here. print('Starting the session.') self.your_tensor = ... def after_create_session(self, session, coord): # When this is called, the graph is finalized and # ops can no longer be added to the graph. print('Session created.') def before_run(self, run_context): print('Before calling session.run().') return SessionRunArgs(self.your_tensor) def after_run(self, run_context, run_values): print('Done running one step. The value of my tensor: %s', run_values.results) if you-need-to-stop-loop: run_context.request_stop() def end(self, session): print('Done with the session.') To understand how hooks interact with calls to `MonitoredSession.run()`, look at following code: with MonitoredTrainingSession(hooks=your_hooks, ...) as sess: while not sess.should_stop(): sess.run(your_fetches) Above user code leads to following execution: call hooks.begin() sess = tf.Session() call hooks.after_create_session() while not stop is requested: call hooks.before_run() try: results = sess.run(merged_fetches, feed_dict=merged_feeds) except (errors.OutOfRangeError, StopIteration): break call hooks.after_run() call hooks.end() sess.close() Note that if sess.run() raises OutOfRangeError or StopIteration then hooks.after_run() will not be called but hooks.end() will still be called. If sess.run() raises any other exception then neither hooks.after_run() nor hooks.end() will be called. @@SessionRunHook @@SessionRunArgs @@SessionRunContext @@SessionRunValues """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections class SessionRunHook(object): """Hook to extend calls to MonitoredSession.run().""" def begin(self): """Called once before using the session. When called, the default graph is the one that will be launched in the session. The hook can modify the graph by adding new operations to it. After the `begin()` call the graph will be finalized and the other callbacks can not modify the graph anymore. Second call of `begin()` on the same graph, should not change the graph. """ pass def after_create_session(self, session, coord): # pylint: disable=unused-argument """Called when new TensorFlow session is created. This is called to signal the hooks that a new session has been created. This has two essential differences with the situation in which `begin` is called: * When this is called, the graph is finalized and ops can no longer be added to the graph. * This method will also be called as a result of recovering a wrapped session, not only at the beginning of the overall session. Args: session: A TensorFlow Session that has been created. coord: A Coordinator object which keeps track of all threads. """ pass def before_run(self, run_context): # pylint: disable=unused-argument """Called before each call to run(). You can return from this call a `SessionRunArgs` object indicating ops or tensors to add to the upcoming `run()` call. These ops/tensors will be run together with the ops/tensors originally passed to the original run() call. The run args you return can also contain feeds to be added to the run() call. The `run_context` argument is a `SessionRunContext` that provides information about the upcoming `run()` call: the originally requested op/tensors, the TensorFlow Session. At this point graph is finalized and you can not add ops. Args: run_context: A `SessionRunContext` object. Returns: None or a `SessionRunArgs` object. """ return None def after_run(self, run_context, # pylint: disable=unused-argument run_values): # pylint: disable=unused-argument """Called after each call to run(). The `run_values` argument contains results of requested ops/tensors by `before_run()`. The `run_context` argument is the same one send to `before_run` call. `run_context.request_stop()` can be called to stop the iteration. If `session.run()` raises any exceptions then `after_run()` is not called. Args: run_context: A `SessionRunContext` object. run_values: A SessionRunValues object. """ pass def end(self, session): # pylint: disable=unused-argument """Called at the end of session. The `session` argument can be used in case the hook wants to run final ops, such as saving a last checkpoint. If `session.run()` raises exception other than OutOfRangeError or StopIteration then `end()` is not called. Note the difference between `end()` and `after_run()` behavior when `session.run()` raises OutOfRangeError or StopIteration. In that case `end()` is called but `after_run()` is not called. Args: session: A TensorFlow Session that will be soon closed. """ pass class SessionRunArgs( collections.namedtuple("SessionRunArgs", ["fetches", "feed_dict", "options"])): """Represents arguments to be added to a `Session.run()` call. Args: fetches: Exactly like the 'fetches' argument to Session.Run(). Can be a single tensor or op, a list of 'fetches' or a dictionary of fetches. For example: fetches = global_step_tensor fetches = [train_op, summary_op, global_step_tensor] fetches = {'step': global_step_tensor, 'summ': summary_op} Note that this can recurse as expected: fetches = {'step': global_step_tensor, 'ops': [train_op, check_nan_op]} feed_dict: Exactly like the `feed_dict` argument to `Session.Run()` options: Exactly like the `options` argument to `Session.run()`, i.e., a config_pb2.RunOptions proto. """ def __new__(cls, fetches, feed_dict=None, options=None): return super(SessionRunArgs, cls).__new__(cls, fetches, feed_dict, options) class SessionRunContext(object): """Provides information about the `session.run()` call being made. Provides information about original request to `Session.Run()` function. SessionRunHook objects can stop the loop by calling `request_stop()` of `run_context`. In the future we may use this object to add more information about run without changing the Hook API. """ def __init__(self, original_args, session): """Initializes SessionRunContext.""" self._original_args = original_args self._session = session self._stop_requested = False @property def original_args(self): """A `SessionRunArgs` object holding the original arguments of `run()`. If user called `MonitoredSession.run(fetches=a, feed_dict=b)`, then this field is equal to SessionRunArgs(a, b). Returns: A `SessionRunArgs` object """ return self._original_args @property def session(self): """A TensorFlow session object which will execute the `run`.""" return self._session @property def stop_requested(self): """Returns whether a stop is requested or not. If true, `MonitoredSession` stops iterations. Returns: A `bool` """ return self._stop_requested def request_stop(self): """Sets stop requested field. Hooks can use this function to request stop of iterations. `MonitoredSession` checks whether this is called or not. """ self._stop_requested = True class SessionRunValues( collections.namedtuple("SessionRunValues", ["results", "options", "run_metadata"])): """Contains the results of `Session.run()`. In the future we may use this object to add more information about result of run without changing the Hook API. Args: results: The return values from `Session.run()` corresponding to the fetches attribute returned in the RunArgs. Note that this has the same shape as the RunArgs fetches. For example: fetches = global_step_tensor => results = nparray(int) fetches = [train_op, summary_op, global_step_tensor] => results = [None, nparray(string), nparray(int)] fetches = {'step': global_step_tensor, 'summ': summary_op} => results = {'step': nparray(int), 'summ': nparray(string)} options: `RunOptions` from the `Session.run()` call. run_metadata: `RunMetadata` from the `Session.run()` call. """
apache-2.0
kallewoof/bitcoin
test/functional/rpc_whitelist.py
40
5082
#!/usr/bin/env python3 # Copyright (c) 2017-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ A test for RPC users with restricted permissions """ from test_framework.test_framework import BitcoinTestFramework import os from test_framework.util import ( get_datadir_path, assert_equal, str_to_b64str ) import http.client import urllib.parse def rpccall(node, user, method): url = urllib.parse.urlparse(node.url) headers = {"Authorization": "Basic " + str_to_b64str('{}:{}'.format(user[0], user[3]))} conn = http.client.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "' + method + '"}', headers) resp = conn.getresponse() conn.close() return resp class RPCWhitelistTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 def setup_chain(self): super().setup_chain() # 0 => Username # 1 => Password (Hashed) # 2 => Permissions # 3 => Password Plaintext self.users = [ ["user1", "50358aa884c841648e0700b073c32b2e$b73e95fff0748cc0b517859d2ca47d9bac1aa78231f3e48fa9222b612bd2083e", "getbestblockhash,getblockcount,", "12345"], ["user2", "8650ba41296f62092377a38547f361de$4620db7ba063ef4e2f7249853e9f3c5c3592a9619a759e3e6f1c63f2e22f1d21", "getblockcount", "54321"] ] # For exceptions self.strange_users = [ # Test empty ["strangedude", "62d67dffec03836edd698314f1b2be62$c2fb4be29bb0e3646298661123cf2d8629640979cabc268ef05ea613ab54068d", ":", "s7R4nG3R7H1nGZ"], ["strangedude2", "575c012c7fe4b1e83b9d809412da3ef7$09f448d0acfc19924dd62ecb96004d3c2d4b91f471030dfe43c6ea64a8f658c1", "", "s7R4nG3R7H1nGZ"], # Test trailing comma ["strangedude3", "23189c561b5975a56f4cf94030495d61$3a2f6aac26351e2257428550a553c4c1979594e36675bbd3db692442387728c0", ":getblockcount,", "s7R4nG3R7H1nGZ"], # Test overwrite ["strangedude4", "990c895760a70df83949e8278665e19a$8f0906f20431ff24cb9e7f5b5041e4943bdf2a5c02a19ef4960dcf45e72cde1c", ":getblockcount, getbestblockhash", "s7R4nG3R7H1nGZ"], ["strangedude4", "990c895760a70df83949e8278665e19a$8f0906f20431ff24cb9e7f5b5041e4943bdf2a5c02a19ef4960dcf45e72cde1c", ":getblockcount", "s7R4nG3R7H1nGZ"], # Testing the same permission twice ["strangedude5", "d12c6e962d47a454f962eb41225e6ec8$2dd39635b155536d3c1a2e95d05feff87d5ba55f2d5ff975e6e997a836b717c9", ":getblockcount,getblockcount", "s7R4nG3R7H1nGZ"] ] # These commands shouldn't be allowed for any user to test failures self.never_allowed = ["getnetworkinfo"] with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "bitcoin.conf"), 'a', encoding='utf8') as f: f.write("\nrpcwhitelistdefault=0\n") for user in self.users: f.write("rpcauth=" + user[0] + ":" + user[1] + "\n") f.write("rpcwhitelist=" + user[0] + ":" + user[2] + "\n") # Special cases for strangedude in self.strange_users: f.write("rpcauth=" + strangedude[0] + ":" + strangedude[1] + "\n") f.write("rpcwhitelist=" + strangedude[0] + strangedude[2] + "\n") def run_test(self): for user in self.users: permissions = user[2].replace(" ", "").split(",") # Pop all empty items i = 0 while i < len(permissions): if permissions[i] == '': permissions.pop(i) i += 1 for permission in permissions: self.log.info("[" + user[0] + "]: Testing a permitted permission (" + permission + ")") assert_equal(200, rpccall(self.nodes[0], user, permission).status) for permission in self.never_allowed: self.log.info("[" + user[0] + "]: Testing a non permitted permission (" + permission + ")") assert_equal(403, rpccall(self.nodes[0], user, permission).status) # Now test the strange users for permission in self.never_allowed: self.log.info("Strange test 1") assert_equal(403, rpccall(self.nodes[0], self.strange_users[0], permission).status) for permission in self.never_allowed: self.log.info("Strange test 2") assert_equal(403, rpccall(self.nodes[0], self.strange_users[1], permission).status) self.log.info("Strange test 3") assert_equal(200, rpccall(self.nodes[0], self.strange_users[2], "getblockcount").status) self.log.info("Strange test 4") assert_equal(403, rpccall(self.nodes[0], self.strange_users[3], "getbestblockhash").status) self.log.info("Strange test 5") assert_equal(200, rpccall(self.nodes[0], self.strange_users[4], "getblockcount").status) if __name__ == "__main__": RPCWhitelistTest().main()
mit
MostafaGazar/tensorflow
tensorflow/python/framework/test_util.py
23
21081
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # pylint: disable=invalid-name """Test utils for tensorflow.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import contextlib import math import random import re import sys import threading import numpy as np import six from google.protobuf import text_format from tensorflow.core.framework import graph_pb2 from tensorflow.core.protobuf import config_pb2 from tensorflow.python import pywrap_tensorflow from tensorflow.python.client import session from tensorflow.python.framework import device as pydev from tensorflow.python.framework import errors from tensorflow.python.framework import ops from tensorflow.python.framework import random_seed from tensorflow.python.framework import versions from tensorflow.python.platform import googletest from tensorflow.python.platform import tf_logging as logging from tensorflow.python.util import compat from tensorflow.python.util.protobuf import compare def assert_ops_in_graph(expected_ops, graph): """Assert all expected operations are found. Args: expected_ops: `dict<string, string>` of op name to op type. graph: Graph to check. Returns: `dict<string, node>` of node name to node. Raises: ValueError: If the expected ops are not present in the graph. """ actual_ops = {} gd = graph.as_graph_def() for node in gd.node: if node.name in expected_ops: if expected_ops[node.name] != node.op: raise ValueError( "Expected op for node %s is different. %s vs %s" % ( node.name, expected_ops[node.name], node.op)) actual_ops[node.name] = node if set(expected_ops.keys()) != set(actual_ops.keys()): raise ValueError( "Not all expected ops are present. Expected %s, found %s" % ( expected_ops.keys(), actual_ops.keys())) return actual_ops def assert_equal_graph_def(actual, expected): """Asserts that two `GraphDef`s are (mostly) the same. Compares two `GraphDef` protos for equality, ignoring versions and ordering of nodes, attrs, and control inputs. Node names are used to match up nodes between the graphs, so the naming of nodes must be consistent. Args: actual: The `GraphDef` we have. expected: The `GraphDef` we expected. Raises: AssertionError: If the `GraphDef`s do not match. TypeError: If either argument is not a `GraphDef`. """ if not isinstance(actual, graph_pb2.GraphDef): raise TypeError("Expected tf.GraphDef for actual, got %s" % type(actual).__name__) if not isinstance(expected, graph_pb2.GraphDef): raise TypeError("Expected tf.GraphDef for expected, got %s" % type(expected).__name__) diff = pywrap_tensorflow.EqualGraphDefWrapper(actual.SerializeToString(), expected.SerializeToString()) if diff: raise AssertionError(compat.as_str(diff)) def IsGoogleCudaEnabled(): return pywrap_tensorflow.IsGoogleCudaEnabled() def CudaSupportsHalfMatMulAndConv(): return pywrap_tensorflow.CudaSupportsHalfMatMulAndConv() class TensorFlowTestCase(googletest.TestCase): """Base class for tests that need to test TensorFlow. """ def __init__(self, methodName="runTest"): super(TensorFlowTestCase, self).__init__(methodName) self._threads = [] self._tempdir = None self._cached_session = None def setUp(self): self._ClearCachedSession() random.seed(random_seed.DEFAULT_GRAPH_SEED) np.random.seed(random_seed.DEFAULT_GRAPH_SEED) ops.reset_default_graph() ops.get_default_graph().seed = random_seed.DEFAULT_GRAPH_SEED def tearDown(self): for thread in self._threads: self.assertFalse(thread.is_alive(), "A checkedThread did not terminate") self._ClearCachedSession() def _ClearCachedSession(self): if self._cached_session is not None: self._cached_session.close() self._cached_session = None def get_temp_dir(self): if not self._tempdir: self._tempdir = googletest.GetTempDir() return self._tempdir def _AssertProtoEquals(self, a, b): """Asserts that a and b are the same proto. Uses ProtoEq() first, as it returns correct results for floating point attributes, and then use assertProtoEqual() in case of failure as it provides good error messages. Args: a: a proto. b: another proto. """ if not compare.ProtoEq(a, b): compare.assertProtoEqual(self, a, b, normalize_numbers=True) def assertProtoEquals(self, expected_message_maybe_ascii, message): """Asserts that message is same as parsed expected_message_ascii. Creates another prototype of message, reads the ascii message into it and then compares them using self._AssertProtoEqual(). Args: expected_message_maybe_ascii: proto message in original or ascii form message: the message to validate """ if type(expected_message_maybe_ascii) == type(message): expected_message = expected_message_maybe_ascii self._AssertProtoEquals(expected_message, message) elif isinstance(expected_message_maybe_ascii, str): expected_message = type(message)() text_format.Merge(expected_message_maybe_ascii, expected_message) self._AssertProtoEquals(expected_message, message) else: assert False, ("Can't compare protos of type %s and %s" % (type(expected_message_maybe_ascii), type(message))) def assertProtoEqualsVersion( self, expected, actual, producer=versions.GRAPH_DEF_VERSION, min_consumer=versions.GRAPH_DEF_VERSION_MIN_CONSUMER): expected = "versions { producer: %d min_consumer: %d };\n%s" % ( producer, min_consumer, expected) self.assertProtoEquals(expected, actual) def assertStartsWith(self, actual, expected_start, msg=None): """Assert that actual.startswith(expected_start) is True. Args: actual: str expected_start: str msg: Optional message to report on failure. """ if not actual.startswith(expected_start): fail_msg = "%r does not start with %r" % (actual, expected_start) fail_msg += " : %r" % (msg) if msg else "" self.fail(fail_msg) # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def test_session(self, graph=None, config=None, use_gpu=False, force_gpu=False): """Returns a TensorFlow Session for use in executing tests. This method should be used for all functional tests. Use the `use_gpu` and `force_gpu` options to control where ops are run. If `force_gpu` is True, all ops are pinned to `/gpu:0`. Otherwise, if `use_gpu` is True, TensorFlow tries to run as many ops on the GPU as possible. If both `force_gpu and `use_gpu` are False, all ops are pinned to the CPU. Example: class MyOperatorTest(test_util.TensorFlowTestCase): def testMyOperator(self): with self.test_session(use_gpu=True): valid_input = [1.0, 2.0, 3.0, 4.0, 5.0] result = MyOperator(valid_input).eval() self.assertEqual(result, [1.0, 2.0, 3.0, 5.0, 8.0] invalid_input = [-1.0, 2.0, 7.0] with self.assertRaisesOpError("negative input not supported"): MyOperator(invalid_input).eval() Args: graph: Optional graph to use during the returned session. config: An optional config_pb2.ConfigProto to use to configure the session. use_gpu: If True, attempt to run as many ops as possible on GPU. force_gpu: If True, pin all ops to `/gpu:0`. Returns: A Session object that should be used as a context manager to surround the graph building and execution code in a test case. """ if self.id().endswith(".test_session"): self.skipTest("Not a test.") def prepare_config(config): if config is None: config = config_pb2.ConfigProto() config.allow_soft_placement = not force_gpu config.gpu_options.per_process_gpu_memory_fraction = 0.3 elif force_gpu and config.allow_soft_placement: config = config_pb2.ConfigProto().CopyFrom(config) config.allow_soft_placement = False # Don't perform optimizations for tests so we don't inadvertently run # gpu ops on cpu config.graph_options.optimizer_options.opt_level = -1 return config if graph is None: if self._cached_session is None: self._cached_session = session.Session(graph=None, config=prepare_config(config)) sess = self._cached_session with sess.graph.as_default(), sess.as_default(): if force_gpu: with sess.graph.device("/gpu:0"): yield sess elif use_gpu: yield sess else: with sess.graph.device("/cpu:0"): yield sess else: with session.Session(graph=graph, config=prepare_config(config)) as sess: if force_gpu: with sess.graph.device("/gpu:0"): yield sess elif use_gpu: yield sess else: with sess.graph.device("/cpu:0"): yield sess # pylint: enable=g-doc-return-or-yield class _CheckedThread(object): """A wrapper class for Thread that asserts successful completion. This class should be created using the TensorFlowTestCase.checkedThread() method. """ def __init__(self, testcase, target, args=None, kwargs=None): """Constructs a new instance of _CheckedThread. Args: testcase: The TensorFlowTestCase for which this thread is being created. target: A callable object representing the code to be executed in the thread. args: A tuple of positional arguments that will be passed to target. kwargs: A dictionary of keyword arguments that will be passed to target. """ self._testcase = testcase self._target = target self._args = () if args is None else args self._kwargs = {} if kwargs is None else kwargs self._thread = threading.Thread(target=self._protected_run) self._exception = None def _protected_run(self): """Target for the wrapper thread. Sets self._exception on failure.""" try: self._target(*self._args, **self._kwargs) except Exception as e: # pylint: disable=broad-except self._exception = e def start(self): """Starts the thread's activity. This must be called at most once per _CheckedThread object. It arranges for the object's target to be invoked in a separate thread of control. """ self._thread.start() def join(self): """Blocks until the thread terminates. Raises: self._testcase.failureException: If the thread terminates with due to an exception. """ self._thread.join() if self._exception is not None: self._testcase.fail( "Error in checkedThread: %s" % str(self._exception)) def is_alive(self): """Returns whether the thread is alive. This method returns True just before the run() method starts until just after the run() method terminates. Returns: True if the thread is alive, otherwise False. """ return self._thread.is_alive() def checkedThread(self, target, args=None, kwargs=None): """Returns a Thread wrapper that asserts 'target' completes successfully. This method should be used to create all threads in test cases, as otherwise there is a risk that a thread will silently fail, and/or assertions made in the thread will not be respected. Args: target: A callable object to be executed in the thread. args: The argument tuple for the target invocation. Defaults to (). kwargs: A dictionary of keyword arguments for the target invocation. Defaults to {}. Returns: A wrapper for threading.Thread that supports start() and join() methods. """ ret = TensorFlowTestCase._CheckedThread(self, target, args, kwargs) self._threads.append(ret) return ret # pylint: enable=invalid-name def assertNear(self, f1, f2, err, msg=None): """Asserts that two floats are near each other. Checks that |f1 - f2| < err and asserts a test failure if not. Args: f1: A float value. f2: A float value. err: A float value. msg: An optional string message to append to the failure message. """ self.assertTrue(math.fabs(f1 - f2) <= err, "%f != %f +/- %f%s" % ( f1, f2, err, " (%s)" % msg if msg is not None else "")) def assertArrayNear(self, farray1, farray2, err): """Asserts that two float arrays are near each other. Checks that for all elements of farray1 and farray2 |f1 - f2| < err. Asserts a test failure if not. Args: farray1: a list of float values. farray2: a list of float values. err: a float value. """ self.assertEqual(len(farray1), len(farray2)) for f1, f2 in zip(farray1, farray2): self.assertNear(float(f1), float(f2), err) def _NDArrayNear(self, ndarray1, ndarray2, err): return np.linalg.norm(ndarray1 - ndarray2) < err def assertNDArrayNear(self, ndarray1, ndarray2, err): """Asserts that two numpy arrays have near values. Args: ndarray1: a numpy ndarray. ndarray2: a numpy ndarray. err: a float. The maximum absolute difference allowed. """ self.assertTrue(self._NDArrayNear(ndarray1, ndarray2, err)) def _GetNdArray(self, a): if not isinstance(a, np.ndarray): a = np.array(a) return a def assertAllClose(self, a, b, rtol=1e-6, atol=1e-6): """Asserts that two numpy arrays have near values. Args: a: a numpy ndarray or anything can be converted to one. b: a numpy ndarray or anything can be converted to one. rtol: relative tolerance atol: absolute tolerance """ a = self._GetNdArray(a) b = self._GetNdArray(b) self.assertEqual( a.shape, b.shape, "Shape mismatch: expected %s, got %s." % (a.shape, b.shape)) if not np.allclose(a, b, rtol=rtol, atol=atol): # Prints more details than np.testing.assert_allclose. # # NOTE: numpy.allclose (and numpy.testing.assert_allclose) # checks whether two arrays are element-wise equal within a # tolerance. The relative difference (rtol * abs(b)) and the # absolute difference atol are added together to compare against # the absolute difference between a and b. Here, we want to # print out which elements violate such conditions. cond = np.logical_or( np.abs(a - b) > atol + rtol * np.abs(b), np.isnan(a) != np.isnan(b)) if a.ndim: x = a[np.where(cond)] y = b[np.where(cond)] print("not close where = ", np.where(cond)) else: # np.where is broken for scalars x, y = a, b print("not close lhs = ", x) print("not close rhs = ", y) print("not close dif = ", np.abs(x - y)) print("not close tol = ", atol + rtol * np.abs(y)) print("dtype = %s, shape = %s" % (a.dtype, a.shape)) np.testing.assert_allclose(a, b, rtol=rtol, atol=atol) def assertAllCloseAccordingToType(self, a, b, rtol=1e-6, atol=1e-6): """Like assertAllClose, but also suitable for comparing fp16 arrays. In particular, the tolerance is reduced to 1e-3 if at least one of the arguments is of type float16. Args: a: a numpy ndarray or anything can be converted to one. b: a numpy ndarray or anything can be converted to one. rtol: relative tolerance atol: absolute tolerance """ a = self._GetNdArray(a) b = self._GetNdArray(b) if a.dtype == np.float16 or b.dtype == np.float16: rtol = max(rtol, 1e-3) atol = max(atol, 1e-3) self.assertAllClose(a, b, rtol=rtol, atol=atol) def assertAllEqual(self, a, b): """Asserts that two numpy arrays have the same values. Args: a: a numpy ndarray or anything can be converted to one. b: a numpy ndarray or anything can be converted to one. """ a = self._GetNdArray(a) b = self._GetNdArray(b) self.assertEqual( a.shape, b.shape, "Shape mismatch: expected %s, got %s." % (a.shape, b.shape)) same = (a == b) if a.dtype == np.float32 or a.dtype == np.float64: same = np.logical_or(same, np.logical_and(np.isnan(a), np.isnan(b))) if not np.all(same): # Prints more details than np.testing.assert_array_equal. diff = np.logical_not(same) if a.ndim: x = a[np.where(diff)] y = b[np.where(diff)] print("not equal where = ", np.where(diff)) else: # np.where is broken for scalars x, y = a, b print("not equal lhs = ", x) print("not equal rhs = ", y) np.testing.assert_array_equal(a, b) # pylint: disable=g-doc-return-or-yield @contextlib.contextmanager def assertRaisesWithPredicateMatch(self, exception_type, expected_err_re_or_predicate): """Returns a context manager to enclose code expected to raise an exception. If the exception is an OpError, the op stack is also included in the message predicate search. Args: exception_type: The expected type of exception that should be raised. expected_err_re_or_predicate: If this is callable, it should be a function of one argument that inspects the passed-in exception and returns True (success) or False (please fail the test). Otherwise, the error message is expected to match this regular expression partially. Returns: A context manager to surround code that is expected to raise an exception. """ if callable(expected_err_re_or_predicate): predicate = expected_err_re_or_predicate else: def predicate(e): err_str = e.message if isinstance(e, errors.OpError) else str(e) op = e.op if isinstance(e, errors.OpError) else None while op is not None: err_str += "\nCaused by: " + op.name op = op._original_op logging.info("Searching within error strings: '%s' within '%s'", expected_err_re_or_predicate, err_str) return re.search(expected_err_re_or_predicate, err_str) try: yield self.fail(exception_type.__name__ + " not raised") except Exception as e: # pylint: disable=broad-except if not isinstance(e, exception_type) or not predicate(e): raise AssertionError("Exception of type %s: %s" % (str(type(e)), str(e))) # pylint: enable=g-doc-return-or-yield def assertRaisesOpError(self, expected_err_re_or_predicate): return self.assertRaisesWithPredicateMatch(errors.OpError, expected_err_re_or_predicate) def assertShapeEqual(self, np_array, tf_tensor): """Asserts that a Numpy ndarray and a TensorFlow tensor have the same shape. Args: np_array: A Numpy ndarray or Numpy scalar. tf_tensor: A Tensor. Raises: TypeError: If the arguments have the wrong type. """ if not isinstance(np_array, (np.ndarray, np.generic)): raise TypeError("np_array must be a Numpy ndarray or Numpy scalar") if not isinstance(tf_tensor, ops.Tensor): raise TypeError("tf_tensor must be a Tensor") self.assertAllEqual(np_array.shape, tf_tensor.get_shape().as_list()) def assertDeviceEqual(self, device1, device2): """Asserts that the two given devices are the same. Args: device1: A string device name or TensorFlow `DeviceSpec` object. device2: A string device name or TensorFlow `DeviceSpec` object. """ device1 = pydev.canonical_name(device1) device2 = pydev.canonical_name(device2) self.assertEqual(device1, device2, "Devices %s and %s are not equal" % (device1, device2)) # Fix Python 3 compatibility issues if six.PY3: # Silence a deprecation warning assertRaisesRegexp = googletest.TestCase.assertRaisesRegex # assertItemsEqual is assertCountEqual as of 3.2. assertItemsEqual = googletest.TestCase.assertCountEqual
apache-2.0
joshowen/django-allauth
allauth/socialaccount/providers/facebook/locale.py
20
2395
# Default locale mapping for the Facebook JS SDK # The list of supported locales is at # https://www.facebook.com/translations/FacebookLocales.xml import os from django.utils.translation import get_language, to_locale def _build_locale_table(filename_or_file): """ Parses the FacebookLocales.xml file and builds a dict relating every available language ('en, 'es, 'zh', ...) with a list of available regions for that language ('en' -> 'US', 'EN') and an (arbitrary) default region. """ # Require the XML parser module only if we want the default mapping from xml.dom.minidom import parse dom = parse(filename_or_file) reps = dom.getElementsByTagName('representation') locs = map(lambda r: r.childNodes[0].data, reps) locale_map = {} for loc in locs: lang, _, reg = loc.partition('_') lang_map = locale_map.setdefault(lang, {'regs': [], 'default': reg}) lang_map['regs'].append(reg) # Default region overrides (arbitrary) locale_map['en']['default'] = 'US' # Special case: Use es_ES for Spain and es_LA for everything else locale_map['es']['default'] = 'LA' locale_map['zh']['default'] = 'CN' locale_map['fr']['default'] = 'FR' locale_map['pt']['default'] = 'PT' return locale_map def get_default_locale_callable(): """ Wrapper function so that the default mapping is only built when needed """ exec_dir = os.path.dirname(os.path.realpath(__file__)) xml_path = os.path.join(exec_dir, 'data', 'FacebookLocales.xml') fb_locales = _build_locale_table(xml_path) def default_locale(request): """ Guess an appropiate FB locale based on the active Django locale. If the active locale is available, it is returned. Otherwise, it tries to return another locale with the same language. If there isn't one avaible, 'en_US' is returned. """ chosen = 'en_US' language = get_language() if language: locale = to_locale(language) lang, _, reg = locale.partition('_') lang_map = fb_locales.get(lang) if lang_map is not None: if reg in lang_map['regs']: chosen = lang + '_' + reg else: chosen = lang + '_' + lang_map['default'] return chosen return default_locale
mit
harmy/kbengine
kbe/src/lib/python/Lib/xml/etree/ElementTree.py
46
57023
# # ElementTree # $Id: ElementTree.py 3440 2008-07-18 14:45:01Z fredrik $ # # light-weight XML support for Python 2.3 and later. # # history (since 1.2.6): # 2005-11-12 fl added tostringlist/fromstringlist helpers # 2006-07-05 fl merged in selected changes from the 1.3 sandbox # 2006-07-05 fl removed support for 2.1 and earlier # 2007-06-21 fl added deprecation/future warnings # 2007-08-25 fl added doctype hook, added parser version attribute etc # 2007-08-26 fl added new serializer code (better namespace handling, etc) # 2007-08-27 fl warn for broken /tag searches on tree level # 2007-09-02 fl added html/text methods to serializer (experimental) # 2007-09-05 fl added method argument to tostring/tostringlist # 2007-09-06 fl improved error handling # 2007-09-13 fl added itertext, iterfind; assorted cleanups # 2007-12-15 fl added C14N hooks, copy method (experimental) # # Copyright (c) 1999-2008 by Fredrik Lundh. All rights reserved. # # fredrik@pythonware.com # http://www.pythonware.com # # -------------------------------------------------------------------- # The ElementTree toolkit is # # Copyright (c) 1999-2008 by Fredrik Lundh # # By obtaining, using, and/or copying this software and/or its # associated documentation, you agree that you have read, understood, # and will comply with the following terms and conditions: # # Permission to use, copy, modify, and distribute this software and # its associated documentation for any purpose and without fee is # hereby granted, provided that the above copyright notice appears in # all copies, and that both that copyright notice and this permission # notice appear in supporting documentation, and that the name of # Secret Labs AB or the author not be used in advertising or publicity # pertaining to distribution of the software without specific, written # prior permission. # # SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD # TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- # ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR # BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY # DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, # WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS # ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE # OF THIS SOFTWARE. # -------------------------------------------------------------------- # Licensed to PSF under a Contributor Agreement. # See http://www.python.org/psf/license for licensing details. __all__ = [ # public symbols "Comment", "dump", "Element", "ElementTree", "fromstring", "fromstringlist", "iselement", "iterparse", "parse", "ParseError", "PI", "ProcessingInstruction", "QName", "SubElement", "tostring", "tostringlist", "TreeBuilder", "VERSION", "XML", "XMLParser", "XMLTreeBuilder", ] VERSION = "1.3.0" ## # The <b>Element</b> type is a flexible container object, designed to # store hierarchical data structures in memory. The type can be # described as a cross between a list and a dictionary. # <p> # Each element has a number of properties associated with it: # <ul> # <li>a <i>tag</i>. This is a string identifying what kind of data # this element represents (the element type, in other words).</li> # <li>a number of <i>attributes</i>, stored in a Python dictionary.</li> # <li>a <i>text</i> string.</li> # <li>an optional <i>tail</i> string.</li> # <li>a number of <i>child elements</i>, stored in a Python sequence</li> # </ul> # # To create an element instance, use the {@link #Element} constructor # or the {@link #SubElement} factory function. # <p> # The {@link #ElementTree} class can be used to wrap an element # structure, and convert it from and to XML. ## import sys import re import warnings class _SimpleElementPath: # emulate pre-1.2 find/findtext/findall behaviour def find(self, element, tag, namespaces=None): for elem in element: if elem.tag == tag: return elem return None def findtext(self, element, tag, default=None, namespaces=None): elem = self.find(element, tag) if elem is None: return default return elem.text or "" def iterfind(self, element, tag, namespaces=None): if tag[:3] == ".//": for elem in element.iter(tag[3:]): yield elem for elem in element: if elem.tag == tag: yield elem def findall(self, element, tag, namespaces=None): return list(self.iterfind(element, tag, namespaces)) try: from . import ElementPath except ImportError: ElementPath = _SimpleElementPath() ## # Parser error. This is a subclass of <b>SyntaxError</b>. # <p> # In addition to the exception value, an exception instance contains a # specific exception code in the <b>code</b> attribute, and the line and # column of the error in the <b>position</b> attribute. class ParseError(SyntaxError): pass # -------------------------------------------------------------------- ## # Checks if an object appears to be a valid element object. # # @param An element instance. # @return A true value if this is an element object. # @defreturn flag def iselement(element): # FIXME: not sure about this; might be a better idea to look # for tag/attrib/text attributes return isinstance(element, Element) or hasattr(element, "tag") ## # Element class. This class defines the Element interface, and # provides a reference implementation of this interface. # <p> # The element name, attribute names, and attribute values can be # either ASCII strings (ordinary Python strings containing only 7-bit # ASCII characters) or Unicode strings. # # @param tag The element name. # @param attrib An optional dictionary, containing element attributes. # @param **extra Additional attributes, given as keyword arguments. # @see Element # @see SubElement # @see Comment # @see ProcessingInstruction class Element: # <tag attrib>text<child/>...</tag>tail ## # (Attribute) Element tag. tag = None ## # (Attribute) Element attribute dictionary. Where possible, use # {@link #Element.get}, # {@link #Element.set}, # {@link #Element.keys}, and # {@link #Element.items} to access # element attributes. attrib = None ## # (Attribute) Text before first subelement. This is either a # string or the value None. Note that if there was no text, this # attribute may be either None or an empty string, depending on # the parser. text = None ## # (Attribute) Text after this element's end tag, but before the # next sibling element's start tag. This is either a string or # the value None. Note that if there was no text, this attribute # may be either None or an empty string, depending on the parser. tail = None # text after end tag, if any # constructor def __init__(self, tag, attrib={}, **extra): attrib = attrib.copy() attrib.update(extra) self.tag = tag self.attrib = attrib self._children = [] def __repr__(self): return "<Element %s at 0x%x>" % (repr(self.tag), id(self)) ## # Creates a new element object of the same type as this element. # # @param tag Element tag. # @param attrib Element attributes, given as a dictionary. # @return A new element instance. def makeelement(self, tag, attrib): return self.__class__(tag, attrib) ## # (Experimental) Copies the current element. This creates a # shallow copy; subelements will be shared with the original tree. # # @return A new element instance. def copy(self): elem = self.makeelement(self.tag, self.attrib) elem.text = self.text elem.tail = self.tail elem[:] = self return elem ## # Returns the number of subelements. Note that this only counts # full elements; to check if there's any content in an element, you # have to check both the length and the <b>text</b> attribute. # # @return The number of subelements. def __len__(self): return len(self._children) def __bool__(self): warnings.warn( "The behavior of this method will change in future versions. " "Use specific 'len(elem)' or 'elem is not None' test instead.", FutureWarning, stacklevel=2 ) return len(self._children) != 0 # emulate old behaviour, for now ## # Returns the given subelement, by index. # # @param index What subelement to return. # @return The given subelement. # @exception IndexError If the given element does not exist. def __getitem__(self, index): return self._children[index] ## # Replaces the given subelement, by index. # # @param index What subelement to replace. # @param element The new element value. # @exception IndexError If the given element does not exist. def __setitem__(self, index, element): # if isinstance(index, slice): # for elt in element: # assert iselement(elt) # else: # assert iselement(element) self._children[index] = element ## # Deletes the given subelement, by index. # # @param index What subelement to delete. # @exception IndexError If the given element does not exist. def __delitem__(self, index): del self._children[index] ## # Adds a subelement to the end of this element. In document order, # the new element will appear after the last existing subelement (or # directly after the text, if it's the first subelement), but before # the end tag for this element. # # @param element The element to add. def append(self, element): # assert iselement(element) self._children.append(element) ## # Appends subelements from a sequence. # # @param elements A sequence object with zero or more elements. # @since 1.3 def extend(self, elements): # for element in elements: # assert iselement(element) self._children.extend(elements) ## # Inserts a subelement at the given position in this element. # # @param index Where to insert the new subelement. def insert(self, index, element): # assert iselement(element) self._children.insert(index, element) ## # Removes a matching subelement. Unlike the <b>find</b> methods, # this method compares elements based on identity, not on tag # value or contents. To remove subelements by other means, the # easiest way is often to use a list comprehension to select what # elements to keep, and use slice assignment to update the parent # element. # # @param element What element to remove. # @exception ValueError If a matching element could not be found. def remove(self, element): # assert iselement(element) self._children.remove(element) ## # (Deprecated) Returns all subelements. The elements are returned # in document order. # # @return A list of subelements. # @defreturn list of Element instances def getchildren(self): warnings.warn( "This method will be removed in future versions. " "Use 'list(elem)' or iteration over elem instead.", DeprecationWarning, stacklevel=2 ) return self._children ## # Finds the first matching subelement, by tag name or path. # # @param path What element to look for. # @keyparam namespaces Optional namespace prefix map. # @return The first matching element, or None if no element was found. # @defreturn Element or None def find(self, path, namespaces=None): return ElementPath.find(self, path, namespaces) ## # Finds text for the first matching subelement, by tag name or path. # # @param path What element to look for. # @param default What to return if the element was not found. # @keyparam namespaces Optional namespace prefix map. # @return The text content of the first matching element, or the # default value no element was found. Note that if the element # is found, but has no text content, this method returns an # empty string. # @defreturn string def findtext(self, path, default=None, namespaces=None): return ElementPath.findtext(self, path, default, namespaces) ## # Finds all matching subelements, by tag name or path. # # @param path What element to look for. # @keyparam namespaces Optional namespace prefix map. # @return A list or other sequence containing all matching elements, # in document order. # @defreturn list of Element instances def findall(self, path, namespaces=None): return ElementPath.findall(self, path, namespaces) ## # Finds all matching subelements, by tag name or path. # # @param path What element to look for. # @keyparam namespaces Optional namespace prefix map. # @return An iterator or sequence containing all matching elements, # in document order. # @defreturn a generated sequence of Element instances def iterfind(self, path, namespaces=None): return ElementPath.iterfind(self, path, namespaces) ## # Resets an element. This function removes all subelements, clears # all attributes, and sets the <b>text</b> and <b>tail</b> attributes # to None. def clear(self): self.attrib.clear() self._children = [] self.text = self.tail = None ## # Gets an element attribute. Equivalent to <b>attrib.get</b>, but # some implementations may handle this a bit more efficiently. # # @param key What attribute to look for. # @param default What to return if the attribute was not found. # @return The attribute value, or the default value, if the # attribute was not found. # @defreturn string or None def get(self, key, default=None): return self.attrib.get(key, default) ## # Sets an element attribute. Equivalent to <b>attrib[key] = value</b>, # but some implementations may handle this a bit more efficiently. # # @param key What attribute to set. # @param value The attribute value. def set(self, key, value): self.attrib[key] = value ## # Gets a list of attribute names. The names are returned in an # arbitrary order (just like for an ordinary Python dictionary). # Equivalent to <b>attrib.keys()</b>. # # @return A list of element attribute names. # @defreturn list of strings def keys(self): return self.attrib.keys() ## # Gets element attributes, as a sequence. The attributes are # returned in an arbitrary order. Equivalent to <b>attrib.items()</b>. # # @return A list of (name, value) tuples for all attributes. # @defreturn list of (string, string) tuples def items(self): return self.attrib.items() ## # Creates a tree iterator. The iterator loops over this element # and all subelements, in document order, and returns all elements # with a matching tag. # <p> # If the tree structure is modified during iteration, new or removed # elements may or may not be included. To get a stable set, use the # list() function on the iterator, and loop over the resulting list. # # @param tag What tags to look for (default is to return all elements). # @return An iterator containing all the matching elements. # @defreturn iterator def iter(self, tag=None): if tag == "*": tag = None if tag is None or self.tag == tag: yield self for e in self._children: for e in e.iter(tag): yield e # compatibility def getiterator(self, tag=None): # Change for a DeprecationWarning in 1.4 warnings.warn( "This method will be removed in future versions. " "Use 'elem.iter()' or 'list(elem.iter())' instead.", PendingDeprecationWarning, stacklevel=2 ) return list(self.iter(tag)) ## # Creates a text iterator. The iterator loops over this element # and all subelements, in document order, and returns all inner # text. # # @return An iterator containing all inner text. # @defreturn iterator def itertext(self): tag = self.tag if not isinstance(tag, str) and tag is not None: return if self.text: yield self.text for e in self: for s in e.itertext(): yield s if e.tail: yield e.tail # compatibility _Element = _ElementInterface = Element ## # Subelement factory. This function creates an element instance, and # appends it to an existing element. # <p> # The element name, attribute names, and attribute values can be # either 8-bit ASCII strings or Unicode strings. # # @param parent The parent element. # @param tag The subelement name. # @param attrib An optional dictionary, containing element attributes. # @param **extra Additional attributes, given as keyword arguments. # @return An element instance. # @defreturn Element def SubElement(parent, tag, attrib={}, **extra): attrib = attrib.copy() attrib.update(extra) element = parent.makeelement(tag, attrib) parent.append(element) return element ## # Comment element factory. This factory function creates a special # element that will be serialized as an XML comment by the standard # serializer. # <p> # The comment string can be either an 8-bit ASCII string or a Unicode # string. # # @param text A string containing the comment string. # @return An element instance, representing a comment. # @defreturn Element def Comment(text=None): element = Element(Comment) element.text = text return element ## # PI element factory. This factory function creates a special element # that will be serialized as an XML processing instruction by the standard # serializer. # # @param target A string containing the PI target. # @param text A string containing the PI contents, if any. # @return An element instance, representing a PI. # @defreturn Element def ProcessingInstruction(target, text=None): element = Element(ProcessingInstruction) element.text = target if text: element.text = element.text + " " + text return element PI = ProcessingInstruction ## # QName wrapper. This can be used to wrap a QName attribute value, in # order to get proper namespace handling on output. # # @param text A string containing the QName value, in the form {uri}local, # or, if the tag argument is given, the URI part of a QName. # @param tag Optional tag. If given, the first argument is interpreted as # an URI, and this argument is interpreted as a local name. # @return An opaque object, representing the QName. class QName: def __init__(self, text_or_uri, tag=None): if tag: text_or_uri = "{%s}%s" % (text_or_uri, tag) self.text = text_or_uri def __str__(self): return self.text def __repr__(self): return '<QName %r>' % (self.text,) def __hash__(self): return hash(self.text) def __le__(self, other): if isinstance(other, QName): return self.text <= other.text return self.text <= other def __lt__(self, other): if isinstance(other, QName): return self.text < other.text return self.text < other def __ge__(self, other): if isinstance(other, QName): return self.text >= other.text return self.text >= other def __gt__(self, other): if isinstance(other, QName): return self.text > other.text return self.text > other def __eq__(self, other): if isinstance(other, QName): return self.text == other.text return self.text == other def __ne__(self, other): if isinstance(other, QName): return self.text != other.text return self.text != other # -------------------------------------------------------------------- ## # ElementTree wrapper class. This class represents an entire element # hierarchy, and adds some extra support for serialization to and from # standard XML. # # @param element Optional root element. # @keyparam file Optional file handle or file name. If given, the # tree is initialized with the contents of this XML file. class ElementTree: def __init__(self, element=None, file=None): # assert element is None or iselement(element) self._root = element # first node if file: self.parse(file) ## # Gets the root element for this tree. # # @return An element instance. # @defreturn Element def getroot(self): return self._root ## # Replaces the root element for this tree. This discards the # current contents of the tree, and replaces it with the given # element. Use with care. # # @param element An element instance. def _setroot(self, element): # assert iselement(element) self._root = element ## # Loads an external XML document into this element tree. # # @param source A file name or file object. If a file object is # given, it only has to implement a <b>read(n)</b> method. # @keyparam parser An optional parser instance. If not given, the # standard {@link XMLParser} parser is used. # @return The document root element. # @defreturn Element # @exception ParseError If the parser fails to parse the document. def parse(self, source, parser=None): close_source = False if not hasattr(source, "read"): source = open(source, "rb") close_source = True try: if not parser: parser = XMLParser(target=TreeBuilder()) while 1: data = source.read(65536) if not data: break parser.feed(data) self._root = parser.close() return self._root finally: if close_source: source.close() ## # Creates a tree iterator for the root element. The iterator loops # over all elements in this tree, in document order. # # @param tag What tags to look for (default is to return all elements) # @return An iterator. # @defreturn iterator def iter(self, tag=None): # assert self._root is not None return self._root.iter(tag) # compatibility def getiterator(self, tag=None): # Change for a DeprecationWarning in 1.4 warnings.warn( "This method will be removed in future versions. " "Use 'tree.iter()' or 'list(tree.iter())' instead.", PendingDeprecationWarning, stacklevel=2 ) return list(self.iter(tag)) ## # Finds the first toplevel element with given tag. # Same as getroot().find(path). # # @param path What element to look for. # @keyparam namespaces Optional namespace prefix map. # @return The first matching element, or None if no element was found. # @defreturn Element or None def find(self, path, namespaces=None): # assert self._root is not None if path[:1] == "/": path = "." + path warnings.warn( "This search is broken in 1.3 and earlier, and will be " "fixed in a future version. If you rely on the current " "behaviour, change it to %r" % path, FutureWarning, stacklevel=2 ) return self._root.find(path, namespaces) ## # Finds the element text for the first toplevel element with given # tag. Same as getroot().findtext(path). # # @param path What toplevel element to look for. # @param default What to return if the element was not found. # @keyparam namespaces Optional namespace prefix map. # @return The text content of the first matching element, or the # default value no element was found. Note that if the element # is found, but has no text content, this method returns an # empty string. # @defreturn string def findtext(self, path, default=None, namespaces=None): # assert self._root is not None if path[:1] == "/": path = "." + path warnings.warn( "This search is broken in 1.3 and earlier, and will be " "fixed in a future version. If you rely on the current " "behaviour, change it to %r" % path, FutureWarning, stacklevel=2 ) return self._root.findtext(path, default, namespaces) ## # Finds all toplevel elements with the given tag. # Same as getroot().findall(path). # # @param path What element to look for. # @keyparam namespaces Optional namespace prefix map. # @return A list or iterator containing all matching elements, # in document order. # @defreturn list of Element instances def findall(self, path, namespaces=None): # assert self._root is not None if path[:1] == "/": path = "." + path warnings.warn( "This search is broken in 1.3 and earlier, and will be " "fixed in a future version. If you rely on the current " "behaviour, change it to %r" % path, FutureWarning, stacklevel=2 ) return self._root.findall(path, namespaces) ## # Finds all matching subelements, by tag name or path. # Same as getroot().iterfind(path). # # @param path What element to look for. # @keyparam namespaces Optional namespace prefix map. # @return An iterator or sequence containing all matching elements, # in document order. # @defreturn a generated sequence of Element instances def iterfind(self, path, namespaces=None): # assert self._root is not None if path[:1] == "/": path = "." + path warnings.warn( "This search is broken in 1.3 and earlier, and will be " "fixed in a future version. If you rely on the current " "behaviour, change it to %r" % path, FutureWarning, stacklevel=2 ) return self._root.iterfind(path, namespaces) ## # Writes the element tree to a file, as XML. # # @def write(file, **options) # @param file A file name, or a file object opened for writing. # @param **options Options, given as keyword arguments. # @keyparam encoding Optional output encoding (default is US-ASCII). # Use "unicode" to return a Unicode string. # @keyparam method Optional output method ("xml", "html", "text" or # "c14n"; default is "xml"). # @keyparam xml_declaration Controls if an XML declaration should # be added to the file. Use False for never, True for always, # None for only if not US-ASCII or UTF-8 or Unicode. None is default. def write(self, file_or_filename, # keyword arguments encoding=None, xml_declaration=None, default_namespace=None, method=None): # assert self._root is not None if not method: method = "xml" elif method not in _serialize: # FIXME: raise an ImportError for c14n if ElementC14N is missing? raise ValueError("unknown method %r" % method) if not encoding: if method == "c14n": encoding = "utf-8" else: encoding = "us-ascii" elif encoding == str: # lxml.etree compatibility. encoding = "unicode" else: encoding = encoding.lower() if hasattr(file_or_filename, "write"): file = file_or_filename else: if encoding != "unicode": file = open(file_or_filename, "wb") else: file = open(file_or_filename, "w") if encoding != "unicode": def write(text): try: return file.write(text.encode(encoding, "xmlcharrefreplace")) except (TypeError, AttributeError): _raise_serialization_error(text) else: write = file.write if method == "xml" and (xml_declaration or (xml_declaration is None and encoding not in ("utf-8", "us-ascii", "unicode"))): declared_encoding = encoding if encoding == "unicode": # Retrieve the default encoding for the xml declaration import locale declared_encoding = locale.getpreferredencoding() write("<?xml version='1.0' encoding='%s'?>\n" % declared_encoding) if method == "text": _serialize_text(write, self._root) else: qnames, namespaces = _namespaces(self._root, default_namespace) serialize = _serialize[method] serialize(write, self._root, qnames, namespaces) if file_or_filename is not file: file.close() def write_c14n(self, file): # lxml.etree compatibility. use output method instead return self.write(file, method="c14n") # -------------------------------------------------------------------- # serialization support def _namespaces(elem, default_namespace=None): # identify namespaces used in this tree # maps qnames to *encoded* prefix:local names qnames = {None: None} # maps uri:s to prefixes namespaces = {} if default_namespace: namespaces[default_namespace] = "" def add_qname(qname): # calculate serialized qname representation try: if qname[:1] == "{": uri, tag = qname[1:].rsplit("}", 1) prefix = namespaces.get(uri) if prefix is None: prefix = _namespace_map.get(uri) if prefix is None: prefix = "ns%d" % len(namespaces) if prefix != "xml": namespaces[uri] = prefix if prefix: qnames[qname] = "%s:%s" % (prefix, tag) else: qnames[qname] = tag # default element else: if default_namespace: # FIXME: can this be handled in XML 1.0? raise ValueError( "cannot use non-qualified names with " "default_namespace option" ) qnames[qname] = qname except TypeError: _raise_serialization_error(qname) # populate qname and namespaces table try: iterate = elem.iter except AttributeError: iterate = elem.getiterator # cET compatibility for elem in iterate(): tag = elem.tag if isinstance(tag, QName): if tag.text not in qnames: add_qname(tag.text) elif isinstance(tag, str): if tag not in qnames: add_qname(tag) elif tag is not None and tag is not Comment and tag is not PI: _raise_serialization_error(tag) for key, value in elem.items(): if isinstance(key, QName): key = key.text if key not in qnames: add_qname(key) if isinstance(value, QName) and value.text not in qnames: add_qname(value.text) text = elem.text if isinstance(text, QName) and text.text not in qnames: add_qname(text.text) return qnames, namespaces def _serialize_xml(write, elem, qnames, namespaces): tag = elem.tag text = elem.text if tag is Comment: write("<!--%s-->" % text) elif tag is ProcessingInstruction: write("<?%s?>" % text) else: tag = qnames[tag] if tag is None: if text: write(_escape_cdata(text)) for e in elem: _serialize_xml(write, e, qnames, None) else: write("<" + tag) items = list(elem.items()) if items or namespaces: if namespaces: for v, k in sorted(namespaces.items(), key=lambda x: x[1]): # sort on prefix if k: k = ":" + k write(" xmlns%s=\"%s\"" % ( k, _escape_attrib(v) )) for k, v in sorted(items): # lexical order if isinstance(k, QName): k = k.text if isinstance(v, QName): v = qnames[v.text] else: v = _escape_attrib(v) write(" %s=\"%s\"" % (qnames[k], v)) if text or len(elem): write(">") if text: write(_escape_cdata(text)) for e in elem: _serialize_xml(write, e, qnames, None) write("</" + tag + ">") else: write(" />") if elem.tail: write(_escape_cdata(elem.tail)) HTML_EMPTY = ("area", "base", "basefont", "br", "col", "frame", "hr", "img", "input", "isindex", "link", "meta" "param") try: HTML_EMPTY = set(HTML_EMPTY) except NameError: pass def _serialize_html(write, elem, qnames, namespaces): tag = elem.tag text = elem.text if tag is Comment: write("<!--%s-->" % _escape_cdata(text)) elif tag is ProcessingInstruction: write("<?%s?>" % _escape_cdata(text)) else: tag = qnames[tag] if tag is None: if text: write(_escape_cdata(text)) for e in elem: _serialize_html(write, e, qnames, None) else: write("<" + tag) items = list(elem.items()) if items or namespaces: if namespaces: for v, k in sorted(namespaces.items(), key=lambda x: x[1]): # sort on prefix if k: k = ":" + k write(" xmlns%s=\"%s\"" % ( k, _escape_attrib(v) )) for k, v in sorted(items): # lexical order if isinstance(k, QName): k = k.text if isinstance(v, QName): v = qnames[v.text] else: v = _escape_attrib_html(v) # FIXME: handle boolean attributes write(" %s=\"%s\"" % (qnames[k], v)) write(">") tag = tag.lower() if text: if tag == "script" or tag == "style": write(text) else: write(_escape_cdata(text)) for e in elem: _serialize_html(write, e, qnames, None) if tag not in HTML_EMPTY: write("</" + tag + ">") if elem.tail: write(_escape_cdata(elem.tail)) def _serialize_text(write, elem): for part in elem.itertext(): write(part) if elem.tail: write(elem.tail) _serialize = { "xml": _serialize_xml, "html": _serialize_html, "text": _serialize_text, # this optional method is imported at the end of the module # "c14n": _serialize_c14n, } ## # Registers a namespace prefix. The registry is global, and any # existing mapping for either the given prefix or the namespace URI # will be removed. # # @param prefix Namespace prefix. # @param uri Namespace uri. Tags and attributes in this namespace # will be serialized with the given prefix, if at all possible. # @exception ValueError If the prefix is reserved, or is otherwise # invalid. def register_namespace(prefix, uri): if re.match("ns\d+$", prefix): raise ValueError("Prefix format reserved for internal use") for k, v in list(_namespace_map.items()): if k == uri or v == prefix: del _namespace_map[k] _namespace_map[uri] = prefix _namespace_map = { # "well-known" namespace prefixes "http://www.w3.org/XML/1998/namespace": "xml", "http://www.w3.org/1999/xhtml": "html", "http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf", "http://schemas.xmlsoap.org/wsdl/": "wsdl", # xml schema "http://www.w3.org/2001/XMLSchema": "xs", "http://www.w3.org/2001/XMLSchema-instance": "xsi", # dublin core "http://purl.org/dc/elements/1.1/": "dc", } def _raise_serialization_error(text): raise TypeError( "cannot serialize %r (type %s)" % (text, type(text).__name__) ) def _escape_cdata(text): # escape character data try: # it's worth avoiding do-nothing calls for strings that are # shorter than 500 character, or so. assume that's, by far, # the most common case in most applications. if "&" in text: text = text.replace("&", "&amp;") if "<" in text: text = text.replace("<", "&lt;") if ">" in text: text = text.replace(">", "&gt;") return text except (TypeError, AttributeError): _raise_serialization_error(text) def _escape_attrib(text): # escape attribute value try: if "&" in text: text = text.replace("&", "&amp;") if "<" in text: text = text.replace("<", "&lt;") if ">" in text: text = text.replace(">", "&gt;") if "\"" in text: text = text.replace("\"", "&quot;") if "\n" in text: text = text.replace("\n", "&#10;") return text except (TypeError, AttributeError): _raise_serialization_error(text) def _escape_attrib_html(text): # escape attribute value try: if "&" in text: text = text.replace("&", "&amp;") if ">" in text: text = text.replace(">", "&gt;") if "\"" in text: text = text.replace("\"", "&quot;") return text except (TypeError, AttributeError): _raise_serialization_error(text) # -------------------------------------------------------------------- ## # Generates a string representation of an XML element, including all # subelements. If encoding is "unicode", the return type is a string; # otherwise it is a bytes array. # # @param element An Element instance. # @keyparam encoding Optional output encoding (default is US-ASCII). # Use "unicode" to return a Unicode string. # @keyparam method Optional output method ("xml", "html", "text" or # "c14n"; default is "xml"). # @return An (optionally) encoded string containing the XML data. # @defreturn string def tostring(element, encoding=None, method=None): class dummy: pass data = [] file = dummy() file.write = data.append ElementTree(element).write(file, encoding, method=method) if encoding in (str, "unicode"): return "".join(data) else: return b"".join(data) ## # Generates a string representation of an XML element, including all # subelements. If encoding is False, the string is returned as a # sequence of string fragments; otherwise it is a sequence of # bytestrings. # # @param element An Element instance. # @keyparam encoding Optional output encoding (default is US-ASCII). # Use "unicode" to return a Unicode string. # @keyparam method Optional output method ("xml", "html", "text" or # "c14n"; default is "xml"). # @return A sequence object containing the XML data. # @defreturn sequence # @since 1.3 def tostringlist(element, encoding=None, method=None): class dummy: pass data = [] file = dummy() file.write = data.append ElementTree(element).write(file, encoding, method=method) # FIXME: merge small fragments into larger parts return data ## # Writes an element tree or element structure to sys.stdout. This # function should be used for debugging only. # <p> # The exact output format is implementation dependent. In this # version, it's written as an ordinary XML file. # # @param elem An element tree or an individual element. def dump(elem): # debugging if not isinstance(elem, ElementTree): elem = ElementTree(elem) elem.write(sys.stdout, encoding="unicode") tail = elem.getroot().tail if not tail or tail[-1] != "\n": sys.stdout.write("\n") # -------------------------------------------------------------------- # parsing ## # Parses an XML document into an element tree. # # @param source A filename or file object containing XML data. # @param parser An optional parser instance. If not given, the # standard {@link XMLParser} parser is used. # @return An ElementTree instance def parse(source, parser=None): tree = ElementTree() tree.parse(source, parser) return tree ## # Parses an XML document into an element tree incrementally, and reports # what's going on to the user. # # @param source A filename or file object containing XML data. # @param events A list of events to report back. If omitted, only "end" # events are reported. # @param parser An optional parser instance. If not given, the # standard {@link XMLParser} parser is used. # @return A (event, elem) iterator. def iterparse(source, events=None, parser=None): close_source = False if not hasattr(source, "read"): source = open(source, "rb") close_source = True if not parser: parser = XMLParser(target=TreeBuilder()) return _IterParseIterator(source, events, parser, close_source) class _IterParseIterator: def __init__(self, source, events, parser, close_source=False): self._file = source self._close_file = close_source self._events = [] self._index = 0 self.root = self._root = None self._parser = parser # wire up the parser for event reporting parser = self._parser._parser append = self._events.append if events is None: events = ["end"] for event in events: if event == "start": try: parser.ordered_attributes = 1 parser.specified_attributes = 1 def handler(tag, attrib_in, event=event, append=append, start=self._parser._start_list): append((event, start(tag, attrib_in))) parser.StartElementHandler = handler except AttributeError: def handler(tag, attrib_in, event=event, append=append, start=self._parser._start): append((event, start(tag, attrib_in))) parser.StartElementHandler = handler elif event == "end": def handler(tag, event=event, append=append, end=self._parser._end): append((event, end(tag))) parser.EndElementHandler = handler elif event == "start-ns": def handler(prefix, uri, event=event, append=append): append((event, (prefix or "", uri or ""))) parser.StartNamespaceDeclHandler = handler elif event == "end-ns": def handler(prefix, event=event, append=append): append((event, None)) parser.EndNamespaceDeclHandler = handler else: raise ValueError("unknown event %r" % event) def __next__(self): while 1: try: item = self._events[self._index] except IndexError: if self._parser is None: self.root = self._root if self._close_file: self._file.close() raise StopIteration # load event buffer del self._events[:] self._index = 0 data = self._file.read(16384) if data: self._parser.feed(data) else: self._root = self._parser.close() self._parser = None else: self._index = self._index + 1 return item def __iter__(self): return self ## # Parses an XML document from a string constant. This function can # be used to embed "XML literals" in Python code. # # @param source A string containing XML data. # @param parser An optional parser instance. If not given, the # standard {@link XMLParser} parser is used. # @return An Element instance. # @defreturn Element def XML(text, parser=None): if not parser: parser = XMLParser(target=TreeBuilder()) parser.feed(text) return parser.close() ## # Parses an XML document from a string constant, and also returns # a dictionary which maps from element id:s to elements. # # @param source A string containing XML data. # @param parser An optional parser instance. If not given, the # standard {@link XMLParser} parser is used. # @return A tuple containing an Element instance and a dictionary. # @defreturn (Element, dictionary) def XMLID(text, parser=None): if not parser: parser = XMLParser(target=TreeBuilder()) parser.feed(text) tree = parser.close() ids = {} for elem in tree.iter(): id = elem.get("id") if id: ids[id] = elem return tree, ids ## # Parses an XML document from a string constant. Same as {@link #XML}. # # @def fromstring(text) # @param source A string containing XML data. # @return An Element instance. # @defreturn Element fromstring = XML ## # Parses an XML document from a sequence of string fragments. # # @param sequence A list or other sequence containing XML data fragments. # @param parser An optional parser instance. If not given, the # standard {@link XMLParser} parser is used. # @return An Element instance. # @defreturn Element # @since 1.3 def fromstringlist(sequence, parser=None): if not parser: parser = XMLParser(target=TreeBuilder()) for text in sequence: parser.feed(text) return parser.close() # -------------------------------------------------------------------- ## # Generic element structure builder. This builder converts a sequence # of {@link #TreeBuilder.start}, {@link #TreeBuilder.data}, and {@link # #TreeBuilder.end} method calls to a well-formed element structure. # <p> # You can use this class to build an element structure using a custom XML # parser, or a parser for some other XML-like format. # # @param element_factory Optional element factory. This factory # is called to create new Element instances, as necessary. class TreeBuilder: def __init__(self, element_factory=None): self._data = [] # data collector self._elem = [] # element stack self._last = None # last element self._tail = None # true if we're after an end tag if element_factory is None: element_factory = Element self._factory = element_factory ## # Flushes the builder buffers, and returns the toplevel document # element. # # @return An Element instance. # @defreturn Element def close(self): assert len(self._elem) == 0, "missing end tags" assert self._last is not None, "missing toplevel element" return self._last def _flush(self): if self._data: if self._last is not None: text = "".join(self._data) if self._tail: assert self._last.tail is None, "internal error (tail)" self._last.tail = text else: assert self._last.text is None, "internal error (text)" self._last.text = text self._data = [] ## # Adds text to the current element. # # @param data A string. This should be either an 8-bit string # containing ASCII text, or a Unicode string. def data(self, data): self._data.append(data) ## # Opens a new element. # # @param tag The element name. # @param attrib A dictionary containing element attributes. # @return The opened element. # @defreturn Element def start(self, tag, attrs): self._flush() self._last = elem = self._factory(tag, attrs) if self._elem: self._elem[-1].append(elem) self._elem.append(elem) self._tail = 0 return elem ## # Closes the current element. # # @param tag The element name. # @return The closed element. # @defreturn Element def end(self, tag): self._flush() self._last = self._elem.pop() assert self._last.tag == tag,\ "end tag mismatch (expected %s, got %s)" % ( self._last.tag, tag) self._tail = 1 return self._last ## # Element structure builder for XML source data, based on the # <b>expat</b> parser. # # @keyparam target Target object. If omitted, the builder uses an # instance of the standard {@link #TreeBuilder} class. # @keyparam html Predefine HTML entities. This flag is not supported # by the current implementation. # @keyparam encoding Optional encoding. If given, the value overrides # the encoding specified in the XML file. # @see #ElementTree # @see #TreeBuilder class XMLParser: def __init__(self, html=0, target=None, encoding=None): try: from xml.parsers import expat except ImportError: try: import pyexpat as expat except ImportError: raise ImportError( "No module named expat; use SimpleXMLTreeBuilder instead" ) parser = expat.ParserCreate(encoding, "}") if target is None: target = TreeBuilder() # underscored names are provided for compatibility only self.parser = self._parser = parser self.target = self._target = target self._error = expat.error self._names = {} # name memo cache # callbacks parser.DefaultHandlerExpand = self._default parser.StartElementHandler = self._start parser.EndElementHandler = self._end parser.CharacterDataHandler = self._data # optional callbacks parser.CommentHandler = self._comment parser.ProcessingInstructionHandler = self._pi # let expat do the buffering, if supported try: self._parser.buffer_text = 1 except AttributeError: pass # use new-style attribute handling, if supported try: self._parser.ordered_attributes = 1 self._parser.specified_attributes = 1 parser.StartElementHandler = self._start_list except AttributeError: pass self._doctype = None self.entity = {} try: self.version = "Expat %d.%d.%d" % expat.version_info except AttributeError: pass # unknown def _raiseerror(self, value): err = ParseError(value) err.code = value.code err.position = value.lineno, value.offset raise err def _fixname(self, key): # expand qname, and convert name string to ascii, if possible try: name = self._names[key] except KeyError: name = key if "}" in name: name = "{" + name self._names[key] = name return name def _start(self, tag, attrib_in): fixname = self._fixname tag = fixname(tag) attrib = {} for key, value in attrib_in.items(): attrib[fixname(key)] = value return self.target.start(tag, attrib) def _start_list(self, tag, attrib_in): fixname = self._fixname tag = fixname(tag) attrib = {} if attrib_in: for i in range(0, len(attrib_in), 2): attrib[fixname(attrib_in[i])] = attrib_in[i+1] return self.target.start(tag, attrib) def _data(self, text): return self.target.data(text) def _end(self, tag): return self.target.end(self._fixname(tag)) def _comment(self, data): try: comment = self.target.comment except AttributeError: pass else: return comment(data) def _pi(self, target, data): try: pi = self.target.pi except AttributeError: pass else: return pi(target, data) def _default(self, text): prefix = text[:1] if prefix == "&": # deal with undefined entities try: self.target.data(self.entity[text[1:-1]]) except KeyError: from xml.parsers import expat err = expat.error( "undefined entity %s: line %d, column %d" % (text, self._parser.ErrorLineNumber, self._parser.ErrorColumnNumber) ) err.code = 11 # XML_ERROR_UNDEFINED_ENTITY err.lineno = self._parser.ErrorLineNumber err.offset = self._parser.ErrorColumnNumber raise err elif prefix == "<" and text[:9] == "<!DOCTYPE": self._doctype = [] # inside a doctype declaration elif self._doctype is not None: # parse doctype contents if prefix == ">": self._doctype = None return text = text.strip() if not text: return self._doctype.append(text) n = len(self._doctype) if n > 2: type = self._doctype[1] if type == "PUBLIC" and n == 4: name, type, pubid, system = self._doctype elif type == "SYSTEM" and n == 3: name, type, system = self._doctype pubid = None else: return if pubid: pubid = pubid[1:-1] if hasattr(self.target, "doctype"): self.target.doctype(name, pubid, system[1:-1]) elif self.doctype is not self._XMLParser__doctype: # warn about deprecated call self._XMLParser__doctype(name, pubid, system[1:-1]) self.doctype(name, pubid, system[1:-1]) self._doctype = None ## # (Deprecated) Handles a doctype declaration. # # @param name Doctype name. # @param pubid Public identifier. # @param system System identifier. def doctype(self, name, pubid, system): """This method of XMLParser is deprecated.""" warnings.warn( "This method of XMLParser is deprecated. Define doctype() " "method on the TreeBuilder target.", DeprecationWarning, ) # sentinel, if doctype is redefined in a subclass __doctype = doctype ## # Feeds data to the parser. # # @param data Encoded data. def feed(self, data): try: self._parser.Parse(data, 0) except self._error as v: self._raiseerror(v) ## # Finishes feeding data to the parser. # # @return An element structure. # @defreturn Element def close(self): try: self._parser.Parse("", 1) # end of data except self._error as v: self._raiseerror(v) tree = self.target.close() del self.target, self._parser # get rid of circular references return tree # compatibility XMLTreeBuilder = XMLParser # workaround circular import. try: from ElementC14N import _serialize_c14n _serialize["c14n"] = _serialize_c14n except ImportError: pass
lgpl-3.0
vipmike007/virt-test
virttest/version.py
18
3404
#!/usr/bin/python """ Based on work from Douglas Creager <dcreager@dcreager.net> Gets the current version number. If possible, this is the output of "git describe", modified to conform to the versioning scheme that setuptools uses. If "git describe" returns an error (most likely because we're in an unpacked copy of a release tarball, rather than in a git working copy), then we fall back on reading the contents of the RELEASE-VERSION file. """ __all__ = ("get_git_version", "get_version", "get_top_commit", "get_current_branch", "get_pretty_version_info") import os import sys import common from autotest.client import utils from autotest.client.shared import error import data_dir _ROOT_PATH = data_dir.get_root_dir() RELEASE_VERSION_PATH = os.path.join(_ROOT_PATH, 'RELEASE-VERSION') global _GIT_VERSION_CACHE, _VERSION_CACHE, _TOP_COMMIT_CACHE global _CURRENT_BRANCH_CACHE, _PRETTY_VERSION_CACHE _GIT_VERSION_CACHE = None _VERSION_CACHE = None _TOP_COMMIT_CACHE = None _CURRENT_BRANCH_CACHE = None _PRETTY_VERSION_CACHE = None def _execute_git_command(command): """ As git is sensitive to the $CWD, change to the top dir to execute git cmds. :param: command - Git command to be executed. """ cwd = os.getcwd() os.chdir(_ROOT_PATH) try: try: return utils.system_output(command).strip() finally: os.chdir(cwd) except error.CmdError: return 'unknown' def get_git_version(abbrev=4): global _GIT_VERSION_CACHE if _GIT_VERSION_CACHE is not None: return _GIT_VERSION_CACHE _GIT_VERSION_CACHE = _execute_git_command('git describe --abbrev=%d' % abbrev) return _GIT_VERSION_CACHE def get_top_commit(): global _TOP_COMMIT_CACHE if _TOP_COMMIT_CACHE is not None: return _TOP_COMMIT_CACHE _TOP_COMMIT_CACHE = _execute_git_command( "git show --summary --pretty='%H' | head -1") return _TOP_COMMIT_CACHE def get_current_branch(): global _CURRENT_BRANCH_CACHE if _CURRENT_BRANCH_CACHE is not None: return _CURRENT_BRANCH_CACHE _CURRENT_BRANCH_CACHE = _execute_git_command('git rev-parse ' '--abbrev-ref HEAD') return _CURRENT_BRANCH_CACHE def _read_release_version(): try: f = open(RELEASE_VERSION_PATH, "r") try: version = f.readlines()[0] return version.strip() finally: f.close() except: return 'unknown' def _write_release_version(version): f = open(RELEASE_VERSION_PATH, "w") f.write("%s\n" % version) f.close() def get_version(abbrev=4): global _GIT_VERSION_CACHE release_version = _read_release_version() if _GIT_VERSION_CACHE is not None: version = _GIT_VERSION_CACHE else: _GIT_VERSION_CACHE = get_git_version(abbrev) version = _GIT_VERSION_CACHE if version is 'unknown': version = release_version if version is 'unknown': return version if version != release_version: _write_release_version(version) return version def get_pretty_version_info(): return ("Virt Test '%s', Branch '%s', SHA1 '%s'" % (get_version(), get_current_branch(), get_top_commit())) if __name__ == "__main__": print get_pretty_version_info()
gpl-2.0
xpansa/server-tools
base_suspend_security/base_suspend_security.py
29
1308
# -*- coding: utf-8 -*- ############################################################################## # # This module copyright (C) 2015 Therp BV <http://therp.nl>. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## class BaseSuspendSecurityUid(int): def __int__(self): return self def __eq__(self, other): if isinstance(other, (int, long)): return False return super(BaseSuspendSecurityUid, self).__int__() == other def __iter__(self): yield super(BaseSuspendSecurityUid, self).__int__() SUSPEND_METHOD = 'suspend_security'
agpl-3.0
newswangerd/ansible
test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/utils/utils.py
47
5828
# -*- coding: utf-8 -*- # Copyright 2019 Red Hat # GNU General Public License v3.0+ # (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # utils from __future__ import absolute_import, division, print_function __metaclass__ = type from ansible.module_utils.six import iteritems from ansible_collections.ansible.netcommon.plugins.module_utils.compat import ( ipaddress, ) def search_obj_in_list(name, lst, key="name"): for item in lst: if item[key] == name: return item return None def get_interface_type(interface): """Gets the type of interface """ if interface.startswith("eth"): return "ethernet" elif interface.startswith("bond"): return "bonding" elif interface.startswith("vti"): return "vti" elif interface.startswith("lo"): return "loopback" def dict_delete(base, comparable): """ This function generates a dict containing key, value pairs for keys that are present in the `base` dict but not present in the `comparable` dict. :param base: dict object to base the diff on :param comparable: dict object to compare against base :returns: new dict object with key, value pairs that needs to be deleted. """ to_delete = dict() for key in base: if isinstance(base[key], dict): sub_diff = dict_delete(base[key], comparable.get(key, {})) if sub_diff: to_delete[key] = sub_diff else: if key not in comparable: to_delete[key] = base[key] return to_delete def diff_list_of_dicts(want, have): diff = [] set_w = set(tuple(d.items()) for d in want) set_h = set(tuple(d.items()) for d in have) difference = set_w.difference(set_h) for element in difference: diff.append(dict((x, y) for x, y in element)) return diff def get_lst_diff_for_dicts(want, have, lst): """ This function generates a list containing values that are only in want and not in list in have dict :param want: dict object to want :param have: dict object to have :param lst: list the diff on :return: new list object with values which are only in want. """ if not have: diff = want.get(lst) or [] else: want_elements = want.get(lst) or {} have_elements = have.get(lst) or {} diff = list_diff_want_only(want_elements, have_elements) return diff def get_lst_same_for_dicts(want, have, lst): """ This function generates a list containing values that are common for list in want and list in have dict :param want: dict object to want :param have: dict object to have :param lst: list the comparison on :return: new list object with values which are common in want and have. """ diff = None if want and have: want_list = want.get(lst) or {} have_list = have.get(lst) or {} diff = [ i for i in want_list and have_list if i in have_list and i in want_list ] return diff def list_diff_have_only(want_list, have_list): """ This function generated the list containing values that are only in have list. :param want_list: :param have_list: :return: new list with values which are only in have list """ if have_list and not want_list: diff = have_list elif not have_list: diff = None else: diff = [ i for i in have_list + want_list if i in have_list and i not in want_list ] return diff def list_diff_want_only(want_list, have_list): """ This function generated the list containing values that are only in want list. :param want_list: :param have_list: :return: new list with values which are only in want list """ if have_list and not want_list: diff = None elif not have_list: diff = want_list else: diff = [ i for i in have_list + want_list if i in want_list and i not in have_list ] return diff def search_dict_tv_in_list(d_val1, d_val2, lst, key1, key2): """ This function return the dict object if it exist in list. :param d_val1: :param d_val2: :param lst: :param key1: :param key2: :return: """ obj = next( ( item for item in lst if item[key1] == d_val1 and item[key2] == d_val2 ), None, ) if obj: return obj else: return None def key_value_in_dict(have_key, have_value, want_dict): """ This function checks whether the key and values exist in dict :param have_key: :param have_value: :param want_dict: :return: """ for key, value in iteritems(want_dict): if key == have_key and value == have_value: return True return False def is_dict_element_present(dict, key): """ This function checks whether the key is present in dict. :param dict: :param key: :return: """ for item in dict: if item == key: return True return False def get_ip_address_version(address): """ This function returns the version of IP address :param address: IP address :return: """ try: address = unicode(address) except NameError: address = str(address) version = ipaddress.ip_address(address.split("/")[0]).version return version def get_route_type(address): """ This function returns the route type based on IP address :param address: :return: """ version = get_ip_address_version(address) if version == 6: return "route6" elif version == 4: return "route"
gpl-3.0
liam-middlebrook/yaml-cpp
test/gmock-1.7.0/gtest/xcode/Scripts/versiongenerate.py
3088
4536
#!/usr/bin/env python # # Copyright 2008, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """A script to prepare version informtion for use the gtest Info.plist file. This script extracts the version information from the configure.ac file and uses it to generate a header file containing the same information. The #defines in this header file will be included in during the generation of the Info.plist of the framework, giving the correct value to the version shown in the Finder. This script makes the following assumptions (these are faults of the script, not problems with the Autoconf): 1. The AC_INIT macro will be contained within the first 1024 characters of configure.ac 2. The version string will be 3 integers separated by periods and will be surrounded by squre brackets, "[" and "]" (e.g. [1.0.1]). The first segment represents the major version, the second represents the minor version and the third represents the fix version. 3. No ")" character exists between the opening "(" and closing ")" of AC_INIT, including in comments and character strings. """ import sys import re # Read the command line argument (the output directory for Version.h) if (len(sys.argv) < 3): print "Usage: versiongenerate.py input_dir output_dir" sys.exit(1) else: input_dir = sys.argv[1] output_dir = sys.argv[2] # Read the first 1024 characters of the configure.ac file config_file = open("%s/configure.ac" % input_dir, 'r') buffer_size = 1024 opening_string = config_file.read(buffer_size) config_file.close() # Extract the version string from the AC_INIT macro # The following init_expression means: # Extract three integers separated by periods and surrounded by squre # brackets(e.g. "[1.0.1]") between "AC_INIT(" and ")". Do not be greedy # (*? is the non-greedy flag) since that would pull in everything between # the first "(" and the last ")" in the file. version_expression = re.compile(r"AC_INIT\(.*?\[(\d+)\.(\d+)\.(\d+)\].*?\)", re.DOTALL) version_values = version_expression.search(opening_string) major_version = version_values.group(1) minor_version = version_values.group(2) fix_version = version_values.group(3) # Write the version information to a header file to be included in the # Info.plist file. file_data = """// // DO NOT MODIFY THIS FILE (but you can delete it) // // This file is autogenerated by the versiongenerate.py script. This script // is executed in a "Run Script" build phase when creating gtest.framework. This // header file is not used during compilation of C-source. Rather, it simply // defines some version strings for substitution in the Info.plist. Because of // this, we are not not restricted to C-syntax nor are we using include guards. // #define GTEST_VERSIONINFO_SHORT %s.%s #define GTEST_VERSIONINFO_LONG %s.%s.%s """ % (major_version, minor_version, major_version, minor_version, fix_version) version_file = open("%s/Version.h" % output_dir, 'w') version_file.write(file_data) version_file.close()
mit
leomoon-studios/blender-light-studio
light_profiles.py
1
16362
import bpy from bpy.props import BoolProperty, StringProperty, PointerProperty, FloatProperty, EnumProperty import os, sys, subprocess from . common import * from . light_data import * from itertools import chain from . operators.modal import close_control_panel from . import light_list _ = os.sep class ListItem(bpy.types.PropertyGroup): """ Group of properties representing an item in the list """ def update_name(self, context): print("{} : {}".format(repr(self.name), repr(context))) name: StringProperty( name="Profile Name", default="Untitled") empty_name: StringProperty( name="Name of Empty that holds the profile", description="", default="") class LIST_OT_NewItem(bpy.types.Operator): bl_idname = "lls_list.new_profile" bl_label = "Add a new profile" bl_options = {"INTERNAL"} handle: BoolProperty(default=True) def execute(self, context): props = context.scene.LLStudio item = props.profile_list.add() lls_collection = get_lls_collection(context) # unlink existing profiles for profile in (prof for prof in context.scene.objects if prof.name.startswith('LLS_PROFILE.') and isFamily(prof)): profile_collection = profile.users_collection[0] lls_collection.children.unlink(profile_collection) # idx = 0 for id in (i.name.split('Profile ')[1] for i in props.profile_list if i.name.startswith('Profile ')): try: id = int(id) except ValueError: continue if id > idx: idx = id item.name = 'Profile '+str(idx+1) ''' Add Hierarchy stuff ''' # before A = set(bpy.data.objects[:]) script_file = os.path.realpath(__file__) dir = os.path.dirname(script_file) bpy.ops.wm.append(filepath=_+'LLS4.blend'+_+'Object'+_, directory=os.path.join(dir,"LLS4.blend"+_+"Object"+_), filename="LLS_PROFILE.000", active_collection=True) # after operation B = set(bpy.data.objects[:]) # whats the difference profile = (A ^ B).pop() profile.parent = [ob for ob in context.scene.objects if ob.name.startswith('LEOMOON_LIGHT_STUDIO')][0] profile.use_fake_user = True profile_collection = bpy.data.collections.new(profile.name) profile_collection.use_fake_user = True lls_collection = [c for c in context.scene.collection.children if c.name.startswith('LLS')][0] lls_collection.children.link(profile_collection) replace_link(profile, profile.name) item.empty_name = profile.name handle = None if self.handle: bpy.ops.object.empty_add() handle = context.active_object handle.name = "LLS_HANDLE" handle.empty_display_type = 'SPHERE' handle.parent = profile handle.protected = True handle.use_fake_user = True replace_link(handle, profile.name) props.last_empty = profile.name props.list_index = len(props.profile_list)-1 light_list.update_light_list_set(context) return{'FINISHED'} class LIST_OT_DeleteItem(bpy.types.Operator): bl_idname = "lls_list.delete_profile" bl_label = "Delete the selected profile" bl_options = {"INTERNAL"} @classmethod def poll(self, context): """ Enable if there's something in the list """ return len(context.scene.LLStudio.profile_list) def execute(self, context): props = context.scene.LLStudio index = props.list_index props.profile_list.remove(index) ''' Delete/Switch Hierarchy stuff ''' #delete objects from current profile obsToRemove = family(context.scene.objects[props.last_empty]) collectionsToRemove = set() for ob in obsToRemove: collectionsToRemove.update(ob.users_collection) ob.use_fake_user = False bpy.ops.object.delete({"selected_objects": obsToRemove}, use_global=True) for c in collectionsToRemove: if c.name.startswith('LLS_'): bpy.data.collections.remove(c) # update index if index > 0: index = index - 1 props.list_index = index light_list.update_light_list_set(context) return{'FINISHED'} class LIST_OT_CopyItem(bpy.types.Operator): bl_idname = "lls_list.copy_profile" bl_label = "Copy profile" bl_options = {"INTERNAL"} @classmethod def poll(self, context): """ Enable if there's something in the list. """ return len(context.scene.LLStudio.profile_list) def execute(self, context): props = context.scene.LLStudio list = props.profile_list lls_collection, profile_collection = llscol_profilecol(context) profile_copy = duplicate_collection(profile_collection, None) profile = [ob for ob in profile_copy.objects if ob.name.startswith('LLS_PROFILE')][0] handle = [ob for ob in profile.children if ob.name.startswith('LLS_HANDLE')][0] for l in [lm for lc in profile_copy.children if lc.name.startswith('LLS_Light') for lm in lc.objects if lm.name.startswith('LLS_LIGHT_MESH')]: l.constraints['Copy Location'].target = handle new_list_item = props.profile_list.add() new_list_item.empty_name = profile_copy.name_full new_list_item.name = props.profile_list[props.list_index].name + ' Copy' # place copied profile next to source profile lastItemId = len(props.profile_list)-1 while lastItemId > props.list_index+1: list.move(lastItemId-1, lastItemId) lastItemId -= 1 return{'FINISHED'} class LIST_OT_MoveItem(bpy.types.Operator): bl_idname = "lls_list.move_profile" bl_label = "Move profile" bl_options = {"INTERNAL"} direction: bpy.props.EnumProperty( items=( ('UP', 'Up', ""), ('DOWN', 'Down', ""),)) @classmethod def poll(self, context): """ Enable if there's something in the list. """ return len(context.scene.LLStudio.profile_list) def move_index(self, context): """ Move index of an item render queue while clamping it. """ props = context.scene.LLStudio index = props.list_index list_length = len(props.profile_list) - 1 # (index starts at 0) new_index = 0 if self.direction == 'UP': new_index = index - 1 elif self.direction == 'DOWN': new_index = index + 1 new_index = max(0, min(new_index, list_length)) props.list_index = new_index def execute(self, context): props = context.scene.LLStudio list = props.profile_list index = props.list_index if self.direction == 'DOWN': neighbor = index + 1 list.move(index,neighbor) elif self.direction == 'UP': neighbor = index - 1 list.move(neighbor, index) else: return{'CANCELLED'} self.move_index(context) return{'FINISHED'} def update_list_index(self, context): props = context.scene.LLStudio if len(props.profile_list) == 0: return selected_profile = props.profile_list[self.list_index] if selected_profile.empty_name == props.last_empty: return print('Index update {}'.format(self.list_index)) #unlink current profile lls_collection = get_lls_collection(context) profile_collection = [c for c in lls_collection.children if c.name.startswith('LLS_PROFILE')] profile_collection = profile_collection[0] if profile_collection else None if profile_collection: lls_collection.children.unlink(profile_collection) #link selected profile lls_collection.children.link(bpy.data.collections[selected_profile.empty_name]) props.last_empty = selected_profile.empty_name from . operators.modal import update_light_sets, panel_global if panel_global: update_light_sets(panel_global, bpy.context, always=True) light_list.update_light_list_set(context) # import/export import json, time script_file = os.path.realpath(__file__) dir = os.path.dirname(script_file) VERSION = 3 from . import light_operators def parse_profile(context, props, profiles, version=VERSION, internal_copy=False): plist = props.profile_list for profile in profiles: if VERBOSE: print('_'*5, 'Parse profile', '_'*5) print(json.dumps(profile, indent=4, separators=(',', ': '))) bpy.ops.lls_list.new_profile() props.list_index = len(plist)-1 plist[-1].name = profile["name"] if not internal_copy: date = time.localtime() plist[-1].name += ' {}-{:02}-{:02} {:02}:{:02}'.format(str(date.tm_year)[-2:], date.tm_mon, date.tm_mday, date.tm_hour, date.tm_min) profile_empty = context.scene.objects[plist[-1].empty_name] if version > 1: handle = getProfileHandle(profile_empty) handle.location.x = profile['handle_position'][0] handle.location.y = profile['handle_position'][1] handle.location.z = profile['handle_position'][2] for light in profile["lights"]: if version < 3: # most of light settings are moved to advanced sub dict. copy whole dict for the simplicity sake light['advanced'] = light.copy() light_operators.light_from_dict(light, profile_empty.users_collection[0]) class ImportProfiles(bpy.types.Operator): bl_idname = "lls_list.import_profiles" bl_label = "Import profiles" bl_description = "Import profiles from file" #bl_options = {"INTERNAL"} filepath: bpy.props.StringProperty(default="*.lls", subtype="FILE_PATH") @classmethod def poll(self, context): return True def execute(self, context): props = context.scene.LLStudio with open(self.filepath, 'r') as f: file = f.read() f.closed file = json.loads(file) parse_profile(context, props, file["profiles"], float(file["version"])) light_list.update_light_list_set(context) return{'FINISHED'} def invoke(self, context, event): context.window_manager.fileselect_add(self) return {'RUNNING_MODAL'} def compose_profile(list_index): props = bpy.context.scene.LLStudio profile_dict = {} profile_dict['name'] = props.profile_list[list_index].name profile_dict['lights']= [] profile = bpy.data.objects[props.profile_list[list_index].empty_name] profile_collection = get_collection(profile) handle = getProfileHandle(profile) profile_dict['handle_position'] = [handle.location.x, handle.location.y, handle.location.z] for light_collection in profile_collection.children: light = salvage_data(light_collection) profile_dict['lights'].append(light.dict) profile_dict['lights'].sort(key=lambda x: x["order_index"]) # import json # print(json.dumps(profile_dict, indent=4, separators=(',', ': '))) return profile_dict class ExportProfiles(bpy.types.Operator): bl_idname = "lls_list.export_profiles" bl_label = "Export profiles to file" bl_description = "Export profile(s) to file" #bl_options = {"INTERNAL"} filepath: bpy.props.StringProperty(default="profile.lls", subtype="FILE_PATH") all: bpy.props.BoolProperty(default=False, name="Export All Profiles") @classmethod def poll(self, context): """ Enable if there's something in the list """ return len(context.scene.LLStudio.profile_list) def execute(self, context): props = context.scene.LLStudio index = props.list_index export_file = {} date = time.localtime() export_file['date'] = '{}-{:02}-{:02} {:02}:{:02}'.format(date.tm_year, date.tm_mon, date.tm_mday, date.tm_hour, date.tm_min) export_file['version'] = VERSION profiles_to_export = export_file['profiles'] = [] if self.all: for p in range(len(props.profile_list)): try: profiles_to_export.append(compose_profile(p)) except Exception: self.report({'WARNING'}, 'Malformed profile %s. Omitting.' % props.profile_list[p].name) else: try: profiles_to_export.append(compose_profile(index)) except Exception: self.report({'WARNING'}, 'Malformed profile %s. Omitting.' % props.profile_list[index].name) with open(self.filepath, 'w') as f: f.write(json.dumps(export_file, indent=4)) f.closed return{'FINISHED'} def invoke(self, context, event): self.filepath = "profile.lls" context.window_manager.fileselect_add(self) return {'RUNNING_MODAL'} class FindMissingTextures(bpy.types.Operator): bl_idname = "lls.find_missing_textures" bl_label = "Find Missing Textures" bl_description = "Find missing light textures" #bl_options = {"INTERNAL"} @classmethod def poll(self, context): """ Enable if there's something in the list """ return len(context.scene.LLStudio.profile_list) def execute(self, context): bpy.ops.file.find_missing_files(directory=os.path.join(dir, "textures_real_lights")) bpy.context.scene.frame_current = bpy.context.scene.frame_current return{'FINISHED'} class OpenTexturesFolder(bpy.types.Operator): bl_idname = "lls.open_textures_folder" bl_label = "Open Textures Folder" bl_description = "Open textures folder" #bl_options = {"INTERNAL"} #@classmethod #def poll(self, context): # """ Enable if there's something in the list """ # return len(context.scene.LLStudio.profile_list) def execute(self, context): path = os.path.join(dir, "textures_real_lights") if sys.platform == 'darwin': subprocess.Popen(["open", path]) elif sys.platform == 'linux2': subprocess.Popen(["xdg-open", path]) elif sys.platform == 'win32': subprocess.Popen(["explorer", path]) return{'FINISHED'} class CopyProfileToScene(bpy.types.Operator): """ Copy Light Profile to Scene """ bl_idname = "lls_list.copy_profile_to_scene" bl_label = "Copy Profile to Scene" bl_property = "sceneprop" def get_scenes(self, context): return ((s.name, s.name, "Scene name") for i,s in enumerate(bpy.data.scenes))#global_vars["scenes"] sceneprop: EnumProperty(items = get_scenes) @classmethod def poll(self, context): """ Enable if there's something in the list """ return len(context.scene.LLStudio.profile_list) def execute(self, context): props = context.scene.LLStudio index = props.list_index profiles = [compose_profile(index),] context.window.scene = bpy.data.scenes[self.sceneprop] context.scene.render.engine = 'CYCLES' if not context.scene.LLStudio.initialized: bpy.ops.scene.create_leomoon_light_studio() parse_profile(context, context.scene.LLStudio, profiles, internal_copy=True) close_control_panel() return{'FINISHED'} def invoke(self, context, event): wm = context.window_manager wm.invoke_search_popup(self) return {'FINISHED'} class CopyProfileMenu(bpy.types.Operator): bl_idname = "lls_list.copy_profile_menu" bl_label = "Copy selected profile" @classmethod def poll(self, context): """ Enable if there's something in the list """ return len(context.scene.LLStudio.profile_list) def execute(self, context): wm = context.window_manager def draw(self, context): layout = self.layout layout.operator_context='INVOKE_AREA' col = layout.column(align=True) col.operator('lls_list.copy_profile') col.operator('lls_list.copy_profile_to_scene') wm.popup_menu(draw, title="Copy Profile") return {'FINISHED'}
gpl-3.0
stanford-ssi/balloons-VALBAL
utils/parser.py
1
11921
#!/usr/bin/env python # Stanford Student Space Initiative # Balloons | VALBAL | May 2017 # Davy Ragland | dragland@stanford.edu # File: parser.py # -------------------------- # Server side script to parse incoming messages # from RockBLOCK #******************************* SETUP *************************************** import math #****************************** GLOBALS ************************************** #binary is the string we get from RockBLOCK binary = "0000000000010100010010110101001110101011000101001000111101101100000110010001010000011000110010101111111111101001000011000000000000000000000000000000000000000000000000000000000000000001010011100000011101011100011011101000000101011100000000000000000011110010011000100111110000010110010001100100110001001110001011100110011111111000000011100000000001010000101000000011100000011101111101000000000000000000000000001111111100001001111000000000000100111100000000000000001000000001111011000000110000110011111000001110011000001011011011011011001100011111111000110100001010001010000001000001001100110000000000000000010011000100000010000010011001100000000000000000000000000000"; #Regex is what is pasted into "advanced parser" regex = """lengthBits += compressVariable(data.TIME / 1000, 0, 3000000, 20, lengthBits); // time lengthBits += compressVariable(data.LAT_GPS, -90, 90, 21, lengthBits); // latitude lengthBits += compressVariable(data.LONG_GPS, -180, 180, 22, lengthBits); // longitude lengthBits += compressVariable(data.ALTITUDE_BAROMETER, -2000, 40000, 16, lengthBits); // altitude_barometer lengthBits += compressVariable(data.ALTITUDE_GPS, -2000, 40000, 14, lengthBits); lengthBits += compressVariable(data.ASCENT_RATE, -10, 10, 11, lengthBits); lengthBits += compressVariable(data.VALVE_INCENTIVE, -50, 10, 12, lengthBits); lengthBits += compressVariable(data.BALLAST_INCENTIVE, -50, 10, 12, lengthBits); lengthBits += compressVariable(data.VALVE_STATE, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.BALLAST_STATE, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.VALVE_QUEUE / 1000, 0, 1023, 10, lengthBits); lengthBits += compressVariable(data.BALLAST_QUEUE / 1000, 0, 1023, 10, lengthBits); lengthBits += compressVariable(data.VALVE_TIME_TOTAL / 1000, 0, 16383, 13, lengthBits); // valve time total lengthBits += compressVariable(data.BALLAST_TIME_TOTAL / 1000, 0, 16383, 13, lengthBits); // ballast time total lengthBits += compressVariable(data.VALVE_NUM_ACTIONS, 0, 63, 6, lengthBits); lengthBits += compressVariable(data.BALLAST_NUM_ACTIONS, 0, 63, 6, lengthBits); lengthBits += compressVariable(data.VALVE_NUM_ATTEMPTS, 0, 63, 6, lengthBits); lengthBits += compressVariable(data.BALLAST_NUM_ATTEMPTS, 0, 63, 6, lengthBits); lengthBits += compressVariable(data.BALLAST_NUM_OVERCURRENTS, 0, 63, 6, lengthBits); lengthBits += compressVariable(data.CUTDOWN_STATE, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.TEMP_INT, -85, 65, 9, lengthBits); lengthBits += compressVariable(data.JOULES_TOTAL, 0, 1572863, 18, lengthBits); lengthBits += compressVariable(data.VOLTAGE_PRIMARY, 0, 6, 9, lengthBits); lengthBits += compressVariable(data.VOLTAGE_5V, 4, 6, 7, lengthBits); lengthBits += compressVariable(data.CURRENT_TOTAL_AVG, 0, 4095, 12, lengthBits); lengthBits += compressVariable(data.CURRENT_TOTAL_MIN, 0, 4095, 12, lengthBits); lengthBits += compressVariable(data.CURRENT_TOTAL_MAX, 0, 4095, 12, lengthBits); lengthBits += compressVariable(data.CURRENT_RB_AVG, 0, 1023, 8, lengthBits); lengthBits += compressVariable(data.CURRENT_RB_MAX, 0, 1023, 8, lengthBits); lengthBits += compressVariable(data.CURRENT_MOTOR_VALVE_AVG, 0, 1023, 8, lengthBits); lengthBits += compressVariable(data.CURRENT_MOTOR_VALVE_MAX, 0, 1023, 8, lengthBits); lengthBits += compressVariable(data.CURRENT_MOTOR_BALLAST_AVG, 0, 1023, 8, lengthBits); lengthBits += compressVariable(data.CURRENT_MOTOR_BALLAST_MAX, 0, 1023, 8, lengthBits); lengthBits += compressVariable(data.CURRENT_PAYLOAD_AVG, 0, 1023, 8, lengthBits); lengthBits += compressVariable(data.CURRENT_PAYLOAD_MAX, 0, 1023, 8, lengthBits); lengthBits += compressVariable(data.TEMP_EXT, -100, 30, 8, lengthBits); lengthBits += compressVariable(data.LOOP_TIME_MAX, 0, 10239, 10, lengthBits); lengthBits += compressVariable(data.RB_SENT_COMMS, 0, 8191, 13, lengthBits); lengthBits += compressVariable(data.RB_SLEEP_FAILS, 0, 8191, 13, lengthBits); lengthBits += compressVariable(data.MANUAL_MODE, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.REPORT_MODE, 0, 2, 2, lengthBits); lengthBits += compressVariable(data.SHOULD_REPORT, 0, 1, 1, lengthBits); if (data.SHOULD_REPORT || data.REPORT_MODE != 0) { lengthBits += compressVariable(data.POWER_STATE_LED, 0, 1, 1, lengthBits); // LED Power state lengthBits += compressVariable(data.POWER_STATE_RB, 0, 1, 1, lengthBits); // RB Power State lengthBits += compressVariable(data.POWER_STATE_GPS, 0, 1, 1, lengthBits); // GPS Power State lengthBits += compressVariable(data.POWER_STATE_PAYLOAD, 0, 1, 1, lengthBits); // Payload Power State lengthBits += compressVariable(data.NUM_SATS_GPS, 0, 15, 3, lengthBits); lengthBits += compressVariable(data.INCENTIVE_NOISE, 0, 4, 8, lengthBits); lengthBits += compressVariable(data.RE_ARM_CONSTANT, 0, 4, 8, lengthBits); lengthBits += compressVariable(data.VALVE_ALT_LAST, -2000, 50000, 11, lengthBits); // Altitude During Last Venting Event lengthBits += compressVariable(data.BALLAST_ALT_LAST, -2000, 50000, 11, lengthBits); // Altitude During Last Ballast Event lengthBits += compressVariable(data.DEBUG_STATE, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.FORCE_VALVE, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.FORCE_BALLAST, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.BMP_1_ENABLE, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.BMP_2_ENABLE, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.BMP_3_ENABLE, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.BMP_4_ENABLE, 0, 1, 1, lengthBits); lengthBits += compressVariable(log2(data.BMP_1_REJECTIONS + 1), 0, 6, 4, lengthBits); // sensor_1_logrejections lengthBits += compressVariable(log2(data.BMP_2_REJECTIONS + 1), 0, 6, 4, lengthBits); // sensor_2_logrejections lengthBits += compressVariable(log2(data.BMP_3_REJECTIONS + 1), 0, 6, 4, lengthBits); // sensor_3_logrejections lengthBits += compressVariable(log2(data.BMP_4_REJECTIONS + 1), 0, 6, 4, lengthBits); // sensor_4_logrejections lengthBits += compressVariable(data.BLACK_BODY_TEMP, -100, 30, 8, lengthBits); } if (data.SHOULD_REPORT || data.REPORT_MODE == 2) { lengthBits += compressVariable(data.RB_INTERVAL / 1000, 0, 1023, 10, lengthBits); // RB communication interval lengthBits += compressVariable(data.GPS_INTERVAL / 1000, 0, 1023, 10, lengthBits); // GPS communication interval lengthBits += compressVariable(data.RB_SHOULD_SLEEP, 0, 1, 1, lengthBits); lengthBits += compressVariable(data.PRESS_BASELINE, 0, 131071, 17, lengthBits); // Pressure baseline lengthBits += compressVariable(data.INCENTIVE_THRESHOLD, 0, 4, 3, lengthBits); lengthBits += compressVariable(data.BALLAST_ARM_ALT, -2000, 40000, 16, lengthBits); // Ballast Arming Altitude lengthBits += compressVariable(data.BALLAST_REVERSE_INTERVAL / 1000, 0, 1599, 4, lengthBits); // Ballast reverse interval lengthBits += compressVariable(data.VALVE_LEAK_INTERVAL / 1000, 0, 1599, 4, lengthBits); lengthBits += compressVariable(data.BALLAST_STALL_CURRENT, 0, 511, 4, lengthBits); lengthBits += compressVariable(data.VALVE_OPENING_DURATION / 1000, 0, 10, 5, lengthBits); lengthBits += compressVariable(data.VALVE_CLOSING_DURATION / 1000, 0, 10, 5, lengthBits); lengthBits += compressVariable(data.VALVE_SETPOINT, -2000, 50000, 11, lengthBits); lengthBits += compressVariable(data.VALVE_VENT_DURATION / 1000, 0, 1023, 6, lengthBits); lengthBits += compressVariable(data.VALVE_FORCE_DURATION / 1000, 0, 1023, 6, lengthBits); lengthBits += compressVariable(data.VALVE_VELOCITY_CONSTANT, 0, 5, 8, lengthBits); // Valve Speed Constant lengthBits += compressVariable(1.0 / data.VALVE_ALTITUDE_DIFF_CONSTANT, 0, 4095, 8, lengthBits); // Valve Altitude Difference Constant lengthBits += compressVariable(1.0 / data.VALVE_LAST_ACTION_CONSTANT, 0, 4095, 8, lengthBits); // Valve last action constant lengthBits += compressVariable(data.BALLAST_SETPOINT, -2000, 50000, 11, lengthBits); lengthBits += compressVariable(data.BALLAST_DROP_DURATION / 1000, 0, 1023, 6, lengthBits); lengthBits += compressVariable(data.BALLAST_FORCE_DURATION / 1000, 0, 1023, 6, lengthBits); lengthBits += compressVariable(data.BALLAST_VELOCITY_CONSTANT, 0, 5, 8, lengthBits); // Ballast Speed Constant lengthBits += compressVariable(1.0 / data.BALLAST_ALTITUDE_DIFF_CONSTANT,0, 4095, 8, lengthBits); // Ballast Altitude Difference Constant lengthBits += compressVariable(1.0 / data.BALLAST_LAST_ACTION_CONSTANT, 0, 4095, 8, lengthBits); // Ballast last action constant""" names = [] mins = [] maxs = [] bits = [] #****************************** HELPERS ************************************* def setupREGEX(): for line in regex.split('\n'): csv = line.split(",") names.append(csv[0].split("data.")[1]) mins.append(int(csv[1].replace(" ", ""))) maxs.append(int(csv[2].replace(" ", ""))) bits.append(int(csv[3].replace(" ", ""))) def parseMessage(message): curr = 0 for i in range(len(names)): num = message[curr:(curr + bits[i])] curr = curr + bits[i] adc = int(num, 2) value = mins[i] + adc * ((maxs[i] - mins[i]) / (math.pow(2, bits[i]) - 1)) print(names[i] + ":" + str(value)) #******************************** MAIN *************************************** setupREGEX() parseMessage(binary)
gpl-3.0
osvalr/odoo
addons/sale_analytic_plans/sale_analytic_plans.py
381
1765
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class sale_order_line(osv.osv): _inherit = 'sale.order.line' _columns = { 'analytics_id': fields.many2one('account.analytic.plan.instance', 'Analytic Distribution'), } def invoice_line_create(self, cr, uid, ids, context=None): if context is None: context = {} line_obj = self.pool.get('account.invoice.line') create_ids = super(sale_order_line, self).invoice_line_create(cr, uid, ids, context=context) i = 0 for line in self.browse(cr, uid, ids, context=context): line_obj.write(cr, uid, [create_ids[i]], {'analytics_id': line.analytics_id.id}) i = i + 1 return create_ids # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
shhui/nova
nova/api/openstack/compute/plugins/v3/keypairs.py
10
6309
# Copyright 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Keypair management extension.""" import webob import webob.exc from nova.api.openstack.compute.schemas.v3 import keypairs from nova.api.openstack import extensions from nova.api.openstack import wsgi from nova.api import validation from nova.compute import api as compute_api from nova import exception from nova.openstack.common.gettextutils import _ ALIAS = 'keypairs' authorize = extensions.extension_authorizer('compute', 'v3:' + ALIAS) soft_authorize = extensions.soft_extension_authorizer('compute', 'v3:' + ALIAS) class KeypairController(object): """Keypair API controller for the OpenStack API.""" def __init__(self): self.api = compute_api.KeypairAPI() def _filter_keypair(self, keypair, **attrs): clean = { 'name': keypair.name, 'public_key': keypair.public_key, 'fingerprint': keypair.fingerprint, } for attr in attrs: clean[attr] = keypair[attr] return clean @extensions.expected_errors((400, 409, 413)) @wsgi.response(201) @validation.schema(keypairs.create) def create(self, req, body): """Create or import keypair. Sending name will generate a key and return private_key and fingerprint. You can send a public_key to add an existing ssh key params: keypair object with: name (required) - string public_key (optional) - string """ context = req.environ['nova.context'] authorize(context, action='create') params = body['keypair'] name = params['name'] try: if 'public_key' in params: keypair = self.api.import_key_pair(context, context.user_id, name, params['public_key']) keypair = self._filter_keypair(keypair, user_id=True) else: keypair, private_key = self.api.create_key_pair( context, context.user_id, name) keypair = self._filter_keypair(keypair, user_id=True) keypair['private_key'] = private_key return {'keypair': keypair} except exception.KeypairLimitExceeded: msg = _("Quota exceeded, too many key pairs.") raise webob.exc.HTTPRequestEntityTooLarge( explanation=msg, headers={'Retry-After': 0}) except exception.InvalidKeypair as exc: raise webob.exc.HTTPBadRequest(explanation=exc.format_message()) except exception.KeyPairExists as exc: raise webob.exc.HTTPConflict(explanation=exc.format_message()) @wsgi.response(204) @extensions.expected_errors(404) def delete(self, req, id): """Delete a keypair with a given name.""" context = req.environ['nova.context'] authorize(context, action='delete') try: self.api.delete_key_pair(context, context.user_id, id) except exception.KeypairNotFound: raise webob.exc.HTTPNotFound() @extensions.expected_errors(404) def show(self, req, id): """Return data for the given key name.""" context = req.environ['nova.context'] authorize(context, action='show') try: keypair = self.api.get_key_pair(context, context.user_id, id) except exception.KeypairNotFound: raise webob.exc.HTTPNotFound() return {'keypair': self._filter_keypair(keypair)} @extensions.expected_errors(()) def index(self, req): """List of keypairs for a user.""" context = req.environ['nova.context'] authorize(context, action='index') key_pairs = self.api.get_key_pairs(context, context.user_id) rval = [] for key_pair in key_pairs: rval.append({'keypair': self._filter_keypair(key_pair)}) return {'keypairs': rval} class Controller(wsgi.Controller): def _add_key_name(self, req, servers): for server in servers: db_server = req.get_db_instance(server['id']) # server['id'] is guaranteed to be in the cache due to # the core API adding it in its 'show'/'detail' methods. server['key_name'] = db_server['key_name'] def _show(self, req, resp_obj): if 'server' in resp_obj.obj: server = resp_obj.obj['server'] self._add_key_name(req, [server]) @wsgi.extends def show(self, req, resp_obj, id): context = req.environ['nova.context'] if soft_authorize(context): self._show(req, resp_obj) @wsgi.extends def detail(self, req, resp_obj): context = req.environ['nova.context'] if 'servers' in resp_obj.obj and soft_authorize(context): servers = resp_obj.obj['servers'] self._add_key_name(req, servers) class Keypairs(extensions.V3APIExtensionBase): """Keypair Support.""" name = "Keypairs" alias = ALIAS version = 1 def get_resources(self): resources = [ extensions.ResourceExtension('keypairs', KeypairController())] return resources def get_controller_extensions(self): controller = Controller() extension = extensions.ControllerExtension(self, 'servers', controller) return [extension] # use nova.api.extensions.server.extensions entry point to modify # server create kwargs def server_create(self, server_dict, create_kwargs): create_kwargs['key_name'] = server_dict.get('key_name')
apache-2.0
maithreyee/python-koans
python3/koans/about_packages.py
66
1910
#!/usr/bin/env python # -*- coding: utf-8 -*- # # This is very different to AboutModules in Ruby Koans # Our AboutMultipleInheritance class is a little more comparable # from runner.koan import * # # Package hierarchy of Python Koans project: # # contemplate_koans.py # koans/ # __init__.py # about_asserts.py # about_attribute_access.py # about_class_attributes.py # about_classes.py # ... # a_package_folder/ # __init__.py # a_module.py class AboutPackages(Koan): def test_subfolders_can_form_part_of_a_module_package(self): # Import ./a_package_folder/a_module.py from .a_package_folder.a_module import Duck duck = Duck() self.assertEqual(__, duck.name) def test_subfolders_become_modules_if_they_have_an_init_module(self): # Import ./a_package_folder/__init__.py from .a_package_folder import an_attribute self.assertEqual(__, an_attribute) # ------------------------------------------------------------------ def test_use_absolute_imports_to_import_upper_level_modules(self): # Import /contemplate_koans.py import contemplate_koans self.assertEqual(__, contemplate_koans.__name__) # contemplate_koans.py is the root module in this package because its # the first python module called in koans. # # If contemplate_koans.py was based in a_package_folder that would be # the root folder, which would make reaching the koans folder # almost impossible. So always leave the starting python script in # a folder which can reach everything else. def test_import_a_module_in_a_subfolder_folder_using_an_absolute_path(self): # Import contemplate_koans.py/koans/a_package_folder/a_module.py from koans.a_package_folder.a_module import Duck self.assertEqual(__, Duck.__module__)
mit
hainm/mdtraj
mdtraj/geometry/internal.py
15
20595
############################################################################## # MDTraj: A Python Library for Loading, Saving, and Manipulating # Molecular Dynamics Trajectories. # Copyright 2012-2013 Stanford University and the Authors # # Authors: Robert McGibbon # Contributors: Kyle A. Beauchamp # # MDTraj is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 2.1 # of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with MDTraj. If not, see <http://www.gnu.org/licenses/>. ############################################################################## """Methods to calculate internal coordinates from the cartesian coordinates This code is new and should be considered __unstable__ """ ############################################################################## # Imports ############################################################################## from __future__ import print_function, division import numpy as np from itertools import combinations from mdtraj.utils.six.moves import filter, xrange # itertools import logging from mdtraj.geometry.distance import compute_distances from mdtraj.geometry.dihedral import compute_dihedrals from mdtraj.geometry.angle import compute_angles from mdtraj.utils import import_ # these are covalent radii taken from the crystalographic data in nm # Dalton Trans., 2008, 2832-2838, DOI: 10.1039/B801115J # http://pubs.rsc.org/en/Content/ArticleLanding/2008/DT/b801115j COVALENT_RADII = {'C': 0.0762, 'N': 0.0706, 'O': 0.0661, 'H': 0.031, 'S': 0.105} logger = logging.getLogger(__name__) __all__ = ['get_redundant_internal_coordinates', 'get_nonredundant_internal_coordinates', 'get_connectivity', 'get_bond_connectivity', 'get_angle_connectivity', 'get_dihedral_connectivity', 'get_wilson_B', 'get_bond_derivs', 'get_angle_derivs', 'get_dihedral_derivs'] ################################################################################ # Get actual coordinates ################################################################################ def get_redundant_internal_coordinates(trajectory, **kwargs): """Compute internal coordinates from the cartesian coordinates This extracts all of the bond lengths, bond angles and dihedral angles from every frame in a trajectory. Parameters ---------- trajectory : mdtraj.Trajectory Trajectory object containing the internal coordinates Other Parameters ---------------- ibonds : np.ndarray, optional, shape[n_bonds, 2], dtype=int Each row gives the indices of two atoms involved in a bond iangles : np.ndarray, optional shape[n_angles, 3], dtype=int Each row gives the indices of three atoms which together make an angle idihedrals : np.ndarray, optional, shape[n_dihedrals, 4], dtype=int Each row gives the indices of the four atoms which together make a dihedral Notes ----- ibonds, iangles, and idihedrals will be computed usig the first frame in the trajectory, if not supplied Returns ------- internal_coords : np.ndarray, shape=[n_frames, n_bonds+n_angles+n_dihedrals] All of the internal coordinates collected into a big array, such that internal_coords[i,j] gives the jth coordinate for the ith frame. """ if 'ibonds' in kwargs and 'iangles' in kwargs and 'idihedrals' in kwargs: ibonds = kwargs['ibonds'] iangles = kwargs['iangles'] idihedrals = kwargs['idihedrals'] else: ibonds, iangles, idihedrals = get_connectivity(trajectory) # convert everything to the right shape and C ordering, since # all of these methods are in C and are going to need things to be # the right type. The methods will all do a copy for things that # aren't the right type, but hopefully we can only do the copy once # instead of three times if xyzlist really does need to be reordered # in memory xyzlist = np.array(trajectory.xyz, dtype=np.float32, order='c') ibonds = np.array(ibonds, dtype=np.int32, order='c') iangles = np.array(iangles, dtype=np.int32, order='c') idihedrals = np.array(idihedrals, dtype=np.int32, order='c') b = compute_distances(xyzlist, ibonds) a = compute_angles(xyzlist, iangles) d = compute_dihedrals(xyzlist, idihedrals, degrees=False) return np.hstack((b, a, d)) def get_nonredundant_internal_coordinates(trajectory, conformation, get_operator=False): """Compute nonredudant delocalized internal coordinates from the cartesian coordinates These are basically a set of 3N-6 linear combinations of bond lengths, bond angles and dihedral angles that span the full space of internal coordinates without being redundant. The procedure to generate them involves collecting a bunch of "primative" internal coordinates and then and then taking linear combinations correspondong to eigenvectors with nonzero corresponding eigenvalues of G=B*B.T, where B is the so called "Wilson B matrix" which relates small displacements in cartesian space to small displacements in the internal coordinate space. Notes ----- The projection operator from the redundant coordinate space into the active or nonredudant subspace is formed from the geometery in `conformation`, but is then applied unformly to all of the frames in trajectory. Parameters ---------- trajectory : mdtraj.Trajectory Trajectory object containing the cartesian coordinates of every frame in the dataset conformation : mdtraj.Trajectory Trajectort object containing a single frame (the first) to be used as the reference for defining the projection operator into the active space. get_operator : boolean Retreive the information necessary to define the cartesian -> nonredundant internal coordinates projection operator, including both the indices for generating the redudant internal coordinates and the linear operator that removes the redundant subspace. Returns ------- internal_coordinates : np.ndarray, shape[n_frames, 3*N-6], dtype=float The position of each frame in the trajectory, represented in internal coordinates (if get_operator == True) activespace : np.ndarray, shape[n_redundant, n_nonredundant], dtype=float The linear projection operator ibonds : np.ndarray, shape=[n_bonds, 2], dtype=int n_bonds x 2 array of indices, where each row is the index of two atom who participate in a bond. iangles : np.ndarray, shape[n_angles, 3], dtype=int n_angles x 3 array of indices, where each row is the index of three atoms m,n,o such that n is bonded to both m and o. idihedrals : np.ndarray, shape[n_dihedrals, 4], dtype=int All sets of 4 atoms A,B,C,D such that A is bonded to B, B is bonded to C, and C is bonded to D References ---------- Baker, Kessi, Delley J. Chem. Phys. 105, 192 (1996); doi: 10.1063/1.471864 """ import scipy.linalg ibonds, iangles, idihedrals = get_connectivity(conformation) B = get_wilson_B(conformation, ibonds=ibonds, iangles=iangles, idihedrals=idihedrals) # reshape from (n_redundant, n_atoms, 3) to (n_redundant, n_atoms*3) B = B.reshape((B.shape[0], B.shape[1] * B.shape[2])) G = np.dot(B, B.T) eigenvalues, eigenvectors = scipy.linalg.eigh(G) # only the eigenvectors with nonzero eigenvalues # note: there should be 3N-6 of them activespace = eigenvectors[:, np.where(eigenvalues > 1e-10)[0]] if activespace.shape[1] != 3 * trajectory.xyz.shape[1] - 6: logger.error('Active eigenspace is %dd, but 3*N - 6 = %d', activespace.shape[1], 3 * trajectory.xyz.shape[1] - 6) redundant = get_redundant_internal_coordinates(trajectory, ibonds=ibonds, iangles=iangles, idihedrals=idihedrals) if get_operator: return np.dot(redundant, activespace), activespace, ibonds, iangles, idihedrals else: return np.dot(redundant, activespace) ################################################################################ # Compte the connectivity, getting lists of atom indices which form bonds, bond # angles and dihedrals ################################################################################ def get_connectivity(conf): """Get the indices of all the bonds/angles/dihedrals Parameters ---------- conf : MDTraj.Trajectory An MDTraj trajectory, only the first frame will be used. Returns ------- ibonds : np.ndarray, shape=[n_bonds, 2], dtype=int n_bonds x 2 array of indices, where each row is the index of two atom who participate in a bond. iangles : np.ndarray, shape[n_angles, 3], dtype=int n_angles x 3 array of indices, where each row is the index of three atoms m,n,o such that n is bonded to both m and o. idihedrals : np.ndarray, shape[n_dihedrals, 4], dtype=int All sets of 4 atoms A,B,C,D such that A is bonded to B, B is bonded to C, and C is bonded to D """ ibonds = get_bond_connectivity(conf) iangles = get_angle_connectivity(ibonds) idihedrals = get_dihedral_connectivity(ibonds) return ibonds, iangles, idihedrals def get_bond_connectivity(conf): """Get a list of all the bonds in a conformation Parameters ---------- conf : MDTraj.Trajectory An MDTraj trajectory, only the first frame will be used. Returns ------- ibonds : np.ndarray, shape=[n_bonds, 2], dtype=int n_bonds x 2 array of indices, where each row is the index of two atom who participate in a bond. Notes ----- Regular bonds are assigned to all pairs of atoms where the interatomic distance is less than or equal to 1.3 times the sum of their respective covalent radii. References ---------- Bakken and Helgaker, JCP Vol. 117, Num. 20 22 Nov. 2002 http://folk.uio.no/helgaker/reprints/2002/JCP117b_GeoOpt.pdf """ from scipy.spatial.distance import squareform, pdist xyz = conf.xyz[0, :, :] n_atoms = xyz.shape[0] elements = np.zeros(n_atoms, dtype='S1') atom_names = [a.name for a in conf.top.atoms()] for i in xrange(n_atoms): # name of the element that is atom[i] # take the first character of the AtomNames string, # after stripping off any digits elements[i] = atom_names[i].strip('123456789 ')[0] if not elements[i] in COVALENT_RADII.keys(): raise ValueError("I don't know about this AtomName: {}".format( atom_names[i])) distance_mtx = squareform(pdist(xyz)) connectivity = [] for i in xrange(n_atoms): for j in xrange(i + 1, n_atoms): # Regular bonds are assigned to all pairs of atoms where # the interatomic distance is less than or equal to 1.3 times the # sum of their respective covalent radii. d = distance_mtx[i, j] if d < 1.3 * (COVALENT_RADII[elements[i]] + COVALENT_RADII[elements[j]]): connectivity.append((i, j)) return np.array(connectivity) def get_angle_connectivity(ibonds): """Given the bonds, get the indices of the atoms defining all the bond angles Parameters ---------- ibonds : np.ndarray, shape=[n_bonds, 2], dtype=int n_bonds x 2 array of indices, where each row is the index of two atom who participate in a bond. Returns ------- iangles : np.ndarray, shape[n_angles, 3], dtype=int n_angles x 3 array of indices, where each row is the index of three atoms m,n,o such that n is bonded to both m and o. """ nx = import_('networkx') graph = nx.from_edgelist(ibonds) n_atoms = graph.number_of_nodes() iangles = [] for i in xrange(n_atoms): for (m, n) in combinations(graph.neighbors(i), 2): # so now the there is a bond angle m-i-n iangles.append((m, i, n)) return np.array(iangles) def get_dihedral_connectivity(ibonds): """Given the bonds, get the indices of the atoms defining all the dihedral angles Parameters ---------- ibonds : np.ndarray, shape=[n_bonds, 2], dtype=int n_bonds x 2 array of indices, where each row is the index of two atom who participate in a bond. Returns ------- idihedrals : np.ndarray, shape[n_dihedrals, 4], dtype=int All sets of 4 atoms A,B,C,D such that A is bonded to B, B is bonded to C, and C is bonded to D """ nx = import_('networkx') graph = nx.from_edgelist(ibonds) n_atoms = graph.number_of_nodes() idihedrals = [] # TODO: CHECK FOR DIHEDRAL ANGLES THAT ARE 180 and recover # conf : msmbuilder.Trajectory # An msmbuilder trajectory, only the first frame will be used. This # is used purely to make the check for angle(ABC) != 180. for a in xrange(n_atoms): for b in graph.neighbors(a): for c in filter(lambda c: c not in [a, b], graph.neighbors(b)): for d in filter(lambda d: d not in [a, b, c], graph.neighbors(c)): idihedrals.append((a, b, c, d)) return np.array(idihedrals) ################################################################################ # Compute derivatives of internal coordinates w.r.t to cartesian coordinates # these methods only operate on a single frame ################################################################################ def get_wilson_B(conformation, **kwargs): """Calculate the Wilson B matrix, which collects the derivatives of the redundant internal coordinates w/r/t the cartesian coordinates. .. math:: B_{ij} = \frac{\partial q_i}{\partial x_j} where :math:`q_i` are the internal coorindates and the :math:`x_j` are the Cartesian displacement coordinates of the atoms. BUT NOTE: THE RETURN VALUE IS ACTUALLY 3D Parameters ---------- conformation : mdtraj.Trajectory Only the first frame is used Other Parameters ---------------- ibonds : np.ndarray, optional shape[n_bonds, 2], dtype=int Each row gives the indices of two atoms involved in a bond iangles : np.ndarray, optional, shape[n_angles, 3], dtype=int Each row gives the indices of three atoms which together make an angle idihedrals : np.ndarray, optional, shape[n_dihedrals, 4], dtype=int Each row gives the indices of the four atoms which together make a dihedral Returns ------- B : np.ndarray, shape=[n_internal_coordinates, n_atoms, 3] The layout here is 3 dimensional, where B[i,j,k] is the derivative of internal coordinate`q_i` with respect the cartesian coordinate which is the `k`-th dimension (xyz) of the `j`-th atom. """ if 'ibonds' in kwargs and 'iangles' in kwargs and 'idihedrals' in kwargs: ibonds = kwargs['ibonds'] iangles = kwargs['iangles'] idihedrals = kwargs['idihedrals'] else: ibonds, iangles, idihedrals = get_connectivity(conformation) xyz = conformation.xyz[0] bd = get_bond_derivs(xyz, ibonds) ad = get_angle_derivs(xyz, iangles) dd = get_dihedral_derivs(xyz, idihedrals) return np.vstack((bd, ad, dd)) def get_bond_derivs(xyz, ibonds): """Derivatives of the bond lengths with respect to cartesian coordinates Parameters ---------- xyz : np.ndarray, shape=(n_atoms, 3) The cartesian coordinates of the atomic positions for a single frame ibonds : np.ndarray, optional, shape[n_bonds, 2], dtype=int Each row gives the indices of two atoms involved in a bond Returns ------- derivs : np.ndarray, shape=(n_bonds, n_atoms, 3) The gradient of the bond lengths w.r.t. each atomic position References ---------- Bakken and Helgaker, JCP Vol. 117, Num. 20 22 Nov. 2002 http://folk.uio.no/helgaker/reprints/2002/JCP117b_GeoOpt.pdf """ n_atoms, n_bonds = xyz.shape[0], len(ibonds) derivatives = np.zeros((n_bonds, n_atoms, 3)) for b, (m, n) in enumerate(ibonds): u = (xyz[m] - xyz[n]) / np.linalg.norm(xyz[m] - xyz[n]) derivatives[b, m, :] = u derivatives[b, n, :] = -u return derivatives def get_angle_derivs(xyz, iangles): """ Derivatives of the bond angles with respect to cartesian coordinates Parameters ---------- xyz : np.ndarray, shape=(n_atoms, 3) The cartesian coordinates of the atomic positions for a single frame iangles : np.ndarray, optional shape[n_angles, 3], dtype=int Each row gives the indices of three atoms which together make an angle Returns ------- derivs : np.ndarray, shape=(n_bonds, n_atoms, 3) The gradient of the bond angles w.r.t. each atomic position References ---------- Bakken and Helgaker, JCP Vol. 117, Num. 20 22 Nov. 2002 http://folk.uio.no/helgaker/reprints/2002/JCP117b_GeoOpt.pdf """ n_atoms, n_angles = xyz.shape[0], len(iangles) derivatives = np.zeros((n_angles, n_atoms, 3)) vector1 = np.array([1, -1, 1]) / np.sqrt(3) vector2 = np.array([-1, 1, 1]) / np.sqrt(3) for a, (m, o, n) in enumerate(iangles): u_prime = (xyz[m] - xyz[o]) u_norm = np.linalg.norm(u_prime) v_prime = (xyz[n] - xyz[o]) v_norm = np.linalg.norm(v_prime) u = u_prime / u_norm v = v_prime / v_norm if np.linalg.norm(u + v) < 1e-10 or np.linalg.norm(u - v) < 1e-10: # if they're parallel if np.linalg.norm(u + vector1) < 1e-10 or np.linalg.norm(u - vector1) < 1e-10: # and they're parallel o [1, -1, 1] w_prime = np.cross(u, vector2) else: w_prime = np.cross(u, vector1) else: w_prime = np.cross(u, v) w = w_prime / np.linalg.norm(w_prime) derivatives[a, m, :] = np.cross(u, w) / u_norm derivatives[a, n, :] = np.cross(w, v) / v_norm derivatives[a, o, :] = -np.cross(u, w) / u_norm - np.cross(w, v) / v_norm return derivatives def get_dihedral_derivs(xyz, idihedrals): """ Derivatives of the dihedral angles with respect to cartesian coordinates Parameters ---------- xyz : np.ndarray, shape=(n_atoms, 3) The cartesian coordinates of the atomic positions for a single frame idihedrals : np.ndarray, optional, shape[n_dihedrals, 4], dtype=int Each row gives the indices of the four atoms which together make a dihedral Returns ------- derivs : np.ndarray, shape=(n_dihedrals, n_atoms, 3) The gradient of the dihedral angles w.r.t. each atomic position References ---------- Bakken and Helgaker, JCP Vol. 117, Num. 20 22 Nov. 2002 http://folk.uio.no/helgaker/reprints/2002/JCP117b_GeoOpt.pdf """ n_atoms, n_dihedrals = xyz.shape[0], len(idihedrals) derivatives = np.zeros((n_dihedrals, n_atoms, 3)) for d, (m, o, p, n) in enumerate(idihedrals): u_prime = (xyz[m] - xyz[o]) w_prime = (xyz[p] - xyz[o]) v_prime = (xyz[n] - xyz[p]) u_norm = np.linalg.norm(u_prime) w_norm = np.linalg.norm(w_prime) v_norm = np.linalg.norm(v_prime) u = u_prime / u_norm w = w_prime / w_norm v = v_prime / v_norm term1 = np.cross(u, w) / (u_norm * (1 - np.dot(u, w) ** 2)) term2 = np.cross(v, w) / (v_norm * (1 - np.dot(v, w) ** 2)) term3 = np.cross(u, w) * np.dot(u, w) / (w_norm * (1 - np.dot(u, w) ** 2)) term4 = np.cross(v, w) * -np.dot(v, w) / (w_norm * (1 - np.dot(v, w) ** 2)) derivatives[d, m, :] = term1 derivatives[d, n, :] = -term2 derivatives[d, o, :] = -term1 + term3 - term4 derivatives[d, p, :] = term2 - term3 + term4 return derivatives
lgpl-2.1
SofiaReis/django-cms
docs/conf.py
13
7806
# -*- coding: utf-8 -*- # # django cms documentation build configuration file, created by # sphinx-quickstart on Tue Sep 15 10:47:03 2009. # # This file is execfile()d with the current directory set to its containing # dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out serve # to show the default. import os import sys # If extensions (or modules to document with autodoc) are in another # directory, add these directories to sys.path here. If the directory is # relative to the documentation root, use os.path.abspath to make it absolute, # like shown here. sys.path.append(os.path.abspath('.')) sys.path.append(os.path.abspath('..')) sys.path.append(os.path.join(os.path.abspath('.'), '_ext')) # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. #extensions = ['sphinx.ext.autodoc'] extensions = ['djangocms', 'sphinx.ext.intersphinx'] intersphinx_mapping = { 'python': ('http://docs.python.org/2.6', None), 'django': ('http://readthedocs.org/docs/django/en/latest/', None), 'classytags': ('http://readthedocs.org/docs/django-classy-tags/en/latest/', None), 'sekizai': ('http://readthedocs.org/docs/django-sekizai/en/latest/', None), } # Add any paths that contain templates here, relative to this directory. #templates_path = ['templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'django cms' copyright = u'2009-2015, Patrick Lauber' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. path = os.path.split(os.path.dirname(__file__))[0] path = os.path.split(path)[0] sys.path.insert(0, path) import cms version = cms.__version__ # The full version, including alpha/beta/rc tags. release = cms.__version__ # The language for content autogenerated by Sphinx. Refer to documentation for # a list of supported languages. language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be # searched for source files. exclude_trees = ['build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description unit # titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # on_rtd is whether we are on readthedocs.org on_rtd = os.environ.get('READTHEDOCS', None) == 'True' if not on_rtd: # only import and set the theme if we're building docs locally try: import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] except: html_theme = 'default' # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. # html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_use_modindex = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'djangocmsdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). latex_paper_size = 'a4' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'djangocms.tex', u'django cms Documentation', u'Patrick Lauber', 'manual'), ] # The name of an image file (relative to this directory) to place at the top # of the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True # -- Options for LaTeX output -------------------------------------------------- # Spelling check needs an additional module that is not installed by default. # Add it only if spelling check is requested so docs can be generated without it. if 'spelling' in sys.argv: extensions.append("sphinxcontrib.spelling") # Spelling language. spelling_lang = 'en_GB' # Location of word list. spelling_word_list_filename = 'spelling_wordlist' spelling_ignore_pypi_package_names = True
bsd-3-clause
yaqiyang/autorest
src/generator/AutoRest.Python.Azure.Tests/Expected/AcceptanceTests/StorageManagementClient/storagemanagementclient/models/usage.py
4
1728
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class Usage(Model): """Describes Storage Resource Usage. :param unit: Gets the unit of measurement. Possible values include: 'Count', 'Bytes', 'Seconds', 'Percent', 'CountsPerSecond', 'BytesPerSecond' :type unit: str or :class:`UsageUnit <fixtures.acceptancetestsstoragemanagementclient.models.UsageUnit>` :param current_value: Gets the current count of the allocated resources in the subscription. :type current_value: int :param limit: Gets the maximum count of the resources that can be allocated in the subscription. :type limit: int :param name: Gets the name of the type of usage. :type name: :class:`UsageName <fixtures.acceptancetestsstoragemanagementclient.models.UsageName>` """ _attribute_map = { 'unit': {'key': 'unit', 'type': 'UsageUnit'}, 'current_value': {'key': 'currentValue', 'type': 'int'}, 'limit': {'key': 'limit', 'type': 'int'}, 'name': {'key': 'name', 'type': 'UsageName'}, } def __init__(self, unit=None, current_value=None, limit=None, name=None): self.unit = unit self.current_value = current_value self.limit = limit self.name = name
mit
lammps/lammps
doc/utils/converters/lammpsdoc/rst_anchor_check.py
5
2188
#!/usr/bin/env python3 # LAMMPS Documentation Utilities # # Scan for duplicate anchor labels in documentation files # # Copyright (C) 2017 Richard Berger # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import re import sys import argparse def main(): parser = argparse.ArgumentParser(description='scan for duplicate anchor labels in documentation files') parser.add_argument('files', metavar='file', nargs='+', help='one or more files to scan') parsed_args = parser.parse_args() anchor_pattern = re.compile(r'^\.\. _(.*):$') anchors = {} for filename in parsed_args.files: #print("filename: %s" % filename) with open(filename, 'rt') as f: for line_number, line in enumerate(f): m = anchor_pattern.match(line) if m: label = m.group(1) #print("found label: %s" % label) if label in anchors: anchors[label].append((filename, line_number+1)) else: anchors[label] = [(filename, line_number+1)] print("Found %d anchor labels" % len(anchors)) count = 0 for label in sorted(anchors.keys()): if len(anchors[label]) > 1: print(label) count += 1 for filename, line_number in anchors[label]: print(" - %s:%d" % (filename, line_number)) if count > 0: print("Found %d anchor label errors." % count) sys.exit(1) else: print("No anchor label errors.") if __name__ == "__main__": main()
gpl-2.0
nekulin/arangodb
3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/win32comext/mapi/mapiutil.py
17
5090
# General utilities for MAPI and MAPI objects. from types import TupleType, ListType, IntType, StringType from pywintypes import UnicodeType, TimeType import pythoncom import mapi, mapitags # Pre 2.2.1 compat. try: True, False except NameError: True = 1==1; False = 1==0 prTable = {} def GetPropTagName(pt): if not prTable: for name, value in mapitags.__dict__.items(): if name[:3] == 'PR_': # Store both the full ID (including type) and just the ID. # This is so PR_FOO_A and PR_FOO_W are still differentiated, # but should we get a PT_FOO with PT_ERROR set, we fallback # to the ID. prTable[value] = name prTable[mapitags.PROP_ID(value)] = name try: try: return prTable[pt] except KeyError: # Can't find it exactly - see if the raw ID exists. return prTable[mapitags.PROP_ID(pt)] except KeyError: # god-damn bullshit hex() warnings: I don't see a way to get the # old behaviour without a warning!! ret = hex(long(pt)) # -0x8000000L -> 0x80000000 if ret[0]=='-': ret = ret[1:] if ret[-1]=='L': ret = ret[:-1] return ret mapiErrorTable = {} def GetScodeString(hr): if not mapiErrorTable: for name, value in mapi.__dict__.items(): if name[:7] in ['MAPI_E_', 'MAPI_W_']: mapiErrorTable[value] = name return mapiErrorTable.get(hr, pythoncom.GetScodeString(hr)) ptTable = {} def GetMapiTypeName(propType): """Given a mapi type flag, return a string description of the type""" if not ptTable: for name, value in mapitags.__dict__.items(): if name[:3] == 'PT_': ptTable[value] = name rawType = propType & ~mapitags.MV_FLAG return ptTable.get(rawType, str(hex(rawType))) def GetProperties(obj, propList): """Given a MAPI object and a list of properties, return a list of property values. Allows a single property to be passed, and the result is a single object. Each request property can be an integer or a string. Of a string, it is automatically converted to an integer via the GetIdsFromNames function. If the property fetch fails, the result is None. """ bRetList = 1 if type(propList) not in [TupleType, ListType]: bRetList = 0 propList = (propList,) realPropList = [] rc = [] for prop in propList: if type(prop)!=IntType: # Integer props = ( (mapi.PS_PUBLIC_STRINGS, prop), ) propIds = obj.GetIDsFromNames(props, 0) prop = mapitags.PROP_TAG( mapitags.PT_UNSPECIFIED, mapitags.PROP_ID(propIds[0])) realPropList.append(prop) hr, data = obj.GetProps(realPropList,0) if hr != 0: data = None return None if bRetList: return map( lambda(v): v[1], data ) else: return data[0][1] def GetAllProperties(obj, make_tag_names = True): tags = obj.GetPropList(0) hr, data = obj.GetProps(tags) ret = [] for tag, val in data: if make_tag_names: hr, tags, array = obj.GetNamesFromIDs( (tag,) ) if type(array[0][1])==type(u''): name = array[0][1] else: name = GetPropTagName(tag) else: name = tag ret.append((name, val)) return ret _MapiTypeMap = { type(0.0): mapitags.PT_DOUBLE, type(0): mapitags.PT_I4, type(''): mapitags.PT_STRING8, type(u''): mapitags.PT_UNICODE, type(None): mapitags.PT_UNSPECIFIED, # In Python 2.2.2, bool isn't a distinct type (type(1==1) is type(0)). } def SetPropertyValue(obj, prop, val): if type(prop)!=IntType: props = ( (mapi.PS_PUBLIC_STRINGS, prop), ) propIds = obj.GetIDsFromNames(props, mapi.MAPI_CREATE) if val == (1==1) or val == (1==0): type_tag = mapitags.PT_BOOLEAN else: type_tag = _MapiTypeMap.get(type(val)) if type_tag is None: raise ValueError, "Don't know what to do with '%r' ('%s')" % (val, type(val)) prop = mapitags.PROP_TAG( type_tag, mapitags.PROP_ID(propIds[0])) if val is None: # Delete the property obj.DeleteProps((prop,)) else: obj.SetProps(((prop,val),)) def SetProperties( msg, propDict): """ Given a Python dictionary, set the objects properties. If the dictionary key is a string, then a property ID is queried otherwise the ID is assumed native. Coded for maximum efficiency wrt server calls - ie, maximum of 2 calls made to the object, regardless of the dictionary contents (only 1 if dictionary full of int keys) """ newProps = [] # First pass over the properties we should get IDs for. for key, val in propDict.items(): if type(key) in [StringType, UnicodeType]: newProps.append((mapi.PS_PUBLIC_STRINGS, key)) # Query for the new IDs if newProps: newIds = msg.GetIDsFromNames(newProps, mapi.MAPI_CREATE) newIdNo = 0 newProps = [] for key, val in propDict.items(): if type(key) in [StringType, UnicodeType]: type_val=type(val) if type_val in [StringType, pywintypes.UnicodeType]: tagType = mapitags.PT_UNICODE elif type_val==IntType: tagType = mapitags.PT_I4 elif type_val==TimeType: tagType = mapitags.PT_SYSTIME else: raise ValueError, "The type of object %s(%s) can not be written" % (`val`,type_val) key = mapitags.PROP_TAG(tagType, mapitags.PROP_ID(newIds[newIdNo])) newIdNo = newIdNo + 1 newProps.append( (key, val) ) msg.SetProps(newProps)
apache-2.0
ouya/ouya_1_1-kernel
arch/ia64/scripts/unwcheck.py
13143
1714
#!/usr/bin/python # # Usage: unwcheck.py FILE # # This script checks the unwind info of each function in file FILE # and verifies that the sum of the region-lengths matches the total # length of the function. # # Based on a shell/awk script originally written by Harish Patil, # which was converted to Perl by Matthew Chapman, which was converted # to Python by David Mosberger. # import os import re import sys if len(sys.argv) != 2: print "Usage: %s FILE" % sys.argv[0] sys.exit(2) readelf = os.getenv("READELF", "readelf") start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]") rlen_pattern = re.compile(".*rlen=([0-9]+)") def check_func (func, slots, rlen_sum): if slots != rlen_sum: global num_errors num_errors += 1 if not func: func = "[%#x-%#x]" % (start, end) print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum) return num_funcs = 0 num_errors = 0 func = False slots = 0 rlen_sum = 0 for line in os.popen("%s -u %s" % (readelf, sys.argv[1])): m = start_pattern.match(line) if m: check_func(func, slots, rlen_sum) func = m.group(1) start = long(m.group(2), 16) end = long(m.group(3), 16) slots = 3 * (end - start) / 16 rlen_sum = 0L num_funcs += 1 else: m = rlen_pattern.match(line) if m: rlen_sum += long(m.group(1)) check_func(func, slots, rlen_sum) if num_errors == 0: print "No errors detected in %u functions." % num_funcs else: if num_errors > 1: err="errors" else: err="error" print "%u %s detected in %u functions." % (num_errors, err, num_funcs) sys.exit(1)
gpl-2.0
geodynamics/burnman
setup.py
2
1353
from __future__ import absolute_import import re versionstuff = dict( re.findall("(.+) = '(.+)'\n", open('burnman/version.py').read())) metadata = dict(name='burnman', version=versionstuff['version'], description='a thermoelastic and thermodynamic toolkit for Earth and planetary sciences', url='http://burnman.org', author='The BurnMan Team', author_email='bob.myhill@bristol.ac.uk', license='GPL', long_description='BurnMan is a Python library for generating thermodynamic and thermoelastic models of planetary interiors.', packages=['burnman', 'burnman.minerals', 'burnman.eos'], package_data={'burnman': ['data/input_*/*']}, classifiers=[ 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4'], ) # Try to use setuptools in order to check dependencies. # if the system does not have setuptools, fall back on # distutils. try: from setuptools import setup metadata['install_requires'] = ['numpy', 'matplotlib', 'scipy', 'sympy'] except ImportError: from distutils.core import setup setup(**metadata)
gpl-2.0
gcblue/gcblue
scripts/PointDefense.py
1
2779
from UnitCommands import * from GroupCommands import * import math import random def GetPointDefenseLaunchers(UI): # build list of all launcher info launcher_list = [] nLaunchers = UI.GetLauncherCount() for n in range(0, nLaunchers): launcher_info = UI.GetLauncherInfo(n) if ((launcher_info.Status == 0) and (launcher_info.TargetFlags & 0x0008) and (launcher_info.MinRange_km < 1.0)): launcher_list.append(launcher_info) return launcher_list def GetPointDefenseTargets(UI): class_mask = 0x0060 # missiles and aircraft range_km = 8.0 affiliation = 3 # hostile track_list = UI.GetTrackList(class_mask, range_km, affiliation) targets = [] current_time = UI.GetTime() max_engaged_count = 10 nTracks = track_list.Size() for n in range(0, nTracks): track_info = track_list.GetTrack(n) track_id = track_info.ID staleness = current_time - track_info.Time is_destroyed = track_info.IsDestroyed() bearing_only = track_info.IsBearingOnly() engaged_count = track_info.GetEngagedCount() is_air_target = track_info.IsAir() or track_info.IsMissile() if ((engaged_count < max_engaged_count) and (staleness < 15.0) and (not bearing_only) and (not is_destroyed) and is_air_target): targets.append(track_info) return targets # script to augment EngageAll for better point defense reaction def PointDefense(UI): target_list = GetPointDefenseTargets(UI) nTargets = len(target_list) if (nTargets == 0): return launcher_list = GetPointDefenseLaunchers(UI) for k in range(0, len(launcher_list)): launcher_info = launcher_list[k] launcher_idx = launcher_info.Launcher # engage first target in random target order target_order = random.sample(range(0, nTargets), nTargets) selected_target = -1 for n in target_order: track_info = target_list[n] target_range = UI.GetRangeToTrack(track_info) launch_range = launcher_info.Range_km # reference max range, use for launch decision #UI.DisplayMessage('Best launcher %d' % launcher_idx) if (launcher_idx != -1): target_range = UI.GetRangeToTarget() launch_range = launcher_info.Range_km # reference max range, use for launch decision if ((selected_target == -1) and (target_range < launch_range)): selected_target = track_info.ID break if (selected_target != -1): UI.SendTargetToLauncher(selected_target, launcher_idx) UI.Launch(launcher_idx, 1)
bsd-3-clause
vicky2135/lucious
oscar/lib/python2.7/site-packages/django/template/backends/django.py
119
4406
# Since this package contains a "django" module, this is required on Python 2. from __future__ import absolute_import import sys from importlib import import_module from pkgutil import walk_packages from django.apps import apps from django.conf import settings from django.template import TemplateDoesNotExist from django.template.context import make_context from django.template.engine import Engine from django.template.library import InvalidTemplateLibrary from django.utils import six from .base import BaseEngine class DjangoTemplates(BaseEngine): app_dirname = 'templates' def __init__(self, params): params = params.copy() options = params.pop('OPTIONS').copy() options.setdefault('autoescape', True) options.setdefault('debug', settings.DEBUG) options.setdefault('file_charset', settings.FILE_CHARSET) libraries = options.get('libraries', {}) options['libraries'] = self.get_templatetag_libraries(libraries) super(DjangoTemplates, self).__init__(params) self.engine = Engine(self.dirs, self.app_dirs, **options) def from_string(self, template_code): return Template(self.engine.from_string(template_code), self) def get_template(self, template_name): try: return Template(self.engine.get_template(template_name), self) except TemplateDoesNotExist as exc: reraise(exc, self) def get_templatetag_libraries(self, custom_libraries): """ Return a collation of template tag libraries from installed applications and the supplied custom_libraries argument. """ libraries = get_installed_libraries() libraries.update(custom_libraries) return libraries class Template(object): def __init__(self, template, backend): self.template = template self.backend = backend @property def origin(self): return self.template.origin def render(self, context=None, request=None): context = make_context(context, request, autoescape=self.backend.engine.autoescape) try: return self.template.render(context) except TemplateDoesNotExist as exc: reraise(exc, self.backend) def copy_exception(exc, backend=None): """ Create a new TemplateDoesNotExist. Preserve its declared attributes and template debug data but discard __traceback__, __context__, and __cause__ to make this object suitable for keeping around (in a cache, for example). """ backend = backend or exc.backend new = exc.__class__(*exc.args, tried=exc.tried, backend=backend, chain=exc.chain) if hasattr(exc, 'template_debug'): new.template_debug = exc.template_debug return new def reraise(exc, backend): """ Reraise TemplateDoesNotExist while maintaining template debug information. """ new = copy_exception(exc, backend) six.reraise(exc.__class__, new, sys.exc_info()[2]) def get_installed_libraries(): """ Return the built-in template tag libraries and those from installed applications. Libraries are stored in a dictionary where keys are the individual module names, not the full module paths. Example: django.templatetags.i18n is stored as i18n. """ libraries = {} candidates = ['django.templatetags'] candidates.extend( '%s.templatetags' % app_config.name for app_config in apps.get_app_configs()) for candidate in candidates: try: pkg = import_module(candidate) except ImportError: # No templatetags package defined. This is safe to ignore. continue if hasattr(pkg, '__path__'): for name in get_package_libraries(pkg): libraries[name[len(candidate) + 1:]] = name return libraries def get_package_libraries(pkg): """ Recursively yield template tag libraries defined in submodules of a package. """ for entry in walk_packages(pkg.__path__, pkg.__name__ + '.'): try: module = import_module(entry[1]) except ImportError as e: raise InvalidTemplateLibrary( "Invalid template library specified. ImportError raised when " "trying to load '%s': %s" % (entry[1], e) ) if hasattr(module, 'register'): yield entry[1]
bsd-3-clause
darktears/chromium-crosswalk
third_party/WebKit/LayoutTests/http/tests/websocket/reserved-bits_wsh.py
45
1709
import re from mod_pywebsocket import common from mod_pywebsocket import stream from mod_pywebsocket.extensions import DeflateFrameExtensionProcessor bit = 0 def _get_deflate_frame_extension_processor(request): for extension_processor in request.ws_extension_processors: if isinstance(extension_processor, DeflateFrameExtensionProcessor): return extension_processor return None def web_socket_do_extra_handshake(request): match = re.search(r'\?compressed=(true|false)&bitNumber=(\d)$', request.ws_resource) if match is None: msgutil.send_message(request, 'FAIL: Query value is incorrect or missing') return global bit compressed = match.group(1) bit = int(match.group(2)) if compressed == 'false': request.ws_extension_processors = [] # using no extension response else: processor = _get_deflate_frame_extension_processor(request) if processor is None: request.ws_extension_processors = [] # using no extension response else: request.ws_extension_processors = [processor] # avoid conflict def web_socket_transfer_data(request): text = 'This message should be ignored.' opcode = common.OPCODE_TEXT if bit == 1: frame = stream.create_header(opcode, len(text), 1, 1, 0, 0, 0) + text elif bit == 2: frame = stream.create_header(opcode, len(text), 1, 0, 1, 0, 0) + text elif bit == 3: frame = stream.create_header(opcode, len(text), 1, 0, 0, 1, 0) + text else: frame = stream.create_text_frame('FAIL: Invalid bit number: %d' % bit) request.connection.write(frame)
bsd-3-clause
NewHorizonsAU/ibuild
docs/source/conf.py
1
8697
# -*- coding: utf-8 -*- # This file is based upon the file generated by sphinx-quickstart. However, # where sphinx-quickstart hardcodes values in this file that you input, this # file has been changed to pull from your module's metadata module. # # This file is execfile()d with the current directory set to its containing # dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../..')) # Import project metadata from IBuild import metadata # -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode'] # show todos todo_include_todos = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = metadata.project copyright = metadata.copyright # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = metadata.version # The full version, including alpha/beta/rc tags. release = metadata.version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'nature' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = metadata.project_no_spaces + 'doc' # -- Options for LaTeX output ------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, # documentclass [howto/manual]). latex_documents = [ ('index', metadata.project_no_spaces + '.tex', metadata.project + ' Documentation', metadata.authors_string, 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output ------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', metadata.package, metadata.project + ' Documentation', metadata.authors_string, 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ----------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', metadata.project_no_spaces, metadata.project + ' Documentation', metadata.authors_string, metadata.project_no_spaces, metadata.description, 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'python': ('http://docs.python.org/', None), } # Extra local configuration. This is useful for placing the class description # in the class docstring and the __init__ parameter documentation in the # __init__ docstring. See # <http://sphinx-doc.org/ext/autodoc.html#confval-autoclass_content> for more # information. autoclass_content = 'both'
mit
FlipperPA/wagtail
wagtail/images/tests/test_models.py
2
20435
import unittest from django.contrib.auth.models import Group, Permission from django.core.cache import caches from django.core.files.uploadedfile import SimpleUploadedFile from django.db.utils import IntegrityError from django.test import TestCase from django.test.utils import override_settings from django.urls import reverse from willow.image import Image as WillowImage from wagtail.core.models import Collection, GroupCollectionPermission, Page from wagtail.images.models import Rendition, SourceImageIOError from wagtail.images.rect import Rect from wagtail.tests.testapp.models import EventPage, EventPageCarouselItem from wagtail.tests.utils import WagtailTestUtils from .utils import Image, get_test_image_file class TestImage(TestCase): def setUp(self): # Create an image for running tests on self.image = Image.objects.create( title="Test image", file=get_test_image_file(colour='white'), ) def test_is_portrait(self): self.assertFalse(self.image.is_portrait()) def test_is_landscape(self): self.assertTrue(self.image.is_landscape()) def test_get_rect(self): self.assertTrue(self.image.get_rect(), Rect(0, 0, 640, 480)) def test_get_focal_point(self): self.assertEqual(self.image.get_focal_point(), None) # Add a focal point to the image self.image.focal_point_x = 100 self.image.focal_point_y = 200 self.image.focal_point_width = 50 self.image.focal_point_height = 20 # Get it self.assertEqual(self.image.get_focal_point(), Rect(75, 190, 125, 210)) def test_has_focal_point(self): self.assertFalse(self.image.has_focal_point()) # Add a focal point to the image self.image.focal_point_x = 100 self.image.focal_point_y = 200 self.image.focal_point_width = 50 self.image.focal_point_height = 20 self.assertTrue(self.image.has_focal_point()) def test_set_focal_point(self): self.assertEqual(self.image.focal_point_x, None) self.assertEqual(self.image.focal_point_y, None) self.assertEqual(self.image.focal_point_width, None) self.assertEqual(self.image.focal_point_height, None) self.image.set_focal_point(Rect(100, 150, 200, 350)) self.assertEqual(self.image.focal_point_x, 150) self.assertEqual(self.image.focal_point_y, 250) self.assertEqual(self.image.focal_point_width, 100) self.assertEqual(self.image.focal_point_height, 200) self.image.set_focal_point(None) self.assertEqual(self.image.focal_point_x, None) self.assertEqual(self.image.focal_point_y, None) self.assertEqual(self.image.focal_point_width, None) self.assertEqual(self.image.focal_point_height, None) def test_is_stored_locally(self): self.assertTrue(self.image.is_stored_locally()) @override_settings(DEFAULT_FILE_STORAGE='wagtail.tests.dummy_external_storage.DummyExternalStorage') def test_is_stored_locally_with_external_storage(self): self.assertFalse(self.image.is_stored_locally()) def test_get_file_size(self): file_size = self.image.get_file_size() self.assertIsInstance(file_size, int) self.assertGreater(file_size, 0) def test_get_file_size_on_missing_file_raises_sourceimageioerror(self): self.image.file.delete(save=False) with self.assertRaises(SourceImageIOError): self.image.get_file_size() class TestImageQuerySet(TestCase): def test_search_method(self): # Create an image for running tests on image = Image.objects.create( title="Test image", file=get_test_image_file(), ) # Search for it results = Image.objects.search("Test") self.assertEqual(list(results), [image]) def test_operators(self): aaa_image = Image.objects.create( title="AAA Test image", file=get_test_image_file(), ) zzz_image = Image.objects.create( title="ZZZ Test image", file=get_test_image_file(), ) results = Image.objects.search("aaa test", operator='and') self.assertEqual(list(results), [aaa_image]) results = Image.objects.search("aaa test", operator='or') sorted_results = sorted(results, key=lambda img: img.title) self.assertEqual(sorted_results, [aaa_image, zzz_image]) def test_custom_ordering(self): aaa_image = Image.objects.create( title="AAA Test image", file=get_test_image_file(), ) zzz_image = Image.objects.create( title="ZZZ Test image", file=get_test_image_file(), ) results = Image.objects.order_by('title').search("Test") self.assertEqual(list(results), [aaa_image, zzz_image]) results = Image.objects.order_by('-title').search("Test") self.assertEqual(list(results), [zzz_image, aaa_image]) def test_search_indexing_prefetches_tags(self): for i in range(0, 10): image = Image.objects.create( title="Test image %d" % i, file=get_test_image_file(), ) image.tags.add('aardvark', 'artichoke', 'armadillo') with self.assertNumQueries(2): results = { image.title: [tag.name for tag in image.tags.all()] for image in Image.get_indexed_objects() } self.assertTrue('aardvark' in results['Test image 0']) class TestImagePermissions(TestCase, WagtailTestUtils): def setUp(self): # Create some user accounts for testing permissions self.user = self.create_user(username='user', email='user@email.com', password='password') self.owner = self.create_user(username='owner', email='owner@email.com', password='password') self.editor = self.create_user(username='editor', email='editor@email.com', password='password') self.editor.groups.add(Group.objects.get(name='Editors')) self.administrator = self.create_superuser( username='administrator', email='administrator@email.com', password='password' ) # Owner user must have the add_image permission image_adders_group = Group.objects.create(name="Image adders") GroupCollectionPermission.objects.create( group=image_adders_group, collection=Collection.get_first_root_node(), permission=Permission.objects.get(codename='add_image'), ) self.owner.groups.add(image_adders_group) # Create an image for running tests on self.image = Image.objects.create( title="Test image", uploaded_by_user=self.owner, file=get_test_image_file(), ) def test_administrator_can_edit(self): self.assertTrue(self.image.is_editable_by_user(self.administrator)) def test_editor_can_edit(self): self.assertTrue(self.image.is_editable_by_user(self.editor)) def test_owner_can_edit(self): self.assertTrue(self.image.is_editable_by_user(self.owner)) def test_user_cant_edit(self): self.assertFalse(self.image.is_editable_by_user(self.user)) class TestRenditions(TestCase): def setUp(self): # Create an image for running tests on self.image = Image.objects.create( title="Test image", file=get_test_image_file(), ) def test_get_rendition_model(self): self.assertIs(Image.get_rendition_model(), Rendition) def test_minification(self): rendition = self.image.get_rendition('width-400') # Check size self.assertEqual(rendition.width, 400) self.assertEqual(rendition.height, 300) # check that the rendition has been recorded under the correct filter, # via the Rendition.filter_spec attribute (in active use as of Wagtail 1.8) self.assertEqual(rendition.filter_spec, 'width-400') def test_resize_to_max(self): rendition = self.image.get_rendition('max-100x100') # Check size self.assertEqual(rendition.width, 100) self.assertEqual(rendition.height, 75) def test_resize_to_min(self): rendition = self.image.get_rendition('min-120x120') # Check size self.assertEqual(rendition.width, 160) self.assertEqual(rendition.height, 120) def test_resize_to_original(self): rendition = self.image.get_rendition('original') # Check size self.assertEqual(rendition.width, 640) self.assertEqual(rendition.height, 480) def test_cache(self): # Get two renditions with the same filter first_rendition = self.image.get_rendition('width-400') second_rendition = self.image.get_rendition('width-400') # Check that they are the same object self.assertEqual(first_rendition, second_rendition) def test_alt_attribute(self): rendition = self.image.get_rendition('width-400') self.assertEqual(rendition.alt, "Test image") @override_settings( CACHES={ 'renditions': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, ) def test_renditions_cache_backend(self): cache = caches['renditions'] rendition = self.image.get_rendition('width-500') rendition_cache_key = "image-{}-{}-{}".format( rendition.image.id, rendition.focal_point_key, rendition.filter_spec ) # Check rendition is saved to cache self.assertEqual(cache.get(rendition_cache_key), rendition) # Mark a rendition to check it comes from cache rendition._from_cache = 'original' cache.set(rendition_cache_key, rendition) # Check if get_rendition returns the rendition from cache with self.assertNumQueries(0): new_rendition = self.image.get_rendition('width-500') self.assertEqual(new_rendition._from_cache, 'original') # changing the image file should invalidate the cache self.image.file = get_test_image_file(colour='green') self.image.save() # deleting renditions would normally happen within the 'edit' view on file change - # we're bypassing that here, so have to do it manually self.image.renditions.all().delete() new_rendition = self.image.get_rendition('width-500') self.assertFalse(hasattr(new_rendition, '_from_cache')) # changing it back should also generate a new rendition and not re-use # the original one (because that file has now been deleted in the change) self.image.file = get_test_image_file(colour='white') self.image.save() self.image.renditions.all().delete() new_rendition = self.image.get_rendition('width-500') self.assertFalse(hasattr(new_rendition, '_from_cache')) class TestUsageCount(TestCase): fixtures = ['test.json'] def setUp(self): self.image = Image.objects.create( title="Test image", file=get_test_image_file(), ) @override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True) def test_unused_image_usage_count(self): self.assertEqual(self.image.get_usage().count(), 0) @override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True) def test_used_image_document_usage_count(self): page = EventPage.objects.get(id=4) event_page_carousel_item = EventPageCarouselItem() event_page_carousel_item.page = page event_page_carousel_item.image = self.image event_page_carousel_item.save() self.assertEqual(self.image.get_usage().count(), 1) class TestGetUsage(TestCase): fixtures = ['test.json'] def setUp(self): self.image = Image.objects.create( title="Test image", file=get_test_image_file(), ) def test_image_get_usage_not_enabled(self): self.assertEqual(list(self.image.get_usage()), []) @override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True) def test_unused_image_get_usage(self): self.assertEqual(list(self.image.get_usage()), []) @override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True) def test_used_image_document_get_usage(self): page = EventPage.objects.get(id=4) event_page_carousel_item = EventPageCarouselItem() event_page_carousel_item.page = page event_page_carousel_item.image = self.image event_page_carousel_item.save() self.assertTrue(issubclass(Page, type(self.image.get_usage()[0]))) class TestGetWillowImage(TestCase): fixtures = ['test.json'] def setUp(self): self.image = Image.objects.create( title="Test image", file=get_test_image_file(), ) def test_willow_image_object_returned(self): with self.image.get_willow_image() as willow_image: self.assertIsInstance(willow_image, WillowImage) def test_with_missing_image(self): # Image id=1 in test fixtures has a missing image file bad_image = Image.objects.get(id=1) # Attempting to get the Willow image for images without files # should raise a SourceImageIOError with self.assertRaises(SourceImageIOError): with bad_image.get_willow_image(): self.fail() # Shouldn't get here def test_closes_image(self): # This tests that willow closes images after use with self.image.get_willow_image(): self.assertFalse(self.image.file.closed) self.assertTrue(self.image.file.closed) def test_closes_image_on_exception(self): # This tests that willow closes images when the with is exited with an exception try: with self.image.get_willow_image(): self.assertFalse(self.image.file.closed) raise ValueError("Something went wrong!") except ValueError: pass self.assertTrue(self.image.file.closed) def test_doesnt_close_open_image(self): # This tests that when the image file is already open, get_willow_image doesn't close it (#1256) self.image.file.open('rb') with self.image.get_willow_image(): pass self.assertFalse(self.image.file.closed) self.image.file.close() class TestIssue573(TestCase): """ This tests for a bug which causes filename limit on Renditions to be reached when the Image has a long original filename and a big focal point key """ def test_issue_573(self): # Create an image with a big filename and focal point image = Image.objects.create( title="Test image", file=get_test_image_file( 'thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexpialidocious.png' ), focal_point_x=1000, focal_point_y=1000, focal_point_width=1000, focal_point_height=1000, ) # Try creating a rendition from that image # This would crash if the bug is present image.get_rendition('fill-800x600') @override_settings(_WAGTAILSEARCH_FORCE_AUTO_UPDATE=['elasticsearch']) class TestIssue613(TestCase, WagtailTestUtils): def get_elasticsearch_backend(self): from django.conf import settings from wagtail.search.backends import get_search_backend if 'elasticsearch' not in settings.WAGTAILSEARCH_BACKENDS: raise unittest.SkipTest("No elasticsearch backend active") return get_search_backend('elasticsearch') def setUp(self): self.search_backend = self.get_elasticsearch_backend() self.login() def add_image(self, **params): post_data = { 'title': "Test image", 'file': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()), } post_data.update(params) response = self.client.post(reverse('wagtailimages:add'), post_data) # Should redirect back to index self.assertRedirects(response, reverse('wagtailimages:index')) # Check that the image was created images = Image.objects.filter(title="Test image") self.assertEqual(images.count(), 1) # Test that size was populated correctly image = images.first() self.assertEqual(image.width, 640) self.assertEqual(image.height, 480) return image def edit_image(self, **params): # Create an image to edit self.image = Image.objects.create( title="Test image", file=get_test_image_file(), ) # Edit it post_data = { 'title': "Edited", } post_data.update(params) response = self.client.post(reverse('wagtailimages:edit', args=(self.image.id,)), post_data) # Should redirect back to index self.assertRedirects(response, reverse('wagtailimages:index')) # Check that the image was edited image = Image.objects.get(id=self.image.id) self.assertEqual(image.title, "Edited") return image def test_issue_613_on_add(self): # Reset the search index self.search_backend.reset_index() self.search_backend.add_type(Image) # Add an image with some tags image = self.add_image(tags="hello") self.search_backend.refresh_index() # Search for it by tag results = self.search_backend.search("hello", Image) # Check self.assertEqual(len(results), 1) self.assertEqual(results[0].id, image.id) def test_issue_613_on_edit(self): # Reset the search index self.search_backend.reset_index() self.search_backend.add_type(Image) # Add an image with some tags image = self.edit_image(tags="hello") self.search_backend.refresh_index() # Search for it by tag results = self.search_backend.search("hello", Image) # Check self.assertEqual(len(results), 1) self.assertEqual(results[0].id, image.id) class TestIssue312(TestCase): def test_duplicate_renditions(self): # Create an image image = Image.objects.create( title="Test image", file=get_test_image_file(), ) # Get two renditions and check that they're the same rend1 = image.get_rendition('fill-100x100') rend2 = image.get_rendition('fill-100x100') self.assertEqual(rend1, rend2) # Now manually duplicate the renditon and check that the database blocks it self.assertRaises( IntegrityError, Rendition.objects.create, image=rend1.image, filter_spec=rend1.filter_spec, width=rend1.width, height=rend1.height, focal_point_key=rend1.focal_point_key, ) class TestFilenameReduction(TestCase): """ This tests for a bug which results in filenames without extensions causing an infinite loop """ def test_filename_reduction_no_ext(self): # Create an image with a big filename and no extension image = Image.objects.create( title="Test image", file=get_test_image_file( 'thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexpialidocioussuperlong' ) ) # Saving file will result in infinite loop when bug is present image.save() self.assertEqual("original_images/thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexpiali", image.file.name) # Test for happy path. Long filename with extension def test_filename_reduction_ext(self): # Create an image with a big filename and extensions image = Image.objects.create( title="Test image", file=get_test_image_file( 'thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexpialidocioussuperlong.png' ) ) image.save() self.assertEqual("original_images/thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexp.png", image.file.name)
bsd-3-clause
reyoung/Paddle
python/paddle/fluid/tests/unittests/test_reduce_op.py
1
9990
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import unittest import numpy as np from op_test import OpTest class TestSumOp(OpTest): def setUp(self): self.op_type = "reduce_sum" self.inputs = {'X': np.random.random((5, 6, 10)).astype("float64")} self.outputs = {'Out': self.inputs['X'].sum(axis=0)} def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') class TestMeanOp(OpTest): def setUp(self): self.op_type = "reduce_mean" self.inputs = {'X': np.random.random((5, 6, 2, 10)).astype("float64")} self.attrs = {'dim': [1]} self.outputs = { 'Out': self.inputs['X'].mean(axis=tuple(self.attrs['dim'])) } def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') class TestMaxOp(OpTest): """Remove Max with subgradient from gradient check to confirm the success of CI.""" def setUp(self): self.op_type = "reduce_max" self.inputs = {'X': np.random.random((5, 6, 10)).astype("float64")} self.attrs = {'dim': [-1]} self.outputs = { 'Out': self.inputs['X'].max(axis=tuple(self.attrs['dim'])) } def test_check_output(self): self.check_output() class TestMinOp(OpTest): """Remove Min with subgradient from gradient check to confirm the success of CI.""" def setUp(self): self.op_type = "reduce_min" self.inputs = {'X': np.random.random((5, 6, 10)).astype("float64")} self.attrs = {'dim': [2]} self.outputs = { 'Out': self.inputs['X'].min(axis=tuple(self.attrs['dim'])) } def test_check_output(self): self.check_output() class TestProdOp(OpTest): def setUp(self): self.op_type = "reduce_prod" self.inputs = {'X': np.random.random((5, 6, 10)).astype("float64")} self.outputs = {'Out': self.inputs['X'].prod(axis=0)} def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') class Test1DReduce(OpTest): def setUp(self): self.op_type = "reduce_sum" self.inputs = {'X': np.random.random(20).astype("float64")} self.outputs = {'Out': self.inputs['X'].sum(axis=0)} def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') class Test2DReduce0(Test1DReduce): def setUp(self): self.op_type = "reduce_sum" self.attrs = {'dim': [0]} self.inputs = {'X': np.random.random((20, 10)).astype("float64")} self.outputs = {'Out': self.inputs['X'].sum(axis=0)} class Test2DReduce1(Test1DReduce): def setUp(self): self.op_type = "reduce_sum" self.attrs = {'dim': [1]} self.inputs = {'X': np.random.random((20, 10)).astype("float64")} self.outputs = { 'Out': self.inputs['X'].sum(axis=tuple(self.attrs['dim'])) } class Test3DReduce0(Test1DReduce): def setUp(self): self.op_type = "reduce_sum" self.attrs = {'dim': [1]} self.inputs = {'X': np.random.random((5, 6, 7)).astype("float64")} self.outputs = { 'Out': self.inputs['X'].sum(axis=tuple(self.attrs['dim'])) } class Test3DReduce1(Test1DReduce): def setUp(self): self.op_type = "reduce_sum" self.attrs = {'dim': [2]} self.inputs = {'X': np.random.random((5, 6, 7)).astype("float64")} self.outputs = { 'Out': self.inputs['X'].sum(axis=tuple(self.attrs['dim'])) } class Test3DReduce2(Test1DReduce): def setUp(self): self.op_type = "reduce_sum" self.attrs = {'dim': [-2]} self.inputs = {'X': np.random.random((5, 6, 7)).astype("float64")} self.outputs = { 'Out': self.inputs['X'].sum(axis=tuple(self.attrs['dim'])) } class Test3DReduce3(Test1DReduce): def setUp(self): self.op_type = "reduce_sum" self.attrs = {'dim': [1, 2]} self.inputs = {'X': np.random.random((5, 6, 7)).astype("float64")} self.outputs = { 'Out': self.inputs['X'].sum(axis=tuple(self.attrs['dim'])) } class TestKeepDimReduce(Test1DReduce): def setUp(self): self.op_type = "reduce_sum" self.inputs = {'X': np.random.random((5, 6, 10)).astype("float64")} self.attrs = {'dim': [1], 'keep_dim': True} self.outputs = { 'Out': self.inputs['X'].sum(axis=tuple(self.attrs['dim']), keepdims=self.attrs['keep_dim']) } class TestReduceAll(Test1DReduce): def setUp(self): self.op_type = "reduce_sum" self.inputs = {'X': np.random.random((5, 6, 2, 10)).astype("float64")} self.attrs = {'reduce_all': True} self.outputs = {'Out': self.inputs['X'].sum()} ## reduction in multi dims class TestReduceMeanOpMultiAxises(OpTest): def setUp(self): self.op_type = "reduce_mean" self.inputs = {'X': np.random.random((5, 6, 2, 10)).astype("float64")} self.attrs = {'dim': [1, 2]} self.outputs = {'Out': self.inputs['X'].mean(axis=(1, 2))} def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') class TestReduceMaxOpMultiAxises(OpTest): """Remove Max with subgradient from gradient check to confirm the success of CI.""" def setUp(self): self.op_type = "reduce_max" self.inputs = {'X': np.random.random((5, 6, 10)).astype("float64")} self.attrs = {'dim': [-2, -1]} self.outputs = { 'Out': self.inputs['X'].max(axis=tuple(self.attrs['dim'])) } def test_check_output(self): self.check_output() class TestReduceMinOpMultiAxises(OpTest): """Remove Min with subgradient from gradient check to confirm the success of CI.""" def setUp(self): self.op_type = "reduce_min" self.inputs = {'X': np.random.random((5, 6, 10)).astype("float64")} self.attrs = {'dim': [1, 2]} self.outputs = { 'Out': self.inputs['X'].min(axis=tuple(self.attrs['dim'])) } def test_check_output(self): self.check_output() class TestKeepDimReduceSumMultiAxises(OpTest): def setUp(self): self.op_type = "reduce_sum" self.inputs = {'X': np.random.random((5, 6, 10)).astype("float64")} self.attrs = {'dim': [-2, -1], 'keep_dim': True} self.outputs = { 'Out': self.inputs['X'].sum(axis=tuple(self.attrs['dim']), keepdims=True) } def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') class TestReduceSumWithDimOne(OpTest): def setUp(self): self.op_type = "reduce_sum" self.inputs = {'X': np.random.random((10, 1, 1)).astype("float64")} self.attrs = {'dim': [1, 2], 'keep_dim': True} self.outputs = { 'Out': self.inputs['X'].sum(axis=tuple(self.attrs['dim']), keepdims=True) } def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') class TestReduceSumWithNumelOne(OpTest): def setUp(self): self.op_type = "reduce_sum" self.inputs = {'X': np.random.random((1, 1)).astype("float64")} self.attrs = {'dim': [1], 'keep_dim': False} self.outputs = { 'Out': self.inputs['X'].sum(axis=tuple(self.attrs['dim']), keepdims=False) } def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') class TestReduceMeanWithDimOne(OpTest): def setUp(self): self.op_type = "reduce_mean" self.inputs = {'X': np.random.random((10, 1, 1)).astype("float64")} self.attrs = {'dim': [1], 'keep_dim': False} self.outputs = { 'Out': self.inputs['X'].mean( axis=tuple(self.attrs['dim']), keepdims=False) } def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') class TestReduceMeanWithNumelOne(OpTest): def setUp(self): self.op_type = "reduce_mean" self.inputs = {'X': np.random.random((1, 1)).astype("float64")} self.attrs = {'dim': [1], 'keep_dim': True} self.outputs = { 'Out': self.inputs['X'].mean( axis=tuple(self.attrs['dim']), keepdims=True) } def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') class TestReduceAll(OpTest): def setUp(self): self.op_type = "reduce_sum" self.inputs = {'X': np.random.random((1, 1, 1)).astype("float64")} self.attrs = {'reduce_all': True, 'keep_dim': False} self.outputs = {'Out': self.inputs['X'].sum()} def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') if __name__ == '__main__': unittest.main()
apache-2.0
Mazecreator/tensorflow
tensorflow/python/debug/examples/debug_errors.py
150
2655
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Example of debugging TensorFlow runtime errors using tfdbg.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import sys import numpy as np import tensorflow as tf from tensorflow.python import debug as tf_debug def main(_): sess = tf.Session() # Construct the TensorFlow network. ph_float = tf.placeholder(tf.float32, name="ph_float") x = tf.transpose(ph_float, name="x") v = tf.Variable(np.array([[-2.0], [-3.0], [6.0]], dtype=np.float32), name="v") m = tf.constant( np.array([[0.0, 1.0, 2.0], [-4.0, -1.0, 0.0]]), dtype=tf.float32, name="m") y = tf.matmul(m, x, name="y") z = tf.matmul(m, v, name="z") if FLAGS.debug: sess = tf_debug.LocalCLIDebugWrapperSession(sess, ui_type=FLAGS.ui_type) if FLAGS.error == "shape_mismatch": print(sess.run(y, feed_dict={ph_float: np.array([[0.0], [1.0], [2.0]])})) elif FLAGS.error == "uninitialized_variable": print(sess.run(z)) elif FLAGS.error == "no_error": print(sess.run(y, feed_dict={ph_float: np.array([[0.0, 1.0, 2.0]])})) else: raise ValueError("Unrecognized error type: " + FLAGS.error) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.register("type", "bool", lambda v: v.lower() == "true") parser.add_argument( "--error", type=str, default="shape_mismatch", help="""\ Type of the error to generate (shape_mismatch | uninitialized_variable | no_error).\ """) parser.add_argument( "--ui_type", type=str, default="curses", help="Command-line user interface type (curses | readline)") parser.add_argument( "--debug", type="bool", nargs="?", const=True, default=False, help="Use debugger to track down bad values during training") FLAGS, unparsed = parser.parse_known_args() tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
apache-2.0
Mazecreator/tensorflow
tensorflow/python/kernel_tests/gradient_correctness_test.py
118
1669
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow.ops.argmax_op.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.ops import gradients_impl from tensorflow.python.ops import math_ops from tensorflow.python.platform import test class GradientCorrectnessTest(test.TestCase): def testMultipleOutputChainedGradients(self): with self.test_session() as sess: x = constant_op.constant(1.0, dtype=dtypes.float32) yexp = math_ops.exp(x) yexplog = math_ops.log(yexp) grads = gradients_impl.gradients([yexp, yexplog], [x]) grad_vals = sess.run(grads) exp1_plus_one = (1.0 + np.exp(1.0)).astype(np.float32) # [dexp(x)/dx + d(log(exp(x)))/dx] @ x=1 == exp(1) + 1 self.assertAllClose(grad_vals[0], exp1_plus_one) if __name__ == '__main__': test.main()
apache-2.0
verpoorten/immobilier
main/tests/views/test_view_utils.py
1
1476
############################################################################## # # Immobilier it's an application # designed to manage the core business of property management, buildings, # rental agreement and so on. # # Copyright (C) 2016-2017 Verpoorten Leïla # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # A copy of this license - GNU General Public License - is available # at the root of the source code of this program. If not, # see http://www.gnu.org/licenses/. # ############################################################################## from main import views_utils from django.test import TestCase class ViewUtilsTest(TestCase): def test_get_key_none(self): self.assertIsNone(views_utils.get_key(None)) self.assertIsNone(views_utils.get_key("-")) self.assertIsNone(views_utils.get_key("None")) def test_get_key_int(self): self.assertEqual(views_utils.get_key("2"),2) self.assertEqual(views_utils.get_key(2),2)
agpl-3.0
agrimaldi/RandomIO
RandomIO/__init__.py
5
1221
# # The MIT License (MIT) # # Copyright (c) 2014 William T. James for Storj Labs # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from .version import __version__ # NOQA from .RandomIO import RandomIO # NOQA
mit
gaddman/ansible
test/units/modules/network/f5/test_bigip_sys_global.py
21
4145
# -*- coding: utf-8 -*- # # Copyright (c) 2017 F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest import sys if sys.version_info < (2, 7): pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7") from ansible.module_utils.basic import AnsibleModule try: from library.modules.bigip_sys_global import ApiParameters from library.modules.bigip_sys_global import ModuleParameters from library.modules.bigip_sys_global import ModuleManager from library.modules.bigip_sys_global import ArgumentSpec # In Ansible 2.8, Ansible changed import paths. from test.units.compat import unittest from test.units.compat.mock import Mock from test.units.compat.mock import patch from test.units.modules.utils import set_module_args except ImportError: from ansible.modules.network.f5.bigip_sys_global import ApiParameters from ansible.modules.network.f5.bigip_sys_global import ModuleParameters from ansible.modules.network.f5.bigip_sys_global import ModuleManager from ansible.modules.network.f5.bigip_sys_global import ArgumentSpec # Ansible 2.8 imports from units.compat import unittest from units.compat.mock import Mock from units.compat.mock import patch from units.modules.utils import set_module_args fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestParameters(unittest.TestCase): def test_module_parameters(self): args = dict( banner_text='this is a banner', console_timeout=100, gui_setup='yes', lcd_display='yes', mgmt_dhcp='yes', net_reboot='yes', quiet_boot='yes', security_banner='yes', ) p = ModuleParameters(params=args) assert p.banner_text == 'this is a banner' assert p.console_timeout == 100 assert p.gui_setup == 'yes' assert p.lcd_display == 'yes' assert p.mgmt_dhcp == 'yes' assert p.net_reboot == 'yes' assert p.quiet_boot == 'yes' assert p.security_banner == 'yes' def test_api_parameters(self): args = load_fixture('load_sys_global_settings.json') p = ApiParameters(params=args) assert 'Welcome to the BIG-IP Configuration Utility' in p.banner_text assert p.console_timeout == 0 assert p.gui_setup == 'no' assert p.lcd_display == 'yes' assert p.mgmt_dhcp == 'yes' assert p.net_reboot == 'no' assert p.quiet_boot == 'yes' assert p.security_banner == 'yes' class TestManager(unittest.TestCase): def setUp(self): self.spec = ArgumentSpec() def test_update(self, *args): set_module_args(dict( banner_text='this is a banner', console_timeout=100, password='admin', server='localhost', user='admin', state='present' )) # Configure the parameters that would be returned by querying the # remote device current = ApiParameters(params=load_fixture('load_sys_global_settings.json')) module = AnsibleModule( argument_spec=self.spec.argument_spec, supports_check_mode=self.spec.supports_check_mode ) mm = ModuleManager(module=module) # Override methods to force specific logic in the module to happen mm.exists = Mock(return_value=False) mm.read_current_from_device = Mock(return_value=current) mm.update_on_device = Mock(return_value=True) results = mm.exec_module() assert results['changed'] is True
gpl-3.0
peterjc/bioconda-recipes
recipes/biopet-vcfstats/1.0/biopet-vcfstats.py
44
3367
#!/usr/bin/env python # # Wrapper script for starting the biopet-vcfstats JAR package # # This script is written for use with the Conda package manager and is copied # from the peptide-shaker wrapper. Only the parameters are changed. # (https://github.com/bioconda/bioconda-recipes/blob/master/recipes/peptide-shaker/peptide-shaker.py) # # This file was automatically generated by the sbt-bioconda plugin. import os import subprocess import sys import shutil from os import access from os import getenv from os import X_OK jar_file = 'VcfStats-assembly-1.0.jar' default_jvm_mem_opts = [] # !!! End of parameter section. No user-serviceable code below this line !!! def real_dirname(path): """Return the symlink-resolved, canonicalized directory-portion of path.""" return os.path.dirname(os.path.realpath(path)) def java_executable(): """Return the executable name of the Java interpreter.""" java_home = getenv('JAVA_HOME') java_bin = os.path.join('bin', 'java') if java_home and access(os.path.join(java_home, java_bin), X_OK): return os.path.join(java_home, java_bin) else: return 'java' def jvm_opts(argv): """Construct list of Java arguments based on our argument list. The argument list passed in argv must not include the script name. The return value is a 3-tuple lists of strings of the form: (memory_options, prop_options, passthrough_options) """ mem_opts = [] prop_opts = [] pass_args = [] exec_dir = None for arg in argv: if arg.startswith('-D'): prop_opts.append(arg) elif arg.startswith('-XX'): prop_opts.append(arg) elif arg.startswith('-Xm'): mem_opts.append(arg) elif arg.startswith('--exec_dir='): exec_dir = arg.split('=')[1].strip('"').strip("'") if not os.path.exists(exec_dir): shutil.copytree(real_dirname(sys.argv[0]), exec_dir, symlinks=False, ignore=None) else: pass_args.append(arg) # In the original shell script the test coded below read: # if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ] # To reproduce the behaviour of the above shell code fragment # it is important to explictly check for equality with None # in the second condition, so a null envar value counts as True! if mem_opts == [] and getenv('_JAVA_OPTIONS') is None: mem_opts = default_jvm_mem_opts return (mem_opts, prop_opts, pass_args, exec_dir) def main(): """ PeptideShaker updates files relative to the path of the jar file. In a multiuser setting, the option --exec_dir="exec_dir" can be used as the location for the peptide-shaker distribution. If the exec_dir dies not exist, we copy the jar file, lib, and resources to the exec_dir directory. """ java = java_executable() (mem_opts, prop_opts, pass_args, exec_dir) = jvm_opts(sys.argv[1:]) jar_dir = exec_dir if exec_dir else real_dirname(sys.argv[0]) if pass_args != [] and pass_args[0].startswith('eu'): jar_arg = '-cp' else: jar_arg = '-jar' jar_path = os.path.join(jar_dir, jar_file) java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args sys.exit(subprocess.call(java_args)) if __name__ == '__main__': main()
mit
freakynit/kaggle-ndsb
configurations/bagging_07_convroll4_doublescale_fs5.py
6
7063
import numpy as np import theano import theano.tensor as T import lasagne as nn import data import load import nn_plankton import dihedral import dihedral_fast import tmp_dnn import tta validation_split_path = "splits/bagging_split_7.pkl" patch_sizes = [(95, 95), (47, 47)] augmentation_params = { 'zoom_range': (1 / 1.6, 1.6), 'rotation_range': (0, 360), 'shear_range': (-20, 20), 'translation_range': (-10, 10), 'do_flip': True, 'allow_stretch': 1.3, } batch_size = 128 // 4 chunk_size = 32768 // 4 num_chunks_train = 840 momentum = 0.9 learning_rate_schedule = { 0: 0.0015, 700: 0.00015, 800: 0.000015, } validate_every = 20 save_every = 20 def estimate_scale(img): return np.maximum(img.shape[0], img.shape[1]) / 85.0 scale_factors = [estimate_scale, 5.0] # combine size-based rescaling + fixed rescaling # augmentation_transforms_test = [] # for flip in [True, False]: # for zoom in [1/1.3, 1/1.2, 1/1.1, 1.0, 1.1, 1.2, 1.3]: # for rot in np.linspace(0.0, 360.0, 5, endpoint=False): # tf = data.build_augmentation_transform(zoom=(zoom, zoom), rotation=rot, flip=flip) # augmentation_transforms_test.append(tf) augmentation_transforms_test = tta.build_quasirandom_transforms(70, **{ 'zoom_range': (1 / 1.4, 1.4), 'rotation_range': (0, 360), 'shear_range': (-10, 10), 'translation_range': (-8, 8), 'do_flip': True, 'allow_stretch': 1.2, }) data_loader = load.ZmuvMultiscaleDataLoader(scale_factors=scale_factors, num_chunks_train=num_chunks_train, patch_sizes=patch_sizes, chunk_size=chunk_size, augmentation_params=augmentation_params, augmentation_transforms_test=augmentation_transforms_test, validation_split_path=validation_split_path) # Conv2DLayer = nn.layers.cuda_convnet.Conv2DCCLayer # MaxPool2DLayer = nn.layers.cuda_convnet.MaxPool2DCCLayer Conv2DLayer = tmp_dnn.Conv2DDNNLayer MaxPool2DLayer = tmp_dnn.MaxPool2DDNNLayer def build_model(): l0_variable = nn.layers.InputLayer((batch_size, 1, patch_sizes[0][0], patch_sizes[0][1])) l0c = dihedral.CyclicSliceLayer(l0_variable) l1a = Conv2DLayer(l0c, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l1b = Conv2DLayer(l1a, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l1 = MaxPool2DLayer(l1b, ds=(3, 3), strides=(2, 2)) l1r = dihedral_fast.CyclicConvRollLayer(l1) l2a = Conv2DLayer(l1r, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l2b = Conv2DLayer(l2a, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l2 = MaxPool2DLayer(l2b, ds=(3, 3), strides=(2, 2)) l2r = dihedral_fast.CyclicConvRollLayer(l2) l3a = Conv2DLayer(l2r, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l3b = Conv2DLayer(l3a, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l3c = Conv2DLayer(l3b, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l3 = MaxPool2DLayer(l3c, ds=(3, 3), strides=(2, 2)) l3r = dihedral_fast.CyclicConvRollLayer(l3) l4a = Conv2DLayer(l3r, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l4b = Conv2DLayer(l4a, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l4c = Conv2DLayer(l4b, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l4 = MaxPool2DLayer(l4c, ds=(3, 3), strides=(2, 2)) l4r = dihedral_fast.CyclicConvRollLayer(l4) l4f = nn.layers.flatten(l4r) l5 = nn.layers.DenseLayer(nn.layers.dropout(l4f, p=0.5), num_units=256, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l5r = dihedral_fast.CyclicRollLayer(l5) l6 = nn.layers.DenseLayer(nn.layers.dropout(l5r, p=0.5), num_units=256, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu) l_variable = dihedral.CyclicPoolLayer(l6, pool_function=nn_plankton.rms) # fixed scale part l0_fixed = nn.layers.InputLayer((batch_size, 1, patch_sizes[1][0], patch_sizes[1][1])) l0c = dihedral.CyclicSliceLayer(l0_fixed) l1a = Conv2DLayer(l0c, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1)) l1b = Conv2DLayer(l1a, num_filters=8, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1)) l1 = MaxPool2DLayer(l1b, ds=(3, 3), strides=(2, 2)) l1r = dihedral_fast.CyclicConvRollLayer(l1) l2a = Conv2DLayer(l1r, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1)) l2b = Conv2DLayer(l2a, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1)) l2 = MaxPool2DLayer(l2b, ds=(3, 3), strides=(2, 2)) l2r = dihedral_fast.CyclicConvRollLayer(l2) l3a = Conv2DLayer(l2r, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1)) l3b = Conv2DLayer(l3a, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1)) l3c = Conv2DLayer(l3b, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1)) l3 = MaxPool2DLayer(l3c, ds=(3, 3), strides=(2, 2)) l3r = dihedral_fast.CyclicConvRollLayer(l3) l3f = nn.layers.flatten(l3r) l4 = nn.layers.DenseLayer(nn.layers.dropout(l3f, p=0.5), num_units=128, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1)) l4r = dihedral_fast.CyclicRollLayer(l4) l5 = nn.layers.DenseLayer(nn.layers.dropout(l4r, p=0.5), num_units=128, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1)) l_fixed = dihedral.CyclicPoolLayer(l5, pool_function=nn_plankton.rms) # merge the parts l_merged = nn.layers.concat([l_variable, l_fixed]) l7 = nn.layers.DenseLayer(nn.layers.dropout(l_merged, p=0.5), num_units=data.num_classes, nonlinearity=T.nnet.softmax, W=nn_plankton.Orthogonal(1.0)) return [l0_variable, l0_fixed], l7
mit
tobiasjakobi/mpv
bootstrap.py
7
1482
#!/usr/bin/env python # This script simply downloads waf to the current directory from __future__ import print_function import os, sys, stat, hashlib, subprocess WAFRELEASE = "waf-1.9.8" WAFURLS = ["https://waf.io/" + WAFRELEASE, "http://www.freehackers.org/~tnagy/release/" + WAFRELEASE] SHA256HASH = "167dc42bab6d5bd823b798af195420319cb5c9b571e00db7d83df2a0fe1f4dbf" if os.path.exists("waf"): wafver = subprocess.check_output([sys.executable, './waf', '--version']).decode() if WAFRELEASE.split('-')[1] == wafver.split(' ')[1]: print("Found 'waf', skipping download.") sys.exit(0) try: from urllib.request import urlopen, URLError except: from urllib2 import urlopen, URLError waf = None for WAFURL in WAFURLS: try: print("Downloading {}...".format(WAFURL)) waf = urlopen(WAFURL).read() break except URLError: print("Download failed.") if not waf: print("Could not download {}.".format(WAFRELEASE)) sys.exit(1) if SHA256HASH == hashlib.sha256(waf).hexdigest(): with open("waf", "wb") as wf: wf.write(waf) os.chmod("waf", os.stat("waf").st_mode | stat.S_IXUSR) print("Checksum verified.") else: print("The checksum of the downloaded file does not match!") print(" - got: {}".format(hashlib.sha256(waf).hexdigest())) print(" - expected: {}".format(SHA256HASH)) print("Please download and verify the file manually.") sys.exit(1)
gpl-2.0
Pathel/deuterium
src/server/amp.py
2
20482
""" Contains the protocols, commands, and client factory needed for the Server and Portal to communicate with each other, letting Portal work as a proxy. Both sides use this same protocol. The separation works like this: Portal - (AMP client) handles protocols. It contains a list of connected sessions in a dictionary for identifying the respective player connected. If it looses the AMP connection it will automatically try to reconnect. Server - (AMP server) Handles all mud operations. The server holds its own list of sessions tied to player objects. This is synced against the portal at startup and when a session connects/disconnects """ # imports needed on both server and portal side import os from collections import defaultdict try: import cPickle as pickle except ImportError: import pickle from twisted.protocols import amp from twisted.internet import protocol from twisted.internet.defer import Deferred from src.utils.utils import to_str, variable_from_module # communication bits PCONN = chr(1) # portal session connect PDISCONN = chr(2) # portal session disconnect PSYNC = chr(3) # portal session sync SLOGIN = chr(4) # server session login SDISCONN = chr(5) # server session disconnect SDISCONNALL = chr(6) # server session disconnect all SSHUTD = chr(7) # server shutdown SSYNC = chr(8) # server session sync SCONN = chr(9) # server creating new connectiong (for irc/imc2 bots etc) PCONNSYNC = chr(10) # portal post-syncing a session MAXLEN = 65535 # max allowed data length in AMP protocol _MSGBUFFER = defaultdict(list) def get_restart_mode(restart_file): """ Parse the server/portal restart status """ if os.path.exists(restart_file): flag = open(restart_file, 'r').read() return flag == "True" return False class AmpServerFactory(protocol.ServerFactory): """ This factory creates the Server as a new AMPProtocol instance for accepting connections from the Portal. """ def __init__(self, server): """ server: The Evennia server service instance protocol: The protocol the factory creates instances of. """ self.server = server self.protocol = AMPProtocol def buildProtocol(self, addr): """ Start a new connection, and store it on the service object """ #print "Evennia Server connected to Portal at %s." % addr self.server.amp_protocol = AMPProtocol() self.server.amp_protocol.factory = self return self.server.amp_protocol class AmpClientFactory(protocol.ReconnectingClientFactory): """ This factory creates an instance of the Portal, an AMPProtocol instances to use to connect """ # Initial reconnect delay in seconds. initialDelay = 1 factor = 1.5 maxDelay = 1 def __init__(self, portal): self.portal = portal self.protocol = AMPProtocol def startedConnecting(self, connector): """ Called when starting to try to connect to the MUD server. """ pass #print 'AMP started to connect:', connector def buildProtocol(self, addr): """ Creates an AMPProtocol instance when connecting to the server. """ #print "Portal connected to Evennia server at %s." % addr self.resetDelay() self.portal.amp_protocol = AMPProtocol() self.portal.amp_protocol.factory = self return self.portal.amp_protocol def clientConnectionLost(self, connector, reason): """ Called when the AMP connection to the MUD server is lost. """ if hasattr(self, "server_restart_mode"): self.maxDelay = 1 else: # Don't translate this; avoid loading django on portal side. self.maxDelay = 10 self.portal.sessions.announce_all(" ... Portal lost connection to Server.") protocol.ReconnectingClientFactory.clientConnectionLost(self, connector, reason) def clientConnectionFailed(self, connector, reason): """ Called when an AMP connection attempt to the MUD server fails. """ if hasattr(self, "server_restart_mode"): self.maxDelay = 1 else: self.maxDelay = 10 self.portal.sessions.announce_all(" ...") protocol.ReconnectingClientFactory.clientConnectionFailed(self, connector, reason) # AMP Communication Command types class MsgPortal2Server(amp.Command): """ Message portal -> server """ key = "MsgPortal2Server" arguments = [('sessid', amp.Integer()), ('ipart', amp.Integer()), ('nparts', amp.Integer()), ('msg', amp.String()), ('data', amp.String())] errors = [(Exception, 'EXCEPTION')] response = [] class MsgServer2Portal(amp.Command): """ Message server -> portal """ key = "MsgServer2Portal" arguments = [('sessid', amp.Integer()), ('ipart', amp.Integer()), ('nparts', amp.Integer()), ('msg', amp.String()), ('data', amp.String())] errors = [(Exception, 'EXCEPTION')] response = [] class ServerAdmin(amp.Command): """ Portal -> Server Sent when the portal needs to perform admin operations on the server, such as when a new session connects or resyncs """ key = "ServerAdmin" arguments = [('sessid', amp.Integer()), ('ipart', amp.Integer()), ('nparts', amp.Integer()), ('operation', amp.String()), ('data', amp.String())] errors = [(Exception, 'EXCEPTION')] response = [] class PortalAdmin(amp.Command): """ Server -> Portal Sent when the server needs to perform admin operations on the portal. """ key = "PortalAdmin" arguments = [('sessid', amp.Integer()), ('ipart', amp.Integer()), ('nparts', amp.Integer()), ('operation', amp.String()), ('data', amp.String())] errors = [(Exception, 'EXCEPTION')] response = [] class FunctionCall(amp.Command): """ Bidirectional Sent when either process needs to call an arbitrary function in the other. """ key = "FunctionCall" arguments = [('module', amp.String()), ('function', amp.String()), ('args', amp.String()), ('kwargs', amp.String())] errors = [(Exception, 'EXCEPTION')] response = [('result', amp.String())] # Helper functions dumps = lambda data: to_str(pickle.dumps(data, pickle.HIGHEST_PROTOCOL)) loads = lambda data: pickle.loads(to_str(data)) # multipart message store #------------------------------------------------------------ # Core AMP protocol for communication Server <-> Portal #------------------------------------------------------------ class AMPProtocol(amp.AMP): """ This is the protocol that the MUD server and the proxy server communicate to each other with. AMP is a bi-directional protocol, so both the proxy and the MUD use the same commands and protocol. AMP specifies responder methods here and connect them to amp.Command subclasses that specify the datatypes of the input/output of these methods. """ # helper methods def connectionMade(self): """ This is called when a connection is established between server and portal. AMP calls it on both sides, so we need to make sure to only trigger resync from the portal side. """ if hasattr(self.factory, "portal"): # only the portal has the 'portal' property, so we know we are # on the portal side and can initialize the connection. sessdata = self.factory.portal.sessions.get_all_sync_data() self.call_remote_ServerAdmin(0, PSYNC, data=sessdata) self.factory.portal.sessions.at_server_connection() if hasattr(self.factory, "server_restart_mode"): del self.factory.server_restart_mode # Error handling def errback(self, e, info): "error handler, to avoid dropping connections on server tracebacks." f = e.trap(Exception) print "AMP Error for %(info)s: %(e)s" % {'info': info, 'e': e.getErrorMessage()} def safe_send(self, command, sessid, **kwargs): """ This helper method splits the sending of a message into multiple parts with a maxlength of MAXLEN. This is to avoid repetition in two sending commands. when calling this the maximum length has already been exceeded. The max-length will be checked for all kwargs and these will be used as argument to the command. The command type must have keywords ipart and nparts to track the parts and put them back together on the other side. Returns a deferred or a list of such """ to_send = [(key, [string[i:i+MAXLEN] for i in range(0, len(string), MAXLEN)]) for key, string in kwargs.items()] nparts_max = max(len(part[1]) for part in to_send) if nparts_max == 1: # first try to send directly return self.callRemote(command, sessid=sessid, ipart=0, nparts=1, **kwargs).addErrback(self.errback, command.key) else: # one or more parts were too long for MAXLEN. #print "TooLong triggered!" deferreds = [] for ipart in range(nparts_max): part_kwargs = {} for key, str_part in to_send: try: part_kwargs[key] = str_part[ipart] except IndexError: # means this kwarg needed fewer splits part_kwargs[key] = "" # send this part #print "amp safe sending:", ipart, nparts_max, str_part deferreds.append(self.callRemote( command, sessid=sessid, ipart=ipart, nparts=nparts_max, **part_kwargs).addErrback(self.errback, command.key)) return deferreds def safe_recv(self, command, sessid, ipart, nparts, **kwargs): """ Safely decode potentially split data coming over the wire. No decoding or parsing is done here, only merging of data split with safe_send(). If the data stream is not yet complete, this method will return None, otherwise it will return a dictionary of the (possibly merged) properties. """ global _MSGBUFFER if nparts == 1: # the most common case return kwargs else: # part of a multi-part send hashid = "%s_%s" % (command.key, sessid) #print "amp safe receive:", ipart, nparts-1, kwargs if ipart < nparts-1: # not yet complete _MSGBUFFER[hashid].append(kwargs) return else: # all parts in place, put them back together buf = _MSGBUFFER.pop(hashid) + [kwargs] recv_kwargs = dict((key, "".join(kw[key] for kw in buf)) for key in kwargs) return recv_kwargs # Message definition + helper methods to call/create each message type # Portal -> Server Msg def amp_msg_portal2server(self, sessid, ipart, nparts, msg, data): """ Relays message to server. This method is executed on the Server. Since AMP has a limit of 65355 bytes per message, it's possible the data comes in multiple chunks; if so (nparts>1) we buffer the data and wait for the remaining parts to arrive before continuing. """ #print "msg portal -> server (server side):", sessid, msg, data ret = self.safe_recv(MsgPortal2Server, sessid, ipart, nparts, text=msg, data=data) if ret is not None: self.factory.server.sessions.data_in(sessid, text=ret["text"], **loads(ret["data"])) return {} MsgPortal2Server.responder(amp_msg_portal2server) def call_remote_MsgPortal2Server(self, sessid, msg, data=""): """ Access method called by the Portal and executed on the Portal. """ #print "msg portal->server (portal side):", sessid, msg, data return self.safe_send(MsgPortal2Server, sessid, msg=msg if msg is not None else "", data=dumps(data)) # Server -> Portal message def amp_msg_server2portal(self, sessid, ipart, nparts, msg, data): """ Relays message to Portal. This method is executed on the Portal. """ #print "msg server->portal (portal side):", sessid, msg ret = self.safe_recv(MsgServer2Portal, sessid, ipart, nparts, text=msg, data=data) if ret is not None: self.factory.portal.sessions.data_out(sessid, text=ret["text"], **loads(ret["data"])) return {} MsgServer2Portal.responder(amp_msg_server2portal) def call_remote_MsgServer2Portal(self, sessid, msg, data=""): """ Access method called by the Server and executed on the Server. """ #print "msg server->portal (server side):", sessid, msg, data return self.safe_send(MsgServer2Portal, sessid, msg=msg if msg is not None else "", data=dumps(data)) # Server administration from the Portal side def amp_server_admin(self, sessid, ipart, nparts, operation, data): """ This allows the portal to perform admin operations on the server. This is executed on the Server. """ ret = self.safe_recv(ServerAdmin, sessid, ipart, nparts, operation=operation, data=data) if ret is not None: data = loads(ret["data"]) operation = ret["operation"] server_sessionhandler = self.factory.server.sessions #print "serveradmin (server side):", sessid, ord(operation), data if operation == PCONN: # portal_session_connect # create a new session and sync it server_sessionhandler.portal_connect(data) elif operation == PCONNSYNC: #portal_session_sync server_sessionhandler.portal_session_sync(data) elif operation == PDISCONN: # portal_session_disconnect # session closed from portal side self.factory.server.sessions.portal_disconnect(sessid) elif operation == PSYNC: # portal_session_sync # force a resync of sessions when portal reconnects to # server (e.g. after a server reboot) the data kwarg # contains a dict {sessid: {arg1:val1,...}} # representing the attributes to sync for each # session. server_sessionhandler.portal_sessions_sync(data) else: raise Exception("operation %(op)s not recognized." % {'op': operation}) return {} ServerAdmin.responder(amp_server_admin) def call_remote_ServerAdmin(self, sessid, operation="", data=""): """ Access method called by the Portal and Executed on the Portal. """ #print "serveradmin (portal side):", sessid, ord(operation), data data = dumps(data) return self.safe_send(ServerAdmin, sessid, operation=operation, data=data) # Portal administraton from the Server side def amp_portal_admin(self, sessid, ipart, nparts, operation, data): """ This allows the server to perform admin operations on the portal. This is executed on the Portal. """ #print "portaladmin (portal side):", sessid, ord(operation), data ret = self.safe_recv(PortalAdmin, sessid, ipart, nparts, operation=operation, data=data) if ret is not None: data = loads(data) portal_sessionhandler = self.factory.portal.sessions if operation == SLOGIN: # server_session_login # a session has authenticated; sync it. portal_sessionhandler.server_logged_in(sessid, data) elif operation == SDISCONN: # server_session_disconnect # the server is ordering to disconnect the session portal_sessionhandler.server_disconnect(sessid, reason=data) elif operation == SDISCONNALL: # server_session_disconnect_all # server orders all sessions to disconnect portal_sessionhandler.server_disconnect_all(reason=data) elif operation == SSHUTD: # server_shutdown # the server orders the portal to shut down self.factory.portal.shutdown(restart=False) elif operation == SSYNC: # server_session_sync # server wants to save session data to the portal, # maybe because it's about to shut down. portal_sessionhandler.server_session_sync(data) # set a flag in case we are about to shut down soon self.factory.server_restart_mode = True elif operation == SCONN: # server_force_connection (for irc/imc2 etc) portal_sessionhandler.server_connect(**data) else: raise Exception("operation %(op)s not recognized." % {'op': operation}) return {} PortalAdmin.responder(amp_portal_admin) def call_remote_PortalAdmin(self, sessid, operation="", data=""): """ Access method called by the server side. """ self.safe_send(PortalAdmin, sessid, operation=operation, data=dumps(data)) # Extra functions def amp_function_call(self, module, function, args, **kwargs): """ This allows Portal- and Server-process to call an arbitrary function in the other process. It is intended for use by plugin modules. """ args = loads(args) kwargs = loads(kwargs) # call the function (don't catch tracebacks here) result = variable_from_module(module, function)(*args, **kwargs) if isinstance(result, Deferred): # if result is a deferred, attach handler to properly # wrap the return value result.addCallback(lambda r: {"result": dumps(r)}) return result else: return {'result': dumps(result)} FunctionCall.responder(amp_function_call) def call_remote_FunctionCall(self, modulepath, functionname, *args, **kwargs): """ Access method called by either process. This will call an arbitrary function on the other process (On Portal if calling from Server and vice versa). Inputs: modulepath (str) - python path to module holding function to call functionname (str) - name of function in given module *args, **kwargs will be used as arguments/keyword args for the remote function call Returns: A deferred that fires with the return value of the remote function call """ return self.callRemote(FunctionCall, module=modulepath, function=functionname, args=dumps(args), kwargs=dumps(kwargs)).addCallback(lambda r: loads(r["result"])).addErrback(self.errback, "FunctionCall")
bsd-3-clause
Endika/edx-platform
openedx/core/lib/api/tests/test_permissions.py
6
4368
""" Tests for API permissions classes. """ import ddt from django.test import TestCase, RequestFactory from student.roles import CourseStaffRole, CourseInstructorRole from openedx.core.lib.api.permissions import IsStaffOrOwner, IsCourseInstructor from student.tests.factories import UserFactory from opaque_keys.edx.keys import CourseKey class TestObject(object): """ Fake class for object permission tests. """ def __init__(self, user=None, course_id=None): self.user = user self.course_id = course_id class IsCourseInstructorTests(TestCase): """ Test for IsCourseInstructor permission class. """ def setUp(self): super(IsCourseInstructorTests, self).setUp() self.permission = IsCourseInstructor() self.request = RequestFactory().get('/') self.course_key = CourseKey.from_string('edx/test123/run') self.obj = TestObject(course_id=self.course_key) def test_course_staff_has_no_access(self): user = UserFactory.create() self.request.user = user CourseStaffRole(course_key=self.course_key).add_users(user) self.assertFalse( self.permission.has_object_permission(self.request, None, self.obj)) def test_course_instructor_has_access(self): user = UserFactory.create() self.request.user = user CourseInstructorRole(course_key=self.course_key).add_users(user) self.assertTrue( self.permission.has_object_permission(self.request, None, self.obj)) def test_anonymous_has_no_access(self): self.assertFalse( self.permission.has_object_permission(self.request, None, self.obj)) @ddt.ddt class IsStaffOrOwnerTests(TestCase): """ Tests for IsStaffOrOwner permission class. """ def setUp(self): super(IsStaffOrOwnerTests, self).setUp() self.permission = IsStaffOrOwner() self.request = RequestFactory().get('/') self.obj = TestObject() def assert_user_has_object_permission(self, user, permitted): """ Asserts whether or not the user has permission to access an object. Arguments user (User) permitted (boolean) """ self.request.user = user self.assertEqual(self.permission.has_object_permission(self.request, None, self.obj), permitted) def test_staff_user(self): """ Staff users should be permitted. """ user = UserFactory.create(is_staff=True) self.assert_user_has_object_permission(user, True) def test_owner(self): """ Owners should be permitted. """ user = UserFactory.create() self.obj.user = user self.assert_user_has_object_permission(user, True) def test_non_staff_test_non_owner_or_staff_user(self): """ Non-staff and non-owner users should not be permitted. """ user = UserFactory.create() self.assert_user_has_object_permission(user, False) def test_has_permission_as_staff(self): """ Staff users always have permission. """ self.request.user = UserFactory.create(is_staff=True) self.assertTrue(self.permission.has_permission(self.request, None)) def test_has_permission_as_owner_with_get(self): """ Owners always have permission to make GET actions. """ user = UserFactory.create() request = RequestFactory().get('/?username={}'.format(user.username)) request.user = user self.assertTrue(self.permission.has_permission(request, None)) @ddt.data('patch', 'post', 'put') def test_has_permission_as_owner_with_edit(self, action): """ Owners always have permission to edit. """ user = UserFactory.create() data = {'username': user.username} request = getattr(RequestFactory(), action)('/', data, format='json') request.user = user request.data = data # Note (CCB): This is a hack that should be fixed. (ECOM-3171) self.assertTrue(self.permission.has_permission(request, None)) def test_has_permission_as_non_owner(self): """ Non-owners should not have permission. """ user = UserFactory.create() request = RequestFactory().get('/?username={}'.format(user.username)) request.user = UserFactory.create() self.assertFalse(self.permission.has_permission(request, None))
agpl-3.0
shrimpboyho/git.js
emscript/python/2.7.5.1_32bit/Lib/UserDict.py
358
5811
"""A more or less complete user-defined wrapper around dictionary objects.""" class UserDict: def __init__(self, dict=None, **kwargs): self.data = {} if dict is not None: self.update(dict) if len(kwargs): self.update(kwargs) def __repr__(self): return repr(self.data) def __cmp__(self, dict): if isinstance(dict, UserDict): return cmp(self.data, dict.data) else: return cmp(self.data, dict) __hash__ = None # Avoid Py3k warning def __len__(self): return len(self.data) def __getitem__(self, key): if key in self.data: return self.data[key] if hasattr(self.__class__, "__missing__"): return self.__class__.__missing__(self, key) raise KeyError(key) def __setitem__(self, key, item): self.data[key] = item def __delitem__(self, key): del self.data[key] def clear(self): self.data.clear() def copy(self): if self.__class__ is UserDict: return UserDict(self.data.copy()) import copy data = self.data try: self.data = {} c = copy.copy(self) finally: self.data = data c.update(self) return c def keys(self): return self.data.keys() def items(self): return self.data.items() def iteritems(self): return self.data.iteritems() def iterkeys(self): return self.data.iterkeys() def itervalues(self): return self.data.itervalues() def values(self): return self.data.values() def has_key(self, key): return key in self.data def update(self, dict=None, **kwargs): if dict is None: pass elif isinstance(dict, UserDict): self.data.update(dict.data) elif isinstance(dict, type({})) or not hasattr(dict, 'items'): self.data.update(dict) else: for k, v in dict.items(): self[k] = v if len(kwargs): self.data.update(kwargs) def get(self, key, failobj=None): if key not in self: return failobj return self[key] def setdefault(self, key, failobj=None): if key not in self: self[key] = failobj return self[key] def pop(self, key, *args): return self.data.pop(key, *args) def popitem(self): return self.data.popitem() def __contains__(self, key): return key in self.data @classmethod def fromkeys(cls, iterable, value=None): d = cls() for key in iterable: d[key] = value return d class IterableUserDict(UserDict): def __iter__(self): return iter(self.data) import _abcoll _abcoll.MutableMapping.register(IterableUserDict) class DictMixin: # Mixin defining all dictionary methods for classes that already have # a minimum dictionary interface including getitem, setitem, delitem, # and keys. Without knowledge of the subclass constructor, the mixin # does not define __init__() or copy(). In addition to the four base # methods, progressively more efficiency comes with defining # __contains__(), __iter__(), and iteritems(). # second level definitions support higher levels def __iter__(self): for k in self.keys(): yield k def has_key(self, key): try: self[key] except KeyError: return False return True def __contains__(self, key): return self.has_key(key) # third level takes advantage of second level definitions def iteritems(self): for k in self: yield (k, self[k]) def iterkeys(self): return self.__iter__() # fourth level uses definitions from lower levels def itervalues(self): for _, v in self.iteritems(): yield v def values(self): return [v for _, v in self.iteritems()] def items(self): return list(self.iteritems()) def clear(self): for key in self.keys(): del self[key] def setdefault(self, key, default=None): try: return self[key] except KeyError: self[key] = default return default def pop(self, key, *args): if len(args) > 1: raise TypeError, "pop expected at most 2 arguments, got "\ + repr(1 + len(args)) try: value = self[key] except KeyError: if args: return args[0] raise del self[key] return value def popitem(self): try: k, v = self.iteritems().next() except StopIteration: raise KeyError, 'container is empty' del self[k] return (k, v) def update(self, other=None, **kwargs): # Make progressively weaker assumptions about "other" if other is None: pass elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups for k, v in other.iteritems(): self[k] = v elif hasattr(other, 'keys'): for k in other.keys(): self[k] = other[k] else: for k, v in other: self[k] = v if kwargs: self.update(kwargs) def get(self, key, default=None): try: return self[key] except KeyError: return default def __repr__(self): return repr(dict(self.iteritems())) def __cmp__(self, other): if other is None: return 1 if isinstance(other, DictMixin): other = dict(other.iteritems()) return cmp(dict(self.iteritems()), other) def __len__(self): return len(self.keys())
gpl-2.0
buntyke/Flask
microblog/flask/lib/python2.7/site-packages/openid/consumer/html_parse.py
167
7161
""" This module implements a VERY limited parser that finds <link> tags in the head of HTML or XHTML documents and parses out their attributes according to the OpenID spec. It is a liberal parser, but it requires these things from the data in order to work: - There must be an open <html> tag - There must be an open <head> tag inside of the <html> tag - Only <link>s that are found inside of the <head> tag are parsed (this is by design) - The parser follows the OpenID specification in resolving the attributes of the link tags. This means that the attributes DO NOT get resolved as they would by an XML or HTML parser. In particular, only certain entities get replaced, and href attributes do not get resolved relative to a base URL. From http://openid.net/specs.bml#linkrel: - The openid.server URL MUST be an absolute URL. OpenID consumers MUST NOT attempt to resolve relative URLs. - The openid.server URL MUST NOT include entities other than &amp;, &lt;, &gt;, and &quot;. The parser ignores SGML comments and <![CDATA[blocks]]>. Both kinds of quoting are allowed for attributes. The parser deals with invalid markup in these ways: - Tag names are not case-sensitive - The <html> tag is accepted even when it is not at the top level - The <head> tag is accepted even when it is not a direct child of the <html> tag, but a <html> tag must be an ancestor of the <head> tag - <link> tags are accepted even when they are not direct children of the <head> tag, but a <head> tag must be an ancestor of the <link> tag - If there is no closing tag for an open <html> or <head> tag, the remainder of the document is viewed as being inside of the tag. If there is no closing tag for a <link> tag, the link tag is treated as a short tag. Exceptions to this rule are that <html> closes <html> and <body> or <head> closes <head> - Attributes of the <link> tag are not required to be quoted. - In the case of duplicated attribute names, the attribute coming last in the tag will be the value returned. - Any text that does not parse as an attribute within a link tag will be ignored. (e.g. <link pumpkin rel='openid.server' /> will ignore pumpkin) - If there are more than one <html> or <head> tag, the parser only looks inside of the first one. - The contents of <script> tags are ignored entirely, except unclosed <script> tags. Unclosed <script> tags are ignored. - Any other invalid markup is ignored, including unclosed SGML comments and unclosed <![CDATA[blocks. """ __all__ = ['parseLinkAttrs'] import re flags = ( re.DOTALL # Match newlines with '.' | re.IGNORECASE | re.VERBOSE # Allow comments and whitespace in patterns | re.UNICODE # Make \b respect Unicode word boundaries ) # Stuff to remove before we start looking for tags removed_re = re.compile(r''' # Comments <!--.*?--> # CDATA blocks | <!\[CDATA\[.*?\]\]> # script blocks | <script\b # make sure script is not an XML namespace (?!:) [^>]*>.*?</script> ''', flags) tag_expr = r''' # Starts with the tag name at a word boundary, where the tag name is # not a namespace <%(tag_name)s\b(?!:) # All of the stuff up to a ">", hopefully attributes. (?P<attrs>[^>]*?) (?: # Match a short tag /> | # Match a full tag > (?P<contents>.*?) # Closed by (?: # One of the specified close tags </?%(closers)s\s*> # End of the string | \Z ) ) ''' def tagMatcher(tag_name, *close_tags): if close_tags: options = '|'.join((tag_name,) + close_tags) closers = '(?:%s)' % (options,) else: closers = tag_name expr = tag_expr % locals() return re.compile(expr, flags) # Must contain at least an open html and an open head tag html_find = tagMatcher('html') head_find = tagMatcher('head', 'body') link_find = re.compile(r'<link\b(?!:)', flags) attr_find = re.compile(r''' # Must start with a sequence of word-characters, followed by an equals sign (?P<attr_name>\w+)= # Then either a quoted or unquoted attribute (?: # Match everything that\'s between matching quote marks (?P<qopen>["\'])(?P<q_val>.*?)(?P=qopen) | # If the value is not quoted, match up to whitespace (?P<unq_val>(?:[^\s<>/]|/(?!>))+) ) | (?P<end_link>[<>]) ''', flags) # Entity replacement: replacements = { 'amp':'&', 'lt':'<', 'gt':'>', 'quot':'"', } ent_replace = re.compile(r'&(%s);' % '|'.join(replacements.keys())) def replaceEnt(mo): "Replace the entities that are specified by OpenID" return replacements.get(mo.group(1), mo.group()) def parseLinkAttrs(html): """Find all link tags in a string representing a HTML document and return a list of their attributes. @param html: the text to parse @type html: str or unicode @return: A list of dictionaries of attributes, one for each link tag @rtype: [[(type(html), type(html))]] """ stripped = removed_re.sub('', html) html_mo = html_find.search(stripped) if html_mo is None or html_mo.start('contents') == -1: return [] start, end = html_mo.span('contents') head_mo = head_find.search(stripped, start, end) if head_mo is None or head_mo.start('contents') == -1: return [] start, end = head_mo.span('contents') link_mos = link_find.finditer(stripped, head_mo.start(), head_mo.end()) matches = [] for link_mo in link_mos: start = link_mo.start() + 5 link_attrs = {} for attr_mo in attr_find.finditer(stripped, start): if attr_mo.lastgroup == 'end_link': break # Either q_val or unq_val must be present, but not both # unq_val is a True (non-empty) value if it is present attr_name, q_val, unq_val = attr_mo.group( 'attr_name', 'q_val', 'unq_val') attr_val = ent_replace.sub(replaceEnt, unq_val or q_val) link_attrs[attr_name] = attr_val matches.append(link_attrs) return matches def relMatches(rel_attr, target_rel): """Does this target_rel appear in the rel_str?""" # XXX: TESTME rels = rel_attr.strip().split() for rel in rels: rel = rel.lower() if rel == target_rel: return 1 return 0 def linkHasRel(link_attrs, target_rel): """Does this link have target_rel as a relationship?""" # XXX: TESTME rel_attr = link_attrs.get('rel') return rel_attr and relMatches(rel_attr, target_rel) def findLinksRel(link_attrs_list, target_rel): """Filter the list of link attributes on whether it has target_rel as a relationship.""" # XXX: TESTME matchesTarget = lambda attrs: linkHasRel(attrs, target_rel) return filter(matchesTarget, link_attrs_list) def findFirstHref(link_attrs_list, target_rel): """Return the value of the href attribute for the first link tag in the list that has target_rel as a relationship.""" # XXX: TESTME matches = findLinksRel(link_attrs_list, target_rel) if not matches: return None first = matches[0] return first.get('href')
mit
PeterDaveHello/eden
modules/s3db/event.py
4
116628
# -*- coding: utf-8 -*- """ Sahana Eden Event Model @copyright: 2009-2015 (c) Sahana Software Foundation @license: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ __all__ = ("S3EventModel", "S3IncidentModel", "S3IncidentReportModel", "S3IncidentReportOrganisationGroupModel", "S3IncidentTypeModel", "S3IncidentTypeTagModel", "S3EventActivityModel", "S3EventAlertModel", "S3EventAssetModel", "S3EventCMSModel", "S3EventHRModel", "S3EventImpactModel", #"S3EventIReportModel", "S3EventMapModel", "S3EventOrganisationModel", #"S3EventRequestModel", "S3EventResourceModel", "S3EventSiteModel", "S3EventSitRepModel", "S3EventTaskModel", "S3EventShelterModel", "event_notification_dispatcher", "event_incident_list_layout", "event_rheader", ) from gluon import * from gluon.storage import Storage from ..s3 import * from s3layouts import S3AddResourceLink # ============================================================================= class S3EventModel(S3Model): """ Event Model http://eden.sahanafoundation.org/wiki/BluePrintScenario Events are a high-level term, such as a 'Disaster' Link tables are in separate classes to increase performance & allow the system to be more modular """ names = ("event_event_type", "event_type_id", "event_event", "event_event_id", "event_event_location", "event_event_tag", ) def model(self): T = current.T db = current.db s3 = current.response.s3 configure = self.configure crud_strings = s3.crud_strings define_table = self.define_table settings = current.deployment_settings messages = current.messages NONE = messages["NONE"] AUTOCOMPLETE_HELP = messages.AUTOCOMPLETE_HELP hierarchical_event_types = settings.get_event_types_hierarchical() # --------------------------------------------------------------------- # Event Types / Disaster Types # tablename = "event_event_type" define_table(tablename, Field("name", notnull=True, length=64, label = T("Name"), ), Field("parent", "reference event_event_type", # This form of hierarchy may not work on all Databases label = T("SubType of"), ondelete = "RESTRICT", readable = hierarchical_event_types, writable = hierarchical_event_types, ), s3_comments(), *s3_meta_fields()) type_represent = S3Represent(lookup=tablename, translate=True) if hierarchical_event_types: hierarchy = "parent" # Can't be defined in-line as otherwise get a circular reference table = db[tablename] table.parent.represent = type_represent table.parent.requires = IS_EMPTY_OR( IS_ONE_OF(db, "event_event_type.id", type_represent, # If limiting to just 1 level of parent #filterby="parent", #filter_opts=(None,), orderby="event_event_type.name")) event_type_widget = S3HierarchyWidget(lookup = "event_event_type", represent = type_represent, multiple = False, leafonly = True, ) event_type_comment = None else: hierarchy = None event_type_widget = None event_type_comment = None # Uncomment these to use an Autocomplete & not a Dropdown #event_type_widget = S3AutocompleteWidget() #event_typecomment = DIV(_class="tooltip", # _title="%s|%s" % (T("Event Type"), # AUTOCOMPLETE_HELP)) crud_strings[tablename] = Storage( label_create = T("Create Event Type"), title_display = T("Event Type Details"), title_list = T("Event Types"), title_update = T("Edit Event Type"), title_upload = T("Import Event Types"), label_list_button = T("List Event Types"), label_delete_button = T("Delete Event Type"), msg_record_created = T("Event Type added"), msg_record_modified = T("Event Type updated"), msg_record_deleted = T("Event Type removed"), msg_list_empty = T("No Event Types currently registered") ) event_type_id = S3ReusableField("event_type_id", "reference %s" % tablename, label = T("Event Type"), ondelete = "RESTRICT", represent = type_represent, requires = IS_EMPTY_OR( IS_ONE_OF(db, "event_event_type.id", type_represent, orderby="event_event_type.name", sort=True)), sortby = "name", widget = event_type_widget, comment = event_type_comment, ) configure(tablename, deduplicate = self.event_type_duplicate, hierarchy = hierarchy, ) # --------------------------------------------------------------------- # Events / Disasters # # Events can be a way of grouping related Incidents or used standalone # # --------------------------------------------------------------------- tablename = "event_event" define_table(tablename, Field("name", # Name could be a code length = 64, # Mayon compatibility label = T("Name"), requires = IS_NOT_EMPTY(), ), event_type_id(), self.org_organisation_id( comment = DIV(_class="tooltip", _title="%s|%s" % (T("Organization"), T("The organization managing this event"))), # Enable in the template if-required readable = False, writable = False, ), Field("exercise", "boolean", default = False, label = T("Exercise?"), represent = lambda opt: "√" if opt else NONE, #comment = DIV(_class="tooltip", # _title="%s|%s" % (T("Exercise"), # Should! # T("Exercises mean all screens have a watermark & all notifications have a prefix."))), ), s3_datetime("start_date", default = "now", label = T("Start Date"), represent = "date", widget = "date", set_min = "#event_event_end_date", ), s3_datetime("end_date", label = T("End Date"), represent = "date", widget = "date", set_max = "#event_event_start_date", ), Field.Method("year", self.event_event_year), Field("closed", "boolean", default = False, label = T("Closed"), represent = s3_yes_no_represent, ), s3_comments(), *s3_meta_fields()) # CRUD strings ADD_EVENT = T("New Event") crud_strings[tablename] = Storage( label_create = ADD_EVENT, title_display = T("Event Details"), title_list = T("Events"), title_update = T("Edit Event"), label_list_button = T("List Events"), label_delete_button = T("Delete Event"), msg_record_created = T("Event added"), msg_record_modified = T("Event updated"), msg_record_deleted = T("Event deleted"), msg_list_empty = T("No Events currently registered")) represent = S3Represent(lookup=tablename) event_id = S3ReusableField("event_id", "reference %s" % tablename, sortby="name", requires = IS_EMPTY_OR( IS_ONE_OF(db, "event_event.id", represent, filterby="closed", filter_opts=(False,), orderby="event_event.name", sort=True)), represent = represent, label = T("Event"), ondelete = "CASCADE", # Uncomment these to use an Autocomplete & not a Dropdown #widget = S3AutocompleteWidget() #comment = DIV(_class="tooltip", # _title="%s|%s" % (T("Event"), # AUTOCOMPLETE_HELP)) ) # Which levels of Hierarchy are we using? levels = current.gis.get_relevant_hierarchy_levels() if hierarchical_event_types: filter_widgets = [S3HierarchyFilter("event_type_id", label = T("Type"), #multiple = False, ), ] else: filter_widgets = [S3OptionsFilter("event_type_id", label = T("Type"), multiple = False, #options = lambda: \ # get_s3_filter_opts("event_event_type", # translate = True) ), ] filter_widgets.extend((S3LocationFilter("event_location.location_id", levels = levels, label = T("Location"), ), # @ToDo: Filter for any event which starts or ends within a date range S3DateFilter("start_date", label = None, hide_time = True, input_labels = {"ge": "From", "le": "To"} ), )) report_fields = ["event_type_id", ] rappend = report_fields.append for level in levels: rappend("event_location.location_id$%s" % level) rappend((T("Year"), "year")) report_options = Storage( rows = report_fields, cols = report_fields, fact = [(T("Number of Disasters"), "count(id)")], defaults = Storage( rows = "event_type_id", cols = "event_location.location_id$%s" % levels[0], # Highest-level of hierarchy fact = "count(id)", totals = True, chart = "breakdown:rows", table = "collapse", ), ) configure(tablename, context = {"location": "event_location.location_id", }, deduplicate = self.event_duplicate, extra_fields = ["start_date"], filter_widgets = filter_widgets, list_fields = ["id", "name", "event_type_id$name", (T("Location"), "location.name"), "start_date", "exercise", "closed", "comments", ], list_orderby = "event_event.start_date desc", orderby = "event_event.start_date desc", report_options = report_options, update_onaccept = self.event_update_onaccept, ) # Components self.add_components(tablename, event_incident = "event_id", gis_location = {"link": "event_event_location", "joinby": "event_id", "key": "location_id", "actuate": "hide", }, event_event_location = "event_id", event_post = "event_id", event_event_tag = {"name": "tag", "joinby": "event_id", }, req_req = "event_id", stats_impact = {"link": "event_event_impact", "joinby": "event_id", "key": "impact_id", #"actuate": "hide", }, event_event_impact = "event_id", ) self.set_method("event", "event", method = "dispatch", action = event_notification_dispatcher) # --------------------------------------------------------------------- # Event Locations (link table) # tablename = "event_event_location" define_table(tablename, event_id(), self.gis_location_id( widget = S3LocationAutocompleteWidget(), requires = IS_LOCATION(), represent = self.gis_LocationRepresent(sep=", "), comment = S3AddResourceLink(c="gis", f="location", label = T("Create Location"), title=T("Location"), tooltip=AUTOCOMPLETE_HELP), ), *s3_meta_fields()) # --------------------------------------------------------------------- # Event Tags # - Key-Value extensions # - can be used to identify a Source # - can be used to add extra attributes (e.g. Area, Population) # - can link Events to other Systems, such as: # * GLIDE (http://glidenumber.net/glide/public/about.jsp) # * OCHA Financial Tracking System, for HXL (http://fts.unocha.org/api/v1/emergency/year/2013.xml) # * Mayon # * WebEOC # - can be a Triple Store for Semantic Web support # tablename = "event_event_tag" define_table(tablename, event_id(), # key is a reserved word in MySQL Field("tag", label=T("Key")), Field("value", label=T("Value")), s3_comments(), *s3_meta_fields()) configure(tablename, deduplicate = self.event_event_tag_deduplicate, ) # --------------------------------------------------------------------- # Pass names back to global scope (s3.*) # return dict(event_type_id = event_type_id, event_event_id = event_id, ) # ------------------------------------------------------------------------- @staticmethod def defaults(): """ Return safe defaults in case the model has been deactivated. """ dummy = S3ReusableField("dummy_id", "integer", readable = False, writable = False) return dict(event_event_id = lambda **attr: dummy("event_id"), event_type_id = lambda **attr: dummy("event_type_id"), ) # ============================================================================= @staticmethod def event_event_year(row): """ Virtual field for event_event - returns the year of this entry used for report. Requires "start_date" to be in extra_fields @param row: the Row """ try: thisdate = row["event_event.start_date"] except AttributeError: return current.messages["NONE"] if not thisdate: return current.messages["NONE"] return thisdate.year # ------------------------------------------------------------------------- @staticmethod def event_update_onaccept(form): """ When an Event is updated, check for closure """ form_vars = form.vars if form_vars.closed: event = form_vars.id # Ensure this event isn't active in the session s3 = current.session.s3 if s3.event == event: s3.event = None # @ToDo: Hide the Event from the Map menu #gis = current.gis #config = gis.get_config() #if config == config.config_id: # # Reset to the Default Map # gis.set_config(0) # Expire all related Posts db = current.db ltable = current.s3db.event_post table = db.cms_post rows = db(ltable.event_id == event).select(ltable.post_id) for row in rows: db(table.id == row.post_id).update(expired=True) # ------------------------------------------------------------------------- @staticmethod def event_duplicate(item): """ Deduplication of Events """ table = item.table data = item.data query = None # Mandatory checks: Name &/or Start Date for field in ("name", "start_date"): value = data.get(field, None) if value: q = (table[field] == value) if query: query &= q else: query = q if not query: return # Optional check: Include Type event_type_id = data.get("event_type_id", None) if event_type_id: query &= (table.event_type_id == event_type_id) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE # ------------------------------------------------------------------------- @staticmethod def event_type_duplicate(item): """ Deduplication of Event Types """ data = item.data name = data.get("name", None) if not name: return table = item.table query = (table.name == name) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE # ------------------------------------------------------------------------- @staticmethod def event_event_tag_deduplicate(item): """ Deduplication of Event Tags """ data = item.data tag = data.get("tag", None) event = data.get("event_id", None) if not tag or not event: return table = item.table query = (table.tag.lower() == tag.lower()) & \ (table.event_id == event) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE # ============================================================================= class S3IncidentModel(S3Model): """ Incidents - the primary unit at which things are managed: Scenarios are designed Resources are assigned Situation Reports are made """ names = ("event_incident", "event_incident_id", ) def model(self): T = current.T db = current.db settings = current.deployment_settings set_method = self.set_method # --------------------------------------------------------------------- # Incidents # # Incidents are the unit at which responses are managed. # They can be Exercises or real Incidents. # They can be instantiated from Scenario Templates. # tablename = "event_incident" self.define_table(tablename, self.super_link("doc_id", "doc_entity"), # Enable in template if-required self.event_event_id(readable = False, writable = False, ), self.event_incident_type_id(), self.scenario_scenario_id(), Field("name", notnull=True, # Name could be a code length=64, label = T("Name"), ), Field("exercise", "boolean", label = T("Exercise?"), represent = lambda opt: "√" if opt else None, #comment = DIV(_class="tooltip", # _title="%s|%s" % (T("Exercise"), # Should! # T("Exercises mean all screens have a watermark & all notifications have a prefix."))), ), s3_datetime(name="zero_hour", default = "now", label = T("Zero Hour"), comment = DIV(_class="tooltip", _title="%s|%s" % (T("Zero Hour"), T("The time at which the Incident started."))), ), Field("closed", "boolean", default = False, label = T("Closed"), represent = s3_yes_no_represent, ), # Enable this field in templates if-required self.org_organisation_id(label = T("Lead Organization"), # Lead Responder readable = False, writable = False, ), self.gis_location_id(), s3_comments(), *s3_meta_fields()) current.response.s3.crud_strings[tablename] = Storage( label_create = T("Create Incident"), title_display = T("Incident Details"), title_list = T("Incidents"), title_update = T("Edit Incident"), label_list_button = T("List Incidents"), label_delete_button = T("Delete Incident"), msg_record_created = T("Incident added"), msg_record_modified = T("Incident updated"), msg_record_deleted = T("Incident removed"), msg_list_empty = T("No Incidents currently registered")) represent = S3Represent(lookup=tablename) incident_id = S3ReusableField("incident_id", "reference %s" % tablename, label = T("Incident"), ondelete = "RESTRICT", represent = represent, requires = IS_EMPTY_OR( IS_ONE_OF(db, "event_incident.id", represent, filterby="closed", filter_opts=(False,), orderby="event_incident.name", sort=True)), sortby = "name", # Uncomment these to use an Autocomplete & not a Dropdown #widget = S3AutocompleteWidget() #comment = DIV(_class="tooltip", # _title="%s|%s" % (T("Incident"), # current.messages.AUTOCOMPLETE_HELP)) ) # @ToDo: Move this workflow into Templates? # - or useful to have good defaults if settings.has_module("project"): create_next_url = URL(args=["[id]", "task"]) elif settings.has_module("hrm"): create_next_url = URL(args=["[id]", "human_resource"]) elif settings.has_module("asset"): create_next_url = URL(args=["[id]", "asset"]) else: create_next_url = URL(args=["[id]", "site"]) self.configure(tablename, create_next = create_next_url, create_onaccept = self.incident_create_onaccept, deduplicate = self.incident_duplicate, list_fields = ["id", "name", "incident_type_id", "exercise", "closed", "comments", ], list_layout = event_incident_list_layout, # Most recent Incident first orderby = "event_incident.zero_hour desc", super_entity = "doc_entity", update_onaccept = self.incident_update_onaccept, ) # Components self.add_components(tablename, event_asset = "incident_id", asset_asset = {"link": "event_asset", "joinby": "incident_id", "key": "asset_id", #"actuate": "embed", "actuate": "hide", #"autocomplete": "number", "autodelete": False, }, event_human_resource = "incident_id", hrm_human_resource = ({"link": "event_human_resource", "joinby": "incident_id", "key": "human_resource_id", "actuate": "hide", "autodelete": False, }, {"name": "assign", "link": "event_human_resource", "joinby": "incident_id", "key": "human_resource_id", "actuate": "hide", "autodelete": False, }, ), event_organisation = "incident_id", org_organisation = {"link": "event_organisation", "joinby": "incident_id", "key": "organisation_id", #"actuate": "embed", "actuate": "hide", #"autocomplete": "name", "autodelete": False, }, event_post = "incident_id", event_site = "incident_id", event_sitrep = {"name": "incident_sitrep", "joinby": "incident_id", }, doc_sitrep = {"link": "event_sitrep", "joinby": "incident_id", "key": "sitrep_id", "actuate": "replace", #"autocomplete": "name", "autodelete": True, }, event_task = {"name": "incident_task", "joinby": "incident_id", }, project_task = {"link": "event_task", "joinby": "incident_id", "key": "task_id", "actuate": "replace", #"autocomplete": "name", "autodelete": True, }, gis_config = {"link": "event_config", "joinby": "incident_id", "multiple": False, "key": "config_id", "actuate": "replace", "autocomplete": "name", "autodelete": True, }, stats_impact = {"link": "event_event_impact", "joinby": "incident_id", "key": "impact_id", "actuate": "replace", "autodelete": True, }, ) # Custom Method to Assign HRs set_method("event", "incident", method = "assign", action = self.hrm_AssignMethod(component="human_resource")) # Custom Method to Dispatch HRs set_method("event", "incident", method = "dispatch", action = event_notification_dispatcher) # Pass names back to global scope (s3.*) return dict(event_incident_id = incident_id, ) # ------------------------------------------------------------------------- @staticmethod def defaults(): """ Return safe defaults in case the model has been deactivated. """ dummy = S3ReusableField("dummy_id", "integer", readable = False, writable = False) return dict(event_incident_id = lambda **attr: dummy("incident_id"), ) # --------------------------------------------------------------------- @staticmethod def incident_create_onaccept(form): """ When an Incident is instantiated, populate defaults """ form_vars = form.vars incident = form_vars.get("id", None) if incident: # Set the Incident in the session current.session.s3.incident = incident event = form_vars.get("event_id", None) if event: # Set the Event in the session current.session.s3.event = event s3db = current.s3db db = current.db ctable = s3db.gis_config mapconfig = None scenario = form_vars.get("scenario_id", None) if scenario: # We have been instantiated from a Scenario, so # copy all resources from the Scenario to the Incident # Read the source resource tables table = db.scenario_scenario otable = s3db.scenario_organisation stable = s3db.scenario_site mtable = s3db.scenario_config query = (table.id == scenario) squery = query & (stable.scenario_id == table.id) mquery = query & (mtable.scenario_id == table.id) & \ (ctable.id == mtable.config_id) facilities = db(squery).select(stable.site_id) mapconfig = db(mquery).select(ctable.ALL).first() # Write them to their respective destination tables stable = s3db.event_site for row in facilities: stable.insert(incident_id=incident, site_id=row.site_id) # Modules which can be disabled htable = s3db.table("scenario_human_resource", None) # @ToDo: Change to Positions if htable: hquery = query & (htable.scenario_id == table.id) hrms = db(hquery).select(htable.human_resource_id) htable = s3db.event_human_resource for row in hrms: htable.insert(incident_id=incident, human_resource_id=row.human_resource_id) atable = s3db.table("scenario_asset", None) if atable: aquery = query & (atable.scenario_id == table.id) assets = db(aquery).select(atable.asset_id) atable = s3db.event_asset for row in assets: atable.insert(incident_id=incident, asset_id=row.asset_id) ttable = s3db.table("scenario_task", None) if ttable: tquery = query & (ttable.scenario_id == table.id) tasks = db(tquery).select(ttable.task_id) ttable = s3db.event_task for row in tasks: ttable.insert(incident_id=incident, task_id=row.task_id) if mapconfig: # Incident's Map Config is a copy of the Default / Scenario's # so that it can be changed within the Incident without # contaminating the base one del mapconfig["id"] del mapconfig["uuid"] mapconfig["name"] = form_vars.name config = ctable.insert(**mapconfig.as_dict()) mtable = db.event_config mtable.insert(incident_id=incident, config_id=config) # Activate this config current.gis.set_config(config) # @ToDo: Add to GIS Menu? Separate Menu? else: # We have been created without a Scenario or from a Scenario without a Map Config # Create a new Map Config config = ctable.insert(name = form_vars.name) mtable = db.event_config mtable.insert(incident_id=incident, config_id=config) # Activate this config current.gis.set_config(config) # Viewport can be saved from the Map's toolbar # @ToDo: Add to GIS Menu? Separate Menu? # ------------------------------------------------------------------------- @staticmethod def incident_update_onaccept(form): """ When an Incident is updated, check for closure """ form_vars = form.vars if form_vars.closed: incident = form_vars.id # Ensure this incident isn't active in the session s3 = current.session.s3 if s3.incident == incident: s3.incident = None # @ToDo: Hide the Incident from the Map menu #gis = current.gis #config = gis.get_config() #if config == config.config_id: # # Reset to the Default Map # gis.set_config(0) # Expire all related Posts db = current.db ltable = current.s3db.event_post table = db.cms_post rows = db(ltable.incident_id == incident).select(ltable.post_id) for row in rows: db(table.id == row.post_id).update(expired=True) # ------------------------------------------------------------------------- @staticmethod def incident_duplicate(item): """ Deduplication of Incidents """ data = item.data name = data.get("name", None) event_id = data.get("event_id", None) table = item.table query = (table.name == name) if event_id: query = query & ((table.event_id == event_id) | \ (table.event_id == None)) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE # ============================================================================= class S3IncidentReportModel(S3Model): """ Incident Reports - reports about incidents - useful for busy call centres which may receive many reports about a single incident and may receive calls which need logging but don't get responded to as an Incident (e.g. Out of Scope) @ToDo: Deprecate IRS module by porting functionality here """ names = ("event_incident_report", ) def model(self): T = current.T # --------------------------------------------------------------------- # Incident Reports # tablename = "event_incident_report" self.define_table(tablename, self.super_link("doc_id", "doc_entity"), # @ToDo: Use link tables? #self.event_event_id(ondelete = "CASCADE"), #self.event_incident_id(ondelete = "CASCADE"), s3_datetime(default="now"), Field("name", notnull=True, label = T("Title"), ), self.event_incident_type_id(), self.gis_location_id(), self.pr_person_id(label = T("Reported By"), ), Field("closed", "boolean", default = False, label = T("Closed"), represent = s3_yes_no_represent, ), s3_comments(), *s3_meta_fields()) current.response.s3.crud_strings[tablename] = Storage( label_create = T("Create Incident Report"), title_display = T("Incident Report Details"), title_list = T("Incident Reports"), title_update = T("Edit Incident Report"), label_list_button = T("List Incident Reports"), label_delete_button = T("Remove Incident Report from this event"), msg_record_created = T("Incident Report added"), msg_record_modified = T("Incident Report updated"), msg_record_deleted = T("Incident Report removed"), msg_list_empty = T("No Incident Reports currently registered for this event")) filter_widgets = [S3OptionsFilter("incident_type_id", label = T("Type"), ), ] self.configure(tablename, filter_widgets = filter_widgets, super_entity = "doc_entity", ) # Components self.add_components(tablename, # Coalitions org_group = {"link": "event_incident_report_group", "joinby": "incident_report_id", "key": "group_id", "actuate": "hide", }, # Format for InlineComponent/filter_widget event_incident_report_group = "incident_report_id", ) # Pass names back to global scope (s3.*) return {} # ============================================================================= class S3EventResourceModel(S3Model): """ Resources Assigned to Events/Incidents - depends on Stats module Whilst there is a Quantity option, this is envisaged to usually be 1 - these are typically named, trackable resources @ToDo: Optional link to org_resource to e.g. mark resources as assigned """ names = ("event_resource",) def model(self): if not current.deployment_settings.has_module("stats"): current.log.warning("Event Resource Model needs Stats module enabling") return {} T = current.T super_link = self.super_link status_opts = {1: T("Available"), 2: T("Assigned"), 3: T("En Route"), } # --------------------------------------------------------------------- # Resources # tablename = "event_resource" self.define_table(tablename, # Instance super_link("data_id", "stats_data"), super_link("track_id", "sit_trackable"), # Resources are normally managed at the Incident level #self.event_event_id(ondelete = "CASCADE", # # enable in template if-required # readable = False, # writable = False, # ), self.event_incident_id(ondelete = "CASCADE"), # This is a component, so needs to be a super_link # - can't override field name, ondelete or requires super_link("parameter_id", "stats_parameter", empty = False, instance_types = ("org_resource_type",), label = T("Resource Type"), represent = S3Represent(lookup="stats_parameter", translate=True), readable = True, writable = True, comment = S3AddResourceLink(c="org", f="resource_type", vars = dict(child = "parameter_id"), title=T("Create Resource Type")), ), Field("status", "integer", label = T("Status"), represent = lambda opt: \ status_opts.get(opt) or current.messages.UNKNOWN_OPT, requires = IS_IN_SET(status_opts), ), Field("name", label = T("Name"), ), Field("value", "integer", default = 1, label = T("Quantity"), requires = IS_INT_IN_RANGE(0, 999), ), self.org_organisation_id(), self.pr_person_id(label = T("Contact")), # @ToDo: Make use of S3Track: # Base Location: Enable field only in Create form self.gis_location_id(#readable = False, #writable = False, ), #Field.Method("location", lambda row: self.sit_location(row, tablename)), # @ToDo: Deprecate once we start using S3Track s3_datetime(default = "now"), s3_comments(), *s3_meta_fields()) # CRUD strings current.response.s3.crud_strings[tablename] = Storage( label_create=T("Add Resource"), title_display=T("Resource Details"), title_list=T("Resources"), title_update=T("Edit Resource"), title_map=T("Map of Resources"), title_upload=T("Import Resources"), label_list_button=T("List Resources"), label_delete_button=T("Delete Resource"), msg_record_created=T("Resource added"), msg_record_modified=T("Resource updated"), msg_record_deleted=T("Resource deleted"), msg_list_empty=T("No Resources assigned to Incident")) # Custom Methods #self.set_method("event", "resource", # method = "check-in", # action = S3CheckInMethod()) # List Fields #list_fields = ["id", # "incident_id", # "parameter_id", # "status", # "name", # "value", # "organisation_id", # "person_id", # "location_id", # #(T("Location"), "location"), # "comments", # ] # Filter Widgets filter_widgets = [S3TextFilter(["organisation_id$name", "location_id", "parameter_id$name", "comments", ], label = T("Search"), ), S3OptionsFilter("parameter_id", label = T("Type"), ), ] # Report options report_fields = ["incident_id", "organisation_id", "parameter_id", ] report_options = Storage(rows = report_fields, cols = report_fields, fact = [(T("Total Number of Resources"), "sum(value)"), (T("Number of Resources"), "count(value)"), ], defaults = Storage(rows = "incident_id", cols = "parameter_id", fact = "sum(value)", totals = True, chart = "barchart:rows", #table = "collapse", ) ) self.configure(tablename, context = {#"event": "event_id", "incident": "incident_id", "location": "location_id", "organisation": "organisation_id", }, filter_widgets = filter_widgets, #list_fields = list_fields, list_layout = event_resource_list_layout, orderby = "event_resource.date desc", report_options = report_options, super_entity = ("stats_data", "sit_trackable"), ) # Pass names back to global scope (s3.*) return {} # ============================================================================= class S3IncidentReportOrganisationGroupModel(S3Model): """ Links between Incident Reports & Organisation Groups """ names = ("event_incident_report_group", ) def model(self): represent = S3Represent(lookup="event_incident_report") # --------------------------------------------------------------------- # Incident Reports <> Coalitions link table # tablename = "event_incident_report_group" self.define_table(tablename, Field("incident_report_id", self.event_incident_report, represent = represent, requires = IS_ONE_OF(current.db, "event_incident_report.id", represent, sort=True, ), ), self.org_group_id(empty=False), *s3_meta_fields()) # Pass names back to global scope (s3.*) return {} # ============================================================================= class S3IncidentTypeModel(S3Model): """ Incident Types """ names = ("event_incident_type", "event_incident_type_id", ) def model(self): T = current.T db = current.db hierarchical_incident_types = \ current.deployment_settings.get_incident_types_hierarchical() # --------------------------------------------------------------------- # Incident Types # tablename = "event_incident_type" self.define_table(tablename, Field("name", notnull=True, length=64, label = T("Name"), ), Field("parent", "reference event_incident_type", # This form of hierarchy may not work on all Databases label = T("SubType of"), ondelete = "RESTRICT", readable = hierarchical_incident_types, writable = hierarchical_incident_types, ), s3_comments(), *s3_meta_fields()) type_represent = S3Represent(lookup=tablename, translate=True) if hierarchical_incident_types: hierarchy = "parent" # Can't be defined in-line as otherwise get a circular reference table = db[tablename] table.parent.represent = type_represent table.parent.requires = IS_EMPTY_OR( IS_ONE_OF(db, "event_incident_type.id", type_represent, # If limiting to just 1 level of parent #filterby="parent", #filter_opts=(None,), orderby="event_incident_type.name")) incident_type_widget = S3HierarchyWidget(lookup = "event_incident_type", represent = type_represent, multiple = False, leafonly = True, ) incident_type_comment = None else: hierarchy = None incident_type_widget = None incident_type_comment = None # Uncomment these to use an Autocomplete & not a Dropdown #incident_type_widget = S3AutocompleteWidget() #incident_type_comment = DIV(_class="tooltip", # _title="%s|%s" % (T("Event Type"), # AUTOCOMPLETE_HELP)) current.response.s3.crud_strings[tablename] = Storage( label_create = T("Create Incident Type"), title_display = T("Incident Type Details"), title_list = T("Incident Types"), title_update = T("Edit Incident Type"), title_upload = T("Import Incident Types"), label_list_button = T("List Incident Types"), label_delete_button = T("Remove Incident Type from this event"), msg_record_created = T("Incident Type added"), msg_record_modified = T("Incident Type updated"), msg_record_deleted = T("Incident Type removed"), #msg_list_empty = T("No Incident Types currently registered in this event") msg_list_empty = T("No Incident Types currently registered") ) incident_type_id = S3ReusableField("incident_type_id", "reference %s" % tablename, label = T("Incident Type"), ondelete = "RESTRICT", represent = type_represent, requires = IS_EMPTY_OR( IS_ONE_OF(db, "event_incident_type.id", type_represent, orderby="event_incident_type.name", sort=True)), sortby = "name", widget = incident_type_widget, comment = incident_type_comment, ) self.configure(tablename, deduplicate = self.incident_type_duplicate, hierarchy = hierarchy, ) # Pass names back to global scope (s3.*) return dict(event_incident_type_id = incident_type_id, ) # ------------------------------------------------------------------------- @staticmethod def defaults(): """ Return safe defaults in case the model has been deactivated. """ dummy = S3ReusableField("dummy_id", "integer", readable = False, writable = False) return dict(event_incident_type_id = lambda **attr: dummy("incident_type_id"), ) # --------------------------------------------------------------------- @staticmethod def incident_type_duplicate(item): """ Deduplication of Incident Types """ data = item.data name = data.get("name", None) if not name: return table = item.table query = (table.name.lower() == name.lower()) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE # ============================================================================= class S3IncidentTypeTagModel(S3Model): """ Incident Type Tags - Key-Value extensions - can be used to provide conversions to external systems, such as: * CAP * NIMS - can be a Triple Store for Semantic Web support """ names = ("event_incident_type_tag",) def model(self): T = current.T # --------------------------------------------------------------------- # Incident Type Tags # tablename = "event_incident_type_tag" self.define_table(tablename, self.event_incident_type_id(), # key is a reserved word in MySQL Field("tag", label=T("Key")), Field("value", label=T("Value")), s3_comments(), *s3_meta_fields()) # Pass names back to global scope (s3.*) return {} # ============================================================================= class S3EventActivityModel(S3Model): """ Link Project Activities to Events """ names = ("event_activity", ) def model(self): if not current.deployment_settings.has_module("project"): return None tablename = "event_activity" self.define_table(tablename, self.event_event_id(empty = False, ondelete = "CASCADE"), #self.event_incident_id(ondelete = "CASCADE"), self.project_activity_id(empty = False), *s3_meta_fields()) # Pass names back to global scope (s3.*) return {} # ============================================================================= class S3EventAlertModel(S3Model): """ Alerts for Events/Incidents @ToDo: Optional links to CAP broker & Messaging Push: like deploy_alert? via deploy_alert? Pull: Subscription/Notification """ names = ("event_alert", ) def model(self): T = current.T crud_strings = current.response.s3.crud_strings define_table = self.define_table tablename = "event_alert" define_table(tablename, # PE representing its Recipients self.super_link("pe_id", "pr_pentity"), self.event_event_id(ondelete = "CASCADE"), #self.event_incident_id(ondelete = "CASCADE"), Field("name", comment = T("This isn't visible to the recipients"), label = T("Name"), ), Field("subject", length=78, # RFC 2822 comment = T("The subject of the alert (optional)"), label = T("Subject"), ), Field("body", "text", label = T("Message"), requires = IS_NOT_EMPTY(), represent = lambda v: v or current.messages["NONE"], ), # Link to the Message once sent self.msg_message_id(readable = False), *s3_meta_fields()) # CRUD Strings crud_strings[tablename] = Storage( label_create = T("Create Alert"), title_display = T("Alert Details"), title_list = T("Alerts"), title_update = T("Edit Alert Details"), title_upload = T("Import Recipients"), label_list_button = T("List Alerts"), label_delete_button = T("Delete Alert"), msg_record_created = T("Alert added"), msg_record_modified = T("Alert Details updated"), msg_record_deleted = T("Alert deleted"), msg_list_empty = T("No Alerts currently defined")) # Custom method to send alerts #self.set_method("event", "alert", # method = "send", # action = self.event_alert_send) # Reusable field represent = S3Represent(lookup=tablename) alert_id = S3ReusableField("alert_id", "reference %s" % tablename, label = T("Alert"), ondelete = "CASCADE", represent = represent, requires = IS_ONE_OF(db, "event_alert.id", represent), ) # --------------------------------------------------------------------- # Recipients of the Alert # tablename = "event_alert_recipient" define_table(tablename, alert_id(), self.pr_person_id(empty = False, label = T("Recipient")), *s3_meta_fields()) # CRUD Strings crud_strings[tablename] = Storage( label_create = T("Add Recipient"), title_display = T("Recipient Details"), title_list = T("Recipients"), title_update = T("Edit Recipient Details"), title_upload = T("Import Recipients"), label_list_button = T("List Recipients"), label_delete_button = T("Delete Recipient"), msg_record_created = T("Recipient added"), msg_record_modified = T("Recipient Details updated"), msg_record_deleted = T("Recipient deleted"), msg_list_empty = T("No Recipients currently defined")) # Pass names back to global scope (s3.*) return {} # ============================================================================= class S3EventAssetModel(S3Model): """ Link Assets to Incidents """ names = ("event_asset", ) def model(self): T = current.T status_opts = {1: T("Alerted"), 2: T("Standing By"), 3: T("Active"), 4: T("Deactivated"), 5: T("Unable to activate"), } # --------------------------------------------------------------------- # Assets # @ToDo: Search Widget tablename = "event_asset" self.define_table(tablename, # Instance table self.super_link("cost_item_id", "budget_cost_item"), self.event_incident_id(empty = False, ondelete = "CASCADE"), # @ToDo: Notification self.asset_asset_id(empty = False, ondelete = "RESTRICT", ), Field("status", "integer", default = 1, represent = lambda opt: \ status_opts.get(opt, current.messages.UNKNOWN_OPT), requires = IS_IN_SET(status_opts), ), *s3_meta_fields()) current.response.s3.crud_strings[tablename] = Storage( label_create = T("Assign Asset"), title_display = T("Asset Details"), title_list = T("Assets"), title_update = T("Edit Asset"), label_list_button = T("List Assets"), label_delete_button = T("Remove Asset from this incident"), msg_record_created = T("Asset added"), msg_record_modified = T("Asset updated"), msg_record_deleted = T("Asset removed"), msg_list_empty = T("No Assets currently registered in this incident")) if current.deployment_settings.has_module("budget"): crud_form = S3SQLCustomForm("incident_id", "asset_id", "status", S3SQLInlineComponent("allocation", label = T("Budget"), fields = ["budget_id", "start_date", "end_date", "daily_cost", ], ), ) else: crud_form = None self.configure(tablename, crud_form = crud_form, deduplicate = self.event_asset_duplicate, list_fields = [#"incident_id", # Not being dropped in component view "asset_id", "status", "allocation.budget_id", "allocation.start_date", "allocation.end_date", "allocation.daily_cost", ], super_entity = "budget_cost_item", ) # Pass names back to global scope (s3.*) return {} # ------------------------------------------------------------------------- @staticmethod def event_asset_duplicate(item): """ Import item de-duplication """ data = item.data incident_id = data.get("incident_id") asset_id = data.get("asset_id") if incident_id and asset_id: table = item.table query = (table.incident_id == incident_id) & \ (table.asset_id == asset_id) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE return # ============================================================================= class S3EventCMSModel(S3Model): """ Link CMS Posts to Events &/or Incidents """ names = ("event_post", "event_post_incident_type", ) def model(self): #T = current.T # --------------------------------------------------------------------- # Link table between Posts & Events/Incidents tablename = "event_post" self.define_table(tablename, self.event_event_id(ondelete = "CASCADE"), self.event_incident_id(ondelete = "CASCADE"), self.cms_post_id(empty = False, ondelete = "CASCADE", ), *s3_meta_fields()) #current.response.s3.crud_strings[tablename] = Storage( # label_create = T("Tag Post"), # title_display = T("Tag Details"), # title_list = T("Tags"), # title_update = T("Edit Tag"), # label_list_button = T("List Tags"), # label_delete_button = T("Remove Tag for this Event from this Post"), # msg_record_created = T("Tag added"), # msg_record_modified = T("Tag updated"), # msg_record_deleted = T("Tag removed"), # msg_list_empty = T("No Posts currently tagged to this event")) # --------------------------------------------------------------------- # Link table between Posts & Incident Types tablename = "event_post_incident_type" self.define_table(tablename, self.cms_post_id(empty = False, ondelete = "CASCADE", ), self.event_incident_type_id(empty = False, ondelete = "CASCADE", ), *s3_meta_fields()) # Pass names back to global scope (s3.*) return {} # ============================================================================= class S3EventHRModel(S3Model): """ Link Human Resources to Events/Incidents @ToDo: Replace with Deployment module """ names = ("event_human_resource", ) def model(self): T = current.T status_opts = {1: T("Alerted"), 2: T("Standing By"), 3: T("Active"), 4: T("Deactivated"), 5: T("Unable to activate"), } # --------------------------------------------------------------------- # Staff/Volunteers # @ToDo: Use Positions, not individual HRs # @ToDo: Search Widget tablename = "event_human_resource" self.define_table(tablename, # Instance table self.super_link("cost_item_id", "budget_cost_item"), #self.event_event_id(ondelete = "CASCADE", # # Enable in template if-desired # readable = False, # writable = False, # ), self.event_incident_id(ondelete = "CASCADE"), # @ToDo: Add Warning? self.hrm_human_resource_id(empty = False, ondelete = "RESTRICT", ), Field("status", "integer", default = 1, represent = lambda opt: \ status_opts.get(opt, current.messages.UNKNOWN_OPT), requires = IS_IN_SET(status_opts), ), *s3_meta_fields()) current.response.s3.crud_strings[tablename] = Storage( label_create = T("Assign Human Resource"), title_display = T("Human Resource Details"), title_list = T("Assigned Human Resources"), title_update = T("Edit Human Resource"), label_list_button = T("List Assigned Human Resources"), label_delete_button = T("Remove Human Resource from this incident"), msg_record_created = T("Human Resource assigned"), msg_record_modified = T("Human Resource Assignment updated"), msg_record_deleted = T("Human Resource unassigned"), msg_list_empty = T("No Human Resources currently assigned to this incident")) if current.deployment_settings.has_module("budget"): crud_form = S3SQLCustomForm("incident_id", "human_resource_id", "status", S3SQLInlineComponent("allocation", label = T("Budget"), fields = ["budget_id", "start_date", "end_date", "daily_cost", ], ), ) else: crud_form = None self.configure(tablename, crud_form = crud_form, deduplicate = self.event_human_resource_duplicate, list_fields = [#"incident_id", # Not being dropped in component view "human_resource_id", "status", "allocation.budget_id", "allocation.start_date", "allocation.end_date", "allocation.daily_cost", ], super_entity = "budget_cost_item", ) # Pass names back to global scope (s3.*) return {} # ------------------------------------------------------------------------- @staticmethod def event_human_resource_duplicate(item): """ Import item de-duplication """ data = item.data incident_id = data.get("incident_id") human_resource_id = data.get("human_resource_id") if incident_id and human_resource_id: table = item.table query = (table.incident_id == incident_id) & \ (table.human_resource_id == human_resource_id) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE return # ============================================================================= class S3EventImpactModel(S3Model): """ Link Events &/or Incidents with Impacts """ names = ("event_event_impact", ) def model(self): if not current.deployment_settings.has_module("stats"): current.log.warning("Event Impact Model needs Stats module enabling") return {} #T = current.T # --------------------------------------------------------------------- # Event Impact tablename = "event_event_impact" self.define_table(tablename, self.event_event_id(ondelete = "CASCADE"), self.event_incident_id(ondelete = "CASCADE"), self.stats_impact_id(empty = False, ondelete = "CASCADE", ), *s3_meta_fields()) # Table configuration self.configure(tablename, onaccept = self.event_impact_onaccept, ) # Pass names back to global scope (s3.*) return {} # ------------------------------------------------------------------------- @staticmethod def event_impact_onaccept(form): """ Onaccept-routine for event_impact links: - populate event_id from incident if empty """ try: formvars = form.vars record_id = formvars.id except KeyError: return if not record_id: return db = current.db s3db = current.s3db table = s3db.event_event_impact # Make sure we have both keys if any(f not in formvars for f in ("event_id", "incident_id")): query = (table.id == record_id) record = db(query).select(table.id, table.event_id, table.incident_id, limitby=(0, 1)).first() if not record: return else: record = formvars # If event_id is empty - populate it from the incident if not record.event_id and record.incident_id: itable = s3db.event_incident query = (itable.id == record.incident_id) incident = db(query).select(itable.event_id, limitby=(0, 1)).first() if incident: db(table.id == record_id).update(event_id=incident.event_id) # ============================================================================= class S3EventIReportModel(S3Model): """ Link Incident Reports to Incidents @ToDo: Deprecate """ names = ("event_ireport", ) def model(self): T = current.T # --------------------------------------------------------------------- # Incident Reports tablename = "event_ireport" self.define_table(tablename, self.event_incident_id(empty = False, ondelete = "CASCADE", ), self.irs_ireport_id(empty = False, ondelete = "CASCADE", ), *s3_meta_fields()) current.response.s3.crud_strings[tablename] = Storage( label_create = T("Create Incident Report"), title_display = T("Incident Report Details"), title_list = T("Incident Reports"), title_update = T("Edit Incident Report"), label_list_button = T("List Incident Reports"), label_delete_button = T("Remove Incident Report from this incident"), msg_record_created = T("Incident Report added"), msg_record_modified = T("Incident Report updated"), msg_record_deleted = T("Incident Report removed"), msg_list_empty = T("No Incident Reports currently registered in this incident")) # Pass names back to global scope (s3.*) return {} # ============================================================================= class S3EventMapModel(S3Model): """ Link Map Configs to Incidents """ names = ("event_config", ) def model(self): T = current.T # --------------------------------------------------------------------- # Map Config tablename = "event_config" self.define_table(tablename, self.event_incident_id(empty = False, ondelete = "CASCADE", ), self.gis_config_id(empty = False, ondelete = "CASCADE", ), *s3_meta_fields()) current.response.s3.crud_strings[tablename] = Storage( label_create = T("Create Map Profile"), title_display = T("Map Profile Details"), title_list = T("Map Profiles"), title_update = T("Edit Map Profile"), label_list_button = T("List Map Profiles"), label_delete_button = T("Remove Map Profile from this incident"), msg_record_created = T("Map Profile added"), msg_record_modified = T("Map Profile updated"), msg_record_deleted = T("Map Profile removed"), msg_list_empty = T("No Map Profiles currently registered in this incident")) # Pass names back to global scope (s3.*) return {} # ============================================================================= class S3EventOrganisationModel(S3Model): """ Link Organisations to Incidents """ names = ("event_organisation", ) def model(self): T = current.T status_opts = {1: T("Alerted"), 2: T("Standing By"), 3: T("Active"), 4: T("Deactivated"), 5: T("Unable to activate"), } # --------------------------------------------------------------------- # Organisations linked to this Incident # tablename = "event_organisation" self.define_table(tablename, #self.event_event_id(), self.event_incident_id(empty = False, ondelete = "CASCADE", ), self.org_organisation_id(empty = False, ondelete = "CASCADE", ), Field("status", "integer", default = 1, represent = lambda opt: \ status_opts.get(opt, current.messages.UNKNOWN_OPT), requires = IS_IN_SET(status_opts), ), # @ToDo: Role? *s3_meta_fields()) current.response.s3.crud_strings[tablename] = Storage( label_create = T("Add Organization"), title_display = T("Organization Details"), title_list = T("Organizations"), title_update = T("Edit Organization"), label_list_button = T("List Organizations"), label_delete_button = T("Remove Organization from this incident"), msg_record_created = T("Organization added"), msg_record_modified = T("Organization updated"), msg_record_deleted = T("Organization removed"), msg_list_empty = T("No Organizations currently registered in this incident")) # Pass names back to global scope (s3.*) return {} # ============================================================================= class S3EventSiteModel(S3Model): """ Link Sites (Facilities) to Incidents """ names = ("event_site", ) def model(self): T = current.T super_link = self.super_link status_opts = {1: T("Alerted"), 2: T("Standing By"), 3: T("Active"), 4: T("Deactivated"), 5: T("Unable to activate"), } SITE_LABEL = current.deployment_settings.get_org_site_label() # --------------------------------------------------------------------- # Facilities # @ToDo: Filter Widgets tablename = "event_site" self.define_table(tablename, # Instance table super_link("cost_item_id", "budget_cost_item"), self.event_incident_id(empty = False, ondelete = "CASCADE", ), # This is a component, so needs to be a super_link # - can't override field name, ondelete or requires super_link("site_id", "org_site", #default = auth.user.site_id if auth.is_logged_in() else None, empty = False, label = SITE_LABEL, ondelete = "CASCADE", represent = self.org_site_represent, readable = True, writable = True, # Comment these to use a Dropdown & not an Autocomplete #widget = S3SiteAutocompleteWidget(), #comment = DIV(_class="tooltip", # _title="%s|%s" % (SITE_LABEL, # messages.AUTOCOMPLETE_HELP)), ), Field("status", "integer", default = 1, represent = lambda opt: \ status_opts.get(opt, current.messages.UNKNOWN_OPT), requires = IS_IN_SET(status_opts), ), *s3_meta_fields()) current.response.s3.crud_strings[tablename] = Storage( label_create = T("Assign Facility"), title_display = T("Facility Details"), title_list = T("Facilities"), title_update = T("Edit Facility"), label_list_button = T("List Facilities"), label_delete_button = T("Remove Facility from this incident"), msg_record_created = T("Facility added"), msg_record_modified = T("Facility updated"), msg_record_deleted = T("Facility removed"), msg_list_empty = T("No Facilities currently registered in this incident")) if current.deployment_settings.has_module("budget"): crud_form = S3SQLCustomForm("incident_id", "site_id", "status", S3SQLInlineComponent("allocation", label = T("Budget"), fields = ["budget_id", "start_date", "end_date", "daily_cost", ], ), ) else: crud_form = None self.configure(tablename, crud_form = crud_form, deduplicate = self.event_site_duplicate, list_fields = [#"incident_id", # Not being dropped in component view "site_id", "status", "allocation.budget_id", "allocation.start_date", "allocation.end_date", "allocation.daily_cost", ], super_entity = "budget_cost_item", ) # Pass names back to global scope (s3.*) return {} # ------------------------------------------------------------------------- @staticmethod def event_site_duplicate(item): """ Import item de-duplication """ data = item.data incident_id = data.get("incident_id") site_id = data.get("site_id") if incident_id and site_id: table = item.table query = (table.incident_id == incident_id) & \ (table.site_id == site_id) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE return # ============================================================================= class S3EventSitRepModel(S3Model): """ Link Incidents to SitReps """ names = ("event_sitrep", "event_sitrep_id", ) def model(self): T = current.T # --------------------------------------------------------------------- # SitReps # tablename = "event_sitrep" self.define_table(tablename, #self.event_event_id(ondelete = "CASCADE"), self.event_incident_id(empty = False, ondelete = "CASCADE", ), self.doc_sitrep_id(empty = False, ondelete = "CASCADE", ), *s3_meta_fields()) # Not used as we actuate = replace #current.response.s3.crud_strings[tablename] = Storage( # label_create = T("Create SitRep"), # title_display = T("SitRep Details"), # title_list = T("SitReps"), # title_update = T("Edit Task"), # label_list_button = T("List SitReps"), # label_delete_button = T("Remove SitRep from this incident"), # msg_record_created = T("SitRep added"), # msg_record_modified = T("SitRep updated"), # msg_record_deleted = T("SitRep removed"), # msg_list_empty = T("No SitReps currently registered in this incident")) self.configure(tablename, deduplicate = self.event_sitrep_duplicate, ) # Pass names back to global scope (s3.*) return {} # ------------------------------------------------------------------------- @staticmethod def event_sitrep_duplicate(item): """ Import item de-duplication """ data = item.data incident_id = data.get("incident_id") sitrep_id = data.get("sitrep_id") if incident_id and sitrep_id: table = item.table query = (table.incident_id == incident_id) & \ (table.sitrep_id == sitrep_id) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE return # ============================================================================= class S3EventTaskModel(S3Model): """ Link Tasks to Incidents """ names = ("event_task", ) def model(self): T = current.T # --------------------------------------------------------------------- # Tasks # Tasks are to be assigned to resources managed by this EOC # - we manage in detail # @ToDo: Task Templates tablename = "event_task" self.define_table(tablename, #self.event_event_id(ondelete = "CASCADE"), self.event_incident_id(empty = False, ondelete = "CASCADE", ), self.project_task_id(empty = False, ondelete = "CASCADE", ), *s3_meta_fields()) # Not used as we actuate = replace #current.response.s3.crud_strings[tablename] = Storage( # label_create = T("Create Task"), # title_display = T("Task Details"), # title_list = T("Tasks"), # title_update = T("Edit Task"), # label_list_button = T("List Tasks"), # label_delete_button = T("Remove Task from this incident"), # msg_record_created = T("Task added"), # msg_record_modified = T("Task updated"), # msg_record_deleted = T("Task removed"), # msg_list_empty = T("No Tasks currently registered in this incident")) self.configure(tablename, deduplicate = self.event_task_duplicate, ) # Pass names back to global scope (s3.*) return {} # ------------------------------------------------------------------------- @staticmethod def event_task_duplicate(item): """ Import item de-duplication """ data = item.data incident_id = data.get("incident_id") task_id = data.get("task_id") if incident_id and task_id: table = item.table query = (table.incident_id == incident_id) & \ (table.task_id == task_id) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE return # ============================================================================= class S3EventShelterModel(S3Model): """ Link Shelters to Events / Incidents """ names = ("event_event_shelter", ) def model(self): T = current.T # --------------------------------------------------------------------- # Shelters # Link table for cr_shelter <> event_event tablename = "event_event_shelter" self.define_table(tablename, self.event_event_id(ondelete = "CASCADE"), #self.event_incident_id(ondelete = "CASCADE"), self.cr_shelter_id(empty = False, ondelete = "CASCADE", ), *s3_meta_fields() ) function = current.request.function if function == "event": current.response.s3.crud_strings[tablename] = Storage( label_create = T("Add Shelter"), title_display = T("Shelter Details"), title_list = T("Shelters"), title_update = T("Edit Shelter"), label_list_button = T("List Shelters"), label_delete_button = T("Remove Shelter for this Event"), msg_record_created = T("Shelter added"), msg_record_modified = T("Shelter updated"), msg_record_deleted = T("Shelter removed"), msg_list_empty = T("No Shelters currently tagged to this event") ) elif function == "shelter": current.response.s3.crud_strings[tablename] = Storage( label_create = T("Associate Event"), title_display = T("Event Details"), title_list = T("Events"), title_update = T("Edit Event"), label_list_button = T("List Events"), label_delete_button = T("Remove Event for this Shelter"), msg_record_created = T("Event added"), msg_record_modified = T("Event updated"), msg_record_deleted = T("Event removed"), msg_list_empty = T("No Events currently tagged to this Shelter") ) # Pass names back to global scope (s3.*) return {} # ============================================================================= def event_notification_dispatcher(r, **attr): """ Send a Dispatch notice from an Incident Report - this will be formatted as an OpenGeoSMS """ if r.representation == "html" and \ r.id and not r.component: T = current.T msg = current.msg s3db = current.s3db ctable = s3db.pr_contact itable = s3db.event_incident etable = s3db.event_event message = "" text = "" if r.name == "event": record = r.record id = record.id eventName = record.name startDate = record.start_date exercise = record.exercise status = record.closed text += "************************************************" text += "\n%s " % T("Automatic Message") text += "\n%s: %s, " % (T("Event ID"), id) text += " %s: %s" % (T("Event name"), eventName) text += "\n%s: %s " % (T("Event started"), startDate) text += "\n%s= %s, " % (T("Exercise"), exercise) text += "%s= %s" % (T("Status open"), exercise) text += "\n************************************************\n" # URL to redirect to after message sent url = URL(c="event", f="event", args=r.id) if r.name == "incident": record = r.record id = record.id incName = record.name zeroHour = record.zero_hour exercise = record.exercise event_id = record.event_id closed = record.closed if event_id != None: event = current.db(itable.id == event_id).select(etable.name, limitby=(0, 1) ).first() eventName = event.name else: eventName = T("Not Defined") text += "************************************************" text += "\n%s " % T("Automatic Message") text += "\n%s: %s, " % (T("Incident ID"), id) text += " %s: %s" % (T("Incident name"), incName) text += "\n%s: %s " % (T("Related event"), eventName) text += "\n%s: %s " % (T("Incident started"), zeroHour) text += "\n%s %s, " % (T("Exercise?"), exercise) text += "%s %s" % (T("Closed?"), closed) text += "\n************************************************\n" url = URL(c="event", f="incident", args=r.id) # Create the form opts = dict(type="SMS", # @ToDo: deployment_setting subject = T("Deployment Request"), message = message + text, url = url, ) #query = (ctable.pe_id == id) #recipients = current.db(query).select(ctable.pe_id) #if not recipients: # # Provide an Autocomplete the select the person to send the notice to # opts["recipient_type"] = "pr_person" #elif len(recipients) == 1: # # Send to this person # opts["recipient"] = recipients.first()["pr_person"].pe_id #else: # # Send to the Incident Commander # ic = False # for row in recipients: # if row["irs_ireport_human_resource"].incident_commander == True: # opts["recipient"] = row["pr_person"].pe_id # ic = True # break # if not ic: # # Provide an Autocomplete to select the person to send the notice to # opts["recipient_type"] = "pr_person" output = msg.compose(**opts) # Maintain RHeader for consistency if attr.get("rheader"): rheader = attr["rheader"](r) if rheader: output["rheader"] = rheader output["title"] = T("Send Event Update") current.response.view = "msg/compose.html" return output else: raise HTTP(501, current.messages.BADMETHOD) # ============================================================================= def event_incident_list_layout(list_id, item_id, resource, rfields, record, icon="incident"): """ Default dataList item renderer for Projects on Profile pages @param list_id: the HTML ID of the list @param item_id: the HTML ID of the item @param resource: the S3Resource to render @param rfields: the S3ResourceFields to render @param record: the record as dict """ record_id = record["event_incident.id"] item_class = "thumbnail" raw = record._row author = record["event_incident.modified_by"] date = record["event_incident.modified_on"] name = record["event_incident.name"] description = record["event_incident.comments"] zero_hour = record["event_incident.zero_hour"] organisation = record["event_incident.organisation_id"] organisation_id = raw["event_incident.organisation_id"] location = record["event_incident.location_id"] location_id = raw["event_incident.location_id"] comments = raw["event_incident.comments"] org_url = URL(c="org", f="organisation", args=[organisation_id, "profile"]) org_logo = raw["org_organisation.logo"] if org_logo: org_logo = A(IMG(_src=URL(c="default", f="download", args=[org_logo]), _class="media-object", ), _href=org_url, _class="pull-left", ) else: # @ToDo: use a dummy logo image org_logo = A(IMG(_class="media-object"), _href=org_url, _class="pull-left", ) # Edit Bar # @ToDo: Consider using S3NavigationItem to hide the auth-related parts permit = current.auth.s3_has_permission table = current.db.event_incident if permit("update", table, record_id=record_id): edit_btn = A(ICON("edit"), _href=URL(c="event", f="incident", args=[record_id, "update.popup"], vars={"refresh": list_id, "record": record_id}, ), _class="s3_modal", _title=current.response.s3.crud_strings.event_incident.title_update, ) else: edit_btn = "" if permit("delete", table, record_id=record_id): delete_btn = A(ICON("delete"), _class="dl-item-delete", _title=current.response.s3.crud_strings.event_incident.label_delete_button, ) else: delete_btn = "" edit_bar = DIV(edit_btn, delete_btn, _class="edit-bar fright", ) # Render the item item = DIV(DIV(ICON(icon), SPAN(location, _class="location-title"), SPAN(zero_hour, _class="date-title"), edit_bar, _class="card-header", ), DIV(DIV(A(name, _href=URL(c="event", f="incident", args=[record_id, "profile"])), _class="card-title"), DIV(DIV((description or ""), DIV(author or "", " - ", A(organisation, _href=org_url, _class="card-organisation", ), _class="card-person", ), _class="media", ), _class="media-body", ), _class="media", ), #docs, _class=item_class, _id=item_id, ) return item # ============================================================================= def event_resource_list_layout(list_id, item_id, resource, rfields, record): """ Default dataList item renderer for Resources on Profile pages @param list_id: the HTML ID of the list @param item_id: the HTML ID of the item @param resource: the S3Resource to render @param rfields: the S3ResourceFields to render @param record: the record as dict """ record_id = record["event_resource.id"] item_class = "thumbnail" raw = record._row author = record["event_resource.modified_by"] date = record["event_resource.date"] quantity = record["event_resource.value"] resource_type = record["event_resource.parameter_id"] comments = raw["event_resource.comments"] organisation = record["event_resource.organisation_id"] organisation_id = raw["event_resource.organisation_id"] location = record["event_resource.location_id"] location_id = raw["event_resource.location_id"] location_url = URL(c="gis", f="location", args=[location_id, "profile"]) org_url = URL(c="event", f="organisation", args=[organisation_id, "profile"]) logo = raw["org_organisation.logo"] if logo: logo = A(IMG(_src=URL(c="default", f="download", args=[logo]), _class="media-object", ), _href=org_url, _class="pull-left", ) else: # @ToDo: use a dummy logo image logo = A(IMG(_class="media-object"), _href=org_url, _class="pull-left", ) # Edit Bar permit = current.auth.s3_has_permission table = current.db.event_resource if permit("update", table, record_id=record_id): vars = {"refresh": list_id, "record": record_id, } f = current.request.function if f == "organisation" and organisation_id: vars["(organisation)"] = organisation_id elif f == "location" and location_id: vars["(location)"] = location_id edit_btn = A(ICON("edit"), _href=URL(c="event", f="resource", args=[record_id, "update.popup"], vars=vars), _class="s3_modal", _title=current.response.s3.crud_strings.event_resource.title_update, ) else: edit_btn = "" if permit("delete", table, record_id=record_id): delete_btn = A(ICON("delete"), _class="dl-item-delete", ) else: delete_btn = "" edit_bar = DIV(edit_btn, delete_btn, _class="edit-bar fright", ) # Render the item avatar = logo item = DIV(DIV(SPAN(A(location, _href=location_url, ), _class="location-title", ), SPAN(date, _class="date-title", ), edit_bar, _class="card-header", ), DIV(#avatar, DIV("%s %s" % (quantity, current.T(resource_type)), _class="card-title"), DIV(DIV(comments, DIV(author or "" , " - ", A(organisation, _href=org_url, _class="card-organisation", ), _class="card-person", ), _class="media", ), _class="media-body", ), _class="media", ), #docs, _class=item_class, _id=item_id, ) return item # ============================================================================= def event_rheader(r): """ Resource headers for component views """ rheader = None if r.representation == "html": T = current.T settings = current.deployment_settings if r.name == "event": # Event Controller tabs = [(T("Event Details"), None), ] if settings.has_module("cr"): tabs.append((T("Shelters"), "event_shelter")) #if settings.has_module("req"): # tabs.append((T("Requests"), "req")) if settings.has_module("msg"): tabs.append((T("Send Notification"), "dispatch")) rheader_tabs = s3_rheader_tabs(r, tabs) event = r.record if event: if event.exercise: exercise = TH(T("EXERCISE")) else: exercise = TH() if event.closed: closed = TH(T("CLOSED")) else: closed = TH() table = r.table rheader = DIV(TABLE(TR(exercise), TR(TH("%s: " % table.name.label), event.name), TR(TH("%s: " % table.comments.label), event.comments), TR(TH("%s: " % table.start_date.label), table.start_date.represent(event.start_date)), TR(closed), ), rheader_tabs) if r.name == "incident": # Incident Controller tabs = [(T("Incident Details"), None)] append = tabs.append # Impact tab if settings.get_event_incident_impact_tab(): append((T("Impact"), "impact")) # Tasks tab if settings.has_module("project"): append((T("Tasks"), "task")) # Staff tab if settings.has_module("hrm"): STAFF = settings.get_hrm_staff_label() append((STAFF, "human_resource")) if current.auth.s3_has_permission("create", "event_human_resource"): append((T("Assign %(staff)s") % dict(staff=STAFF), "assign")) # Asset tab if settings.has_module("asset"): append((T("Assets"), "asset")) # Other tabs tabs.extend(((T("Facilities"), "site"), # Inc Shelters (T("Organizations"), "organisation"), (T("SitReps"), "sitrep"), (T("Map Profile"), "config"), )) # Messaging tab if settings.has_module("msg"): append((T("Send Notification"), "dispatch")) rheader_tabs = s3_rheader_tabs(r, tabs) record = r.record if record: if record.exercise: exercise = TH(T("EXERCISE")) else: exercise = TH() if record.closed: closed = TH(T("CLOSED")) else: closed = TH() table = r.table rheader = DIV(TABLE(TR(exercise), TR(TH("%s: " % table.name.label), record.name), TR(TH("%s: " % table.comments.label), record.comments), TR(TH("%s: " % table.zero_hour.label), table.zero_hour.represent(record.zero_hour)), TR(closed), ), rheader_tabs) return rheader # END =========================================================================
mit
maxwell-demon/grpc
src/python/grpcio/grpc/framework/core/_protocol.py
18
7044
# Copyright 2015, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """State and behavior for passing protocol objects in an operation.""" import collections import enum from grpc.framework.core import _constants from grpc.framework.core import _interfaces from grpc.framework.core import _utilities from grpc.framework.foundation import callable_util from grpc.framework.interfaces.base import base _EXCEPTION_LOG_MESSAGE = 'Exception delivering protocol object!' _LOCAL_FAILURE_OUTCOME = _utilities.Outcome( base.Outcome.Kind.LOCAL_FAILURE, None, None) class _Awaited( collections.namedtuple('_Awaited', ('kind', 'value',))): @enum.unique class Kind(enum.Enum): NOT_YET_ARRIVED = 'not yet arrived' ARRIVED = 'arrived' _NOT_YET_ARRIVED = _Awaited(_Awaited.Kind.NOT_YET_ARRIVED, None) _ARRIVED_AND_NONE = _Awaited(_Awaited.Kind.ARRIVED, None) class _Transitory( collections.namedtuple('_Transitory', ('kind', 'value',))): @enum.unique class Kind(enum.Enum): NOT_YET_SEEN = 'not yet seen' PRESENT = 'present' GONE = 'gone' _NOT_YET_SEEN = _Transitory(_Transitory.Kind.NOT_YET_SEEN, None) _GONE = _Transitory(_Transitory.Kind.GONE, None) class _ProtocolManager(_interfaces.ProtocolManager): """An implementation of _interfaces.ExpirationManager.""" def __init__( self, protocol_receiver, lock, pool, termination_manager, transmission_manager, expiration_manager): """Constructor. Args: protocol_receiver: An _Awaited wrapping of the base.ProtocolReceiver to which protocol objects should be passed during the operation. May be of kind _Awaited.Kind.NOT_YET_ARRIVED if the customer's subscription is not yet known and may be of kind _Awaited.Kind.ARRIVED but with a value of None if the customer's subscription did not include a ProtocolReceiver. lock: The operation-wide lock. pool: A thread pool. termination_manager: The _interfaces.TerminationManager for the operation. transmission_manager: The _interfaces.TransmissionManager for the operation. expiration_manager: The _interfaces.ExpirationManager for the operation. """ self._lock = lock self._pool = pool self._termination_manager = termination_manager self._transmission_manager = transmission_manager self._expiration_manager = expiration_manager self._protocol_receiver = protocol_receiver self._context = _NOT_YET_SEEN def _abort_and_notify(self, outcome): if self._termination_manager.outcome is None: self._termination_manager.abort(outcome) self._transmission_manager.abort(outcome) self._expiration_manager.terminate() def _deliver(self, behavior, value): def deliver(): delivery_outcome = callable_util.call_logging_exceptions( behavior, _EXCEPTION_LOG_MESSAGE, value) if delivery_outcome.kind is callable_util.Outcome.Kind.RAISED: with self._lock: self._abort_and_notify(_LOCAL_FAILURE_OUTCOME) self._pool.submit( callable_util.with_exceptions_logged( deliver, _constants.INTERNAL_ERROR_LOG_MESSAGE)) def set_protocol_receiver(self, protocol_receiver): """See _interfaces.ProtocolManager.set_protocol_receiver for spec.""" self._protocol_receiver = _Awaited(_Awaited.Kind.ARRIVED, protocol_receiver) if (self._context.kind is _Transitory.Kind.PRESENT and protocol_receiver is not None): self._deliver(protocol_receiver.context, self._context.value) self._context = _GONE def accept_protocol_context(self, protocol_context): """See _interfaces.ProtocolManager.accept_protocol_context for spec.""" if self._protocol_receiver.kind is _Awaited.Kind.ARRIVED: if self._protocol_receiver.value is not None: self._deliver(self._protocol_receiver.value.context, protocol_context) self._context = _GONE else: self._context = _Transitory(_Transitory.Kind.PRESENT, protocol_context) def invocation_protocol_manager( subscription, lock, pool, termination_manager, transmission_manager, expiration_manager): """Creates an _interfaces.ProtocolManager for invocation-side use. Args: subscription: The local customer's subscription to the operation. lock: The operation-wide lock. pool: A thread pool. termination_manager: The _interfaces.TerminationManager for the operation. transmission_manager: The _interfaces.TransmissionManager for the operation. expiration_manager: The _interfaces.ExpirationManager for the operation. """ if subscription.kind is base.Subscription.Kind.FULL: awaited_protocol_receiver = _Awaited( _Awaited.Kind.ARRIVED, subscription.protocol_receiver) else: awaited_protocol_receiver = _ARRIVED_AND_NONE return _ProtocolManager( awaited_protocol_receiver, lock, pool, termination_manager, transmission_manager, expiration_manager) def service_protocol_manager( lock, pool, termination_manager, transmission_manager, expiration_manager): """Creates an _interfaces.ProtocolManager for service-side use. Args: lock: The operation-wide lock. pool: A thread pool. termination_manager: The _interfaces.TerminationManager for the operation. transmission_manager: The _interfaces.TransmissionManager for the operation. expiration_manager: The _interfaces.ExpirationManager for the operation. """ return _ProtocolManager( _NOT_YET_ARRIVED, lock, pool, termination_manager, transmission_manager, expiration_manager)
bsd-3-clause
racker/cloud-init-debian-pkg
cloudinit/config/cc_yum_add_repo.py
6
3944
# vi: ts=4 expandtab # # Copyright (C) 2012 Yahoo! Inc. # # Author: Joshua Harlow <harlowja@yahoo-inc.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3, as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os from cloudinit import util import configobj def _canonicalize_id(repo_id): repo_id = repo_id.lower().replace("-", "_") repo_id = repo_id.replace(" ", "_") return repo_id def _format_repo_value(val): if isinstance(val, (bool)): # Seems like yum prefers 1/0 return str(int(val)) if isinstance(val, (list, tuple)): # Can handle 'lists' in certain cases # See: http://bit.ly/Qqrf1t return "\n ".join([_format_repo_value(v) for v in val]) if not isinstance(val, (basestring, str)): return str(val) return val ## TODO(harlowja): move to distro? # See man yum.conf def _format_repository_config(repo_id, repo_config): to_be = configobj.ConfigObj() to_be[repo_id] = {} # Do basic translation of the items -> values for (k, v) in repo_config.items(): # For now assume that people using this know # the format of yum and don't verify keys/values further to_be[repo_id][k] = _format_repo_value(v) lines = to_be.write() lines.insert(0, "# Created by cloud-init on %s" % (util.time_rfc2822())) return "\n".join(lines) def handle(name, cfg, _cloud, log, _args): repos = cfg.get('yum_repos') if not repos: log.debug(("Skipping module named %s," " no 'yum_repos' configuration found"), name) return repo_base_path = util.get_cfg_option_str(cfg, 'yum_repo_dir', '/etc/yum.repos.d/') repo_locations = {} repo_configs = {} for (repo_id, repo_config) in repos.items(): canon_repo_id = _canonicalize_id(repo_id) repo_fn_pth = os.path.join(repo_base_path, "%s.repo" % (canon_repo_id)) if os.path.exists(repo_fn_pth): log.info("Skipping repo %s, file %s already exists!", repo_id, repo_fn_pth) continue elif canon_repo_id in repo_locations: log.info("Skipping repo %s, file %s already pending!", repo_id, repo_fn_pth) continue if not repo_config: repo_config = {} # Do some basic sanity checks/cleaning n_repo_config = {} for (k, v) in repo_config.items(): k = k.lower().strip().replace("-", "_") if k: n_repo_config[k] = v repo_config = n_repo_config missing_required = 0 for req_field in ['baseurl']: if not req_field in repo_config: log.warn(("Repository %s does not contain a %s" " configuration 'required' entry"), repo_id, req_field) missing_required += 1 if not missing_required: repo_configs[canon_repo_id] = repo_config repo_locations[canon_repo_id] = repo_fn_pth else: log.warn("Repository %s is missing %s required fields, skipping!", repo_id, missing_required) for (c_repo_id, path) in repo_locations.items(): repo_blob = _format_repository_config(c_repo_id, repo_configs.get(c_repo_id)) util.write_file(path, repo_blob)
gpl-3.0
FreeAgent/djangoappengine-starter
django/utils/daemonize.py
452
1907
import os import sys if os.name == 'posix': def become_daemon(our_home_dir='.', out_log='/dev/null', err_log='/dev/null', umask=022): "Robustly turn into a UNIX daemon, running in our_home_dir." # First fork try: if os.fork() > 0: sys.exit(0) # kill off parent except OSError, e: sys.stderr.write("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror)) sys.exit(1) os.setsid() os.chdir(our_home_dir) os.umask(umask) # Second fork try: if os.fork() > 0: os._exit(0) except OSError, e: sys.stderr.write("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror)) os._exit(1) si = open('/dev/null', 'r') so = open(out_log, 'a+', 0) se = open(err_log, 'a+', 0) os.dup2(si.fileno(), sys.stdin.fileno()) os.dup2(so.fileno(), sys.stdout.fileno()) os.dup2(se.fileno(), sys.stderr.fileno()) # Set custom file descriptors so that they get proper buffering. sys.stdout, sys.stderr = so, se else: def become_daemon(our_home_dir='.', out_log=None, err_log=None, umask=022): """ If we're not running under a POSIX system, just simulate the daemon mode by doing redirections and directory changing. """ os.chdir(our_home_dir) os.umask(umask) sys.stdin.close() sys.stdout.close() sys.stderr.close() if err_log: sys.stderr = open(err_log, 'a', 0) else: sys.stderr = NullDevice() if out_log: sys.stdout = open(out_log, 'a', 0) else: sys.stdout = NullDevice() class NullDevice: "A writeable object that writes to nowhere -- like /dev/null." def write(self, s): pass
bsd-3-clause
jdramani/servo
tests/wpt/harness/wptrunner/browsers/base.py
195
4635
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. import os import platform import socket from abc import ABCMeta, abstractmethod from ..wptcommandline import require_arg here = os.path.split(__file__)[0] def cmd_arg(name, value=None): prefix = "-" if platform.system() == "Windows" else "--" rv = prefix + name if value is not None: rv += "=" + value return rv def get_free_port(start_port, exclude=None): """Get the first port number after start_port (inclusive) that is not currently bound. :param start_port: Integer port number at which to start testing. :param exclude: Set of port numbers to skip""" port = start_port while True: if exclude and port in exclude: port += 1 continue s = socket.socket() try: s.bind(("127.0.0.1", port)) except socket.error: port += 1 else: return port finally: s.close() def browser_command(binary, args, debug_info): if debug_info: if debug_info.requiresEscapedArgs: args = [item.replace("&", "\\&") for item in args] debug_args = [debug_info.path] + debug_info.args else: debug_args = [] command = [binary] + args return debug_args, command class BrowserError(Exception): pass class Browser(object): __metaclass__ = ABCMeta process_cls = None init_timeout = 30 def __init__(self, logger): """Abstract class serving as the basis for Browser implementations. The Browser is used in the TestRunnerManager to start and stop the browser process, and to check the state of that process. This class also acts as a context manager, enabling it to do browser-specific setup at the start of the testrun and cleanup after the run is complete. :param logger: Structured logger to use for output. """ self.logger = logger def __enter__(self): self.setup() return self def __exit__(self, *args, **kwargs): self.cleanup() def setup(self): """Used for browser-specific setup that happens at the start of a test run""" pass @abstractmethod def start(self): """Launch the browser object and get it into a state where is is ready to run tests""" pass @abstractmethod def stop(self): """Stop the running browser process.""" pass @abstractmethod def pid(self): """pid of the browser process or None if there is no pid""" pass @abstractmethod def is_alive(self): """Boolean indicating whether the browser process is still running""" pass def setup_ssl(self, hosts): """Return a certificate to use for tests requiring ssl that will be trusted by the browser""" raise NotImplementedError("ssl testing not supported") def cleanup(self): """Browser-specific cleanup that is run after the testrun is finished""" pass def executor_browser(self): """Returns the ExecutorBrowser subclass for this Browser subclass and the keyword arguments with which it should be instantiated""" return ExecutorBrowser, {} def log_crash(self, process, test): """Return a list of dictionaries containing information about crashes that happend in the browser, or an empty list if no crashes occurred""" self.logger.crash(process, test) class NullBrowser(Browser): def start(self): """No-op browser to use in scenarios where the TestRunnerManager shouldn't actually own the browser process (e.g. Servo where we start one browser per test)""" pass def stop(self): pass def pid(self): return None def is_alive(self): return True def on_output(self, line): raise NotImplementedError class ExecutorBrowser(object): def __init__(self, **kwargs): """View of the Browser used by the Executor object. This is needed because the Executor runs in a child process and we can't ship Browser instances between processes on Windows. Typically this will have a few product-specific properties set, but in some cases it may have more elaborate methods for setting up the browser from the runner process. """ for k, v in kwargs.iteritems(): setattr(self, k, v)
mpl-2.0
Zhongqilong/mykbengineer
kbe/src/lib/python/Lib/threading.py
61
48900
"""Thread module emulating a subset of Java's threading model.""" import sys as _sys import _thread try: from time import monotonic as _time except ImportError: from time import time as _time from traceback import format_exc as _format_exc from _weakrefset import WeakSet from itertools import islice as _islice try: from _collections import deque as _deque except ImportError: from collections import deque as _deque # Note regarding PEP 8 compliant names # This threading model was originally inspired by Java, and inherited # the convention of camelCase function and method names from that # language. Those original names are not in any imminent danger of # being deprecated (even for Py3k),so this module provides them as an # alias for the PEP 8 compliant names # Note that using the new PEP 8 compliant names facilitates substitution # with the multiprocessing module, which doesn't provide the old # Java inspired names. __all__ = ['active_count', 'Condition', 'current_thread', 'enumerate', 'Event', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread', 'Barrier', 'Timer', 'ThreadError', 'setprofile', 'settrace', 'local', 'stack_size'] # Rename some stuff so "from threading import *" is safe _start_new_thread = _thread.start_new_thread _allocate_lock = _thread.allocate_lock _set_sentinel = _thread._set_sentinel get_ident = _thread.get_ident ThreadError = _thread.error try: _CRLock = _thread.RLock except AttributeError: _CRLock = None TIMEOUT_MAX = _thread.TIMEOUT_MAX del _thread # Support for profile and trace hooks _profile_hook = None _trace_hook = None def setprofile(func): """Set a profile function for all threads started from the threading module. The func will be passed to sys.setprofile() for each thread, before its run() method is called. """ global _profile_hook _profile_hook = func def settrace(func): """Set a trace function for all threads started from the threading module. The func will be passed to sys.settrace() for each thread, before its run() method is called. """ global _trace_hook _trace_hook = func # Synchronization classes Lock = _allocate_lock def RLock(*args, **kwargs): """Factory function that returns a new reentrant lock. A reentrant lock must be released by the thread that acquired it. Once a thread has acquired a reentrant lock, the same thread may acquire it again without blocking; the thread must release it once for each time it has acquired it. """ if _CRLock is None: return _PyRLock(*args, **kwargs) return _CRLock(*args, **kwargs) class _RLock: """This class implements reentrant lock objects. A reentrant lock must be released by the thread that acquired it. Once a thread has acquired a reentrant lock, the same thread may acquire it again without blocking; the thread must release it once for each time it has acquired it. """ def __init__(self): self._block = _allocate_lock() self._owner = None self._count = 0 def __repr__(self): owner = self._owner try: owner = _active[owner].name except KeyError: pass return "<%s owner=%r count=%d>" % ( self.__class__.__name__, owner, self._count) def acquire(self, blocking=True, timeout=-1): """Acquire a lock, blocking or non-blocking. When invoked without arguments: if this thread already owns the lock, increment the recursion level by one, and return immediately. Otherwise, if another thread owns the lock, block until the lock is unlocked. Once the lock is unlocked (not owned by any thread), then grab ownership, set the recursion level to one, and return. If more than one thread is blocked waiting until the lock is unlocked, only one at a time will be able to grab ownership of the lock. There is no return value in this case. When invoked with the blocking argument set to true, do the same thing as when called without arguments, and return true. When invoked with the blocking argument set to false, do not block. If a call without an argument would block, return false immediately; otherwise, do the same thing as when called without arguments, and return true. When invoked with the floating-point timeout argument set to a positive value, block for at most the number of seconds specified by timeout and as long as the lock cannot be acquired. Return true if the lock has been acquired, false if the timeout has elapsed. """ me = get_ident() if self._owner == me: self._count += 1 return 1 rc = self._block.acquire(blocking, timeout) if rc: self._owner = me self._count = 1 return rc __enter__ = acquire def release(self): """Release a lock, decrementing the recursion level. If after the decrement it is zero, reset the lock to unlocked (not owned by any thread), and if any other threads are blocked waiting for the lock to become unlocked, allow exactly one of them to proceed. If after the decrement the recursion level is still nonzero, the lock remains locked and owned by the calling thread. Only call this method when the calling thread owns the lock. A RuntimeError is raised if this method is called when the lock is unlocked. There is no return value. """ if self._owner != get_ident(): raise RuntimeError("cannot release un-acquired lock") self._count = count = self._count - 1 if not count: self._owner = None self._block.release() def __exit__(self, t, v, tb): self.release() # Internal methods used by condition variables def _acquire_restore(self, state): self._block.acquire() self._count, self._owner = state def _release_save(self): if self._count == 0: raise RuntimeError("cannot release un-acquired lock") count = self._count self._count = 0 owner = self._owner self._owner = None self._block.release() return (count, owner) def _is_owned(self): return self._owner == get_ident() _PyRLock = _RLock class Condition: """Class that implements a condition variable. A condition variable allows one or more threads to wait until they are notified by another thread. If the lock argument is given and not None, it must be a Lock or RLock object, and it is used as the underlying lock. Otherwise, a new RLock object is created and used as the underlying lock. """ def __init__(self, lock=None): if lock is None: lock = RLock() self._lock = lock # Export the lock's acquire() and release() methods self.acquire = lock.acquire self.release = lock.release # If the lock defines _release_save() and/or _acquire_restore(), # these override the default implementations (which just call # release() and acquire() on the lock). Ditto for _is_owned(). try: self._release_save = lock._release_save except AttributeError: pass try: self._acquire_restore = lock._acquire_restore except AttributeError: pass try: self._is_owned = lock._is_owned except AttributeError: pass self._waiters = _deque() def __enter__(self): return self._lock.__enter__() def __exit__(self, *args): return self._lock.__exit__(*args) def __repr__(self): return "<Condition(%s, %d)>" % (self._lock, len(self._waiters)) def _release_save(self): self._lock.release() # No state to save def _acquire_restore(self, x): self._lock.acquire() # Ignore saved state def _is_owned(self): # Return True if lock is owned by current_thread. # This method is called only if __lock doesn't have _is_owned(). if self._lock.acquire(0): self._lock.release() return False else: return True def wait(self, timeout=None): """Wait until notified or until a timeout occurs. If the calling thread has not acquired the lock when this method is called, a RuntimeError is raised. This method releases the underlying lock, and then blocks until it is awakened by a notify() or notify_all() call for the same condition variable in another thread, or until the optional timeout occurs. Once awakened or timed out, it re-acquires the lock and returns. When the timeout argument is present and not None, it should be a floating point number specifying a timeout for the operation in seconds (or fractions thereof). When the underlying lock is an RLock, it is not released using its release() method, since this may not actually unlock the lock when it was acquired multiple times recursively. Instead, an internal interface of the RLock class is used, which really unlocks it even when it has been recursively acquired several times. Another internal interface is then used to restore the recursion level when the lock is reacquired. """ if not self._is_owned(): raise RuntimeError("cannot wait on un-acquired lock") waiter = _allocate_lock() waiter.acquire() self._waiters.append(waiter) saved_state = self._release_save() gotit = False try: # restore state no matter what (e.g., KeyboardInterrupt) if timeout is None: waiter.acquire() gotit = True else: if timeout > 0: gotit = waiter.acquire(True, timeout) else: gotit = waiter.acquire(False) return gotit finally: self._acquire_restore(saved_state) if not gotit: try: self._waiters.remove(waiter) except ValueError: pass def wait_for(self, predicate, timeout=None): """Wait until a condition evaluates to True. predicate should be a callable which result will be interpreted as a boolean value. A timeout may be provided giving the maximum time to wait. """ endtime = None waittime = timeout result = predicate() while not result: if waittime is not None: if endtime is None: endtime = _time() + waittime else: waittime = endtime - _time() if waittime <= 0: break self.wait(waittime) result = predicate() return result def notify(self, n=1): """Wake up one or more threads waiting on this condition, if any. If the calling thread has not acquired the lock when this method is called, a RuntimeError is raised. This method wakes up at most n of the threads waiting for the condition variable; it is a no-op if no threads are waiting. """ if not self._is_owned(): raise RuntimeError("cannot notify on un-acquired lock") all_waiters = self._waiters waiters_to_notify = _deque(_islice(all_waiters, n)) if not waiters_to_notify: return for waiter in waiters_to_notify: waiter.release() try: all_waiters.remove(waiter) except ValueError: pass def notify_all(self): """Wake up all threads waiting on this condition. If the calling thread has not acquired the lock when this method is called, a RuntimeError is raised. """ self.notify(len(self._waiters)) notifyAll = notify_all class Semaphore: """This class implements semaphore objects. Semaphores manage a counter representing the number of release() calls minus the number of acquire() calls, plus an initial value. The acquire() method blocks if necessary until it can return without making the counter negative. If not given, value defaults to 1. """ # After Tim Peters' semaphore class, but not quite the same (no maximum) def __init__(self, value=1): if value < 0: raise ValueError("semaphore initial value must be >= 0") self._cond = Condition(Lock()) self._value = value def acquire(self, blocking=True, timeout=None): """Acquire a semaphore, decrementing the internal counter by one. When invoked without arguments: if the internal counter is larger than zero on entry, decrement it by one and return immediately. If it is zero on entry, block, waiting until some other thread has called release() to make it larger than zero. This is done with proper interlocking so that if multiple acquire() calls are blocked, release() will wake exactly one of them up. The implementation may pick one at random, so the order in which blocked threads are awakened should not be relied on. There is no return value in this case. When invoked with blocking set to true, do the same thing as when called without arguments, and return true. When invoked with blocking set to false, do not block. If a call without an argument would block, return false immediately; otherwise, do the same thing as when called without arguments, and return true. When invoked with a timeout other than None, it will block for at most timeout seconds. If acquire does not complete successfully in that interval, return false. Return true otherwise. """ if not blocking and timeout is not None: raise ValueError("can't specify timeout for non-blocking acquire") rc = False endtime = None with self._cond: while self._value == 0: if not blocking: break if timeout is not None: if endtime is None: endtime = _time() + timeout else: timeout = endtime - _time() if timeout <= 0: break self._cond.wait(timeout) else: self._value -= 1 rc = True return rc __enter__ = acquire def release(self): """Release a semaphore, incrementing the internal counter by one. When the counter is zero on entry and another thread is waiting for it to become larger than zero again, wake up that thread. """ with self._cond: self._value += 1 self._cond.notify() def __exit__(self, t, v, tb): self.release() class BoundedSemaphore(Semaphore): """Implements a bounded semaphore. A bounded semaphore checks to make sure its current value doesn't exceed its initial value. If it does, ValueError is raised. In most situations semaphores are used to guard resources with limited capacity. If the semaphore is released too many times it's a sign of a bug. If not given, value defaults to 1. Like regular semaphores, bounded semaphores manage a counter representing the number of release() calls minus the number of acquire() calls, plus an initial value. The acquire() method blocks if necessary until it can return without making the counter negative. If not given, value defaults to 1. """ def __init__(self, value=1): Semaphore.__init__(self, value) self._initial_value = value def release(self): """Release a semaphore, incrementing the internal counter by one. When the counter is zero on entry and another thread is waiting for it to become larger than zero again, wake up that thread. If the number of releases exceeds the number of acquires, raise a ValueError. """ with self._cond: if self._value >= self._initial_value: raise ValueError("Semaphore released too many times") self._value += 1 self._cond.notify() class Event: """Class implementing event objects. Events manage a flag that can be set to true with the set() method and reset to false with the clear() method. The wait() method blocks until the flag is true. The flag is initially false. """ # After Tim Peters' event class (without is_posted()) def __init__(self): self._cond = Condition(Lock()) self._flag = False def _reset_internal_locks(self): # private! called by Thread._reset_internal_locks by _after_fork() self._cond.__init__() def is_set(self): """Return true if and only if the internal flag is true.""" return self._flag isSet = is_set def set(self): """Set the internal flag to true. All threads waiting for it to become true are awakened. Threads that call wait() once the flag is true will not block at all. """ self._cond.acquire() try: self._flag = True self._cond.notify_all() finally: self._cond.release() def clear(self): """Reset the internal flag to false. Subsequently, threads calling wait() will block until set() is called to set the internal flag to true again. """ self._cond.acquire() try: self._flag = False finally: self._cond.release() def wait(self, timeout=None): """Block until the internal flag is true. If the internal flag is true on entry, return immediately. Otherwise, block until another thread calls set() to set the flag to true, or until the optional timeout occurs. When the timeout argument is present and not None, it should be a floating point number specifying a timeout for the operation in seconds (or fractions thereof). This method returns the internal flag on exit, so it will always return True except if a timeout is given and the operation times out. """ self._cond.acquire() try: signaled = self._flag if not signaled: signaled = self._cond.wait(timeout) return signaled finally: self._cond.release() # A barrier class. Inspired in part by the pthread_barrier_* api and # the CyclicBarrier class from Java. See # http://sourceware.org/pthreads-win32/manual/pthread_barrier_init.html and # http://java.sun.com/j2se/1.5.0/docs/api/java/util/concurrent/ # CyclicBarrier.html # for information. # We maintain two main states, 'filling' and 'draining' enabling the barrier # to be cyclic. Threads are not allowed into it until it has fully drained # since the previous cycle. In addition, a 'resetting' state exists which is # similar to 'draining' except that threads leave with a BrokenBarrierError, # and a 'broken' state in which all threads get the exception. class Barrier: """Implements a Barrier. Useful for synchronizing a fixed number of threads at known synchronization points. Threads block on 'wait()' and are simultaneously once they have all made that call. """ def __init__(self, parties, action=None, timeout=None): """Create a barrier, initialised to 'parties' threads. 'action' is a callable which, when supplied, will be called by one of the threads after they have all entered the barrier and just prior to releasing them all. If a 'timeout' is provided, it is uses as the default for all subsequent 'wait()' calls. """ self._cond = Condition(Lock()) self._action = action self._timeout = timeout self._parties = parties self._state = 0 #0 filling, 1, draining, -1 resetting, -2 broken self._count = 0 def wait(self, timeout=None): """Wait for the barrier. When the specified number of threads have started waiting, they are all simultaneously awoken. If an 'action' was provided for the barrier, one of the threads will have executed that callback prior to returning. Returns an individual index number from 0 to 'parties-1'. """ if timeout is None: timeout = self._timeout with self._cond: self._enter() # Block while the barrier drains. index = self._count self._count += 1 try: if index + 1 == self._parties: # We release the barrier self._release() else: # We wait until someone releases us self._wait(timeout) return index finally: self._count -= 1 # Wake up any threads waiting for barrier to drain. self._exit() # Block until the barrier is ready for us, or raise an exception # if it is broken. def _enter(self): while self._state in (-1, 1): # It is draining or resetting, wait until done self._cond.wait() #see if the barrier is in a broken state if self._state < 0: raise BrokenBarrierError assert self._state == 0 # Optionally run the 'action' and release the threads waiting # in the barrier. def _release(self): try: if self._action: self._action() # enter draining state self._state = 1 self._cond.notify_all() except: #an exception during the _action handler. Break and reraise self._break() raise # Wait in the barrier until we are relased. Raise an exception # if the barrier is reset or broken. def _wait(self, timeout): if not self._cond.wait_for(lambda : self._state != 0, timeout): #timed out. Break the barrier self._break() raise BrokenBarrierError if self._state < 0: raise BrokenBarrierError assert self._state == 1 # If we are the last thread to exit the barrier, signal any threads # waiting for the barrier to drain. def _exit(self): if self._count == 0: if self._state in (-1, 1): #resetting or draining self._state = 0 self._cond.notify_all() def reset(self): """Reset the barrier to the initial state. Any threads currently waiting will get the BrokenBarrier exception raised. """ with self._cond: if self._count > 0: if self._state == 0: #reset the barrier, waking up threads self._state = -1 elif self._state == -2: #was broken, set it to reset state #which clears when the last thread exits self._state = -1 else: self._state = 0 self._cond.notify_all() def abort(self): """Place the barrier into a 'broken' state. Useful in case of error. Any currently waiting threads and threads attempting to 'wait()' will have BrokenBarrierError raised. """ with self._cond: self._break() def _break(self): # An internal error was detected. The barrier is set to # a broken state all parties awakened. self._state = -2 self._cond.notify_all() @property def parties(self): """Return the number of threads required to trip the barrier.""" return self._parties @property def n_waiting(self): """Return the number of threads currently waiting at the barrier.""" # We don't need synchronization here since this is an ephemeral result # anyway. It returns the correct value in the steady state. if self._state == 0: return self._count return 0 @property def broken(self): """Return True if the barrier is in a broken state.""" return self._state == -2 # exception raised by the Barrier class class BrokenBarrierError(RuntimeError): pass # Helper to generate new thread names _counter = 0 def _newname(template="Thread-%d"): global _counter _counter += 1 return template % _counter # Active thread administration _active_limbo_lock = _allocate_lock() _active = {} # maps thread id to Thread object _limbo = {} _dangling = WeakSet() # Main class for threads class Thread: """A class that represents a thread of control. This class can be safely subclassed in a limited fashion. There are two ways to specify the activity: by passing a callable object to the constructor, or by overriding the run() method in a subclass. """ __initialized = False # Need to store a reference to sys.exc_info for printing # out exceptions when a thread tries to use a global var. during interp. # shutdown and thus raises an exception about trying to perform some # operation on/with a NoneType __exc_info = _sys.exc_info # Keep sys.exc_clear too to clear the exception just before # allowing .join() to return. #XXX __exc_clear = _sys.exc_clear def __init__(self, group=None, target=None, name=None, args=(), kwargs=None, *, daemon=None): """This constructor should always be called with keyword arguments. Arguments are: *group* should be None; reserved for future extension when a ThreadGroup class is implemented. *target* is the callable object to be invoked by the run() method. Defaults to None, meaning nothing is called. *name* is the thread name. By default, a unique name is constructed of the form "Thread-N" where N is a small decimal number. *args* is the argument tuple for the target invocation. Defaults to (). *kwargs* is a dictionary of keyword arguments for the target invocation. Defaults to {}. If a subclass overrides the constructor, it must make sure to invoke the base class constructor (Thread.__init__()) before doing anything else to the thread. """ assert group is None, "group argument must be None for now" if kwargs is None: kwargs = {} self._target = target self._name = str(name or _newname()) self._args = args self._kwargs = kwargs if daemon is not None: self._daemonic = daemon else: self._daemonic = current_thread().daemon self._ident = None self._tstate_lock = None self._started = Event() self._is_stopped = False self._initialized = True # sys.stderr is not stored in the class like # sys.exc_info since it can be changed between instances self._stderr = _sys.stderr # For debugging and _after_fork() _dangling.add(self) def _reset_internal_locks(self, is_alive): # private! Called by _after_fork() to reset our internal locks as # they may be in an invalid state leading to a deadlock or crash. self._started._reset_internal_locks() if is_alive: self._set_tstate_lock() else: # The thread isn't alive after fork: it doesn't have a tstate # anymore. self._is_stopped = True self._tstate_lock = None def __repr__(self): assert self._initialized, "Thread.__init__() was not called" status = "initial" if self._started.is_set(): status = "started" self.is_alive() # easy way to get ._is_stopped set when appropriate if self._is_stopped: status = "stopped" if self._daemonic: status += " daemon" if self._ident is not None: status += " %s" % self._ident return "<%s(%s, %s)>" % (self.__class__.__name__, self._name, status) def start(self): """Start the thread's activity. It must be called at most once per thread object. It arranges for the object's run() method to be invoked in a separate thread of control. This method will raise a RuntimeError if called more than once on the same thread object. """ if not self._initialized: raise RuntimeError("thread.__init__() not called") if self._started.is_set(): raise RuntimeError("threads can only be started once") with _active_limbo_lock: _limbo[self] = self try: _start_new_thread(self._bootstrap, ()) except Exception: with _active_limbo_lock: del _limbo[self] raise self._started.wait() def run(self): """Method representing the thread's activity. You may override this method in a subclass. The standard run() method invokes the callable object passed to the object's constructor as the target argument, if any, with sequential and keyword arguments taken from the args and kwargs arguments, respectively. """ try: if self._target: self._target(*self._args, **self._kwargs) finally: # Avoid a refcycle if the thread is running a function with # an argument that has a member that points to the thread. del self._target, self._args, self._kwargs def _bootstrap(self): # Wrapper around the real bootstrap code that ignores # exceptions during interpreter cleanup. Those typically # happen when a daemon thread wakes up at an unfortunate # moment, finds the world around it destroyed, and raises some # random exception *** while trying to report the exception in # _bootstrap_inner() below ***. Those random exceptions # don't help anybody, and they confuse users, so we suppress # them. We suppress them only when it appears that the world # indeed has already been destroyed, so that exceptions in # _bootstrap_inner() during normal business hours are properly # reported. Also, we only suppress them for daemonic threads; # if a non-daemonic encounters this, something else is wrong. try: self._bootstrap_inner() except: if self._daemonic and _sys is None: return raise def _set_ident(self): self._ident = get_ident() def _set_tstate_lock(self): """ Set a lock object which will be released by the interpreter when the underlying thread state (see pystate.h) gets deleted. """ self._tstate_lock = _set_sentinel() self._tstate_lock.acquire() def _bootstrap_inner(self): try: self._set_ident() self._set_tstate_lock() self._started.set() with _active_limbo_lock: _active[self._ident] = self del _limbo[self] if _trace_hook: _sys.settrace(_trace_hook) if _profile_hook: _sys.setprofile(_profile_hook) try: self.run() except SystemExit: pass except: # If sys.stderr is no more (most likely from interpreter # shutdown) use self._stderr. Otherwise still use sys (as in # _sys) in case sys.stderr was redefined since the creation of # self. if _sys: _sys.stderr.write("Exception in thread %s:\n%s\n" % (self.name, _format_exc())) else: # Do the best job possible w/o a huge amt. of code to # approximate a traceback (code ideas from # Lib/traceback.py) exc_type, exc_value, exc_tb = self._exc_info() try: print(( "Exception in thread " + self.name + " (most likely raised during interpreter shutdown):"), file=self._stderr) print(( "Traceback (most recent call last):"), file=self._stderr) while exc_tb: print(( ' File "%s", line %s, in %s' % (exc_tb.tb_frame.f_code.co_filename, exc_tb.tb_lineno, exc_tb.tb_frame.f_code.co_name)), file=self._stderr) exc_tb = exc_tb.tb_next print(("%s: %s" % (exc_type, exc_value)), file=self._stderr) # Make sure that exc_tb gets deleted since it is a memory # hog; deleting everything else is just for thoroughness finally: del exc_type, exc_value, exc_tb finally: # Prevent a race in # test_threading.test_no_refcycle_through_target when # the exception keeps the target alive past when we # assert that it's dead. #XXX self.__exc_clear() pass finally: with _active_limbo_lock: try: # We don't call self._delete() because it also # grabs _active_limbo_lock. del _active[get_ident()] except: pass def _stop(self): # After calling ._stop(), .is_alive() returns False and .join() returns # immediately. ._tstate_lock must be released before calling ._stop(). # # Normal case: C code at the end of the thread's life # (release_sentinel in _threadmodule.c) releases ._tstate_lock, and # that's detected by our ._wait_for_tstate_lock(), called by .join() # and .is_alive(). Any number of threads _may_ call ._stop() # simultaneously (for example, if multiple threads are blocked in # .join() calls), and they're not serialized. That's harmless - # they'll just make redundant rebindings of ._is_stopped and # ._tstate_lock. Obscure: we rebind ._tstate_lock last so that the # "assert self._is_stopped" in ._wait_for_tstate_lock() always works # (the assert is executed only if ._tstate_lock is None). # # Special case: _main_thread releases ._tstate_lock via this # module's _shutdown() function. lock = self._tstate_lock if lock is not None: assert not lock.locked() self._is_stopped = True self._tstate_lock = None def _delete(self): "Remove current thread from the dict of currently running threads." # Notes about running with _dummy_thread: # # Must take care to not raise an exception if _dummy_thread is being # used (and thus this module is being used as an instance of # dummy_threading). _dummy_thread.get_ident() always returns -1 since # there is only one thread if _dummy_thread is being used. Thus # len(_active) is always <= 1 here, and any Thread instance created # overwrites the (if any) thread currently registered in _active. # # An instance of _MainThread is always created by 'threading'. This # gets overwritten the instant an instance of Thread is created; both # threads return -1 from _dummy_thread.get_ident() and thus have the # same key in the dict. So when the _MainThread instance created by # 'threading' tries to clean itself up when atexit calls this method # it gets a KeyError if another Thread instance was created. # # This all means that KeyError from trying to delete something from # _active if dummy_threading is being used is a red herring. But # since it isn't if dummy_threading is *not* being used then don't # hide the exception. try: with _active_limbo_lock: del _active[get_ident()] # There must not be any python code between the previous line # and after the lock is released. Otherwise a tracing function # could try to acquire the lock again in the same thread, (in # current_thread()), and would block. except KeyError: if 'dummy_threading' not in _sys.modules: raise def join(self, timeout=None): """Wait until the thread terminates. This blocks the calling thread until the thread whose join() method is called terminates -- either normally or through an unhandled exception or until the optional timeout occurs. When the timeout argument is present and not None, it should be a floating point number specifying a timeout for the operation in seconds (or fractions thereof). As join() always returns None, you must call isAlive() after join() to decide whether a timeout happened -- if the thread is still alive, the join() call timed out. When the timeout argument is not present or None, the operation will block until the thread terminates. A thread can be join()ed many times. join() raises a RuntimeError if an attempt is made to join the current thread as that would cause a deadlock. It is also an error to join() a thread before it has been started and attempts to do so raises the same exception. """ if not self._initialized: raise RuntimeError("Thread.__init__() not called") if not self._started.is_set(): raise RuntimeError("cannot join thread before it is started") if self is current_thread(): raise RuntimeError("cannot join current thread") if timeout is None: self._wait_for_tstate_lock() else: # the behavior of a negative timeout isn't documented, but # historically .join(timeout=x) for x<0 has acted as if timeout=0 self._wait_for_tstate_lock(timeout=max(timeout, 0)) def _wait_for_tstate_lock(self, block=True, timeout=-1): # Issue #18808: wait for the thread state to be gone. # At the end of the thread's life, after all knowledge of the thread # is removed from C data structures, C code releases our _tstate_lock. # This method passes its arguments to _tstate_lock.aquire(). # If the lock is acquired, the C code is done, and self._stop() is # called. That sets ._is_stopped to True, and ._tstate_lock to None. lock = self._tstate_lock if lock is None: # already determined that the C code is done assert self._is_stopped elif lock.acquire(block, timeout): lock.release() self._stop() @property def name(self): """A string used for identification purposes only. It has no semantics. Multiple threads may be given the same name. The initial name is set by the constructor. """ assert self._initialized, "Thread.__init__() not called" return self._name @name.setter def name(self, name): assert self._initialized, "Thread.__init__() not called" self._name = str(name) @property def ident(self): """Thread identifier of this thread or None if it has not been started. This is a nonzero integer. See the thread.get_ident() function. Thread identifiers may be recycled when a thread exits and another thread is created. The identifier is available even after the thread has exited. """ assert self._initialized, "Thread.__init__() not called" return self._ident def is_alive(self): """Return whether the thread is alive. This method returns True just before the run() method starts until just after the run() method terminates. The module function enumerate() returns a list of all alive threads. """ assert self._initialized, "Thread.__init__() not called" if self._is_stopped or not self._started.is_set(): return False self._wait_for_tstate_lock(False) return not self._is_stopped isAlive = is_alive @property def daemon(self): """A boolean value indicating whether this thread is a daemon thread. This must be set before start() is called, otherwise RuntimeError is raised. Its initial value is inherited from the creating thread; the main thread is not a daemon thread and therefore all threads created in the main thread default to daemon = False. The entire Python program exits when no alive non-daemon threads are left. """ assert self._initialized, "Thread.__init__() not called" return self._daemonic @daemon.setter def daemon(self, daemonic): if not self._initialized: raise RuntimeError("Thread.__init__() not called") if self._started.is_set(): raise RuntimeError("cannot set daemon status of active thread") self._daemonic = daemonic def isDaemon(self): return self.daemon def setDaemon(self, daemonic): self.daemon = daemonic def getName(self): return self.name def setName(self, name): self.name = name # The timer class was contributed by Itamar Shtull-Trauring class Timer(Thread): """Call a function after a specified number of seconds: t = Timer(30.0, f, args=None, kwargs=None) t.start() t.cancel() # stop the timer's action if it's still waiting """ def __init__(self, interval, function, args=None, kwargs=None): Thread.__init__(self) self.interval = interval self.function = function self.args = args if args is not None else [] self.kwargs = kwargs if kwargs is not None else {} self.finished = Event() def cancel(self): """Stop the timer if it hasn't finished yet.""" self.finished.set() def run(self): self.finished.wait(self.interval) if not self.finished.is_set(): self.function(*self.args, **self.kwargs) self.finished.set() # Special thread class to represent the main thread # This is garbage collected through an exit handler class _MainThread(Thread): def __init__(self): Thread.__init__(self, name="MainThread", daemon=False) self._set_tstate_lock() self._started.set() self._set_ident() with _active_limbo_lock: _active[self._ident] = self # Dummy thread class to represent threads not started here. # These aren't garbage collected when they die, nor can they be waited for. # If they invoke anything in threading.py that calls current_thread(), they # leave an entry in the _active dict forever after. # Their purpose is to return *something* from current_thread(). # They are marked as daemon threads so we won't wait for them # when we exit (conform previous semantics). class _DummyThread(Thread): def __init__(self): Thread.__init__(self, name=_newname("Dummy-%d"), daemon=True) self._started.set() self._set_ident() with _active_limbo_lock: _active[self._ident] = self def _stop(self): pass def join(self, timeout=None): assert False, "cannot join a dummy thread" # Global API functions def current_thread(): """Return the current Thread object, corresponding to the caller's thread of control. If the caller's thread of control was not created through the threading module, a dummy thread object with limited functionality is returned. """ try: return _active[get_ident()] except KeyError: return _DummyThread() currentThread = current_thread def active_count(): """Return the number of Thread objects currently alive. The returned count is equal to the length of the list returned by enumerate(). """ with _active_limbo_lock: return len(_active) + len(_limbo) activeCount = active_count def _enumerate(): # Same as enumerate(), but without the lock. Internal use only. return list(_active.values()) + list(_limbo.values()) def enumerate(): """Return a list of all Thread objects currently alive. The list includes daemonic threads, dummy thread objects created by current_thread(), and the main thread. It excludes terminated threads and threads that have not yet been started. """ with _active_limbo_lock: return list(_active.values()) + list(_limbo.values()) from _thread import stack_size # Create the main thread object, # and make it available for the interpreter # (Py_Main) as threading._shutdown. _main_thread = _MainThread() def _shutdown(): # Obscure: other threads may be waiting to join _main_thread. That's # dubious, but some code does it. We can't wait for C code to release # the main thread's tstate_lock - that won't happen until the interpreter # is nearly dead. So we release it here. Note that just calling _stop() # isn't enough: other threads may already be waiting on _tstate_lock. tlock = _main_thread._tstate_lock # The main thread isn't finished yet, so its thread state lock can't have # been released. assert tlock is not None assert tlock.locked() tlock.release() _main_thread._stop() t = _pickSomeNonDaemonThread() while t: t.join() t = _pickSomeNonDaemonThread() _main_thread._delete() def _pickSomeNonDaemonThread(): for t in enumerate(): if not t.daemon and t.is_alive(): return t return None def main_thread(): """Return the main thread object. In normal conditions, the main thread is the thread from which the Python interpreter was started. """ return _main_thread # get thread-local implementation, either from the thread # module, or from the python fallback try: from _thread import _local as local except ImportError: from _threading_local import local def _after_fork(): # This function is called by Python/ceval.c:PyEval_ReInitThreads which # is called from PyOS_AfterFork. Here we cleanup threading module state # that should not exist after a fork. # Reset _active_limbo_lock, in case we forked while the lock was held # by another (non-forked) thread. http://bugs.python.org/issue874900 global _active_limbo_lock, _main_thread _active_limbo_lock = _allocate_lock() # fork() only copied the current thread; clear references to others. new_active = {} current = current_thread() _main_thread = current with _active_limbo_lock: # Dangling thread instances must still have their locks reset, # because someone may join() them. threads = set(_enumerate()) threads.update(_dangling) for thread in threads: # Any lock/condition variable may be currently locked or in an # invalid state, so we reinitialize them. if thread is current: # There is only one active thread. We reset the ident to # its new value since it can have changed. thread._reset_internal_locks(True) ident = get_ident() thread._ident = ident new_active[ident] = thread else: # All the others are already stopped. thread._reset_internal_locks(False) thread._stop() _limbo.clear() _active.clear() _active.update(new_active) assert len(_active) == 1
lgpl-3.0
hephaex/kernel
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py
12980
5411
# SchedGui.py - Python extension for perf script, basic GUI code for # traces drawing and overview. # # Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com> # # This software is distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. try: import wx except ImportError: raise ImportError, "You need to install the wxpython lib for this script" class RootFrame(wx.Frame): Y_OFFSET = 100 RECT_HEIGHT = 100 RECT_SPACE = 50 EVENT_MARKING_WIDTH = 5 def __init__(self, sched_tracer, title, parent = None, id = -1): wx.Frame.__init__(self, parent, id, title) (self.screen_width, self.screen_height) = wx.GetDisplaySize() self.screen_width -= 10 self.screen_height -= 10 self.zoom = 0.5 self.scroll_scale = 20 self.sched_tracer = sched_tracer self.sched_tracer.set_root_win(self) (self.ts_start, self.ts_end) = sched_tracer.interval() self.update_width_virtual() self.nr_rects = sched_tracer.nr_rectangles() + 1 self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)) # whole window panel self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height)) # scrollable container self.scroll = wx.ScrolledWindow(self.panel) self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale) self.scroll.EnableScrolling(True, True) self.scroll.SetFocus() # scrollable drawing area self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2)) self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint) self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press) self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down) self.scroll.Bind(wx.EVT_PAINT, self.on_paint) self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press) self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down) self.scroll.Fit() self.Fit() self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING) self.txt = None self.Show(True) def us_to_px(self, val): return val / (10 ** 3) * self.zoom def px_to_us(self, val): return (val / self.zoom) * (10 ** 3) def scroll_start(self): (x, y) = self.scroll.GetViewStart() return (x * self.scroll_scale, y * self.scroll_scale) def scroll_start_us(self): (x, y) = self.scroll_start() return self.px_to_us(x) def paint_rectangle_zone(self, nr, color, top_color, start, end): offset_px = self.us_to_px(start - self.ts_start) width_px = self.us_to_px(end - self.ts_start) offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)) width_py = RootFrame.RECT_HEIGHT dc = self.dc if top_color is not None: (r, g, b) = top_color top_color = wx.Colour(r, g, b) brush = wx.Brush(top_color, wx.SOLID) dc.SetBrush(brush) dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH) width_py -= RootFrame.EVENT_MARKING_WIDTH offset_py += RootFrame.EVENT_MARKING_WIDTH (r ,g, b) = color color = wx.Colour(r, g, b) brush = wx.Brush(color, wx.SOLID) dc.SetBrush(brush) dc.DrawRectangle(offset_px, offset_py, width_px, width_py) def update_rectangles(self, dc, start, end): start += self.ts_start end += self.ts_start self.sched_tracer.fill_zone(start, end) def on_paint(self, event): dc = wx.PaintDC(self.scroll_panel) self.dc = dc width = min(self.width_virtual, self.screen_width) (x, y) = self.scroll_start() start = self.px_to_us(x) end = self.px_to_us(x + width) self.update_rectangles(dc, start, end) def rect_from_ypixel(self, y): y -= RootFrame.Y_OFFSET rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE) height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE) if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT: return -1 return rect def update_summary(self, txt): if self.txt: self.txt.Destroy() self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50)) def on_mouse_down(self, event): (x, y) = event.GetPositionTuple() rect = self.rect_from_ypixel(y) if rect == -1: return t = self.px_to_us(x) + self.ts_start self.sched_tracer.mouse_down(rect, t) def update_width_virtual(self): self.width_virtual = self.us_to_px(self.ts_end - self.ts_start) def __zoom(self, x): self.update_width_virtual() (xpos, ypos) = self.scroll.GetViewStart() xpos = self.us_to_px(x) / self.scroll_scale self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos) self.Refresh() def zoom_in(self): x = self.scroll_start_us() self.zoom *= 2 self.__zoom(x) def zoom_out(self): x = self.scroll_start_us() self.zoom /= 2 self.__zoom(x) def on_key_press(self, event): key = event.GetRawKeyCode() if key == ord("+"): self.zoom_in() return if key == ord("-"): self.zoom_out() return key = event.GetKeyCode() (x, y) = self.scroll.GetViewStart() if key == wx.WXK_RIGHT: self.scroll.Scroll(x + 1, y) elif key == wx.WXK_LEFT: self.scroll.Scroll(x - 1, y) elif key == wx.WXK_DOWN: self.scroll.Scroll(x, y + 1) elif key == wx.WXK_UP: self.scroll.Scroll(x, y - 1)
gpl-2.0
sdgathman/cjdns
node_build/dependencies/libuv/build/gyp/test/win/gyptest-cl-buffer-security-check.py
344
1612
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Make sure buffer security check setting is extracted properly. """ import TestGyp import sys if sys.platform == 'win32': test = TestGyp.TestGyp(formats=['msvs', 'ninja']) CHDIR = 'compiler-flags' test.run_gyp('buffer-security-check.gyp', chdir=CHDIR) test.build('buffer-security-check.gyp', chdir=CHDIR) def GetDisassemblyOfMain(exe): # The standard library uses buffer security checks independent of our # buffer security settings, so we extract just our code (i.e. main()) to # check against. full_path = test.built_file_path(exe, chdir=CHDIR) output = test.run_dumpbin('/disasm', full_path) result = [] in_main = False for line in output.splitlines(): if line == '_main:': in_main = True elif in_main: # Disassembly of next function starts. if line.startswith('_'): break result.append(line) return '\n'.join(result) # Buffer security checks are on by default, make sure security_cookie # appears in the disassembly of our code. if 'security_cookie' not in GetDisassemblyOfMain('test_bsc_unset.exe'): test.fail_test() # Explicitly on. if 'security_cookie' not in GetDisassemblyOfMain('test_bsc_on.exe'): test.fail_test() # Explicitly off, shouldn't be a reference to the security cookie. if 'security_cookie' in GetDisassemblyOfMain('test_bsc_off.exe'): test.fail_test() test.pass_test()
gpl-3.0
Gogolook-Inc/GogoMonkeyRun
demo_runner.py
1
2068
# this is a test python for old version monkeyrunner # no need to use this file import sys,traceback from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice, MonkeyImage def startSteps(): print ("start monkey runner.") # ui_debug.mr # add START_FROM_DESKTOP at the 'first' line of code # to start app from desktop. # add END_BACK_TO_DESKTOP at the 'last' line of code. # to end app back to desktop. print("TOUCH|{'x':87,'y':888,'type':'downAndUp',}") device.touch(87,888,MonkeyDevice.DOWN_AND_UP) MonkeyRunner.sleep(4.0) print("TOUCH|{'x':666,'y':88,'type':'downAndUp',}") device.touch(666,88,MonkeyDevice.DOWN_AND_UP) MonkeyRunner.sleep(4.0) print("TOUCH|{'x':445,'y':672,'type':'downAndUp',}") device.touch(445,672,MonkeyDevice.DOWN_AND_UP) MonkeyRunner.sleep(4.0) print("TOUCH|{'x':650,'y':181,'type':'downAndUp',}") device.touch(650,181,MonkeyDevice.DOWN_AND_UP) MonkeyRunner.sleep(4.0) print("TOUCH|{'x':175,'y':874,'type':'downAndUp',}") device.touch(175,874,MonkeyDevice.DOWN_AND_UP) MonkeyRunner.sleep(4.0) print("TOUCH|{'x':63,'y':1146,'type':'downAndUp',}") device.touch(63,1146,MonkeyDevice.DOWN_AND_UP) MonkeyRunner.sleep(4.0) print("TAKE SNAPSHOT") result = device.takeSnapshot() print("Writes the screenshot to a file") result.writeToFile('/Users/spring60569/Documents/screenshot_0.png','png') print("PRESS|{'name':'BACK','type':'downAndUp',}") device.press('KEYCODE_BACK',MonkeyDevice.DOWN_AND_UP) MonkeyRunner.sleep(4.0) print("PRESS|{'name':'BACK','type':'downAndUp',}") device.press('KEYCODE_BACK',MonkeyDevice.DOWN_AND_UP) MonkeyRunner.sleep(4.0) print("PRESS|{'name':'BACK','type':'downAndUp',}") device.press('KEYCODE_BACK',MonkeyDevice.DOWN_AND_UP) MonkeyRunner.sleep(4.0) print("end monkey runner.") print("Connects to the current device, returning a MonkeyDevice object") device = MonkeyRunner.waitForConnection(5,"CB5A1LY5YK") if not device: print("device connect...fail") sys.exit(1) else: print("device connect...success") startSteps() device.shell("stop") sys.exit(0)
apache-2.0
suyashphadtare/propshikhari-frappe
frappe/email/doctype/email_alert/test_email_alert.py
37
3222
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # See license.txt from __future__ import unicode_literals import frappe, frappe.utils, frappe.utils.scheduler import unittest test_records = frappe.get_test_records('Email Alert') class TestEmailAlert(unittest.TestCase): def setUp(self): frappe.db.sql("""delete from `tabBulk Email`""") frappe.set_user("test1@example.com") def tearDown(self): frappe.set_user("Administrator") def test_new_and_save(self): comment = frappe.new_doc("Comment") comment.comment = "test" comment.insert(ignore_permissions=True) self.assertTrue(frappe.db.get_value("Bulk Email", {"reference_doctype": "Comment", "reference_name": comment.name, "status":"Not Sent"})) frappe.db.sql("""delete from `tabBulk Email`""") comment.description = "test" comment.save() self.assertTrue(frappe.db.get_value("Bulk Email", {"reference_doctype": "Comment", "reference_name": comment.name, "status":"Not Sent"})) def test_condition(self): event = frappe.new_doc("Event") event.subject = "test", event.event_type = "Private" event.starts_on = "2014-06-06 12:00:00" event.insert() self.assertFalse(frappe.db.get_value("Bulk Email", {"reference_doctype": "Event", "reference_name": event.name, "status":"Not Sent"})) event.event_type = "Public" event.save() self.assertTrue(frappe.db.get_value("Bulk Email", {"reference_doctype": "Event", "reference_name": event.name, "status":"Not Sent"})) def test_value_changed(self): event = frappe.new_doc("Event") event.subject = "test", event.event_type = "Private" event.starts_on = "2014-06-06 12:00:00" event.insert() self.assertFalse(frappe.db.get_value("Bulk Email", {"reference_doctype": "Event", "reference_name": event.name, "status":"Not Sent"})) event.subject = "test 1" event.save() self.assertFalse(frappe.db.get_value("Bulk Email", {"reference_doctype": "Event", "reference_name": event.name, "status":"Not Sent"})) event.description = "test" event.save() self.assertTrue(frappe.db.get_value("Bulk Email", {"reference_doctype": "Event", "reference_name": event.name, "status":"Not Sent"})) def test_date_changed(self): event = frappe.new_doc("Event") event.subject = "test", event.event_type = "Private" event.starts_on = "2014-01-01 12:00:00" event.insert() self.assertFalse(frappe.db.get_value("Bulk Email", {"reference_doctype": "Event", "reference_name": event.name, "status":"Not Sent"})) frappe.utils.scheduler.trigger(frappe.local.site, "daily", now=True) # not today, so no alert self.assertFalse(frappe.db.get_value("Bulk Email", {"reference_doctype": "Event", "reference_name": event.name, "status":"Not Sent"})) event.starts_on = frappe.utils.add_days(frappe.utils.nowdate(), 2) + " 12:00:00" event.save() self.assertFalse(frappe.db.get_value("Bulk Email", {"reference_doctype": "Event", "reference_name": event.name, "status":"Not Sent"})) frappe.utils.scheduler.trigger(frappe.local.site, "daily", now=True) # today so show alert self.assertTrue(frappe.db.get_value("Bulk Email", {"reference_doctype": "Event", "reference_name": event.name, "status":"Not Sent"}))
mit