repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
GunoH/intellij-community
refs/heads/master
python/testData/inspections/PySingleQuotedDocstringInspection/test.py
83
<weak_warning descr="Triple double-quoted strings should be used for docstrings.">'''</weak_warning>package docstring<weak_warning descr="Triple double-quoted strings should be used for docstrings.">'''</weak_warning> def foo(): <weak_warning descr="Triple double-quoted strings should be used for docstrings.">"</weak_warning>foo docstring<weak_warning descr="Triple double-quoted strings should be used for docstrings.">"</weak_warning> pass class Klass: <weak_warning descr="Triple double-quoted strings should be used for docstrings.">'</weak_warning>class docstring\ second line<weak_warning descr="Triple double-quoted strings should be used for docstrings.">'</weak_warning> pass def bar(): """ bar docstring """ pass a = '''some string''' '''another string'''
cydenix/OpenGLCffi
refs/heads/master
OpenGLCffi/GLES2/EXT/EXT/multi_draw_indirect.py
1
from OpenGLCffi.GLES2 import params @params(api='gles2', prms=['mode', 'indirect', 'drawcount', 'stride']) def glMultiDrawArraysIndirectEXT(mode, indirect, drawcount, stride): pass @params(api='gles2', prms=['mode', 'type', 'indirect', 'drawcount', 'stride']) def glMultiDrawElementsIndirectEXT(mode, type, indirect, drawcount, stride): pass
obi-two/Rebelion
refs/heads/master
data/scripts/templates/object/building/faction_perk/hq/shared_hq_s05_fp.py
2
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Building() result.template = "object/building/faction_perk/hq/shared_hq_s05_fp.iff" result.attribute_template_id = -1 result.stfName("faction_perk_n","hq_s05") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
RapidApplicationDevelopment/tensorflow
refs/heads/master
tensorflow/contrib/learn/python/learn/tests/dataframe/feeding_queue_runner_test.py
30
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests `FeedingQueueRunner` using arrays and `DataFrames`.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf import tensorflow.contrib.learn.python.learn.dataframe.queues.feeding_functions as ff # pylint: disable=g-import-not-at-top try: import pandas as pd HAS_PANDAS = True except ImportError: HAS_PANDAS = False def get_rows(array, row_indices): rows = [array[i] for i in row_indices] return np.vstack(rows) class FeedingQueueRunnerTestCase(tf.test.TestCase): """Tests for `FeedingQueueRunner`.""" def testArrayFeeding(self): with tf.Graph().as_default(): array = np.arange(32).reshape([16, 2]) q = ff.enqueue_data(array, capacity=100) batch_size = 3 dq_op = q.dequeue_many(batch_size) with tf.Session() as sess: coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) for i in range(100): indices = [j % array.shape[0] for j in range(batch_size * i, batch_size * (i + 1))] expected_dq = get_rows(array, indices) dq = sess.run(dq_op) np.testing.assert_array_equal(indices, dq[0]) np.testing.assert_array_equal(expected_dq, dq[1]) coord.request_stop() coord.join(threads) def testArrayFeedingMultiThread(self): with tf.Graph().as_default(): array = np.arange(256).reshape([128, 2]) q = ff.enqueue_data(array, capacity=128, num_threads=8, shuffle=True) batch_size = 3 dq_op = q.dequeue_many(batch_size) with tf.Session() as sess: coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) for _ in range(100): dq = sess.run(dq_op) indices = dq[0] expected_dq = get_rows(array, indices) np.testing.assert_array_equal(expected_dq, dq[1]) coord.request_stop() coord.join(threads) def testPandasFeeding(self): if not HAS_PANDAS: return with tf.Graph().as_default(): array1 = np.arange(32) array2 = np.arange(32, 64) df = pd.DataFrame({"a": array1, "b": array2}, index=np.arange(64, 96)) q = ff.enqueue_data(df, capacity=100) batch_size = 5 dq_op = q.dequeue_many(5) with tf.Session() as sess: coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) for i in range(100): indices = [j % array1.shape[0] for j in range(batch_size * i, batch_size * (i + 1))] expected_df_indices = df.index[indices] expected_rows = df.iloc[indices] dq = sess.run(dq_op) np.testing.assert_array_equal(expected_df_indices, dq[0]) for col_num, col in enumerate(df.columns): np.testing.assert_array_equal(expected_rows[col].values, dq[col_num + 1]) coord.request_stop() coord.join(threads) def testPandasFeedingMultiThread(self): if not HAS_PANDAS: return with tf.Graph().as_default(): array1 = np.arange(128, 256) array2 = 2 * array1 df = pd.DataFrame({"a": array1, "b": array2}, index=np.arange(128)) q = ff.enqueue_data(df, capacity=128, num_threads=8, shuffle=True) batch_size = 5 dq_op = q.dequeue_many(batch_size) with tf.Session() as sess: coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) for _ in range(100): dq = sess.run(dq_op) indices = dq[0] expected_rows = df.iloc[indices] for col_num, col in enumerate(df.columns): np.testing.assert_array_equal(expected_rows[col].values, dq[col_num + 1]) coord.request_stop() coord.join(threads) if __name__ == "__main__": tf.test.main()
aferr/LatticeMemCtl
refs/heads/master
src/arch/x86/isa/insts/simd64/integer/logical/exclusive_or.py
91
# Copyright (c) 2007 The Hewlett-Packard Development Company # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black microcode = ''' def macroop PXOR_MMX_MMX { mxor mmx, mmx, mmxm }; def macroop PXOR_MMX_M { ldfp ufp1, seg, sib, disp, dataSize=8 mxor mmx, mmx, ufp1 }; def macroop PXOR_MMX_P { rdip t7 ldfp ufp1, seg, riprel, disp, dataSize=8 mxor mmx, mmx, ufp1 }; '''
showgood/YCM_windows
refs/heads/master
third_party/bottle/test/test_contextlocals.py
50
# -*- coding: utf-8 -*- ''' Some objects are context-local, meaning that they have different values depending on the context they are accessed from. A context is currently defined as a thread. ''' import unittest import bottle import threading def run_thread(func): t = threading.Thread(target=func) t.start() t.join() class TestThreadLocals(unittest.TestCase): def test_request(self): e1 = {'PATH_INFO': '/t1'} e2 = {'PATH_INFO': '/t2'} def run(): bottle.request.bind(e2) self.assertEqual(bottle.request.path, '/t2') bottle.request.bind(e1) self.assertEqual(bottle.request.path, '/t1') run_thread(run) self.assertEqual(bottle.request.path, '/t1') def test_response(self): def run(): bottle.response.bind() bottle.response.content_type='test/thread' self.assertEqual(bottle.response.headers['Content-Type'], 'test/thread') bottle.response.bind() bottle.response.content_type='test/main' self.assertEqual(bottle.response.headers['Content-Type'], 'test/main') run_thread(run) self.assertEqual(bottle.response.headers['Content-Type'], 'test/main') if __name__ == '__main__': #pragma: no cover unittest.main()
MalloyPower/parsing-python
refs/heads/master
front-end/testsuite-python-lib/Python-2.0/Lib/dos-8x3/test_err.py
27
#! /usr/bin/env python """Test the errno module Roger E. Masse """ import errno from test_support import verbose errors = ['E2BIG', 'EACCES', 'EADDRINUSE', 'EADDRNOTAVAIL', 'EADV', 'EAFNOSUPPORT', 'EAGAIN', 'EALREADY', 'EBADE', 'EBADF', 'EBADFD', 'EBADMSG', 'EBADR', 'EBADRQC', 'EBADSLT', 'EBFONT', 'EBUSY', 'ECHILD', 'ECHRNG', 'ECOMM', 'ECONNABORTED', 'ECONNREFUSED', 'ECONNRESET', 'EDEADLK', 'EDEADLOCK', 'EDESTADDRREQ', 'EDOM', 'EDQUOT', 'EEXIST', 'EFAULT', 'EFBIG', 'EHOSTDOWN', 'EHOSTUNREACH', 'EIDRM', 'EILSEQ', 'EINPROGRESS', 'EINTR', 'EINVAL', 'EIO', 'EISCONN', 'EISDIR', 'EL2HLT', 'EL2NSYNC', 'EL3HLT', 'EL3RST', 'ELIBACC', 'ELIBBAD', 'ELIBEXEC', 'ELIBMAX', 'ELIBSCN', 'ELNRNG', 'ELOOP', 'EMFILE', 'EMLINK', 'EMSGSIZE', 'EMULTIHOP', 'ENAMETOOLONG', 'ENETDOWN', 'ENETRESET', 'ENETUNREACH', 'ENFILE', 'ENOANO', 'ENOBUFS', 'ENOCSI', 'ENODATA', 'ENODEV', 'ENOENT', 'ENOEXEC', 'ENOLCK', 'ENOLINK', 'ENOMEM', 'ENOMSG', 'ENONET', 'ENOPKG', 'ENOPROTOOPT', 'ENOSPC', 'ENOSR', 'ENOSTR', 'ENOSYS', 'ENOTBLK', 'ENOTCONN', 'ENOTDIR', 'ENOTEMPTY', 'ENOTOBACCO', 'ENOTSOCK', 'ENOTTY', 'ENOTUNIQ', 'ENXIO', 'EOPNOTSUPP', 'EOVERFLOW', 'EPERM', 'EPFNOSUPPORT', 'EPIPE', 'EPROTO', 'EPROTONOSUPPORT', 'EPROTOTYPE', 'ERANGE', 'EREMCHG', 'EREMOTE', 'ERESTART', 'EROFS', 'ESHUTDOWN', 'ESOCKTNOSUPPORT', 'ESPIPE', 'ESRCH', 'ESRMNT', 'ESTALE', 'ESTRPIPE', 'ETIME', 'ETIMEDOUT', 'ETOOMANYREFS', 'ETXTBSY', 'EUNATCH', 'EUSERS', 'EWOULDBLOCK', 'EXDEV', 'EXFULL'] # # This is is a wee bit bogus since the module only conditionally adds # errno constants if they have been defined by errno.h However, this # test seems to work on SGI, Sparc & intel Solaris, and linux. # for error in errors: try: a = getattr(errno, error) except AttributeError: if verbose: print '%s: not found' % error else: if verbose: print '%s: %d' % (error, a)
fengbaicanhe/intellij-community
refs/heads/master
python/lib/Lib/site-packages/django/contrib/gis/db/backends/spatialite/adapter.py
624
from django.db.backends.sqlite3.base import Database from django.contrib.gis.db.backends.adapter import WKTAdapter class SpatiaLiteAdapter(WKTAdapter): "SQLite adaptor for geometry objects." def __conform__(self, protocol): if protocol is Database.PrepareProtocol: return str(self)
lepricon49/CouchPotatoServer
refs/heads/master
libs/html5lib/treebuilders/_base.py
715
from __future__ import absolute_import, division, unicode_literals from six import text_type from ..constants import scopingElements, tableInsertModeElements, namespaces # The scope markers are inserted when entering object elements, # marquees, table cells, and table captions, and are used to prevent formatting # from "leaking" into tables, object elements, and marquees. Marker = None listElementsMap = { None: (frozenset(scopingElements), False), "button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False), "list": (frozenset(scopingElements | set([(namespaces["html"], "ol"), (namespaces["html"], "ul")])), False), "table": (frozenset([(namespaces["html"], "html"), (namespaces["html"], "table")]), False), "select": (frozenset([(namespaces["html"], "optgroup"), (namespaces["html"], "option")]), True) } class Node(object): def __init__(self, name): """Node representing an item in the tree. name - The tag name associated with the node parent - The parent of the current node (or None for the document node) value - The value of the current node (applies to text nodes and comments attributes - a dict holding name, value pairs for attributes of the node childNodes - a list of child nodes of the current node. This must include all elements but not necessarily other node types _flags - A list of miscellaneous flags that can be set on the node """ self.name = name self.parent = None self.value = None self.attributes = {} self.childNodes = [] self._flags = [] def __str__(self): attributesStr = " ".join(["%s=\"%s\"" % (name, value) for name, value in self.attributes.items()]) if attributesStr: return "<%s %s>" % (self.name, attributesStr) else: return "<%s>" % (self.name) def __repr__(self): return "<%s>" % (self.name) def appendChild(self, node): """Insert node as a child of the current node """ raise NotImplementedError def insertText(self, data, insertBefore=None): """Insert data as text in the current node, positioned before the start of node insertBefore or to the end of the node's text. """ raise NotImplementedError def insertBefore(self, node, refNode): """Insert node as a child of the current node, before refNode in the list of child nodes. Raises ValueError if refNode is not a child of the current node""" raise NotImplementedError def removeChild(self, node): """Remove node from the children of the current node """ raise NotImplementedError def reparentChildren(self, newParent): """Move all the children of the current node to newParent. This is needed so that trees that don't store text as nodes move the text in the correct way """ # XXX - should this method be made more general? for child in self.childNodes: newParent.appendChild(child) self.childNodes = [] def cloneNode(self): """Return a shallow copy of the current node i.e. a node with the same name and attributes but with no parent or child nodes """ raise NotImplementedError def hasContent(self): """Return true if the node has children or text, false otherwise """ raise NotImplementedError class ActiveFormattingElements(list): def append(self, node): equalCount = 0 if node != Marker: for element in self[::-1]: if element == Marker: break if self.nodesEqual(element, node): equalCount += 1 if equalCount == 3: self.remove(element) break list.append(self, node) def nodesEqual(self, node1, node2): if not node1.nameTuple == node2.nameTuple: return False if not node1.attributes == node2.attributes: return False return True class TreeBuilder(object): """Base treebuilder implementation documentClass - the class to use for the bottommost node of a document elementClass - the class to use for HTML Elements commentClass - the class to use for comments doctypeClass - the class to use for doctypes """ # Document class documentClass = None # The class to use for creating a node elementClass = None # The class to use for creating comments commentClass = None # The class to use for creating doctypes doctypeClass = None # Fragment class fragmentClass = None def __init__(self, namespaceHTMLElements): if namespaceHTMLElements: self.defaultNamespace = "http://www.w3.org/1999/xhtml" else: self.defaultNamespace = None self.reset() def reset(self): self.openElements = [] self.activeFormattingElements = ActiveFormattingElements() # XXX - rename these to headElement, formElement self.headPointer = None self.formPointer = None self.insertFromTable = False self.document = self.documentClass() def elementInScope(self, target, variant=None): # If we pass a node in we match that. if we pass a string # match any node with that name exactNode = hasattr(target, "nameTuple") listElements, invert = listElementsMap[variant] for node in reversed(self.openElements): if (node.name == target and not exactNode or node == target and exactNode): return True elif (invert ^ (node.nameTuple in listElements)): return False assert False # We should never reach this point def reconstructActiveFormattingElements(self): # Within this algorithm the order of steps described in the # specification is not quite the same as the order of steps in the # code. It should still do the same though. # Step 1: stop the algorithm when there's nothing to do. if not self.activeFormattingElements: return # Step 2 and step 3: we start with the last element. So i is -1. i = len(self.activeFormattingElements) - 1 entry = self.activeFormattingElements[i] if entry == Marker or entry in self.openElements: return # Step 6 while entry != Marker and entry not in self.openElements: if i == 0: # This will be reset to 0 below i = -1 break i -= 1 # Step 5: let entry be one earlier in the list. entry = self.activeFormattingElements[i] while True: # Step 7 i += 1 # Step 8 entry = self.activeFormattingElements[i] clone = entry.cloneNode() # Mainly to get a new copy of the attributes # Step 9 element = self.insertElement({"type": "StartTag", "name": clone.name, "namespace": clone.namespace, "data": clone.attributes}) # Step 10 self.activeFormattingElements[i] = element # Step 11 if element == self.activeFormattingElements[-1]: break def clearActiveFormattingElements(self): entry = self.activeFormattingElements.pop() while self.activeFormattingElements and entry != Marker: entry = self.activeFormattingElements.pop() def elementInActiveFormattingElements(self, name): """Check if an element exists between the end of the active formatting elements and the last marker. If it does, return it, else return false""" for item in self.activeFormattingElements[::-1]: # Check for Marker first because if it's a Marker it doesn't have a # name attribute. if item == Marker: break elif item.name == name: return item return False def insertRoot(self, token): element = self.createElement(token) self.openElements.append(element) self.document.appendChild(element) def insertDoctype(self, token): name = token["name"] publicId = token["publicId"] systemId = token["systemId"] doctype = self.doctypeClass(name, publicId, systemId) self.document.appendChild(doctype) def insertComment(self, token, parent=None): if parent is None: parent = self.openElements[-1] parent.appendChild(self.commentClass(token["data"])) def createElement(self, token): """Create an element but don't insert it anywhere""" name = token["name"] namespace = token.get("namespace", self.defaultNamespace) element = self.elementClass(name, namespace) element.attributes = token["data"] return element def _getInsertFromTable(self): return self._insertFromTable def _setInsertFromTable(self, value): """Switch the function used to insert an element from the normal one to the misnested table one and back again""" self._insertFromTable = value if value: self.insertElement = self.insertElementTable else: self.insertElement = self.insertElementNormal insertFromTable = property(_getInsertFromTable, _setInsertFromTable) def insertElementNormal(self, token): name = token["name"] assert isinstance(name, text_type), "Element %s not unicode" % name namespace = token.get("namespace", self.defaultNamespace) element = self.elementClass(name, namespace) element.attributes = token["data"] self.openElements[-1].appendChild(element) self.openElements.append(element) return element def insertElementTable(self, token): """Create an element and insert it into the tree""" element = self.createElement(token) if self.openElements[-1].name not in tableInsertModeElements: return self.insertElementNormal(token) else: # We should be in the InTable mode. This means we want to do # special magic element rearranging parent, insertBefore = self.getTableMisnestedNodePosition() if insertBefore is None: parent.appendChild(element) else: parent.insertBefore(element, insertBefore) self.openElements.append(element) return element def insertText(self, data, parent=None): """Insert text data.""" if parent is None: parent = self.openElements[-1] if (not self.insertFromTable or (self.insertFromTable and self.openElements[-1].name not in tableInsertModeElements)): parent.insertText(data) else: # We should be in the InTable mode. This means we want to do # special magic element rearranging parent, insertBefore = self.getTableMisnestedNodePosition() parent.insertText(data, insertBefore) def getTableMisnestedNodePosition(self): """Get the foster parent element, and sibling to insert before (or None) when inserting a misnested table node""" # The foster parent element is the one which comes before the most # recently opened table element # XXX - this is really inelegant lastTable = None fosterParent = None insertBefore = None for elm in self.openElements[::-1]: if elm.name == "table": lastTable = elm break if lastTable: # XXX - we should really check that this parent is actually a # node here if lastTable.parent: fosterParent = lastTable.parent insertBefore = lastTable else: fosterParent = self.openElements[ self.openElements.index(lastTable) - 1] else: fosterParent = self.openElements[0] return fosterParent, insertBefore def generateImpliedEndTags(self, exclude=None): name = self.openElements[-1].name # XXX td, th and tr are not actually needed if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) and name != exclude): self.openElements.pop() # XXX This is not entirely what the specification says. We should # investigate it more closely. self.generateImpliedEndTags(exclude) def getDocument(self): "Return the final tree" return self.document def getFragment(self): "Return the final fragment" # assert self.innerHTML fragment = self.fragmentClass() self.openElements[0].reparentChildren(fragment) return fragment def testSerializer(self, node): """Serialize the subtree of node in the format required by unit tests node - the node from which to start serializing""" raise NotImplementedError
j00bar/ansible
refs/heads/devel
contrib/inventory/ssh_config.py
41
#!/usr/bin/env python # (c) 2014, Tomas Karasek <tomas.karasek@digile.fi> # # This file is part of Ansible. # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Dynamic inventory script which lets you use aliases from ~/.ssh/config. # # There were some issues with various Paramiko versions. I took a deeper look # and tested heavily. Now, ansible parses this alright with Paramiko versions # 1.7.2 to 1.15.2. # # It prints inventory based on parsed ~/.ssh/config. You can refer to hosts # with their alias, rather than with the IP or hostname. It takes advantage # of the ansible_ssh_{host,port,user,private_key_file}. # # If you have in your .ssh/config: # Host git # HostName git.domain.org # User tkarasek # IdentityFile /home/tomk/keys/thekey # # You can do # $ ansible git -m ping # # Example invocation: # ssh_config.py --list # ssh_config.py --host <alias> import argparse import os.path import sys import paramiko try: import json except ImportError: import simplejson as json SSH_CONF = '~/.ssh/config' _key = 'ssh_config' _ssh_to_ansible = [('user', 'ansible_ssh_user'), ('hostname', 'ansible_ssh_host'), ('identityfile', 'ansible_ssh_private_key_file'), ('port', 'ansible_ssh_port')] def get_config(): if not os.path.isfile(os.path.expanduser(SSH_CONF)): return {} with open(os.path.expanduser(SSH_CONF)) as f: cfg = paramiko.SSHConfig() cfg.parse(f) ret_dict = {} for d in cfg._config: if type(d['host']) is list: alias = d['host'][0] else: alias = d['host'] if ('?' in alias) or ('*' in alias): continue _copy = dict(d) del _copy['host'] if 'config' in _copy: ret_dict[alias] = _copy['config'] else: ret_dict[alias] = _copy return ret_dict def print_list(): cfg = get_config() meta = {'hostvars': {}} for alias, attributes in cfg.items(): tmp_dict = {} for ssh_opt, ans_opt in _ssh_to_ansible: if ssh_opt in attributes: # If the attribute is a list, just take the first element. # Private key is returned in a list for some reason. attr = attributes[ssh_opt] if type(attr) is list: attr = attr[0] tmp_dict[ans_opt] = attr if tmp_dict: meta['hostvars'][alias] = tmp_dict print(json.dumps({_key: list(set(meta['hostvars'].keys())), '_meta': meta})) def print_host(host): cfg = get_config() print(json.dumps(cfg[host])) def get_args(args_list): parser = argparse.ArgumentParser( description='ansible inventory script parsing .ssh/config') mutex_group = parser.add_mutually_exclusive_group(required=True) help_list = 'list all hosts from .ssh/config inventory' mutex_group.add_argument('--list', action='store_true', help=help_list) help_host = 'display variables for a host' mutex_group.add_argument('--host', help=help_host) return parser.parse_args(args_list) def main(args_list): args = get_args(args_list) if args.list: print_list() if args.host: print_host(args.host) if __name__ == '__main__': main(sys.argv[1:])
fresskarma/tinyos-1.x
refs/heads/master
contrib/handhelds/swtest/RadioTest/test_uart.py
2
#!/usr/bin/python ''' Copyright (c) 2005 Hewlett-Packard Company All rights reserved Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Hewlett-Packard Company nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Test the UART Andrew Christian <andrew.christian@hp.com> May 2005 ''' import sys,struct,time,signal from twisted.internet import protocol from twisted.internet.serialport import SerialPort from twisted.internet import reactor VERBOSE = 0 ########################################################################################### class BasicMessageReceiver(protocol.Protocol): """A receiver for Basic Message strings A basic message is terminated by 0x7e. All 0x7e and 0x7d characters are replaced by 0x7d 0x20^byte """ MAX_LENGTH = 256 def __init__(self): self.recvd = "" self.synchronized = False self.start_time = time.time() def msgReceived(self,msg): 'Override this' raise NotImplementedError def dataReceived(self,recvd): if VERBOSE > 1: print 'received', ["%02x" % ord(x) for x in recvd] self.recvd = self.recvd + recvd while len(self.recvd): index = self.recvd.find(chr(0x7e)) if index < 0: break msg = self.recvd[:index] self.recvd = self.recvd[index+1:] if not self.synchronized: if len(msg) and VERBOSE: print 'Synchronizing', len(msg), 'characters', ["%02x" % ord(x) for x in msg] self.synchronized = True elif len(msg): vlist = msg.split(chr(0x7d)) if VERBOSE > 2: print 'vlist', vlist try: msg = vlist[0] + ''.join([chr(ord(v[0]) ^ 0x20) + v[1:] for v in vlist[1:]]) self.msgReceived(msg) except Exception, e: # A bad packet can have two 'x7d' bytes in a row if VERBOSE: print 'Decoding error', e, vlist if len(self.recvd) > BasicMessageReceiver.MAX_LENGTH: self.recvd = '' self.synchronized = False print 'Lost synchronization' def sendMessage(self,data): 'Send an encoded string' if VERBOSE: print 'Raw send:', ":".join(["%02x" % ord(x) for x in data]) msg = chr(0x7e) escape_list = (chr(0x7d), chr(0x7e)) for d in data: if d in escape_list: msg += chr(0x7d) + chr(ord(d) ^ 0x20) else: msg += d msg += chr(0x7e) if VERBOSE > 1: print 'Writing', ":".join(["%02x" % ord(x) for x in msg]) self.transport.write(msg) ########################################################################################### def to_hex(data): return ":".join(["%02x" % ord(x) for x in data]) class Transmitter(BasicMessageReceiver): def __init__(self,config,pkt): BasicMessageReceiver.__init__(self) self.pkt = pkt self._sent = 0 self._received = 0 self._bad = 0 def dump(self): print "Lost %d of %d received [%d sent]" % (self._bad, self._received, self._sent) def send_packet(self): self._sent += 1 self.sendMessage(self.pkt) self.transport.flushOutput() if VERBOSE: print 'Sent message', to_hex(self.pkt) def msgReceived(self,msg): if len(msg) <= 0: return self._received += 1 try: if VERBOSE: print to_hex(msg) if msg != self.pkt: self._bad += 1 print 'Error: ', to_hex(msg), "(%d of %d [%d])" % (self._bad, self._received, self._sent) except Exception, e: print e self.send_packet() def connectionMade(self): self.send_packet() def connectionLost(self): print 'Lost' ########################################################################################### def usage(dict): print """ Usage: test_uart.py [OPTIONS] Valid options are: -v, --verbose May be repeated for more detail -h, --help This help -d, --dev=DEVICE Use TTY device DEVICE (%(dev)s) -b, --baud=RATE Set baud rate to RATE (%(baudrate)d) """ % dict sys.exit(0) if __name__ == '__main__': import getopt config = { 'dev' : '/dev/ttyUSB0', 'baudrate': 57600 } try: (options, argv) = getopt.getopt(sys.argv[1:], 'vhd:b:', ['verbose', 'help', 'dev=', 'baudrate=']) except Exception, e: print e usage(config) for (k,v) in options: if k in ('-v', '--verbose'): VERBOSE += 1 elif k in ('-h', '--help'): usage(config) elif k in ('-d', '--dev'): config['dev'] = v elif k in ('-b', '--baudrate'): config['baudrate'] = int(v) else: usage(config) pkt = "1234567890" t = Transmitter(config,pkt) SerialPort(t, config['dev'], reactor, baudrate=config['baudrate']) reactor.run() t.dump()
codilime/cloudify-plugins-common
refs/heads/master
cloudify/tests/test_lifecycle_retry.py
2
######## # Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. import testtools from cloudify import decorators from cloudify import context from cloudify import ctx from cloudify.test_utils import workflow_test invocations = [] @decorators.operation def operation(descriptor, **_): current_op = ctx.operation.name.split('.')[-1] if ctx.type == context.NODE_INSTANCE: node = ctx.node instance = ctx.instance else: node = ctx.source.node instance = ctx.target.instance runtime_properties = instance.runtime_properties invocations.append((node.id, current_op)) state = runtime_properties.get('state', {}) if node.id not in state: state[node.id] = {} node_state = state[node.id] if current_op not in node_state: node_state[current_op] = 0 op_state = node_state[current_op] node_descriptor = descriptor.get(node.id, {}) op_descriptor = node_descriptor.get(current_op, 0) if op_state < op_descriptor: ctx.operation.retry() op_state += 1 node_state[current_op] = op_state runtime_properties['state'] = state class TaskLifecycleRetryTests(testtools.TestCase): blueprint_path = 'resources/blueprints/test-lifecycle-retry-blueprint.yaml' inputs = lambda node, op, count: {'descriptor': {node: {op: count}}} def setUp(self): super(TaskLifecycleRetryTests, self).setUp() self.addCleanup(self.cleanup) def cleanup(self): global invocations invocations = [] def _run(self, env, subgraph_retries=0, workflow='install'): env.execute(workflow, task_retries=1, task_retry_interval=0, subgraph_retries=subgraph_retries) @workflow_test(blueprint_path, inputs=inputs('node1', 'configure', 2)) def test_retry_lifecycle(self, env): self._run(env, subgraph_retries=1) self.assertEqual(invocations, [ ('node1', 'create'), ('node1', 'configure'), ('node1', 'configure'), ('node1', 'stop'), ('node1', 'delete'), ('node1', 'create'), ('node1', 'configure'), ('node1', 'start'), ('node2', 'create'), ('node2', 'preconfigure'), ('node2', 'configure'), ('node2', 'postconfigure'), ('node2', 'start'), ('node2', 'establish'), ]) @workflow_test(blueprint_path, inputs=inputs('node2', 'postconfigure', 2)) def test_retry_lifecycle_2(self, env): self._run(env, subgraph_retries=1) self.assertEqual(invocations, [ ('node1', 'create'), ('node1', 'configure'), ('node1', 'start'), ('node2', 'create'), ('node2', 'preconfigure'), ('node2', 'configure'), ('node2', 'postconfigure'), ('node2', 'postconfigure'), ('node2', 'stop'), ('node2', 'unlink'), ('node2', 'delete'), ('node2', 'create'), ('node2', 'preconfigure'), ('node2', 'configure'), ('node2', 'postconfigure'), ('node2', 'start'), ('node2', 'establish'), ]) @workflow_test(blueprint_path, inputs=inputs('node1', 'configure', 4)) def test_retry_lifecycle3(self, env): self._run(env, subgraph_retries=2) self.assertEqual(invocations, [ ('node1', 'create'), ('node1', 'configure'), ('node1', 'configure'), ('node1', 'stop'), ('node1', 'delete'), ('node1', 'create'), ('node1', 'configure'), ('node1', 'configure'), ('node1', 'stop'), ('node1', 'delete'), ('node1', 'create'), ('node1', 'configure'), ('node1', 'start'), ('node2', 'create'), ('node2', 'preconfigure'), ('node2', 'configure'), ('node2', 'postconfigure'), ('node2', 'start'), ('node2', 'establish'), ]) @workflow_test(blueprint_path, inputs=inputs('node1', 'configure', 4)) def test_retry_lifecycle_failure(self, env): e = self.assertRaises(RuntimeError, self._run, env, subgraph_retries=1) self.assertIn('test_lifecycle_retry.operation', str(e)) self.assertEqual(invocations, [ ('node1', 'create'), ('node1', 'configure'), ('node1', 'configure'), ('node1', 'stop'), ('node1', 'delete'), ('node1', 'create'), ('node1', 'configure'), ('node1', 'configure'), ]) @workflow_test(blueprint_path, inputs=inputs('node1', 'configure', 6)) def test_retry_lifecycle_failure2(self, env): e = self.assertRaises(RuntimeError, self._run, env, subgraph_retries=2) self.assertIn('test_lifecycle_retry.operation', str(e)) self.assertEqual(invocations, [ ('node1', 'create'), ('node1', 'configure'), ('node1', 'configure'), ('node1', 'stop'), ('node1', 'delete'), ('node1', 'create'), ('node1', 'configure'), ('node1', 'configure'), ('node1', 'stop'), ('node1', 'delete'), ('node1', 'create'), ('node1', 'configure'), ('node1', 'configure'), ]) @workflow_test(blueprint_path, inputs=inputs('node2', 'stop', 6)) def test_retry_lifecycle_in_uninstall(self, env): e = self.assertRaises(RuntimeError, self._run, env, subgraph_retries=0, workflow='uninstall') self.assertIn('test_lifecycle_retry.operation', str(e)) self.assertEqual(invocations, [ ('node2', 'stop'), ('node2', 'stop'), ]) @workflow_test(blueprint_path, inputs=inputs('node2', 'stop', 6)) def test_retry_lifecycle_in_uninstall_2(self, env): e = self.assertRaises(RuntimeError, self._run, env, subgraph_retries=1, workflow='uninstall') self.assertIn('test_lifecycle_retry.operation', str(e)) self.assertEqual(invocations, [ ('node2', 'stop'), ('node2', 'stop'), ])
jalavik/invenio
refs/heads/master
invenio/modules/jsonalchemy/jsonext/engines/mongodb_pymongo.py
14
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2013, 2014 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """MongoDB Storage engine implementation.""" import pymongo from itertools import imap from invenio.modules.jsonalchemy.storage import Storage class MongoDBStorage(Storage): """Storage engine for MongoDB using the driver pymongo.""" def __init__(self, model, **kwards): """See also :meth:`~invenio.modules.jsonalchemy.storage.Storage.__init__`.""" self.model = model host = kwards.get('host', 'localhost') port = kwards.get('port', 27017) database = kwards.get('database', 'invenio') self.__connection = pymongo.MongoClient(host=host, port=port) self.__database = self.__connection[database] self.__collection = self.__database[model] def save_one(self, json, id=None): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.save_one`.""" if id is not None: json['_id'] = id return self.__collection.insert(json) def save_many(self, jsons, ids=None): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.save_many`.""" if ids is not None: def add_id(t): t[0]['_id'] = t[1] return t[0] jsons = imap(add_id, zip(jsons, ids)) return self.__collection.insert(jsons, continue_on_error=True) def update_one(self, json, id=None): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.update_one`.""" # FIXME: what if we get only the fields that have change if id is not None: json['_id'] = id return self.__collection.save(json) def update_many(self, jsons, ids=None): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.update_many`.""" if ids is not None: def add_id(t): t[0]['_id'] = t[1] return t[0] jsons = imap(add_id, zip(jsons, ids)) return map(self.__collection.save, jsons) def get_one(self, id): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.get_one`.""" return self.__collection.find_one(id) def get_many(self, ids): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.get_many`.""" return self.__collection.find({'_id': {'$in': ids}}) def get_field_values(self, ids, field, repetitive_values=True, count=False, include_id=False, split_by=0): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.get_field_values`.""" raise NotImplementedError() def get_fields_values(self, ids, fields, repetitive_values=True, count=False, include_id=False, split_by=0): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.get_fields_values`.""" raise NotImplementedError() def search(self, query): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.search`.""" return self.__collection.find(query) def create(self): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.create`.""" assert self.__collection.count() == 0 def drop(self): """See :meth:`~invenio.modules.jsonalchemy.storage.Storage.create`.""" self.__collection.drop()
vigilv/scikit-learn
refs/heads/master
examples/linear_model/plot_sparse_recovery.py
243
""" ============================================================ Sparse recovery: feature selection for sparse linear models ============================================================ Given a small number of observations, we want to recover which features of X are relevant to explain y. For this :ref:`sparse linear models <l1_feature_selection>` can outperform standard statistical tests if the true model is sparse, i.e. if a small fraction of the features are relevant. As detailed in :ref:`the compressive sensing notes <compressive_sensing>`, the ability of L1-based approach to identify the relevant variables depends on the sparsity of the ground truth, the number of samples, the number of features, the conditioning of the design matrix on the signal subspace, the amount of noise, and the absolute value of the smallest non-zero coefficient [Wainwright2006] (http://statistics.berkeley.edu/tech-reports/709.pdf). Here we keep all parameters constant and vary the conditioning of the design matrix. For a well-conditioned design matrix (small mutual incoherence) we are exactly in compressive sensing conditions (i.i.d Gaussian sensing matrix), and L1-recovery with the Lasso performs very well. For an ill-conditioned matrix (high mutual incoherence), regressors are very correlated, and the Lasso randomly selects one. However, randomized-Lasso can recover the ground truth well. In each situation, we first vary the alpha parameter setting the sparsity of the estimated model and look at the stability scores of the randomized Lasso. This analysis, knowing the ground truth, shows an optimal regime in which relevant features stand out from the irrelevant ones. If alpha is chosen too small, non-relevant variables enter the model. On the opposite, if alpha is selected too large, the Lasso is equivalent to stepwise regression, and thus brings no advantage over a univariate F-test. In a second time, we set alpha and compare the performance of different feature selection methods, using the area under curve (AUC) of the precision-recall. """ print(__doc__) # Author: Alexandre Gramfort and Gael Varoquaux # License: BSD 3 clause import warnings import matplotlib.pyplot as plt import numpy as np from scipy import linalg from sklearn.linear_model import (RandomizedLasso, lasso_stability_path, LassoLarsCV) from sklearn.feature_selection import f_regression from sklearn.preprocessing import StandardScaler from sklearn.metrics import auc, precision_recall_curve from sklearn.ensemble import ExtraTreesRegressor from sklearn.utils.extmath import pinvh from sklearn.utils import ConvergenceWarning def mutual_incoherence(X_relevant, X_irelevant): """Mutual incoherence, as defined by formula (26a) of [Wainwright2006]. """ projector = np.dot(np.dot(X_irelevant.T, X_relevant), pinvh(np.dot(X_relevant.T, X_relevant))) return np.max(np.abs(projector).sum(axis=1)) for conditioning in (1, 1e-4): ########################################################################### # Simulate regression data with a correlated design n_features = 501 n_relevant_features = 3 noise_level = .2 coef_min = .2 # The Donoho-Tanner phase transition is around n_samples=25: below we # will completely fail to recover in the well-conditioned case n_samples = 25 block_size = n_relevant_features rng = np.random.RandomState(42) # The coefficients of our model coef = np.zeros(n_features) coef[:n_relevant_features] = coef_min + rng.rand(n_relevant_features) # The correlation of our design: variables correlated by blocs of 3 corr = np.zeros((n_features, n_features)) for i in range(0, n_features, block_size): corr[i:i + block_size, i:i + block_size] = 1 - conditioning corr.flat[::n_features + 1] = 1 corr = linalg.cholesky(corr) # Our design X = rng.normal(size=(n_samples, n_features)) X = np.dot(X, corr) # Keep [Wainwright2006] (26c) constant X[:n_relevant_features] /= np.abs( linalg.svdvals(X[:n_relevant_features])).max() X = StandardScaler().fit_transform(X.copy()) # The output variable y = np.dot(X, coef) y /= np.std(y) # We scale the added noise as a function of the average correlation # between the design and the output variable y += noise_level * rng.normal(size=n_samples) mi = mutual_incoherence(X[:, :n_relevant_features], X[:, n_relevant_features:]) ########################################################################### # Plot stability selection path, using a high eps for early stopping # of the path, to save computation time alpha_grid, scores_path = lasso_stability_path(X, y, random_state=42, eps=0.05) plt.figure() # We plot the path as a function of alpha/alpha_max to the power 1/3: the # power 1/3 scales the path less brutally than the log, and enables to # see the progression along the path hg = plt.plot(alpha_grid[1:] ** .333, scores_path[coef != 0].T[1:], 'r') hb = plt.plot(alpha_grid[1:] ** .333, scores_path[coef == 0].T[1:], 'k') ymin, ymax = plt.ylim() plt.xlabel(r'$(\alpha / \alpha_{max})^{1/3}$') plt.ylabel('Stability score: proportion of times selected') plt.title('Stability Scores Path - Mutual incoherence: %.1f' % mi) plt.axis('tight') plt.legend((hg[0], hb[0]), ('relevant features', 'irrelevant features'), loc='best') ########################################################################### # Plot the estimated stability scores for a given alpha # Use 6-fold cross-validation rather than the default 3-fold: it leads to # a better choice of alpha: # Stop the user warnings outputs- they are not necessary for the example # as it is specifically set up to be challenging. with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) warnings.simplefilter('ignore', ConvergenceWarning) lars_cv = LassoLarsCV(cv=6).fit(X, y) # Run the RandomizedLasso: we use a paths going down to .1*alpha_max # to avoid exploring the regime in which very noisy variables enter # the model alphas = np.linspace(lars_cv.alphas_[0], .1 * lars_cv.alphas_[0], 6) clf = RandomizedLasso(alpha=alphas, random_state=42).fit(X, y) trees = ExtraTreesRegressor(100).fit(X, y) # Compare with F-score F, _ = f_regression(X, y) plt.figure() for name, score in [('F-test', F), ('Stability selection', clf.scores_), ('Lasso coefs', np.abs(lars_cv.coef_)), ('Trees', trees.feature_importances_), ]: precision, recall, thresholds = precision_recall_curve(coef != 0, score) plt.semilogy(np.maximum(score / np.max(score), 1e-4), label="%s. AUC: %.3f" % (name, auc(recall, precision))) plt.plot(np.where(coef != 0)[0], [2e-4] * n_relevant_features, 'mo', label="Ground truth") plt.xlabel("Features") plt.ylabel("Score") # Plot only the 100 first coefficients plt.xlim(0, 100) plt.legend(loc='best') plt.title('Feature selection scores - Mutual incoherence: %.1f' % mi) plt.show()
xpol/gyp
refs/heads/master
test/library/gyptest-shared-obj-install-path.py
53
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Verifies that .so files that are order only dependencies are specified by their install location rather than by their alias. """ # Python 2.5 needs this for the with statement. from __future__ import with_statement import os import TestGyp test = TestGyp.TestGyp(formats=['make']) test.run_gyp('shared_dependency.gyp', chdir='src') test.relocate('src', 'relocate/src') test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src') makefile_path = 'relocate/src/Makefile' with open(makefile_path) as makefile: make_contents = makefile.read() # If we remove the code to generate lib1, Make should still be able # to build lib2 since lib1.so already exists. make_contents = make_contents.replace('include lib1.target.mk', '') with open(makefile_path, 'w') as makefile: makefile.write(make_contents) test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src') test.pass_test()
mislavcimpersak/django-daguerre
refs/heads/master
daguerre/migrations/__init__.py
12133432
rasjani/xbmc-moontvfi
refs/heads/master
resources/__init__.py
12133432
remind101/stacker_blueprints
refs/heads/master
tests/__init__.py
12133432
Orav/kbengine
refs/heads/master
kbe/res/scripts/common/Lib/test/test_ctypes.py
2
import unittest from ctypes.test import load_tests if __name__ == "__main__": unittest.main()
qzio/compose
refs/heads/master
tests/integration/state_test.py
27
from __future__ import unicode_literals import tempfile import shutil import os from compose import config from compose.project import Project from compose.const import LABEL_CONFIG_HASH from .testcases import DockerClientTestCase class ProjectTestCase(DockerClientTestCase): def run_up(self, cfg, **kwargs): kwargs.setdefault('timeout', 1) project = self.make_project(cfg) project.up(**kwargs) return set(project.containers(stopped=True)) def make_project(self, cfg): return Project.from_dicts( name='composetest', client=self.client, service_dicts=config.load(config.ConfigDetails(cfg, 'working_dir', None)) ) class BasicProjectTest(ProjectTestCase): def setUp(self): super(BasicProjectTest, self).setUp() self.cfg = { 'db': {'image': 'busybox:latest'}, 'web': {'image': 'busybox:latest'}, } def test_no_change(self): old_containers = self.run_up(self.cfg) self.assertEqual(len(old_containers), 2) new_containers = self.run_up(self.cfg) self.assertEqual(len(new_containers), 2) self.assertEqual(old_containers, new_containers) def test_partial_change(self): old_containers = self.run_up(self.cfg) old_db = [c for c in old_containers if c.name_without_project == 'db_1'][0] old_web = [c for c in old_containers if c.name_without_project == 'web_1'][0] self.cfg['web']['command'] = '/bin/true' new_containers = self.run_up(self.cfg) self.assertEqual(len(new_containers), 2) preserved = list(old_containers & new_containers) self.assertEqual(preserved, [old_db]) removed = list(old_containers - new_containers) self.assertEqual(removed, [old_web]) created = list(new_containers - old_containers) self.assertEqual(len(created), 1) self.assertEqual(created[0].name_without_project, 'web_1') self.assertEqual(created[0].get('Config.Cmd'), ['/bin/true']) def test_all_change(self): old_containers = self.run_up(self.cfg) self.assertEqual(len(old_containers), 2) self.cfg['web']['command'] = '/bin/true' self.cfg['db']['command'] = '/bin/true' new_containers = self.run_up(self.cfg) self.assertEqual(len(new_containers), 2) unchanged = old_containers & new_containers self.assertEqual(len(unchanged), 0) new = new_containers - old_containers self.assertEqual(len(new), 2) class ProjectWithDependenciesTest(ProjectTestCase): def setUp(self): super(ProjectWithDependenciesTest, self).setUp() self.cfg = { 'db': { 'image': 'busybox:latest', 'command': 'tail -f /dev/null', }, 'web': { 'image': 'busybox:latest', 'command': 'tail -f /dev/null', 'links': ['db'], }, 'nginx': { 'image': 'busybox:latest', 'command': 'tail -f /dev/null', 'links': ['web'], }, } def test_up(self): containers = self.run_up(self.cfg) self.assertEqual( set(c.name_without_project for c in containers), set(['db_1', 'web_1', 'nginx_1']), ) def test_change_leaf(self): old_containers = self.run_up(self.cfg) self.cfg['nginx']['environment'] = {'NEW_VAR': '1'} new_containers = self.run_up(self.cfg) self.assertEqual( set(c.name_without_project for c in new_containers - old_containers), set(['nginx_1']), ) def test_change_middle(self): old_containers = self.run_up(self.cfg) self.cfg['web']['environment'] = {'NEW_VAR': '1'} new_containers = self.run_up(self.cfg) self.assertEqual( set(c.name_without_project for c in new_containers - old_containers), set(['web_1', 'nginx_1']), ) def test_change_root(self): old_containers = self.run_up(self.cfg) self.cfg['db']['environment'] = {'NEW_VAR': '1'} new_containers = self.run_up(self.cfg) self.assertEqual( set(c.name_without_project for c in new_containers - old_containers), set(['db_1', 'web_1', 'nginx_1']), ) def test_change_root_no_recreate(self): old_containers = self.run_up(self.cfg) self.cfg['db']['environment'] = {'NEW_VAR': '1'} new_containers = self.run_up(self.cfg, allow_recreate=False) self.assertEqual(new_containers - old_containers, set()) def converge(service, allow_recreate=True, force_recreate=False, do_build=True): """ If a container for this service doesn't exist, create and start one. If there are any, stop them, create+start new ones, and remove the old containers. """ plan = service.convergence_plan( allow_recreate=allow_recreate, force_recreate=force_recreate, ) return service.execute_convergence_plan( plan, do_build=do_build, timeout=1, ) class ServiceStateTest(DockerClientTestCase): """Test cases for Service.convergence_plan.""" def test_trigger_create(self): web = self.create_service('web') self.assertEqual(('create', []), web.convergence_plan()) def test_trigger_noop(self): web = self.create_service('web') container = web.create_container() web.start() web = self.create_service('web') self.assertEqual(('noop', [container]), web.convergence_plan()) def test_trigger_start(self): options = dict(command=["top"]) web = self.create_service('web', **options) web.scale(2) containers = web.containers(stopped=True) containers[0].stop() containers[0].inspect() self.assertEqual([c.is_running for c in containers], [False, True]) web = self.create_service('web', **options) self.assertEqual( ('start', containers[0:1]), web.convergence_plan(), ) def test_trigger_recreate_with_config_change(self): web = self.create_service('web', command=["top"]) container = web.create_container() web = self.create_service('web', command=["top", "-d", "1"]) self.assertEqual(('recreate', [container]), web.convergence_plan()) def test_trigger_recreate_with_nonexistent_image_tag(self): web = self.create_service('web', image="busybox:latest") container = web.create_container() web = self.create_service('web', image="nonexistent-image") self.assertEqual(('recreate', [container]), web.convergence_plan()) def test_trigger_recreate_with_image_change(self): repo = 'composetest_myimage' tag = 'latest' image = '{}:{}'.format(repo, tag) image_id = self.client.images(name='busybox')[0]['Id'] self.client.tag(image_id, repository=repo, tag=tag) try: web = self.create_service('web', image=image) container = web.create_container() # update the image c = self.client.create_container(image, ['touch', '/hello.txt']) self.client.commit(c, repository=repo, tag=tag) self.client.remove_container(c) web = self.create_service('web', image=image) self.assertEqual(('recreate', [container]), web.convergence_plan()) finally: self.client.remove_image(image) def test_trigger_recreate_with_build(self): context = tempfile.mkdtemp() base_image = "FROM busybox\nLABEL com.docker.compose.test_image=true\n" try: dockerfile = os.path.join(context, 'Dockerfile') with open(dockerfile, 'w') as f: f.write(base_image) web = self.create_service('web', build=context) container = web.create_container() with open(dockerfile, 'w') as f: f.write(base_image + 'CMD echo hello world\n') web.build() web = self.create_service('web', build=context) self.assertEqual(('recreate', [container]), web.convergence_plan()) finally: shutil.rmtree(context) class ConfigHashTest(DockerClientTestCase): def test_no_config_hash_when_one_off(self): web = self.create_service('web') container = web.create_container(one_off=True) self.assertNotIn(LABEL_CONFIG_HASH, container.labels) def test_no_config_hash_when_overriding_options(self): web = self.create_service('web') container = web.create_container(environment={'FOO': '1'}) self.assertNotIn(LABEL_CONFIG_HASH, container.labels) def test_config_hash_with_custom_labels(self): web = self.create_service('web', labels={'foo': '1'}) container = converge(web)[0] self.assertIn(LABEL_CONFIG_HASH, container.labels) self.assertIn('foo', container.labels) def test_config_hash_sticks_around(self): web = self.create_service('web', command=["top"]) container = converge(web)[0] self.assertIn(LABEL_CONFIG_HASH, container.labels) web = self.create_service('web', command=["top", "-d", "1"]) container = converge(web)[0] self.assertIn(LABEL_CONFIG_HASH, container.labels)
cgwalters/anaconda
refs/heads/master
tests/pyanaconda_tests/iutil_test.py
7
# -*- coding: utf-8 -*- # # Copyright (C) 2013 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # # Red Hat Author(s): Vratislav Podzimek <vpodzime@redhat.com> # Martin Kolman <mkolman@redhat.com> from pyanaconda import iutil import unittest import os import tempfile import signal import shutil from test_constants import ANACONDA_TEST_DIR from timer import timer class UpcaseFirstLetterTests(unittest.TestCase): def setUp(self): # create the directory used for file/folder tests if not os.path.exists(ANACONDA_TEST_DIR): os.makedirs(ANACONDA_TEST_DIR) def tearDown(self): # remove the testing directory shutil.rmtree(ANACONDA_TEST_DIR) def upcase_first_letter_test(self): """Upcasing first letter should work as expected.""" # no change self.assertEqual(iutil.upcase_first_letter("Czech RePuBliC"), "Czech RePuBliC") # simple case self.assertEqual(iutil.upcase_first_letter("czech"), "Czech") # first letter only self.assertEqual(iutil.upcase_first_letter("czech republic"), "Czech republic") # no lowercase self.assertEqual(iutil.upcase_first_letter("czech Republic"), "Czech Republic") class RunProgramTests(unittest.TestCase): def run_program_test(self): """Test the _run_program method.""" # correct calling should return rc==0 self.assertEqual(iutil._run_program(['ls'])[0], 0) # incorrect calling should return rc!=0 self.assertNotEqual(iutil._run_program(['ls', '--asdasd'])[0], 0) # check if an int is returned for bot success and error self.assertIsInstance(iutil._run_program(['ls'])[0], int) self.assertIsInstance(iutil._run_program(['ls', '--asdasd'])[0], int) # error should raise OSError with self.assertRaises(OSError): iutil._run_program(['asdasdadasd']) def exec_with_redirect_test(self): """Test execWithRedirect.""" # correct calling should return rc==0 self.assertEqual(iutil.execWithRedirect('ls', []), 0) # incorrect calling should return rc!=0 self.assertNotEqual(iutil.execWithRedirect('ls', ['--asdasd']), 0) def exec_with_capture_test(self): """Test execWithCapture.""" # check some output is returned self.assertGreater(len(iutil.execWithCapture('ls', ['--help'])), 0) # check no output is returned self.assertEqual(len(iutil.execWithCapture('true', [])), 0) def exec_with_capture_no_stderr_test(self): """Test execWithCapture with no stderr""" with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh echo "output" echo "error" >&2 """) testscript.flush() # check that only the output is captured self.assertEqual( iutil.execWithCapture("/bin/sh", [testscript.name], filter_stderr=True), "output\n") # check that both output and error are captured self.assertEqual(iutil.execWithCapture("/bin/sh", [testscript.name]), "output\nerror\n") def exec_readlines_test(self): """Test execReadlines.""" # test no lines are returned self.assertEqual(list(iutil.execReadlines("true", [])), []) # test some lines are returned self.assertGreater(len(list(iutil.execReadlines("ls", ["--help"]))), 0) # check that it always returns an iterator for both # if there is some output and if there isn't any self.assertTrue(hasattr(iutil.execReadlines("ls", ["--help"]), "__iter__")) self.assertTrue(hasattr(iutil.execReadlines("true", []), "__iter__")) def exec_readlines_test_normal_output(self): """Test the output of execReadlines.""" # Test regular-looking output with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh echo "one" echo "two" echo "three" exit 0 """) testscript.flush() with timer(5): rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name]) self.assertEqual(rl_iterator.next(), "one") self.assertEqual(rl_iterator.next(), "two") self.assertEqual(rl_iterator.next(), "three") self.assertRaises(StopIteration, rl_iterator.next) # Test output with no end of line with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh echo "one" echo "two" echo -n "three" exit 0 """) testscript.flush() with timer(5): rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name]) self.assertEqual(rl_iterator.next(), "one") self.assertEqual(rl_iterator.next(), "two") self.assertEqual(rl_iterator.next(), "three") self.assertRaises(StopIteration, rl_iterator.next) def exec_readlines_test_exits(self): """Test execReadlines in different child exit situations.""" # Tests that exit on signal will raise OSError once output # has been consumed, otherwise the test will exit normally. # Test a normal, non-0 exit with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh echo "one" echo "two" echo "three" exit 1 """) testscript.flush() with timer(5): rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name]) self.assertEqual(rl_iterator.next(), "one") self.assertEqual(rl_iterator.next(), "two") self.assertEqual(rl_iterator.next(), "three") self.assertRaises(OSError, rl_iterator.next) # Test exit on signal with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh echo "one" echo "two" echo "three" kill -TERM $$ """) testscript.flush() with timer(5): rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name]) self.assertEqual(rl_iterator.next(), "one") self.assertEqual(rl_iterator.next(), "two") self.assertEqual(rl_iterator.next(), "three") self.assertRaises(OSError, rl_iterator.next) # Repeat the above two tests, but exit before a final newline with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh echo "one" echo "two" echo -n "three" exit 1 """) testscript.flush() with timer(5): rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name]) self.assertEqual(rl_iterator.next(), "one") self.assertEqual(rl_iterator.next(), "two") self.assertEqual(rl_iterator.next(), "three") self.assertRaises(OSError, rl_iterator.next) with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh echo "one" echo "two" echo -n "three" kill -TERM $$ """) testscript.flush() with timer(5): rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name]) self.assertEqual(rl_iterator.next(), "one") self.assertEqual(rl_iterator.next(), "two") self.assertEqual(rl_iterator.next(), "three") self.assertRaises(OSError, rl_iterator.next) def exec_readlines_test_signals(self): """Test execReadlines and signal receipt.""" # ignored signal old_HUP_handler = signal.signal(signal.SIGHUP, signal.SIG_IGN) try: with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh echo "one" kill -HUP $PPID echo "two" echo -n "three" exit 0 """) testscript.flush() with timer(5): rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name]) self.assertEqual(rl_iterator.next(), "one") self.assertEqual(rl_iterator.next(), "two") self.assertEqual(rl_iterator.next(), "three") self.assertRaises(StopIteration, rl_iterator.next) finally: signal.signal(signal.SIGHUP, old_HUP_handler) # caught signal def _hup_handler(signum, frame): pass old_HUP_handler = signal.signal(signal.SIGHUP, _hup_handler) try: with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh echo "one" kill -HUP $PPID echo "two" echo -n "three" exit 0 """) testscript.flush() with timer(5): rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name]) self.assertEqual(rl_iterator.next(), "one") self.assertEqual(rl_iterator.next(), "two") self.assertEqual(rl_iterator.next(), "three") self.assertRaises(StopIteration, rl_iterator.next) finally: signal.signal(signal.SIGHUP, old_HUP_handler) def start_program_preexec_fn_test(self): """Test passing preexec_fn to startProgram.""" marker_text = "yo wassup man" # Create a temporary file that will be written before exec with tempfile.NamedTemporaryFile() as testfile: # Write something to testfile to show this method was run def preexec(): # Open a copy of the file here since close_fds has already closed the descriptor testcopy = open(testfile.name, 'w') testcopy.write(marker_text) testcopy.close() with timer(5): # Start a program that does nothing, with a preexec_fn proc = iutil.startProgram(["/bin/true"], preexec_fn=preexec) proc.communicate() # Rewind testfile and look for the text testfile.seek(0, os.SEEK_SET) self.assertEqual(testfile.read(), marker_text) def start_program_stdout_test(self): """Test redirecting stdout with startProgram.""" marker_text = "yo wassup man" # Create a temporary file that will be written by the program with tempfile.NamedTemporaryFile() as testfile: # Open a new copy of the file so that the child doesn't close and # delete the NamedTemporaryFile stdout = open(testfile.name, 'w') with timer(5): proc = iutil.startProgram(["/bin/echo", marker_text], stdout=stdout) proc.communicate() # Rewind testfile and look for the text testfile.seek(0, os.SEEK_SET) self.assertEqual(testfile.read().strip(), marker_text) def start_program_reset_handlers_test(self): """Test the reset_handlers parameter of startProgram.""" with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh # Just hang out and do nothing, forever while true ; do sleep 1 ; done """) testscript.flush() # Start a program with reset_handlers proc = iutil.startProgram(["/bin/sh", testscript.name]) with timer(5): # Kill with SIGPIPE and check that the python's SIG_IGN was not inheritted # The process should die on the signal. proc.send_signal(signal.SIGPIPE) proc.communicate() self.assertEqual(proc.returncode, -(signal.SIGPIPE)) # Start another copy without reset_handlers proc = iutil.startProgram(["/bin/sh", testscript.name], reset_handlers=False) with timer(5): # Kill with SIGPIPE, then SIGTERM, and make sure SIGTERM was the one # that worked. proc.send_signal(signal.SIGPIPE) proc.terminate() proc.communicate() self.assertEqual(proc.returncode, -(signal.SIGTERM)) def exec_readlines_auto_kill_test(self): """Test execReadlines with reading only part of the output""" with tempfile.NamedTemporaryFile() as testscript: testscript.write("""#!/bin/sh # Output forever while true; do echo hey done """) testscript.flush() with timer(5): rl_iterator = iutil.execReadlines("/bin/sh", [testscript.name]) # Save the process context proc = rl_iterator._proc # Read two lines worth self.assertEqual(rl_iterator.next(), "hey") self.assertEqual(rl_iterator.next(), "hey") # Delete the iterator and wait for the process to be killed del rl_iterator proc.communicate() # Check that the process is gone self.assertIsNotNone(proc.poll()) def watch_process_test(self): """Test watchProcess""" def test_still_running(): with timer(5): # Run something forever so we can kill it proc = iutil.startProgram(["/bin/sh", "-c", "while true; do sleep 1; done"]) iutil.watchProcess(proc, "test1") proc.kill() # Wait for the SIGCHLD signal.pause() self.assertRaises(iutil.ExitError, test_still_running) # Make sure watchProcess checks that the process has not already exited with timer(5): proc = iutil.startProgram(["true"]) proc.communicate() self.assertRaises(iutil.ExitError, iutil.watchProcess, proc, "test2") class MiscTests(unittest.TestCase): def get_dir_size_test(self): """Test the getDirSize.""" # dev null should have a size == 0 self.assertEqual(iutil.getDirSize('/dev/null'), 0) # incorrect path should also return 0 self.assertEqual(iutil.getDirSize('/dev/null/foo'), 0) # check if an int is always returned self.assertIsInstance(iutil.getDirSize('/dev/null'), int) self.assertIsInstance(iutil.getDirSize('/dev/null/foo'), int) # TODO: mock some dirs and check if their size is # computed correctly def mkdir_chain_test(self): """Test mkdirChain.""" # don't fail if directory path already exists iutil.mkdirChain('/dev/null') iutil.mkdirChain('/') iutil.mkdirChain('/tmp') # create a path and test it exists test_folder = "test_mkdir_chain" test_paths = [ "foo", "foo/bar/baz", u"foo/bar/baz", "", "čřščščřščř", u"čřščščřščř", "asdasd asdasd", "! spam" ] # join with the toplevel test folder and the folder for this # test test_paths = [os.path.join(ANACONDA_TEST_DIR, test_folder, p) for p in test_paths] def create_return(path): iutil.mkdirChain(path) return path # create the folders and check that they exist for p in test_paths: self.assertTrue(os.path.exists(create_return(p))) # try to create them again - all the paths should already exist # and the mkdirChain function needs to handle that # without a traceback for p in test_paths: iutil.mkdirChain(p) def get_active_console_test(self): """Test get_active_console.""" # at least check if a string is returned self.assertIsInstance(iutil.get_active_console(), str) def is_console_on_vt_test(self): """Test isConsoleOnVirtualTerminal.""" # at least check if a bool is returned self.assertIsInstance(iutil.isConsoleOnVirtualTerminal(), bool) def parse_nfs_url_test(self): """Test parseNfsUrl.""" # empty NFS url should return 3 blanks self.assertEqual(iutil.parseNfsUrl(""), ("", "", "")) # the string is delimited by :, there is one prefix and 3 parts, # the prefix is discarded and all parts after the 3th part # are also discarded self.assertEqual(iutil.parseNfsUrl("discard:options:host:path"), ("options", "host", "path")) self.assertEqual(iutil.parseNfsUrl("discard:options:host:path:foo:bar"), ("options", "host", "path")) self.assertEqual(iutil.parseNfsUrl(":options:host:path::"), ("options", "host", "path")) self.assertEqual(iutil.parseNfsUrl(":::::"), ("", "", "")) # if there is only prefix & 2 parts, # the two parts are host and path self.assertEqual(iutil.parseNfsUrl("prefix:host:path"), ("", "host", "path")) self.assertEqual(iutil.parseNfsUrl(":host:path"), ("", "host", "path")) self.assertEqual(iutil.parseNfsUrl("::"), ("", "", "")) # if there is only a prefix and single part, # the part is the host self.assertEqual(iutil.parseNfsUrl("prefix:host"), ("", "host", "")) self.assertEqual(iutil.parseNfsUrl(":host"), ("", "host", "")) self.assertEqual(iutil.parseNfsUrl(":"), ("", "", "")) def vt_activate_test(self): """Test vtActivate.""" # pylint: disable=no-member def raise_os_error(*args, **kwargs): raise OSError _execWithRedirect = iutil.vtActivate.func_globals['execWithRedirect'] try: # chvt does not exist on all platforms # and the function needs to correctly survie that iutil.vtActivate.func_globals['execWithRedirect'] = raise_os_error self.assertEqual(iutil.vtActivate(2), False) finally: iutil.vtActivate.func_globals['execWithRedirect'] = _execWithRedirect def get_deep_attr_test(self): """Test getdeepattr.""" # pylint: disable=attribute-defined-outside-init class O(object): pass a = O() a.b = O() a.b1 = 1 a.b.c = 2 a.b.c1 = "ř" self.assertEqual(iutil.getdeepattr(a, "b1"), 1) self.assertEqual(iutil.getdeepattr(a, "b.c"), 2) self.assertEqual(iutil.getdeepattr(a, "b.c1"), "ř") # be consistent with getattr and throw # AttributeError if non-existent attribute is requested with self.assertRaises(AttributeError): iutil.getdeepattr(a, "") with self.assertRaises(AttributeError): iutil.getdeepattr(a, "b.c.d") def set_deep_attr_test(self): """Test setdeepattr.""" # pylint: disable=attribute-defined-outside-init # pylint: disable=no-member class O(object): pass a = O() a.b = O() a.b1 = 1 a.b.c = O() a.b.c1 = "ř" # set to a new attribute iutil.setdeepattr(a, "b.c.d", True) self.assertEqual(a.b.c.d, True) # override existing attribute iutil.setdeepattr(a, "b.c", 1234) self.assertEqual(a.b.c, 1234) # "" is actually a valid attribute name # that can be only accessed by getattr iutil.setdeepattr(a, "", 1234) self.assertEqual(getattr(a, ""), 1234) iutil.setdeepattr(a, "b.", 123) self.assertEqual(iutil.getdeepattr(a, "b."), 123) # error should raise AttributeError with self.assertRaises(AttributeError): iutil.setdeepattr(a, "b.c.d.e.f.g.h", 1234) def strip_accents_test(self): """Test strip_accents.""" # string needs to be Unicode, # otherwise TypeError is raised with self.assertRaises(TypeError): iutil.strip_accents("") with self.assertRaises(TypeError): iutil.strip_accents("abc") with self.assertRaises(TypeError): iutil.strip_accents("ěščřžýáíé") # empty Unicode string self.assertEquals(iutil.strip_accents(u""), u"") # some Czech accents self.assertEquals(iutil.strip_accents(u"ěščřžýáíéúů"), u"escrzyaieuu") self.assertEquals(iutil.strip_accents(u"v češtině"), u"v cestine") self.assertEquals(iutil.strip_accents(u"měšťánek rozšíří HÁČKY"), u"mestanek rozsiri HACKY") self.assertEquals(iutil.strip_accents(u"nejneobhospodařovávatelnějšímu"), u"nejneobhospodarovavatelnejsimu") # some German umlauts self.assertEquals(iutil.strip_accents(u"Lärmüberhörer"), u"Larmuberhorer") self.assertEquals(iutil.strip_accents(u"Heizölrückstoßabdämpfung"), u"Heizolrucksto\xdfabdampfung") # some Japanese self.assertEquals(iutil.strip_accents(u"日本語"), u"\u65e5\u672c\u8a9e") self.assertEquals(iutil.strip_accents(u"アナコンダ"), # Anaconda u"\u30a2\u30ca\u30b3\u30f3\u30bf") # combined input_string = u"ASCI měšťánek アナコンダ Heizölrückstoßabdämpfung" output_string =u"ASCI mestanek \u30a2\u30ca\u30b3\u30f3\u30bf Heizolrucksto\xdfabdampfung" self.assertEquals(iutil.strip_accents(input_string), output_string) def cmp_obj_attrs_test(self): """Test cmp_obj_attrs.""" # pylint: disable=attribute-defined-outside-init class O(object): pass a = O() a.b = 1 a.c = 2 a1 = O() a1.b = 1 a1.c = 2 b = O() b.b = 1 b.c = 3 # a class should have it's own attributes self.assertTrue(iutil.cmp_obj_attrs(a, a, ["b", "c"])) self.assertTrue(iutil.cmp_obj_attrs(a1, a1, ["b", "c"])) self.assertTrue(iutil.cmp_obj_attrs(b, b, ["b", "c"])) # a and a1 should have the same attributes self.assertTrue(iutil.cmp_obj_attrs(a, a1, ["b", "c"])) self.assertTrue(iutil.cmp_obj_attrs(a1, a, ["b", "c"])) self.assertTrue(iutil.cmp_obj_attrs(a1, a, ["c", "b"])) # missing attributes are considered a mismatch self.assertFalse(iutil.cmp_obj_attrs(a, a1, ["b", "c", "d"])) # empty attribute list is not a mismatch self.assertTrue(iutil.cmp_obj_attrs(a, b, [])) # attributes of a and b differ self.assertFalse(iutil.cmp_obj_attrs(a, b, ["b", "c"])) self.assertFalse(iutil.cmp_obj_attrs(b, a, ["b", "c"])) self.assertFalse(iutil.cmp_obj_attrs(b, a, ["c", "b"])) def to_ascii_test(self): """Test _toASCII.""" # works with strings only, chokes on Unicode strings with self.assertRaises(ValueError): iutil._toASCII(u" ") with self.assertRaises(ValueError): iutil._toASCII(u"ABC") with self.assertRaises(ValueError): iutil._toASCII(u"Heizölrückstoßabdämpfung") # but empty Unicode string is fine :) iutil._toASCII(u"") # check some conversions self.assertEqual(iutil._toASCII(""), "") self.assertEqual(iutil._toASCII(" "), " ") self.assertEqual(iutil._toASCII("&@`'łŁ!@#$%^&*{}[]$'<>*"), "&@`'\xc5\x82\xc5\x81!@#$%^&*{}[]$'<>*") self.assertEqual(iutil._toASCII("ABC"), "ABC") self.assertEqual(iutil._toASCII("aBC"), "aBC") _out = "Heiz\xc3\xb6lr\xc3\xbccksto\xc3\x9fabd\xc3\xa4mpfung" self.assertEqual(iutil._toASCII("Heizölrückstoßabdämpfung"), _out) def upper_ascii_test(self): """Test upperASCII.""" self.assertEqual(iutil.upperASCII(""),"") self.assertEqual(iutil.upperASCII("a"),"A") self.assertEqual(iutil.upperASCII("A"),"A") self.assertEqual(iutil.upperASCII("aBc"),"ABC") self.assertEqual(iutil.upperASCII("_&*'@#$%^aBcžčŘ"), "_&*'@#$%^ABC\xc5\xbe\xc4\x8d\xc5\x98") _out = "HEIZ\xc3\xb6LR\xc3\xbcCKSTO\xc3\x9fABD\xc3\xa4MPFUNG" self.assertEqual(iutil.upperASCII("Heizölrückstoßabdämpfung"), _out) def lower_ascii_test(self): """Test lowerASCII.""" self.assertEqual(iutil.lowerASCII(""),"") self.assertEqual(iutil.lowerASCII("A"),"a") self.assertEqual(iutil.lowerASCII("a"),"a") self.assertEqual(iutil.lowerASCII("aBc"),"abc") self.assertEqual(iutil.lowerASCII("_&*'@#$%^aBcžčŘ"), "_&*'@#$%^abc\xc5\xbe\xc4\x8d\xc5\x98") _out = "heiz\xc3\xb6lr\xc3\xbccksto\xc3\x9fabd\xc3\xa4mpfung" self.assertEqual(iutil.lowerASCII("Heizölrückstoßabdämpfung"), _out) def have_word_match_test(self): """Test have_word_match.""" self.assertTrue(iutil.have_word_match("word1 word2", "word1 word2 word3")) self.assertTrue(iutil.have_word_match("word1 word2", "word2 word1 word3")) self.assertTrue(iutil.have_word_match("word2 word1", "word3 word1 word2")) self.assertTrue(iutil.have_word_match("word1", "word1 word2")) self.assertTrue(iutil.have_word_match("word1 word2", "word2word1 word3")) self.assertTrue(iutil.have_word_match("word2 word1", "word3 word1word2")) self.assertTrue(iutil.have_word_match("word1", "word1word2")) self.assertTrue(iutil.have_word_match("", "word1")) self.assertFalse(iutil.have_word_match("word3 word1", "word1")) self.assertFalse(iutil.have_word_match("word1 word3", "word1 word2")) self.assertFalse(iutil.have_word_match("word3 word2", "word1 word2")) self.assertFalse(iutil.have_word_match("word1word2", "word1 word2 word3")) self.assertFalse(iutil.have_word_match("word1", "")) self.assertFalse(iutil.have_word_match("word1", None)) self.assertFalse(iutil.have_word_match(None, "word1")) self.assertFalse(iutil.have_word_match("", None)) self.assertFalse(iutil.have_word_match(None, "")) self.assertFalse(iutil.have_word_match(None, None)) # Compare unicode and str and make sure nothing crashes self.assertTrue(iutil.have_word_match("fête", u"fête champêtre")) self.assertTrue(iutil.have_word_match(u"fête", "fête champêtre")) def parent_dir_test(self): """Test the parent_dir function""" dirs = [("", ""), ("/", ""), ("/home/", ""), ("/home/bcl", "/home"), ("home/bcl", "home"), ("/home/bcl/", "/home"), ("/home/extra/bcl", "/home/extra"), ("/home/extra/bcl/", "/home/extra"), ("/home/extra/../bcl/", "/home")] for d, r in dirs: self.assertEquals(iutil.parent_dir(d), r)
roderickmackenzie/opvdm
refs/heads/master
gui/debug.py
1
# Organic Photovoltaic Device Model - a drift diffusion base/Shockley-Read-Hall # model for organic solar cells. # Copyright (C) 2012 Roderick C. I. MacKenzie # # roderick.mackenzie@nottingham.ac.uk # www.opvdm.com # Room B86 Coates, University Park, Nottingham, NG7 2RD, UK # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License v2.0, as published by # the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. import sys import os from os.path import expanduser from cal_path import get_bin_path def debug_mode(): if os.path.isfile(os.path.join(get_bin_path(),"debug_mode"))==True: return True else: return False
dya2/python-for-android
refs/heads/master
python3-alpha/python3-src/Lib/sqlite3/test/userfunctions.py
46
#-*- coding: ISO-8859-1 -*- # pysqlite2/test/userfunctions.py: tests for user-defined functions and # aggregates. # # Copyright (C) 2005-2007 Gerhard Häring <gh@ghaering.de> # # This file is part of pysqlite. # # This software is provided 'as-is', without any express or implied # warranty. In no event will the authors be held liable for any damages # arising from the use of this software. # # Permission is granted to anyone to use this software for any purpose, # including commercial applications, and to alter it and redistribute it # freely, subject to the following restrictions: # # 1. The origin of this software must not be misrepresented; you must not # claim that you wrote the original software. If you use this software # in a product, an acknowledgment in the product documentation would be # appreciated but is not required. # 2. Altered source versions must be plainly marked as such, and must not be # misrepresented as being the original software. # 3. This notice may not be removed or altered from any source distribution. import unittest import sqlite3 as sqlite def func_returntext(): return "foo" def func_returnunicode(): return "bar" def func_returnint(): return 42 def func_returnfloat(): return 3.14 def func_returnnull(): return None def func_returnblob(): return b"blob" def func_raiseexception(): 5/0 def func_isstring(v): return type(v) is str def func_isint(v): return type(v) is int def func_isfloat(v): return type(v) is float def func_isnone(v): return type(v) is type(None) def func_isblob(v): return isinstance(v, (bytes, memoryview)) class AggrNoStep: def __init__(self): pass def finalize(self): return 1 class AggrNoFinalize: def __init__(self): pass def step(self, x): pass class AggrExceptionInInit: def __init__(self): 5/0 def step(self, x): pass def finalize(self): pass class AggrExceptionInStep: def __init__(self): pass def step(self, x): 5/0 def finalize(self): return 42 class AggrExceptionInFinalize: def __init__(self): pass def step(self, x): pass def finalize(self): 5/0 class AggrCheckType: def __init__(self): self.val = None def step(self, whichType, val): theType = {"str": str, "int": int, "float": float, "None": type(None), "blob": bytes} self.val = int(theType[whichType] is type(val)) def finalize(self): return self.val class AggrSum: def __init__(self): self.val = 0.0 def step(self, val): self.val += val def finalize(self): return self.val class FunctionTests(unittest.TestCase): def setUp(self): self.con = sqlite.connect(":memory:") self.con.create_function("returntext", 0, func_returntext) self.con.create_function("returnunicode", 0, func_returnunicode) self.con.create_function("returnint", 0, func_returnint) self.con.create_function("returnfloat", 0, func_returnfloat) self.con.create_function("returnnull", 0, func_returnnull) self.con.create_function("returnblob", 0, func_returnblob) self.con.create_function("raiseexception", 0, func_raiseexception) self.con.create_function("isstring", 1, func_isstring) self.con.create_function("isint", 1, func_isint) self.con.create_function("isfloat", 1, func_isfloat) self.con.create_function("isnone", 1, func_isnone) self.con.create_function("isblob", 1, func_isblob) def tearDown(self): self.con.close() def CheckFuncErrorOnCreate(self): try: self.con.create_function("bla", -100, lambda x: 2*x) self.fail("should have raised an OperationalError") except sqlite.OperationalError: pass def CheckFuncRefCount(self): def getfunc(): def f(): return 1 return f f = getfunc() globals()["foo"] = f # self.con.create_function("reftest", 0, getfunc()) self.con.create_function("reftest", 0, f) cur = self.con.cursor() cur.execute("select reftest()") def CheckFuncReturnText(self): cur = self.con.cursor() cur.execute("select returntext()") val = cur.fetchone()[0] self.assertEqual(type(val), str) self.assertEqual(val, "foo") def CheckFuncReturnUnicode(self): cur = self.con.cursor() cur.execute("select returnunicode()") val = cur.fetchone()[0] self.assertEqual(type(val), str) self.assertEqual(val, "bar") def CheckFuncReturnInt(self): cur = self.con.cursor() cur.execute("select returnint()") val = cur.fetchone()[0] self.assertEqual(type(val), int) self.assertEqual(val, 42) def CheckFuncReturnFloat(self): cur = self.con.cursor() cur.execute("select returnfloat()") val = cur.fetchone()[0] self.assertEqual(type(val), float) if val < 3.139 or val > 3.141: self.fail("wrong value") def CheckFuncReturnNull(self): cur = self.con.cursor() cur.execute("select returnnull()") val = cur.fetchone()[0] self.assertEqual(type(val), type(None)) self.assertEqual(val, None) def CheckFuncReturnBlob(self): cur = self.con.cursor() cur.execute("select returnblob()") val = cur.fetchone()[0] self.assertEqual(type(val), bytes) self.assertEqual(val, b"blob") def CheckFuncException(self): cur = self.con.cursor() try: cur.execute("select raiseexception()") cur.fetchone() self.fail("should have raised OperationalError") except sqlite.OperationalError as e: self.assertEqual(e.args[0], 'user-defined function raised exception') def CheckParamString(self): cur = self.con.cursor() cur.execute("select isstring(?)", ("foo",)) val = cur.fetchone()[0] self.assertEqual(val, 1) def CheckParamInt(self): cur = self.con.cursor() cur.execute("select isint(?)", (42,)) val = cur.fetchone()[0] self.assertEqual(val, 1) def CheckParamFloat(self): cur = self.con.cursor() cur.execute("select isfloat(?)", (3.14,)) val = cur.fetchone()[0] self.assertEqual(val, 1) def CheckParamNone(self): cur = self.con.cursor() cur.execute("select isnone(?)", (None,)) val = cur.fetchone()[0] self.assertEqual(val, 1) def CheckParamBlob(self): cur = self.con.cursor() cur.execute("select isblob(?)", (memoryview(b"blob"),)) val = cur.fetchone()[0] self.assertEqual(val, 1) class AggregateTests(unittest.TestCase): def setUp(self): self.con = sqlite.connect(":memory:") cur = self.con.cursor() cur.execute(""" create table test( t text, i integer, f float, n, b blob ) """) cur.execute("insert into test(t, i, f, n, b) values (?, ?, ?, ?, ?)", ("foo", 5, 3.14, None, memoryview(b"blob"),)) self.con.create_aggregate("nostep", 1, AggrNoStep) self.con.create_aggregate("nofinalize", 1, AggrNoFinalize) self.con.create_aggregate("excInit", 1, AggrExceptionInInit) self.con.create_aggregate("excStep", 1, AggrExceptionInStep) self.con.create_aggregate("excFinalize", 1, AggrExceptionInFinalize) self.con.create_aggregate("checkType", 2, AggrCheckType) self.con.create_aggregate("mysum", 1, AggrSum) def tearDown(self): #self.cur.close() #self.con.close() pass def CheckAggrErrorOnCreate(self): try: self.con.create_function("bla", -100, AggrSum) self.fail("should have raised an OperationalError") except sqlite.OperationalError: pass def CheckAggrNoStep(self): cur = self.con.cursor() try: cur.execute("select nostep(t) from test") self.fail("should have raised an AttributeError") except AttributeError as e: self.assertEqual(e.args[0], "'AggrNoStep' object has no attribute 'step'") def CheckAggrNoFinalize(self): cur = self.con.cursor() try: cur.execute("select nofinalize(t) from test") val = cur.fetchone()[0] self.fail("should have raised an OperationalError") except sqlite.OperationalError as e: self.assertEqual(e.args[0], "user-defined aggregate's 'finalize' method raised error") def CheckAggrExceptionInInit(self): cur = self.con.cursor() try: cur.execute("select excInit(t) from test") val = cur.fetchone()[0] self.fail("should have raised an OperationalError") except sqlite.OperationalError as e: self.assertEqual(e.args[0], "user-defined aggregate's '__init__' method raised error") def CheckAggrExceptionInStep(self): cur = self.con.cursor() try: cur.execute("select excStep(t) from test") val = cur.fetchone()[0] self.fail("should have raised an OperationalError") except sqlite.OperationalError as e: self.assertEqual(e.args[0], "user-defined aggregate's 'step' method raised error") def CheckAggrExceptionInFinalize(self): cur = self.con.cursor() try: cur.execute("select excFinalize(t) from test") val = cur.fetchone()[0] self.fail("should have raised an OperationalError") except sqlite.OperationalError as e: self.assertEqual(e.args[0], "user-defined aggregate's 'finalize' method raised error") def CheckAggrCheckParamStr(self): cur = self.con.cursor() cur.execute("select checkType('str', ?)", ("foo",)) val = cur.fetchone()[0] self.assertEqual(val, 1) def CheckAggrCheckParamInt(self): cur = self.con.cursor() cur.execute("select checkType('int', ?)", (42,)) val = cur.fetchone()[0] self.assertEqual(val, 1) def CheckAggrCheckParamFloat(self): cur = self.con.cursor() cur.execute("select checkType('float', ?)", (3.14,)) val = cur.fetchone()[0] self.assertEqual(val, 1) def CheckAggrCheckParamNone(self): cur = self.con.cursor() cur.execute("select checkType('None', ?)", (None,)) val = cur.fetchone()[0] self.assertEqual(val, 1) def CheckAggrCheckParamBlob(self): cur = self.con.cursor() cur.execute("select checkType('blob', ?)", (memoryview(b"blob"),)) val = cur.fetchone()[0] self.assertEqual(val, 1) def CheckAggrCheckAggrSum(self): cur = self.con.cursor() cur.execute("delete from test") cur.executemany("insert into test(i) values (?)", [(10,), (20,), (30,)]) cur.execute("select mysum(i) from test") val = cur.fetchone()[0] self.assertEqual(val, 60) def authorizer_cb(action, arg1, arg2, dbname, source): if action != sqlite.SQLITE_SELECT: return sqlite.SQLITE_DENY if arg2 == 'c2' or arg1 == 't2': return sqlite.SQLITE_DENY return sqlite.SQLITE_OK class AuthorizerTests(unittest.TestCase): def setUp(self): self.con = sqlite.connect(":memory:") self.con.executescript(""" create table t1 (c1, c2); create table t2 (c1, c2); insert into t1 (c1, c2) values (1, 2); insert into t2 (c1, c2) values (4, 5); """) # For our security test: self.con.execute("select c2 from t2") self.con.set_authorizer(authorizer_cb) def tearDown(self): pass def CheckTableAccess(self): try: self.con.execute("select * from t2") except sqlite.DatabaseError as e: if not e.args[0].endswith("prohibited"): self.fail("wrong exception text: %s" % e.args[0]) return self.fail("should have raised an exception due to missing privileges") def CheckColumnAccess(self): try: self.con.execute("select c2 from t1") except sqlite.DatabaseError as e: if not e.args[0].endswith("prohibited"): self.fail("wrong exception text: %s" % e.args[0]) return self.fail("should have raised an exception due to missing privileges") def suite(): function_suite = unittest.makeSuite(FunctionTests, "Check") aggregate_suite = unittest.makeSuite(AggregateTests, "Check") authorizer_suite = unittest.makeSuite(AuthorizerTests, "Check") return unittest.TestSuite((function_suite, aggregate_suite, authorizer_suite)) def test(): runner = unittest.TextTestRunner() runner.run(suite()) if __name__ == "__main__": test()
piagarwal11/GDriveLinuxClient
refs/heads/master
src/watchdog-0.8.2/src/watchdog/observers/polling.py
17
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com> # Copyright 2012 Google, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ :module: watchdog.observers.polling :synopsis: Polling emitter implementation. :author: yesudeep@google.com (Yesudeep Mangalapilly) Classes ------- .. autoclass:: PollingObserver :members: :show-inheritance: .. autoclass:: PollingObserverVFS :members: :show-inheritance: :special-members: """ from __future__ import with_statement import os import threading from functools import partial from watchdog.utils import stat as default_stat from watchdog.utils.dirsnapshot import DirectorySnapshot, DirectorySnapshotDiff from watchdog.observers.api import ( EventEmitter, BaseObserver, DEFAULT_OBSERVER_TIMEOUT, DEFAULT_EMITTER_TIMEOUT ) from watchdog.events import ( DirMovedEvent, DirDeletedEvent, DirCreatedEvent, DirModifiedEvent, FileMovedEvent, FileDeletedEvent, FileCreatedEvent, FileModifiedEvent ) class PollingEmitter(EventEmitter): """ Platform-independent emitter that polls a directory to detect file system changes. """ def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT, stat=default_stat, listdir=os.listdir): EventEmitter.__init__(self, event_queue, watch, timeout) self._snapshot = None self._lock = threading.Lock() self._take_snapshot = lambda: DirectorySnapshot( self.watch.path, self.watch.is_recursive, stat=stat, listdir=listdir) def on_thread_start(self): self._snapshot = self._take_snapshot() def queue_events(self, timeout): # We don't want to hit the disk continuously. # timeout behaves like an interval for polling emitters. if self.stopped_event.wait(timeout): return with self._lock: if not self.should_keep_running(): return # Get event diff between fresh snapshot and previous snapshot. # Update snapshot. new_snapshot = self._take_snapshot() events = DirectorySnapshotDiff(self._snapshot, new_snapshot) self._snapshot = new_snapshot # Files. for src_path in events.files_deleted: self.queue_event(FileDeletedEvent(src_path)) for src_path in events.files_modified: self.queue_event(FileModifiedEvent(src_path)) for src_path in events.files_created: self.queue_event(FileCreatedEvent(src_path)) for src_path, dest_path in events.files_moved: self.queue_event(FileMovedEvent(src_path, dest_path)) # Directories. for src_path in events.dirs_deleted: self.queue_event(DirDeletedEvent(src_path)) for src_path in events.dirs_modified: self.queue_event(DirModifiedEvent(src_path)) for src_path in events.dirs_created: self.queue_event(DirCreatedEvent(src_path)) for src_path, dest_path in events.dirs_moved: self.queue_event(DirMovedEvent(src_path, dest_path)) class PollingObserver(BaseObserver): """ Platform-independent observer that polls a directory to detect file system changes. """ def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT): BaseObserver.__init__(self, emitter_class=PollingEmitter, timeout=timeout) class PollingObserverVFS(BaseObserver): """ File system independent observer that polls a directory to detect changes. """ def __init__(self, stat, listdir, polling_interval=1): """ :param stat: stat function. See ``os.stat`` for details. :param listdir: listdir function. See ``os.listdir`` for details. :type polling_interval: float :param polling_interval: interval in seconds between polling the file system. """ emitter_cls = partial(PollingEmitter, stat=stat, listdir=listdir) BaseObserver.__init__(self, emitter_class=emitter_cls, timeout=polling_interval)
wefner/w2pfooty
refs/heads/develop
__init__.py
386048
digris/openbroadcast.org
refs/heads/development
website/apps/pushy/urls.py
386048
Microsoft/PTVS
refs/heads/master
Python/Tests/TestData/InfoBar/InfoBarEnvYml/main.py
386048
reubano/csvkit
refs/heads/master
tests/test_convert/test_geojson.py
21
#!/usr/bin/env python try: import unittest2 as unittest except ImportError: import unittest from csvkit.convert import geojs class TestGeoJSON(unittest.TestCase): def test_geojson(self): with open('examples/test_geojson.json', 'rt') as f: output = geojs.geojson2csv(f) self.assertIn('id,prop0,prop1,geojson', output) self.assertIn('""coordinates"": [102.0, 0.5]', output) self.assertIn('""coordinates"": [[102.0, 0.0], [103.0, 1.0], [104.0, 0.0], [105.0, 1.0]]', output)
metalspring/android_kernel_htc_msm8974
refs/heads/kk44
scripts/gcc-wrapper.py
1276
#! /usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2012, The Linux Foundation. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of The Linux Foundation nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Invoke gcc, looking for warnings, and causing a failure if there are # non-whitelisted warnings. import errno import re import os import sys import subprocess # Note that gcc uses unicode, which may depend on the locale. TODO: # force LANG to be set to en_US.UTF-8 to get consistent warnings. allowed_warnings = set([ "return_address.c:62", ]) # Capture the name of the object file, can find it. ofile = None warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''') def interpret_warning(line): """Decode the message from gcc. The messages we care about have a filename, and a warning""" line = line.rstrip('\n') m = warning_re.match(line) if m and m.group(2) not in allowed_warnings: print "error, forbidden warning:", m.group(2) # If there is a warning, remove any object if it exists. if ofile: try: os.remove(ofile) except OSError: pass sys.exit(1) def run_gcc(): args = sys.argv[1:] # Look for -o try: i = args.index('-o') global ofile ofile = args[i+1] except (ValueError, IndexError): pass compiler = sys.argv[0] try: proc = subprocess.Popen(args, stderr=subprocess.PIPE) for line in proc.stderr: print line, interpret_warning(line) result = proc.wait() except OSError as e: result = e.errno if result == errno.ENOENT: print args[0] + ':',e.strerror print 'Is your PATH set correctly?' else: print ' '.join(args), str(e) return result if __name__ == '__main__': status = run_gcc() sys.exit(status)
samdoran/ansible
refs/heads/devel
lib/ansible/modules/cloud/amazon/ec2_vpc_subnet_facts.py
63
#!/usr/bin/python # # This is a free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This Ansible library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this library. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['stableinterface'], 'supported_by': 'curated'} DOCUMENTATION = ''' --- module: ec2_vpc_subnet_facts short_description: Gather facts about ec2 VPC subnets in AWS description: - Gather facts about ec2 VPC subnets in AWS version_added: "2.1" author: "Rob White (@wimnat)" options: filters: description: - A dict of filters to apply. Each dict item consists of a filter key and a filter value. See U(http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSubnets.html) for possible filters. required: false default: null extends_documentation_fragment: - aws - ec2 ''' EXAMPLES = ''' # Note: These examples do not set authentication details, see the AWS Guide for details. # Gather facts about all VPC subnets - ec2_vpc_subnet_facts: # Gather facts about a particular VPC subnet using ID - ec2_vpc_subnet_facts: filters: subnet-id: subnet-00112233 # Gather facts about any VPC subnet with a tag key Name and value Example - ec2_vpc_subnet_facts: filters: "tag:Name": Example # Gather facts about any VPC subnet within VPC with ID vpc-abcdef00 - ec2_vpc_subnet_facts: filters: vpc-id: vpc-abcdef00 # Gather facts about a set of VPC subnets, publicA, publicB and publicC within a # VPC with ID vpc-abcdef00 and then use the jinja map function to return the # subnet_ids as a list. - ec2_vpc_subnet_facts: filters: vpc-id: vpc-abcdef00 "tag:Name": "{{ item }}" with_items: - publicA - publicB - publicC register: subnet_facts - set_fact: subnet_ids: "{{ subnet_facts.results|map(attribute='subnets.0.id')|list }}" ''' try: import boto.vpc from boto.exception import BotoServerError HAS_BOTO = True except ImportError: HAS_BOTO = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ec2 import AnsibleAWSError, connect_to_aws, ec2_argument_spec, get_aws_connection_info def get_subnet_info(subnet): subnet_info = {'id': subnet.id, 'availability_zone': subnet.availability_zone, 'available_ip_address_count': subnet.available_ip_address_count, 'cidr_block': subnet.cidr_block, 'default_for_az': subnet.defaultForAz, 'map_public_ip_on_launch': subnet.mapPublicIpOnLaunch, 'state': subnet.state, 'tags': subnet.tags, 'vpc_id': subnet.vpc_id} return subnet_info def list_ec2_vpc_subnets(connection, module): filters = module.params.get("filters") subnet_dict_array = [] try: all_subnets = connection.get_all_subnets(filters=filters) except BotoServerError as e: module.fail_json(msg=e.message) for subnet in all_subnets: subnet_dict_array.append(get_subnet_info(subnet)) module.exit_json(subnets=subnet_dict_array) def main(): argument_spec = ec2_argument_spec() argument_spec.update( dict( filters=dict(default=None, type='dict') ) ) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) if not HAS_BOTO: module.fail_json(msg='boto required for this module') region, ec2_url, aws_connect_params = get_aws_connection_info(module) if region: try: connection = connect_to_aws(boto.vpc, region, **aws_connect_params) except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e: module.fail_json(msg=str(e)) else: module.fail_json(msg="region must be specified") list_ec2_vpc_subnets(connection, module) if __name__ == '__main__': main()
tzonghao/influxdb-python
refs/heads/master
influxdb/tests/influxdb08/client_test.py
4
# -*- coding: utf-8 -*- """Client unit tests.""" import json import socket import sys import unittest import random import warnings import mock import requests import requests.exceptions import requests_mock from nose.tools import raises from mock import patch from influxdb.influxdb08 import InfluxDBClient from influxdb.influxdb08.client import session if sys.version < '3': import codecs def u(x): """Test codec.""" return codecs.unicode_escape_decode(x)[0] else: def u(x): """Test codec.""" return x def _build_response_object(status_code=200, content=""): resp = requests.Response() resp.status_code = status_code resp._content = content.encode("utf8") return resp def _mocked_session(method="GET", status_code=200, content=""): method = method.upper() def request(*args, **kwargs): """Define a request for the _mocked_session.""" c = content # Check method assert method == kwargs.get('method', 'GET') if method == 'POST': data = kwargs.get('data', None) if data is not None: # Data must be a string assert isinstance(data, str) # Data must be a JSON string assert c == json.loads(data, strict=True) c = data # Anyway, Content must be a JSON string (or empty string) if not isinstance(c, str): c = json.dumps(c) return _build_response_object(status_code=status_code, content=c) mocked = patch.object( session, 'request', side_effect=request ) return mocked class TestInfluxDBClient(unittest.TestCase): """Define a TestInfluxDBClient object.""" def setUp(self): """Set up a TestInfluxDBClient object.""" # By default, raise exceptions on warnings warnings.simplefilter('error', FutureWarning) self.dummy_points = [ { "points": [ ["1", 1, 1.0], ["2", 2, 2.0] ], "name": "foo", "columns": ["column_one", "column_two", "column_three"] } ] self.dsn_string = 'influxdb://uSr:pWd@host:1886/db' def test_scheme(self): """Test database scheme for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') self.assertEqual(cli._baseurl, 'http://host:8086') cli = InfluxDBClient( 'host', 8086, 'username', 'password', 'database', ssl=True ) self.assertEqual(cli._baseurl, 'https://host:8086') def test_dsn(self): """Test datasource name for TestInfluxDBClient object.""" cli = InfluxDBClient.from_dsn(self.dsn_string) self.assertEqual('http://host:1886', cli._baseurl) self.assertEqual('uSr', cli._username) self.assertEqual('pWd', cli._password) self.assertEqual('db', cli._database) self.assertFalse(cli._use_udp) cli = InfluxDBClient.from_dsn('udp+' + self.dsn_string) self.assertTrue(cli._use_udp) cli = InfluxDBClient.from_dsn('https+' + self.dsn_string) self.assertEqual('https://host:1886', cli._baseurl) cli = InfluxDBClient.from_dsn('https+' + self.dsn_string, **{'ssl': False}) self.assertEqual('http://host:1886', cli._baseurl) def test_switch_database(self): """Test switch database for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_database('another_database') self.assertEqual(cli._database, 'another_database') @raises(FutureWarning) def test_switch_db_deprecated(self): """Test deprecated switch database for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_db('another_database') self.assertEqual(cli._database, 'another_database') def test_switch_user(self): """Test switch user for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') cli.switch_user('another_username', 'another_password') self.assertEqual(cli._username, 'another_username') self.assertEqual(cli._password, 'another_password') def test_write(self): """Test write to database for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/write" ) cli = InfluxDBClient(database='db') cli.write( {"database": "mydb", "retentionPolicy": "mypolicy", "points": [{"name": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "timestamp": "2009-11-10T23:00:00Z", "values": {"value": 0.64}}]} ) self.assertEqual( json.loads(m.last_request.body), {"database": "mydb", "retentionPolicy": "mypolicy", "points": [{"name": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "timestamp": "2009-11-10T23:00:00Z", "values": {"value": 0.64}}]} ) def test_write_points(self): """Test write points for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/series" ) cli = InfluxDBClient(database='db') cli.write_points( self.dummy_points ) self.assertListEqual( json.loads(m.last_request.body), self.dummy_points ) def test_write_points_string(self): """Test write string points for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/series" ) cli = InfluxDBClient(database='db') cli.write_points( str(json.dumps(self.dummy_points)) ) self.assertListEqual( json.loads(m.last_request.body), self.dummy_points ) def test_write_points_batch(self): """Test write batch points for TestInfluxDBClient object.""" with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') cli.write_points(data=self.dummy_points, batch_size=2) self.assertEqual(1, m.call_count) def test_write_points_batch_invalid_size(self): """Test write batch points invalid size for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') cli.write_points(data=self.dummy_points, batch_size=-2) self.assertEqual(1, m.call_count) def test_write_points_batch_multiple_series(self): """Test write points batch multiple series.""" dummy_points = [ {"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0], ["4", 4, 4.0], ["5", 5, 5.0]], "name": "foo", "columns": ["val1", "val2", "val3"]}, {"points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0], ["4", 4, 4.0], ["5", 5, 5.0], ["6", 6, 6.0], ["7", 7, 7.0], ["8", 8, 8.0]], "name": "bar", "columns": ["val1", "val2", "val3"]}, ] expected_last_body = [{'points': [['7', 7, 7.0], ['8', 8, 8.0]], 'name': 'bar', 'columns': ['val1', 'val2', 'val3']}] with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') cli.write_points(data=dummy_points, batch_size=3) self.assertEqual(m.call_count, 5) self.assertEqual(expected_last_body, m.request_history[4].json()) def test_write_points_udp(self): """Test write points UDP for TestInfluxDBClient object.""" s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) port = random.randint(4000, 8000) s.bind(('0.0.0.0', port)) cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', 'test', use_udp=True, udp_port=port ) cli.write_points(self.dummy_points) received_data, addr = s.recvfrom(1024) self.assertEqual(self.dummy_points, json.loads(received_data.decode(), strict=True)) def test_write_bad_precision_udp(self): """Test write UDP w/bad precision.""" cli = InfluxDBClient( 'localhost', 8086, 'root', 'root', 'test', use_udp=True, udp_port=4444 ) with self.assertRaisesRegexp( Exception, "InfluxDB only supports seconds precision for udp writes" ): cli.write_points( self.dummy_points, time_precision='ms' ) @raises(Exception) def test_write_points_fails(self): """Test failed write points for TestInfluxDBClient object.""" with _mocked_session('post', 500): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.write_points([]) def test_write_points_with_precision(self): """Test write points with precision.""" with _mocked_session('post', 200, self.dummy_points): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.write_points(self.dummy_points)) def test_write_points_bad_precision(self): """Test write points with bad precision.""" cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" ): cli.write_points( self.dummy_points, time_precision='g' ) @raises(Exception) def test_write_points_with_precision_fails(self): """Test write points where precision fails.""" with _mocked_session('post', 500): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.write_points_with_precision([]) def test_delete_points(self): """Test delete points for TestInfluxDBClient object.""" with _mocked_session('delete', 204) as mocked: cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.delete_points("foo")) self.assertEqual(len(mocked.call_args_list), 1) args, kwds = mocked.call_args_list[0] self.assertEqual(kwds['params'], {'u': 'username', 'p': 'password'}) self.assertEqual(kwds['url'], 'http://host:8086/db/db/series/foo') @raises(Exception) def test_delete_points_with_wrong_name(self): """Test delete points with wrong name.""" with _mocked_session('delete', 400): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_points("nonexist") @raises(NotImplementedError) def test_create_scheduled_delete(self): """Test create scheduled deletes.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.create_scheduled_delete([]) @raises(NotImplementedError) def test_get_list_scheduled_delete(self): """Test get schedule list of deletes TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.get_list_scheduled_delete() @raises(NotImplementedError) def test_remove_scheduled_delete(self): """Test remove scheduled delete TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.remove_scheduled_delete(1) def test_query(self): """Test query for TestInfluxDBClient object.""" data = [ { "name": "foo", "columns": ["time", "sequence_number", "column_one"], "points": [ [1383876043, 16, "2"], [1383876043, 15, "1"], [1383876035, 14, "2"], [1383876035, 13, "1"] ] } ] with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') result = cli.query('select column_one from foo;') self.assertEqual(len(result[0]['points']), 4) def test_query_chunked(self): """Test chunked query for TestInfluxDBClient object.""" cli = InfluxDBClient(database='db') example_object = { 'points': [ [1415206250119, 40001, 667], [1415206244555, 30001, 7], [1415206228241, 20001, 788], [1415206212980, 10001, 555], [1415197271586, 10001, 23] ], 'name': 'foo', 'columns': [ 'time', 'sequence_number', 'val' ] } example_response = \ json.dumps(example_object) + json.dumps(example_object) with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.query('select * from foo', chunked=True), [example_object, example_object] ) def test_query_chunked_unicode(self): """Test unicode chunked query for TestInfluxDBClient object.""" cli = InfluxDBClient(database='db') example_object = { 'points': [ [1415206212980, 10001, u('unicode-\xcf\x89')], [1415197271586, 10001, u('more-unicode-\xcf\x90')] ], 'name': 'foo', 'columns': [ 'time', 'sequence_number', 'val' ] } example_response = \ json.dumps(example_object) + json.dumps(example_object) with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.query('select * from foo', chunked=True), [example_object, example_object] ) @raises(Exception) def test_query_fail(self): """Test failed query for TestInfluxDBClient.""" with _mocked_session('get', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.query('select column_one from foo;') def test_query_bad_precision(self): """Test query with bad precision for TestInfluxDBClient.""" cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" ): cli.query('select column_one from foo', time_precision='g') def test_create_database(self): """Test create database for TestInfluxDBClient.""" with _mocked_session('post', 201, {"name": "new_db"}): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.create_database('new_db')) @raises(Exception) def test_create_database_fails(self): """Test failed create database for TestInfluxDBClient.""" with _mocked_session('post', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.create_database('new_db') def test_delete_database(self): """Test delete database for TestInfluxDBClient.""" with _mocked_session('delete', 204): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') self.assertTrue(cli.delete_database('old_db')) @raises(Exception) def test_delete_database_fails(self): """Test failed delete database for TestInfluxDBClient.""" with _mocked_session('delete', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_database('old_db') def test_get_list_database(self): """Test get list of databases for TestInfluxDBClient.""" data = [ {"name": "a_db"} ] with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password') self.assertEqual(len(cli.get_list_database()), 1) self.assertEqual(cli.get_list_database()[0]['name'], 'a_db') @raises(Exception) def test_get_list_database_fails(self): """Test failed get list of databases for TestInfluxDBClient.""" with _mocked_session('get', 401): cli = InfluxDBClient('host', 8086, 'username', 'password') cli.get_list_database() @raises(FutureWarning) def test_get_database_list_deprecated(self): """Test deprecated get database list for TestInfluxDBClient.""" data = [ {"name": "a_db"} ] with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password') self.assertEqual(len(cli.get_database_list()), 1) self.assertEqual(cli.get_database_list()[0]['name'], 'a_db') def test_delete_series(self): """Test delete series for TestInfluxDBClient.""" with _mocked_session('delete', 204): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_series('old_series') @raises(Exception) def test_delete_series_fails(self): """Test failed delete series for TestInfluxDBClient.""" with _mocked_session('delete', 401): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_series('old_series') def test_get_series_list(self): """Test get list of series for TestInfluxDBClient.""" cli = InfluxDBClient(database='db') with requests_mock.Mocker() as m: example_response = \ '[{"name":"list_series_result","columns":' \ '["time","name"],"points":[[0,"foo"],[0,"bar"]]}]' m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.get_list_series(), ['foo', 'bar'] ) def test_get_continuous_queries(self): """Test get continuous queries for TestInfluxDBClient.""" cli = InfluxDBClient(database='db') with requests_mock.Mocker() as m: # Tip: put this in a json linter! example_response = '[ { "name": "continuous queries", "columns"' \ ': [ "time", "id", "query" ], "points": [ [ ' \ '0, 1, "select foo(bar,95) from \\"foo_bar' \ 's\\" group by time(5m) into response_times.' \ 'percentiles.5m.95" ], [ 0, 2, "select perce' \ 'ntile(value,95) from \\"response_times\\" g' \ 'roup by time(5m) into response_times.percen' \ 'tiles.5m.95" ] ] } ]' m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.get_list_continuous_queries(), [ 'select foo(bar,95) from "foo_bars" group ' 'by time(5m) into response_times.percentiles.5m.95', 'select percentile(value,95) from "response_times" group ' 'by time(5m) into response_times.percentiles.5m.95' ] ) def test_get_list_cluster_admins(self): """Test get list of cluster admins, not implemented.""" pass def test_add_cluster_admin(self): """Test add cluster admin for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/cluster_admins" ) cli = InfluxDBClient(database='db') cli.add_cluster_admin( new_username='paul', new_password='laup' ) self.assertDictEqual( json.loads(m.last_request.body), { 'name': 'paul', 'password': 'laup' } ) def test_update_cluster_admin_password(self): """Test update cluster admin pass for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/cluster_admins/paul" ) cli = InfluxDBClient(database='db') cli.update_cluster_admin_password( username='paul', new_password='laup' ) self.assertDictEqual( json.loads(m.last_request.body), {'password': 'laup'} ) def test_delete_cluster_admin(self): """Test delete cluster admin for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.DELETE, "http://localhost:8086/cluster_admins/paul", status_code=200, ) cli = InfluxDBClient(database='db') cli.delete_cluster_admin(username='paul') self.assertIsNone(m.last_request.body) def test_set_database_admin(self): """Test set database admin for TestInfluxDBClient.""" pass def test_unset_database_admin(self): """Test unset database admin for TestInfluxDBClient.""" pass def test_alter_database_admin(self): """Test alter database admin for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/users/paul" ) cli = InfluxDBClient(database='db') cli.alter_database_admin( username='paul', is_admin=False ) self.assertDictEqual( json.loads(m.last_request.body), { 'admin': False } ) @raises(NotImplementedError) def test_get_list_database_admins(self): """Test get list of database admins for TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.get_list_database_admins() @raises(NotImplementedError) def test_add_database_admin(self): """Test add database admins for TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.add_database_admin('admin', 'admin_secret_password') @raises(NotImplementedError) def test_update_database_admin_password(self): """Test update database admin pass for TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.update_database_admin_password('admin', 'admin_secret_password') @raises(NotImplementedError) def test_delete_database_admin(self): """Test delete database admin for TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_database_admin('admin') def test_get_database_users(self): """Test get database users for TestInfluxDBClient.""" cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') example_response = \ '[{"name":"paul","isAdmin":false,"writeTo":".*","readFrom":".*"},'\ '{"name":"bobby","isAdmin":false,"writeTo":".*","readFrom":".*"}]' with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/users", text=example_response ) users = cli.get_database_users() self.assertEqual(json.loads(example_response), users) def test_add_database_user(self): """Test add database user for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/users" ) cli = InfluxDBClient(database='db') cli.add_database_user( new_username='paul', new_password='laup', permissions=('.*', '.*') ) self.assertDictEqual( json.loads(m.last_request.body), { 'writeTo': '.*', 'password': 'laup', 'readFrom': '.*', 'name': 'paul' } ) def test_add_database_user_bad_permissions(self): """Test add database user with bad perms for TestInfluxDBClient.""" cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, "'permissions' must be \(readFrom, writeTo\) tuple" ): cli.add_database_user( new_password='paul', new_username='paul', permissions=('hello', 'hello', 'hello') ) def test_alter_database_user_password(self): """Test alter database user pass for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/users/paul" ) cli = InfluxDBClient(database='db') cli.alter_database_user( username='paul', password='n3wp4ss!' ) self.assertDictEqual( json.loads(m.last_request.body), { 'password': 'n3wp4ss!' } ) def test_alter_database_user_permissions(self): """Test alter database user perms for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/users/paul" ) cli = InfluxDBClient(database='db') cli.alter_database_user( username='paul', permissions=('^$', '.*') ) self.assertDictEqual( json.loads(m.last_request.body), { 'readFrom': '^$', 'writeTo': '.*' } ) def test_alter_database_user_password_and_permissions(self): """Test alter database user pass and perms for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/db/users/paul" ) cli = InfluxDBClient(database='db') cli.alter_database_user( username='paul', password='n3wp4ss!', permissions=('^$', '.*') ) self.assertDictEqual( json.loads(m.last_request.body), { 'password': 'n3wp4ss!', 'readFrom': '^$', 'writeTo': '.*' } ) def test_update_database_user_password_current_user(self): """Test update database user pass for TestInfluxDBClient.""" cli = InfluxDBClient( username='root', password='hello', database='database' ) with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/db/database/users/root" ) cli.update_database_user_password( username='root', new_password='bye' ) self.assertEqual(cli._password, 'bye') def test_delete_database_user(self): """Test delete database user for TestInfluxDBClient.""" with requests_mock.Mocker() as m: m.register_uri( requests_mock.DELETE, "http://localhost:8086/db/db/users/paul" ) cli = InfluxDBClient(database='db') cli.delete_database_user(username='paul') self.assertIsNone(m.last_request.body) @raises(NotImplementedError) def test_update_permission(self): """Test update permission for TestInfluxDBClient.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.update_permission('admin', []) @mock.patch('requests.Session.request') def test_request_retry(self, mock_request): """Test that two connection errors will be handled.""" class CustomMock(object): """Define CustomMock object.""" def __init__(self): self.i = 0 def connection_error(self, *args, **kwargs): """Test connection error in CustomMock.""" self.i += 1 if self.i < 3: raise requests.exceptions.ConnectionError else: r = requests.Response() r.status_code = 200 return r mock_request.side_effect = CustomMock().connection_error cli = InfluxDBClient(database='db') cli.write_points( self.dummy_points ) @mock.patch('requests.Session.request') def test_request_retry_raises(self, mock_request): """Test that three connection errors will not be handled.""" class CustomMock(object): """Define CustomMock object.""" def __init__(self): """Initialize the object.""" self.i = 0 def connection_error(self, *args, **kwargs): """Test the connection error for CustomMock.""" self.i += 1 if self.i < 4: raise requests.exceptions.ConnectionError else: r = requests.Response() r.status_code = 200 return r mock_request.side_effect = CustomMock().connection_error cli = InfluxDBClient(database='db') with self.assertRaises(requests.exceptions.ConnectionError): cli.write_points(self.dummy_points)
edureis95/xbmc
refs/heads/master
lib/gtest/test/gtest_filter_unittest.py
2826
#!/usr/bin/env python # # Copyright 2005 Google Inc. All Rights Reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit test for Google Test test filters. A user can specify which test(s) in a Google Test program to run via either the GTEST_FILTER environment variable or the --gtest_filter flag. This script tests such functionality by invoking gtest_filter_unittest_ (a program written with Google Test) with different environments and command line flags. Note that test sharding may also influence which tests are filtered. Therefore, we test that here also. """ __author__ = 'wan@google.com (Zhanyong Wan)' import os import re import sets import sys import gtest_test_utils # Constants. # Checks if this platform can pass empty environment variables to child # processes. We set an env variable to an empty string and invoke a python # script in a subprocess to print whether the variable is STILL in # os.environ. We then use 'eval' to parse the child's output so that an # exception is thrown if the input is anything other than 'True' nor 'False'. os.environ['EMPTY_VAR'] = '' child = gtest_test_utils.Subprocess( [sys.executable, '-c', 'import os; print \'EMPTY_VAR\' in os.environ']) CAN_PASS_EMPTY_ENV = eval(child.output) # Check if this platform can unset environment variables in child processes. # We set an env variable to a non-empty string, unset it, and invoke # a python script in a subprocess to print whether the variable # is NO LONGER in os.environ. # We use 'eval' to parse the child's output so that an exception # is thrown if the input is neither 'True' nor 'False'. os.environ['UNSET_VAR'] = 'X' del os.environ['UNSET_VAR'] child = gtest_test_utils.Subprocess( [sys.executable, '-c', 'import os; print \'UNSET_VAR\' not in os.environ']) CAN_UNSET_ENV = eval(child.output) # Checks if we should test with an empty filter. This doesn't # make sense on platforms that cannot pass empty env variables (Win32) # and on platforms that cannot unset variables (since we cannot tell # the difference between "" and NULL -- Borland and Solaris < 5.10) CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV) # The environment variable for specifying the test filters. FILTER_ENV_VAR = 'GTEST_FILTER' # The environment variables for test sharding. TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS' SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX' SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE' # The command line flag for specifying the test filters. FILTER_FLAG = 'gtest_filter' # The command line flag for including disabled tests. ALSO_RUN_DISABED_TESTS_FLAG = 'gtest_also_run_disabled_tests' # Command to run the gtest_filter_unittest_ program. COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_filter_unittest_') # Regex for determining whether parameterized tests are enabled in the binary. PARAM_TEST_REGEX = re.compile(r'/ParamTest') # Regex for parsing test case names from Google Test's output. TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)') # Regex for parsing test names from Google Test's output. TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)') # The command line flag to tell Google Test to output the list of tests it # will run. LIST_TESTS_FLAG = '--gtest_list_tests' # Indicates whether Google Test supports death tests. SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess( [COMMAND, LIST_TESTS_FLAG]).output # Full names of all tests in gtest_filter_unittests_. PARAM_TESTS = [ 'SeqP/ParamTest.TestX/0', 'SeqP/ParamTest.TestX/1', 'SeqP/ParamTest.TestY/0', 'SeqP/ParamTest.TestY/1', 'SeqQ/ParamTest.TestX/0', 'SeqQ/ParamTest.TestX/1', 'SeqQ/ParamTest.TestY/0', 'SeqQ/ParamTest.TestY/1', ] DISABLED_TESTS = [ 'BarTest.DISABLED_TestFour', 'BarTest.DISABLED_TestFive', 'BazTest.DISABLED_TestC', 'DISABLED_FoobarTest.Test1', 'DISABLED_FoobarTest.DISABLED_Test2', 'DISABLED_FoobarbazTest.TestA', ] if SUPPORTS_DEATH_TESTS: DEATH_TESTS = [ 'HasDeathTest.Test1', 'HasDeathTest.Test2', ] else: DEATH_TESTS = [] # All the non-disabled tests. ACTIVE_TESTS = [ 'FooTest.Abc', 'FooTest.Xyz', 'BarTest.TestOne', 'BarTest.TestTwo', 'BarTest.TestThree', 'BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS param_tests_present = None # Utilities. environ = os.environ.copy() def SetEnvVar(env_var, value): """Sets the env variable to 'value'; unsets it when 'value' is None.""" if value is not None: environ[env_var] = value elif env_var in environ: del environ[env_var] def RunAndReturnOutput(args = None): """Runs the test program and returns its output.""" return gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ).output def RunAndExtractTestList(args = None): """Runs the test program and returns its exit code and a list of tests run.""" p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ) tests_run = [] test_case = '' test = '' for line in p.output.split('\n'): match = TEST_CASE_REGEX.match(line) if match is not None: test_case = match.group(1) else: match = TEST_REGEX.match(line) if match is not None: test = match.group(1) tests_run.append(test_case + '.' + test) return (tests_run, p.exit_code) def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs): """Runs the given function and arguments in a modified environment.""" try: original_env = environ.copy() environ.update(extra_env) return function(*args, **kwargs) finally: environ.clear() environ.update(original_env) def RunWithSharding(total_shards, shard_index, command): """Runs a test program shard and returns exit code and a list of tests run.""" extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index), TOTAL_SHARDS_ENV_VAR: str(total_shards)} return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command) # The unit test. class GTestFilterUnitTest(gtest_test_utils.TestCase): """Tests the env variable or the command line flag to filter tests.""" # Utilities. def AssertSetEqual(self, lhs, rhs): """Asserts that two sets are equal.""" for elem in lhs: self.assert_(elem in rhs, '%s in %s' % (elem, rhs)) for elem in rhs: self.assert_(elem in lhs, '%s in %s' % (elem, lhs)) def AssertPartitionIsValid(self, set_var, list_of_sets): """Asserts that list_of_sets is a valid partition of set_var.""" full_partition = [] for slice_var in list_of_sets: full_partition.extend(slice_var) self.assertEqual(len(set_var), len(full_partition)) self.assertEqual(sets.Set(set_var), sets.Set(full_partition)) def AdjustForParameterizedTests(self, tests_to_run): """Adjust tests_to_run in case value parameterized tests are disabled.""" global param_tests_present if not param_tests_present: return list(sets.Set(tests_to_run) - sets.Set(PARAM_TESTS)) else: return tests_to_run def RunAndVerify(self, gtest_filter, tests_to_run): """Checks that the binary runs correct set of tests for a given filter.""" tests_to_run = self.AdjustForParameterizedTests(tests_to_run) # First, tests using the environment variable. # Windows removes empty variables from the environment when passing it # to a new process. This means it is impossible to pass an empty filter # into a process using the environment variable. However, we can still # test the case when the variable is not supplied (i.e., gtest_filter is # None). # pylint: disable-msg=C6403 if CAN_TEST_EMPTY_FILTER or gtest_filter != '': SetEnvVar(FILTER_ENV_VAR, gtest_filter) tests_run = RunAndExtractTestList()[0] SetEnvVar(FILTER_ENV_VAR, None) self.AssertSetEqual(tests_run, tests_to_run) # pylint: enable-msg=C6403 # Next, tests using the command line flag. if gtest_filter is None: args = [] else: args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)] tests_run = RunAndExtractTestList(args)[0] self.AssertSetEqual(tests_run, tests_to_run) def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run, args=None, check_exit_0=False): """Checks that binary runs correct tests for the given filter and shard. Runs all shards of gtest_filter_unittest_ with the given filter, and verifies that the right set of tests were run. The union of tests run on each shard should be identical to tests_to_run, without duplicates. Args: gtest_filter: A filter to apply to the tests. total_shards: A total number of shards to split test run into. tests_to_run: A set of tests expected to run. args : Arguments to pass to the to the test binary. check_exit_0: When set to a true value, make sure that all shards return 0. """ tests_to_run = self.AdjustForParameterizedTests(tests_to_run) # Windows removes empty variables from the environment when passing it # to a new process. This means it is impossible to pass an empty filter # into a process using the environment variable. However, we can still # test the case when the variable is not supplied (i.e., gtest_filter is # None). # pylint: disable-msg=C6403 if CAN_TEST_EMPTY_FILTER or gtest_filter != '': SetEnvVar(FILTER_ENV_VAR, gtest_filter) partition = [] for i in range(0, total_shards): (tests_run, exit_code) = RunWithSharding(total_shards, i, args) if check_exit_0: self.assertEqual(0, exit_code) partition.append(tests_run) self.AssertPartitionIsValid(tests_to_run, partition) SetEnvVar(FILTER_ENV_VAR, None) # pylint: enable-msg=C6403 def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run): """Checks that the binary runs correct set of tests for the given filter. Runs gtest_filter_unittest_ with the given filter, and enables disabled tests. Verifies that the right set of tests were run. Args: gtest_filter: A filter to apply to the tests. tests_to_run: A set of tests expected to run. """ tests_to_run = self.AdjustForParameterizedTests(tests_to_run) # Construct the command line. args = ['--%s' % ALSO_RUN_DISABED_TESTS_FLAG] if gtest_filter is not None: args.append('--%s=%s' % (FILTER_FLAG, gtest_filter)) tests_run = RunAndExtractTestList(args)[0] self.AssertSetEqual(tests_run, tests_to_run) def setUp(self): """Sets up test case. Determines whether value-parameterized tests are enabled in the binary and sets the flags accordingly. """ global param_tests_present if param_tests_present is None: param_tests_present = PARAM_TEST_REGEX.search( RunAndReturnOutput()) is not None def testDefaultBehavior(self): """Tests the behavior of not specifying the filter.""" self.RunAndVerify(None, ACTIVE_TESTS) def testDefaultBehaviorWithShards(self): """Tests the behavior without the filter, with sharding enabled.""" self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS) self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS) self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS) self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS) self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS) def testEmptyFilter(self): """Tests an empty filter.""" self.RunAndVerify('', []) self.RunAndVerifyWithSharding('', 1, []) self.RunAndVerifyWithSharding('', 2, []) def testBadFilter(self): """Tests a filter that matches nothing.""" self.RunAndVerify('BadFilter', []) self.RunAndVerifyAllowingDisabled('BadFilter', []) def testFullName(self): """Tests filtering by full name.""" self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz']) self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz']) self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz']) def testUniversalFilters(self): """Tests filters that match everything.""" self.RunAndVerify('*', ACTIVE_TESTS) self.RunAndVerify('*.*', ACTIVE_TESTS) self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS) self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS) self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS) def testFilterByTestCase(self): """Tests filtering by test case name.""" self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz']) BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB'] self.RunAndVerify('BazTest.*', BAZ_TESTS) self.RunAndVerifyAllowingDisabled('BazTest.*', BAZ_TESTS + ['BazTest.DISABLED_TestC']) def testFilterByTest(self): """Tests filtering by test name.""" self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne']) def testFilterDisabledTests(self): """Select only the disabled tests to run.""" self.RunAndVerify('DISABLED_FoobarTest.Test1', []) self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1', ['DISABLED_FoobarTest.Test1']) self.RunAndVerify('*DISABLED_*', []) self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS) self.RunAndVerify('*.DISABLED_*', []) self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [ 'BarTest.DISABLED_TestFour', 'BarTest.DISABLED_TestFive', 'BazTest.DISABLED_TestC', 'DISABLED_FoobarTest.DISABLED_Test2', ]) self.RunAndVerify('DISABLED_*', []) self.RunAndVerifyAllowingDisabled('DISABLED_*', [ 'DISABLED_FoobarTest.Test1', 'DISABLED_FoobarTest.DISABLED_Test2', 'DISABLED_FoobarbazTest.TestA', ]) def testWildcardInTestCaseName(self): """Tests using wildcard in the test case name.""" self.RunAndVerify('*a*.*', [ 'BarTest.TestOne', 'BarTest.TestTwo', 'BarTest.TestThree', 'BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS) def testWildcardInTestName(self): """Tests using wildcard in the test name.""" self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA']) def testFilterWithoutDot(self): """Tests a filter that has no '.' in it.""" self.RunAndVerify('*z*', [ 'FooTest.Xyz', 'BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB', ]) def testTwoPatterns(self): """Tests filters that consist of two patterns.""" self.RunAndVerify('Foo*.*:*A*', [ 'FooTest.Abc', 'FooTest.Xyz', 'BazTest.TestA', ]) # An empty pattern + a non-empty one self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA']) def testThreePatterns(self): """Tests filters that consist of three patterns.""" self.RunAndVerify('*oo*:*A*:*One', [ 'FooTest.Abc', 'FooTest.Xyz', 'BarTest.TestOne', 'BazTest.TestOne', 'BazTest.TestA', ]) # The 2nd pattern is empty. self.RunAndVerify('*oo*::*One', [ 'FooTest.Abc', 'FooTest.Xyz', 'BarTest.TestOne', 'BazTest.TestOne', ]) # The last 2 patterns are empty. self.RunAndVerify('*oo*::', [ 'FooTest.Abc', 'FooTest.Xyz', ]) def testNegativeFilters(self): self.RunAndVerify('*-BazTest.TestOne', [ 'FooTest.Abc', 'FooTest.Xyz', 'BarTest.TestOne', 'BarTest.TestTwo', 'BarTest.TestThree', 'BazTest.TestA', 'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS) self.RunAndVerify('*-FooTest.Abc:BazTest.*', [ 'FooTest.Xyz', 'BarTest.TestOne', 'BarTest.TestTwo', 'BarTest.TestThree', ] + DEATH_TESTS + PARAM_TESTS) self.RunAndVerify('BarTest.*-BarTest.TestOne', [ 'BarTest.TestTwo', 'BarTest.TestThree', ]) # Tests without leading '*'. self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [ 'BarTest.TestOne', 'BarTest.TestTwo', 'BarTest.TestThree', ] + DEATH_TESTS + PARAM_TESTS) # Value parameterized tests. self.RunAndVerify('*/*', PARAM_TESTS) # Value parameterized tests filtering by the sequence name. self.RunAndVerify('SeqP/*', [ 'SeqP/ParamTest.TestX/0', 'SeqP/ParamTest.TestX/1', 'SeqP/ParamTest.TestY/0', 'SeqP/ParamTest.TestY/1', ]) # Value parameterized tests filtering by the test name. self.RunAndVerify('*/0', [ 'SeqP/ParamTest.TestX/0', 'SeqP/ParamTest.TestY/0', 'SeqQ/ParamTest.TestX/0', 'SeqQ/ParamTest.TestY/0', ]) def testFlagOverridesEnvVar(self): """Tests that the filter flag overrides the filtering env. variable.""" SetEnvVar(FILTER_ENV_VAR, 'Foo*') args = ['--%s=%s' % (FILTER_FLAG, '*One')] tests_run = RunAndExtractTestList(args)[0] SetEnvVar(FILTER_ENV_VAR, None) self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne']) def testShardStatusFileIsCreated(self): """Tests that the shard file is created if specified in the environment.""" shard_status_file = os.path.join(gtest_test_utils.GetTempDir(), 'shard_status_file') self.assert_(not os.path.exists(shard_status_file)) extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file} try: InvokeWithModifiedEnv(extra_env, RunAndReturnOutput) finally: self.assert_(os.path.exists(shard_status_file)) os.remove(shard_status_file) def testShardStatusFileIsCreatedWithListTests(self): """Tests that the shard file is created with the "list_tests" flag.""" shard_status_file = os.path.join(gtest_test_utils.GetTempDir(), 'shard_status_file2') self.assert_(not os.path.exists(shard_status_file)) extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file} try: output = InvokeWithModifiedEnv(extra_env, RunAndReturnOutput, [LIST_TESTS_FLAG]) finally: # This assertion ensures that Google Test enumerated the tests as # opposed to running them. self.assert_('[==========]' not in output, 'Unexpected output during test enumeration.\n' 'Please ensure that LIST_TESTS_FLAG is assigned the\n' 'correct flag value for listing Google Test tests.') self.assert_(os.path.exists(shard_status_file)) os.remove(shard_status_file) if SUPPORTS_DEATH_TESTS: def testShardingWorksWithDeathTests(self): """Tests integration with death tests and sharding.""" gtest_filter = 'HasDeathTest.*:SeqP/*' expected_tests = [ 'HasDeathTest.Test1', 'HasDeathTest.Test2', 'SeqP/ParamTest.TestX/0', 'SeqP/ParamTest.TestX/1', 'SeqP/ParamTest.TestY/0', 'SeqP/ParamTest.TestY/1', ] for flag in ['--gtest_death_test_style=threadsafe', '--gtest_death_test_style=fast']: self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests, check_exit_0=True, args=[flag]) self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests, check_exit_0=True, args=[flag]) if __name__ == '__main__': gtest_test_utils.Main()
ccnmtl/lettuce
refs/heads/master
tests/integration/lib/Django-1.3/django/conf/locale/uk/__init__.py
12133432
Salat-Cx65/python-for-android
refs/heads/master
python3-alpha/python3-src/Lib/codeop.py
187
r"""Utilities to compile possibly incomplete Python source code. This module provides two interfaces, broadly similar to the builtin function compile(), which take program text, a filename and a 'mode' and: - Return code object if the command is complete and valid - Return None if the command is incomplete - Raise SyntaxError, ValueError or OverflowError if the command is a syntax error (OverflowError and ValueError can be produced by malformed literals). Approach: First, check if the source consists entirely of blank lines and comments; if so, replace it with 'pass', because the built-in parser doesn't always do the right thing for these. Compile three times: as is, with \n, and with \n\n appended. If it compiles as is, it's complete. If it compiles with one \n appended, we expect more. If it doesn't compile either way, we compare the error we get when compiling with \n or \n\n appended. If the errors are the same, the code is broken. But if the errors are different, we expect more. Not intuitive; not even guaranteed to hold in future releases; but this matches the compiler's behavior from Python 1.4 through 2.2, at least. Caveat: It is possible (but not likely) that the parser stops parsing with a successful outcome before reaching the end of the source; in this case, trailing symbols may be ignored instead of causing an error. For example, a backslash followed by two newlines may be followed by arbitrary garbage. This will be fixed once the API for the parser is better. The two interfaces are: compile_command(source, filename, symbol): Compiles a single command in the manner described above. CommandCompiler(): Instances of this class have __call__ methods identical in signature to compile_command; the difference is that if the instance compiles program text containing a __future__ statement, the instance 'remembers' and compiles all subsequent program texts with the statement in force. The module also provides another class: Compile(): Instances of this class act like the built-in function compile, but with 'memory' in the sense described above. """ import __future__ _features = [getattr(__future__, fname) for fname in __future__.all_feature_names] __all__ = ["compile_command", "Compile", "CommandCompiler"] PyCF_DONT_IMPLY_DEDENT = 0x200 # Matches pythonrun.h def _maybe_compile(compiler, source, filename, symbol): # Check for source consisting of only blank lines and comments for line in source.split("\n"): line = line.strip() if line and line[0] != '#': break # Leave it alone else: if symbol != "eval": source = "pass" # Replace it with a 'pass' statement err = err1 = err2 = None code = code1 = code2 = None try: code = compiler(source, filename, symbol) except SyntaxError as err: pass try: code1 = compiler(source + "\n", filename, symbol) except SyntaxError as e: err1 = e try: code2 = compiler(source + "\n\n", filename, symbol) except SyntaxError as e: err2 = e if code: return code if not code1 and repr(err1) == repr(err2): raise err1 def _compile(source, filename, symbol): return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT) def compile_command(source, filename="<input>", symbol="single"): r"""Compile a command and determine whether it is incomplete. Arguments: source -- the source string; may contain \n characters filename -- optional filename from which source was read; default "<input>" symbol -- optional grammar start symbol; "single" (default) or "eval" Return value / exceptions raised: - Return a code object if the command is complete and valid - Return None if the command is incomplete - Raise SyntaxError, ValueError or OverflowError if the command is a syntax error (OverflowError and ValueError can be produced by malformed literals). """ return _maybe_compile(_compile, source, filename, symbol) class Compile: """Instances of this class behave much like the built-in compile function, but if one is used to compile text containing a future statement, it "remembers" and compiles all subsequent program texts with the statement in force.""" def __init__(self): self.flags = PyCF_DONT_IMPLY_DEDENT def __call__(self, source, filename, symbol): codeob = compile(source, filename, symbol, self.flags, 1) for feature in _features: if codeob.co_flags & feature.compiler_flag: self.flags |= feature.compiler_flag return codeob class CommandCompiler: """Instances of this class have __call__ methods identical in signature to compile_command; the difference is that if the instance compiles program text containing a __future__ statement, the instance 'remembers' and compiles all subsequent program texts with the statement in force.""" def __init__(self,): self.compiler = Compile() def __call__(self, source, filename="<input>", symbol="single"): r"""Compile a command and determine whether it is incomplete. Arguments: source -- the source string; may contain \n characters filename -- optional filename from which source was read; default "<input>" symbol -- optional grammar start symbol; "single" (default) or "eval" Return value / exceptions raised: - Return a code object if the command is complete and valid - Return None if the command is incomplete - Raise SyntaxError, ValueError or OverflowError if the command is a syntax error (OverflowError and ValueError can be produced by malformed literals). """ return _maybe_compile(self.compiler, source, filename, symbol)
tensorflow/lingvo
refs/heads/master
lingvo/tasks/milan/score_functions.py
1
# Lint as: python3 # Copyright 2021 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Implementation of combination functions for dual-encoder models.""" from lingvo import compat as tf from lingvo.core import base_layer class DotProductScoreFunction(base_layer.BaseLayer): """Performs dot product combination between two encoded vectors.""" @classmethod def Params(cls): p = super().Params() p.name = 'dot_product_score_function' return p def FProp(self, theta, x, y): """Computes pair-wise dot product similarity. Args: theta: NestedMap of variables belonging to this layer and its children. x: batch of encoded representations from modality x. A float32 Tensor of shape [x_batch_size, encoded_dim] y: batch of encoded representations from modality y. A float32 Tensor of shape [y_batch_size, encoded_dim] Returns: Pairwise dot products. A float32 Tensor with shape `[x_batch_size, y_batch_size]`. """ return tf.matmul(x, y, transpose_b=True)
ruitian/firefly
refs/heads/master
firefly/views/api/__init__.py
9
from __future__ import absolute_import # coding=utf-8 from flask import Blueprint from flask_restful import Api from .category import CategoryApi, CategoryListApi from .comment import ReplyApi from .user import FollowUserApi, BlockUserApi bp = Blueprint('api', __name__, url_prefix='/api') api = Api(bp) api.add_resource(CategoryListApi, '/categories') api.add_resource(CategoryApi, '/categories/<slug>') api.add_resource(FollowUserApi, '/users/<id>/follow') api.add_resource(BlockUserApi, '/users/<id>/block') api.add_resource(ReplyApi, '/posts/<int:id>/replies')
birkestroem/NougatUI
refs/heads/master
tools/bin/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
217
# This file comes from # https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py # Do not edit! Edit the upstream one instead. """Python module for generating .ninja files. Note that this is emphatically not a required piece of Ninja; it's just a helpful utility for build-file-generation systems that already use Python. """ import textwrap import re def escape_path(word): return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:') class Writer(object): def __init__(self, output, width=78): self.output = output self.width = width def newline(self): self.output.write('\n') def comment(self, text): for line in textwrap.wrap(text, self.width - 2): self.output.write('# ' + line + '\n') def variable(self, key, value, indent=0): if value is None: return if isinstance(value, list): value = ' '.join(filter(None, value)) # Filter out empty strings. self._line('%s = %s' % (key, value), indent) def rule(self, name, command, description=None, depfile=None, generator=False, restat=False, rspfile=None, rspfile_content=None): self._line('rule %s' % name) self.variable('command', command, indent=1) if description: self.variable('description', description, indent=1) if depfile: self.variable('depfile', depfile, indent=1) if generator: self.variable('generator', '1', indent=1) if restat: self.variable('restat', '1', indent=1) if rspfile: self.variable('rspfile', rspfile, indent=1) if rspfile_content: self.variable('rspfile_content', rspfile_content, indent=1) def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None): outputs = self._as_list(outputs) all_inputs = self._as_list(inputs)[:] out_outputs = list(map(escape_path, outputs)) all_inputs = list(map(escape_path, all_inputs)) if implicit: implicit = map(escape_path, self._as_list(implicit)) all_inputs.append('|') all_inputs.extend(implicit) if order_only: order_only = map(escape_path, self._as_list(order_only)) all_inputs.append('||') all_inputs.extend(order_only) self._line('build %s: %s %s' % (' '.join(out_outputs), rule, ' '.join(all_inputs))) if variables: if isinstance(variables, dict): iterator = variables.iteritems() else: iterator = iter(variables) for key, val in iterator: self.variable(key, val, indent=1) return outputs def include(self, path): self._line('include %s' % path) def subninja(self, path): self._line('subninja %s' % path) def default(self, paths): self._line('default %s' % ' '.join(self._as_list(paths))) def _count_dollars_before_index(self, s, i): """Returns the number of '$' characters right in front of s[i].""" dollar_count = 0 dollar_index = i - 1 while dollar_index > 0 and s[dollar_index] == '$': dollar_count += 1 dollar_index -= 1 return dollar_count def _line(self, text, indent=0): """Write 'text' word-wrapped at self.width characters.""" leading_space = ' ' * indent while len(leading_space) + len(text) > self.width: # The text is too wide; wrap if possible. # Find the rightmost space that would obey our width constraint and # that's not an escaped space. available_space = self.width - len(leading_space) - len(' $') space = available_space while True: space = text.rfind(' ', 0, space) if space < 0 or \ self._count_dollars_before_index(text, space) % 2 == 0: break if space < 0: # No such space; just use the first unescaped space we can find. space = available_space - 1 while True: space = text.find(' ', space + 1) if space < 0 or \ self._count_dollars_before_index(text, space) % 2 == 0: break if space < 0: # Give up on breaking. break self.output.write(leading_space + text[0:space] + ' $\n') text = text[space+1:] # Subsequent lines are continuations, so indent them. leading_space = ' ' * (indent+2) self.output.write(leading_space + text + '\n') def _as_list(self, input): if input is None: return [] if isinstance(input, list): return input return [input] def escape(string): """Escape a string such that it can be embedded into a Ninja file without further interpretation.""" assert '\n' not in string, 'Ninja syntax does not allow newlines' # We only have one special metacharacter: '$'. return string.replace('$', '$$')
savoirfairelinux/odoo
refs/heads/master
addons/purchase/res_config.py
357
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Business Applications # Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv from openerp.tools.translate import _ class purchase_config_settings(osv.osv_memory): _name = 'purchase.config.settings' _inherit = 'res.config.settings' _columns = { 'default_invoice_method': fields.selection( [('manual', 'Based on purchase order lines'), ('picking', 'Based on incoming shipments'), ('order', 'Pre-generate draft invoices based on purchase orders'), ], 'Default invoicing control method', required=True, default_model='purchase.order'), 'group_purchase_pricelist':fields.boolean("Manage pricelist per supplier", implied_group='product.group_purchase_pricelist', help='Allows to manage different prices based on rules per category of Supplier.\n' 'Example: 10% for retailers, promotion of 5 EUR on this product, etc.'), 'group_uom':fields.boolean("Manage different units of measure for products", implied_group='product.group_uom', help="""Allows you to select and maintain different units of measure for products."""), 'group_costing_method':fields.boolean("Use 'Real Price' or 'Average' costing methods.", implied_group='stock_account.group_inventory_valuation', help="""Allows you to compute product cost price based on average cost."""), 'module_warning': fields.boolean("Alerts by products or supplier", help='Allow to configure notification on products and trigger them when a user wants to purchase a given product or a given supplier.\n' 'Example: Product: this product is deprecated, do not purchase more than 5.\n' 'Supplier: don\'t forget to ask for an express delivery.'), 'module_purchase_double_validation': fields.boolean("Force two levels of approvals", help='Provide a double validation mechanism for purchases exceeding minimum amount.\n' '-This installs the module purchase_double_validation.'), 'module_purchase_requisition': fields.boolean("Manage calls for bids", help="""Calls for bids are used when you want to generate requests for quotations to several suppliers for a given set of products. You can configure per product if you directly do a Request for Quotation to one supplier or if you want a Call for Bids to compare offers from several suppliers."""), 'group_advance_purchase_requisition': fields.boolean("Choose from several bids in a call for bids", implied_group='purchase.group_advance_bidding', help="""In the process of a public bidding, you can compare the bid lines and choose for each requested product from which bid you buy which quantity"""), 'module_purchase_analytic_plans': fields.boolean('Use multiple analytic accounts on purchase orders', help='Allows the user to maintain several analysis plans. These let you split lines on a purchase order between several accounts and analytic plans.\n' '-This installs the module purchase_analytic_plans.'), 'group_analytic_account_for_purchases': fields.boolean('Analytic accounting for purchases', implied_group='purchase.group_analytic_accounting', help="Allows you to specify an analytic account on purchase orders."), 'module_stock_dropshipping': fields.boolean("Manage dropshipping", help='\nCreates the dropship route and add more complex tests' '-This installs the module stock_dropshipping.'), } _defaults = { 'default_invoice_method': 'order', } def onchange_purchase_analytic_plans(self, cr, uid, ids, module_purchase_analytic_plans, context=None): """ change group_analytic_account_for_purchases following module_purchase_analytic_plans """ if not module_purchase_analytic_plans: return {} return {'value': {'group_analytic_account_for_purchases': module_purchase_analytic_plans}} class account_config_settings(osv.osv_memory): _inherit = 'account.config.settings' _columns = { 'module_purchase_analytic_plans': fields.boolean('Use multiple analytic accounts on orders', help='Allows the user to maintain several analysis plans. These let you split lines on a purchase order between several accounts and analytic plans.\n' '-This installs the module purchase_analytic_plans.'), 'group_analytic_account_for_purchases': fields.boolean('Analytic accounting for purchases', implied_group='purchase.group_analytic_accounting', help="Allows you to specify an analytic account on purchase orders."), } def onchange_purchase_analytic_plans(self, cr, uid, ids, module_purchase_analytic_plans, context=None): """ change group_analytic_account_for_purchases following module_purchase_analytic_plans """ if not module_purchase_analytic_plans: return {} return {'value': {'group_analytic_account_for_purchases': module_purchase_analytic_plans}} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
GolovanovSrg/au-linux-kernel-spring-2016
refs/heads/master
linux/tools/perf/scripts/python/export-to-postgresql.py
238
# export-to-postgresql.py: export perf data to a postgresql database # Copyright (c) 2014, Intel Corporation. # # This program is free software; you can redistribute it and/or modify it # under the terms and conditions of the GNU General Public License, # version 2, as published by the Free Software Foundation. # # This program is distributed in the hope it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. import os import sys import struct import datetime # To use this script you will need to have installed package python-pyside which # provides LGPL-licensed Python bindings for Qt. You will also need the package # libqt4-sql-psql for Qt postgresql support. # # The script assumes postgresql is running on the local machine and that the # user has postgresql permissions to create databases. Examples of installing # postgresql and adding such a user are: # # fedora: # # $ sudo yum install postgresql postgresql-server python-pyside qt-postgresql # $ sudo su - postgres -c initdb # $ sudo service postgresql start # $ sudo su - postgres # $ createuser <your user id here> # Shall the new role be a superuser? (y/n) y # # ubuntu: # # $ sudo apt-get install postgresql # $ sudo su - postgres # $ createuser <your user id here> # Shall the new role be a superuser? (y/n) y # # An example of using this script with Intel PT: # # $ perf record -e intel_pt//u ls # $ perf script -s ~/libexec/perf-core/scripts/python/export-to-postgresql.py pt_example branches calls # 2015-05-29 12:49:23.464364 Creating database... # 2015-05-29 12:49:26.281717 Writing to intermediate files... # 2015-05-29 12:49:27.190383 Copying to database... # 2015-05-29 12:49:28.140451 Removing intermediate files... # 2015-05-29 12:49:28.147451 Adding primary keys # 2015-05-29 12:49:28.655683 Adding foreign keys # 2015-05-29 12:49:29.365350 Done # # To browse the database, psql can be used e.g. # # $ psql pt_example # pt_example=# select * from samples_view where id < 100; # pt_example=# \d+ # pt_example=# \d+ samples_view # pt_example=# \q # # An example of using the database is provided by the script # call-graph-from-postgresql.py. Refer to that script for details. # # Tables: # # The tables largely correspond to perf tools' data structures. They are largely self-explanatory. # # samples # # 'samples' is the main table. It represents what instruction was executing at a point in time # when something (a selected event) happened. The memory address is the instruction pointer or 'ip'. # # calls # # 'calls' represents function calls and is related to 'samples' by 'call_id' and 'return_id'. # 'calls' is only created when the 'calls' option to this script is specified. # # call_paths # # 'call_paths' represents all the call stacks. Each 'call' has an associated record in 'call_paths'. # 'calls_paths' is only created when the 'calls' option to this script is specified. # # branch_types # # 'branch_types' provides descriptions for each type of branch. # # comm_threads # # 'comm_threads' shows how 'comms' relates to 'threads'. # # comms # # 'comms' contains a record for each 'comm' - the name given to the executable that is running. # # dsos # # 'dsos' contains a record for each executable file or library. # # machines # # 'machines' can be used to distinguish virtual machines if virtualization is supported. # # selected_events # # 'selected_events' contains a record for each kind of event that has been sampled. # # symbols # # 'symbols' contains a record for each symbol. Only symbols that have samples are present. # # threads # # 'threads' contains a record for each thread. # # Views: # # Most of the tables have views for more friendly display. The views are: # # calls_view # call_paths_view # comm_threads_view # dsos_view # machines_view # samples_view # symbols_view # threads_view # # More examples of browsing the database with psql: # Note that some of the examples are not the most optimal SQL query. # Note that call information is only available if the script's 'calls' option has been used. # # Top 10 function calls (not aggregated by symbol): # # SELECT * FROM calls_view ORDER BY elapsed_time DESC LIMIT 10; # # Top 10 function calls (aggregated by symbol): # # SELECT symbol_id,(SELECT name FROM symbols WHERE id = symbol_id) AS symbol, # SUM(elapsed_time) AS tot_elapsed_time,SUM(branch_count) AS tot_branch_count # FROM calls_view GROUP BY symbol_id ORDER BY tot_elapsed_time DESC LIMIT 10; # # Note that the branch count gives a rough estimation of cpu usage, so functions # that took a long time but have a relatively low branch count must have spent time # waiting. # # Find symbols by pattern matching on part of the name (e.g. names containing 'alloc'): # # SELECT * FROM symbols_view WHERE name LIKE '%alloc%'; # # Top 10 function calls for a specific symbol (e.g. whose symbol_id is 187): # # SELECT * FROM calls_view WHERE symbol_id = 187 ORDER BY elapsed_time DESC LIMIT 10; # # Show function calls made by function in the same context (i.e. same call path) (e.g. one with call_path_id 254): # # SELECT * FROM calls_view WHERE parent_call_path_id = 254; # # Show branches made during a function call (e.g. where call_id is 29357 and return_id is 29370 and tid is 29670) # # SELECT * FROM samples_view WHERE id >= 29357 AND id <= 29370 AND tid = 29670 AND event LIKE 'branches%'; # # Show transactions: # # SELECT * FROM samples_view WHERE event = 'transactions'; # # Note transaction start has 'in_tx' true whereas, transaction end has 'in_tx' false. # Transaction aborts have branch_type_name 'transaction abort' # # Show transaction aborts: # # SELECT * FROM samples_view WHERE event = 'transactions' AND branch_type_name = 'transaction abort'; # # To print a call stack requires walking the call_paths table. For example this python script: # #!/usr/bin/python2 # # import sys # from PySide.QtSql import * # # if __name__ == '__main__': # if (len(sys.argv) < 3): # print >> sys.stderr, "Usage is: printcallstack.py <database name> <call_path_id>" # raise Exception("Too few arguments") # dbname = sys.argv[1] # call_path_id = sys.argv[2] # db = QSqlDatabase.addDatabase('QPSQL') # db.setDatabaseName(dbname) # if not db.open(): # raise Exception("Failed to open database " + dbname + " error: " + db.lastError().text()) # query = QSqlQuery(db) # print " id ip symbol_id symbol dso_id dso_short_name" # while call_path_id != 0 and call_path_id != 1: # ret = query.exec_('SELECT * FROM call_paths_view WHERE id = ' + str(call_path_id)) # if not ret: # raise Exception("Query failed: " + query.lastError().text()) # if not query.next(): # raise Exception("Query failed") # print "{0:>6} {1:>10} {2:>9} {3:<30} {4:>6} {5:<30}".format(query.value(0), query.value(1), query.value(2), query.value(3), query.value(4), query.value(5)) # call_path_id = query.value(6) from PySide.QtSql import * # Need to access PostgreSQL C library directly to use COPY FROM STDIN from ctypes import * libpq = CDLL("libpq.so.5") PQconnectdb = libpq.PQconnectdb PQconnectdb.restype = c_void_p PQfinish = libpq.PQfinish PQstatus = libpq.PQstatus PQexec = libpq.PQexec PQexec.restype = c_void_p PQresultStatus = libpq.PQresultStatus PQputCopyData = libpq.PQputCopyData PQputCopyData.argtypes = [ c_void_p, c_void_p, c_int ] PQputCopyEnd = libpq.PQputCopyEnd PQputCopyEnd.argtypes = [ c_void_p, c_void_p ] sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') # These perf imports are not used at present #from perf_trace_context import * #from Core import * perf_db_export_mode = True perf_db_export_calls = False def usage(): print >> sys.stderr, "Usage is: export-to-postgresql.py <database name> [<columns>] [<calls>]" print >> sys.stderr, "where: columns 'all' or 'branches'" print >> sys.stderr, " calls 'calls' => create calls table" raise Exception("Too few arguments") if (len(sys.argv) < 2): usage() dbname = sys.argv[1] if (len(sys.argv) >= 3): columns = sys.argv[2] else: columns = "all" if columns not in ("all", "branches"): usage() branches = (columns == "branches") if (len(sys.argv) >= 4): if (sys.argv[3] == "calls"): perf_db_export_calls = True else: usage() output_dir_name = os.getcwd() + "/" + dbname + "-perf-data" os.mkdir(output_dir_name) def do_query(q, s): if (q.exec_(s)): return raise Exception("Query failed: " + q.lastError().text()) print datetime.datetime.today(), "Creating database..." db = QSqlDatabase.addDatabase('QPSQL') query = QSqlQuery(db) db.setDatabaseName('postgres') db.open() try: do_query(query, 'CREATE DATABASE ' + dbname) except: os.rmdir(output_dir_name) raise query.finish() query.clear() db.close() db.setDatabaseName(dbname) db.open() query = QSqlQuery(db) do_query(query, 'SET client_min_messages TO WARNING') do_query(query, 'CREATE TABLE selected_events (' 'id bigint NOT NULL,' 'name varchar(80))') do_query(query, 'CREATE TABLE machines (' 'id bigint NOT NULL,' 'pid integer,' 'root_dir varchar(4096))') do_query(query, 'CREATE TABLE threads (' 'id bigint NOT NULL,' 'machine_id bigint,' 'process_id bigint,' 'pid integer,' 'tid integer)') do_query(query, 'CREATE TABLE comms (' 'id bigint NOT NULL,' 'comm varchar(16))') do_query(query, 'CREATE TABLE comm_threads (' 'id bigint NOT NULL,' 'comm_id bigint,' 'thread_id bigint)') do_query(query, 'CREATE TABLE dsos (' 'id bigint NOT NULL,' 'machine_id bigint,' 'short_name varchar(256),' 'long_name varchar(4096),' 'build_id varchar(64))') do_query(query, 'CREATE TABLE symbols (' 'id bigint NOT NULL,' 'dso_id bigint,' 'sym_start bigint,' 'sym_end bigint,' 'binding integer,' 'name varchar(2048))') do_query(query, 'CREATE TABLE branch_types (' 'id integer NOT NULL,' 'name varchar(80))') if branches: do_query(query, 'CREATE TABLE samples (' 'id bigint NOT NULL,' 'evsel_id bigint,' 'machine_id bigint,' 'thread_id bigint,' 'comm_id bigint,' 'dso_id bigint,' 'symbol_id bigint,' 'sym_offset bigint,' 'ip bigint,' 'time bigint,' 'cpu integer,' 'to_dso_id bigint,' 'to_symbol_id bigint,' 'to_sym_offset bigint,' 'to_ip bigint,' 'branch_type integer,' 'in_tx boolean)') else: do_query(query, 'CREATE TABLE samples (' 'id bigint NOT NULL,' 'evsel_id bigint,' 'machine_id bigint,' 'thread_id bigint,' 'comm_id bigint,' 'dso_id bigint,' 'symbol_id bigint,' 'sym_offset bigint,' 'ip bigint,' 'time bigint,' 'cpu integer,' 'to_dso_id bigint,' 'to_symbol_id bigint,' 'to_sym_offset bigint,' 'to_ip bigint,' 'period bigint,' 'weight bigint,' 'transaction bigint,' 'data_src bigint,' 'branch_type integer,' 'in_tx boolean)') if perf_db_export_calls: do_query(query, 'CREATE TABLE call_paths (' 'id bigint NOT NULL,' 'parent_id bigint,' 'symbol_id bigint,' 'ip bigint)') do_query(query, 'CREATE TABLE calls (' 'id bigint NOT NULL,' 'thread_id bigint,' 'comm_id bigint,' 'call_path_id bigint,' 'call_time bigint,' 'return_time bigint,' 'branch_count bigint,' 'call_id bigint,' 'return_id bigint,' 'parent_call_path_id bigint,' 'flags integer)') do_query(query, 'CREATE VIEW machines_view AS ' 'SELECT ' 'id,' 'pid,' 'root_dir,' 'CASE WHEN id=0 THEN \'unknown\' WHEN pid=-1 THEN \'host\' ELSE \'guest\' END AS host_or_guest' ' FROM machines') do_query(query, 'CREATE VIEW dsos_view AS ' 'SELECT ' 'id,' 'machine_id,' '(SELECT host_or_guest FROM machines_view WHERE id = machine_id) AS host_or_guest,' 'short_name,' 'long_name,' 'build_id' ' FROM dsos') do_query(query, 'CREATE VIEW symbols_view AS ' 'SELECT ' 'id,' 'name,' '(SELECT short_name FROM dsos WHERE id=dso_id) AS dso,' 'dso_id,' 'sym_start,' 'sym_end,' 'CASE WHEN binding=0 THEN \'local\' WHEN binding=1 THEN \'global\' ELSE \'weak\' END AS binding' ' FROM symbols') do_query(query, 'CREATE VIEW threads_view AS ' 'SELECT ' 'id,' 'machine_id,' '(SELECT host_or_guest FROM machines_view WHERE id = machine_id) AS host_or_guest,' 'process_id,' 'pid,' 'tid' ' FROM threads') do_query(query, 'CREATE VIEW comm_threads_view AS ' 'SELECT ' 'comm_id,' '(SELECT comm FROM comms WHERE id = comm_id) AS command,' 'thread_id,' '(SELECT pid FROM threads WHERE id = thread_id) AS pid,' '(SELECT tid FROM threads WHERE id = thread_id) AS tid' ' FROM comm_threads') if perf_db_export_calls: do_query(query, 'CREATE VIEW call_paths_view AS ' 'SELECT ' 'c.id,' 'to_hex(c.ip) AS ip,' 'c.symbol_id,' '(SELECT name FROM symbols WHERE id = c.symbol_id) AS symbol,' '(SELECT dso_id FROM symbols WHERE id = c.symbol_id) AS dso_id,' '(SELECT dso FROM symbols_view WHERE id = c.symbol_id) AS dso_short_name,' 'c.parent_id,' 'to_hex(p.ip) AS parent_ip,' 'p.symbol_id AS parent_symbol_id,' '(SELECT name FROM symbols WHERE id = p.symbol_id) AS parent_symbol,' '(SELECT dso_id FROM symbols WHERE id = p.symbol_id) AS parent_dso_id,' '(SELECT dso FROM symbols_view WHERE id = p.symbol_id) AS parent_dso_short_name' ' FROM call_paths c INNER JOIN call_paths p ON p.id = c.parent_id') do_query(query, 'CREATE VIEW calls_view AS ' 'SELECT ' 'calls.id,' 'thread_id,' '(SELECT pid FROM threads WHERE id = thread_id) AS pid,' '(SELECT tid FROM threads WHERE id = thread_id) AS tid,' '(SELECT comm FROM comms WHERE id = comm_id) AS command,' 'call_path_id,' 'to_hex(ip) AS ip,' 'symbol_id,' '(SELECT name FROM symbols WHERE id = symbol_id) AS symbol,' 'call_time,' 'return_time,' 'return_time - call_time AS elapsed_time,' 'branch_count,' 'call_id,' 'return_id,' 'CASE WHEN flags=1 THEN \'no call\' WHEN flags=2 THEN \'no return\' WHEN flags=3 THEN \'no call/return\' ELSE \'\' END AS flags,' 'parent_call_path_id' ' FROM calls INNER JOIN call_paths ON call_paths.id = call_path_id') do_query(query, 'CREATE VIEW samples_view AS ' 'SELECT ' 'id,' 'time,' 'cpu,' '(SELECT pid FROM threads WHERE id = thread_id) AS pid,' '(SELECT tid FROM threads WHERE id = thread_id) AS tid,' '(SELECT comm FROM comms WHERE id = comm_id) AS command,' '(SELECT name FROM selected_events WHERE id = evsel_id) AS event,' 'to_hex(ip) AS ip_hex,' '(SELECT name FROM symbols WHERE id = symbol_id) AS symbol,' 'sym_offset,' '(SELECT short_name FROM dsos WHERE id = dso_id) AS dso_short_name,' 'to_hex(to_ip) AS to_ip_hex,' '(SELECT name FROM symbols WHERE id = to_symbol_id) AS to_symbol,' 'to_sym_offset,' '(SELECT short_name FROM dsos WHERE id = to_dso_id) AS to_dso_short_name,' '(SELECT name FROM branch_types WHERE id = branch_type) AS branch_type_name,' 'in_tx' ' FROM samples') file_header = struct.pack("!11sii", "PGCOPY\n\377\r\n\0", 0, 0) file_trailer = "\377\377" def open_output_file(file_name): path_name = output_dir_name + "/" + file_name file = open(path_name, "w+") file.write(file_header) return file def close_output_file(file): file.write(file_trailer) file.close() def copy_output_file_direct(file, table_name): close_output_file(file) sql = "COPY " + table_name + " FROM '" + file.name + "' (FORMAT 'binary')" do_query(query, sql) # Use COPY FROM STDIN because security may prevent postgres from accessing the files directly def copy_output_file(file, table_name): conn = PQconnectdb("dbname = " + dbname) if (PQstatus(conn)): raise Exception("COPY FROM STDIN PQconnectdb failed") file.write(file_trailer) file.seek(0) sql = "COPY " + table_name + " FROM STDIN (FORMAT 'binary')" res = PQexec(conn, sql) if (PQresultStatus(res) != 4): raise Exception("COPY FROM STDIN PQexec failed") data = file.read(65536) while (len(data)): ret = PQputCopyData(conn, data, len(data)) if (ret != 1): raise Exception("COPY FROM STDIN PQputCopyData failed, error " + str(ret)) data = file.read(65536) ret = PQputCopyEnd(conn, None) if (ret != 1): raise Exception("COPY FROM STDIN PQputCopyEnd failed, error " + str(ret)) PQfinish(conn) def remove_output_file(file): name = file.name file.close() os.unlink(name) evsel_file = open_output_file("evsel_table.bin") machine_file = open_output_file("machine_table.bin") thread_file = open_output_file("thread_table.bin") comm_file = open_output_file("comm_table.bin") comm_thread_file = open_output_file("comm_thread_table.bin") dso_file = open_output_file("dso_table.bin") symbol_file = open_output_file("symbol_table.bin") branch_type_file = open_output_file("branch_type_table.bin") sample_file = open_output_file("sample_table.bin") if perf_db_export_calls: call_path_file = open_output_file("call_path_table.bin") call_file = open_output_file("call_table.bin") def trace_begin(): print datetime.datetime.today(), "Writing to intermediate files..." # id == 0 means unknown. It is easier to create records for them than replace the zeroes with NULLs evsel_table(0, "unknown") machine_table(0, 0, "unknown") thread_table(0, 0, 0, -1, -1) comm_table(0, "unknown") dso_table(0, 0, "unknown", "unknown", "") symbol_table(0, 0, 0, 0, 0, "unknown") sample_table(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) if perf_db_export_calls: call_path_table(0, 0, 0, 0) unhandled_count = 0 def trace_end(): print datetime.datetime.today(), "Copying to database..." copy_output_file(evsel_file, "selected_events") copy_output_file(machine_file, "machines") copy_output_file(thread_file, "threads") copy_output_file(comm_file, "comms") copy_output_file(comm_thread_file, "comm_threads") copy_output_file(dso_file, "dsos") copy_output_file(symbol_file, "symbols") copy_output_file(branch_type_file, "branch_types") copy_output_file(sample_file, "samples") if perf_db_export_calls: copy_output_file(call_path_file, "call_paths") copy_output_file(call_file, "calls") print datetime.datetime.today(), "Removing intermediate files..." remove_output_file(evsel_file) remove_output_file(machine_file) remove_output_file(thread_file) remove_output_file(comm_file) remove_output_file(comm_thread_file) remove_output_file(dso_file) remove_output_file(symbol_file) remove_output_file(branch_type_file) remove_output_file(sample_file) if perf_db_export_calls: remove_output_file(call_path_file) remove_output_file(call_file) os.rmdir(output_dir_name) print datetime.datetime.today(), "Adding primary keys" do_query(query, 'ALTER TABLE selected_events ADD PRIMARY KEY (id)') do_query(query, 'ALTER TABLE machines ADD PRIMARY KEY (id)') do_query(query, 'ALTER TABLE threads ADD PRIMARY KEY (id)') do_query(query, 'ALTER TABLE comms ADD PRIMARY KEY (id)') do_query(query, 'ALTER TABLE comm_threads ADD PRIMARY KEY (id)') do_query(query, 'ALTER TABLE dsos ADD PRIMARY KEY (id)') do_query(query, 'ALTER TABLE symbols ADD PRIMARY KEY (id)') do_query(query, 'ALTER TABLE branch_types ADD PRIMARY KEY (id)') do_query(query, 'ALTER TABLE samples ADD PRIMARY KEY (id)') if perf_db_export_calls: do_query(query, 'ALTER TABLE call_paths ADD PRIMARY KEY (id)') do_query(query, 'ALTER TABLE calls ADD PRIMARY KEY (id)') print datetime.datetime.today(), "Adding foreign keys" do_query(query, 'ALTER TABLE threads ' 'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id),' 'ADD CONSTRAINT processfk FOREIGN KEY (process_id) REFERENCES threads (id)') do_query(query, 'ALTER TABLE comm_threads ' 'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),' 'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id)') do_query(query, 'ALTER TABLE dsos ' 'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id)') do_query(query, 'ALTER TABLE symbols ' 'ADD CONSTRAINT dsofk FOREIGN KEY (dso_id) REFERENCES dsos (id)') do_query(query, 'ALTER TABLE samples ' 'ADD CONSTRAINT evselfk FOREIGN KEY (evsel_id) REFERENCES selected_events (id),' 'ADD CONSTRAINT machinefk FOREIGN KEY (machine_id) REFERENCES machines (id),' 'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id),' 'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),' 'ADD CONSTRAINT dsofk FOREIGN KEY (dso_id) REFERENCES dsos (id),' 'ADD CONSTRAINT symbolfk FOREIGN KEY (symbol_id) REFERENCES symbols (id),' 'ADD CONSTRAINT todsofk FOREIGN KEY (to_dso_id) REFERENCES dsos (id),' 'ADD CONSTRAINT tosymbolfk FOREIGN KEY (to_symbol_id) REFERENCES symbols (id)') if perf_db_export_calls: do_query(query, 'ALTER TABLE call_paths ' 'ADD CONSTRAINT parentfk FOREIGN KEY (parent_id) REFERENCES call_paths (id),' 'ADD CONSTRAINT symbolfk FOREIGN KEY (symbol_id) REFERENCES symbols (id)') do_query(query, 'ALTER TABLE calls ' 'ADD CONSTRAINT threadfk FOREIGN KEY (thread_id) REFERENCES threads (id),' 'ADD CONSTRAINT commfk FOREIGN KEY (comm_id) REFERENCES comms (id),' 'ADD CONSTRAINT call_pathfk FOREIGN KEY (call_path_id) REFERENCES call_paths (id),' 'ADD CONSTRAINT callfk FOREIGN KEY (call_id) REFERENCES samples (id),' 'ADD CONSTRAINT returnfk FOREIGN KEY (return_id) REFERENCES samples (id),' 'ADD CONSTRAINT parent_call_pathfk FOREIGN KEY (parent_call_path_id) REFERENCES call_paths (id)') do_query(query, 'CREATE INDEX pcpid_idx ON calls (parent_call_path_id)') if (unhandled_count): print datetime.datetime.today(), "Warning: ", unhandled_count, " unhandled events" print datetime.datetime.today(), "Done" def trace_unhandled(event_name, context, event_fields_dict): global unhandled_count unhandled_count += 1 def sched__sched_switch(*x): pass def evsel_table(evsel_id, evsel_name, *x): n = len(evsel_name) fmt = "!hiqi" + str(n) + "s" value = struct.pack(fmt, 2, 8, evsel_id, n, evsel_name) evsel_file.write(value) def machine_table(machine_id, pid, root_dir, *x): n = len(root_dir) fmt = "!hiqiii" + str(n) + "s" value = struct.pack(fmt, 3, 8, machine_id, 4, pid, n, root_dir) machine_file.write(value) def thread_table(thread_id, machine_id, process_id, pid, tid, *x): value = struct.pack("!hiqiqiqiiii", 5, 8, thread_id, 8, machine_id, 8, process_id, 4, pid, 4, tid) thread_file.write(value) def comm_table(comm_id, comm_str, *x): n = len(comm_str) fmt = "!hiqi" + str(n) + "s" value = struct.pack(fmt, 2, 8, comm_id, n, comm_str) comm_file.write(value) def comm_thread_table(comm_thread_id, comm_id, thread_id, *x): fmt = "!hiqiqiq" value = struct.pack(fmt, 3, 8, comm_thread_id, 8, comm_id, 8, thread_id) comm_thread_file.write(value) def dso_table(dso_id, machine_id, short_name, long_name, build_id, *x): n1 = len(short_name) n2 = len(long_name) n3 = len(build_id) fmt = "!hiqiqi" + str(n1) + "si" + str(n2) + "si" + str(n3) + "s" value = struct.pack(fmt, 5, 8, dso_id, 8, machine_id, n1, short_name, n2, long_name, n3, build_id) dso_file.write(value) def symbol_table(symbol_id, dso_id, sym_start, sym_end, binding, symbol_name, *x): n = len(symbol_name) fmt = "!hiqiqiqiqiii" + str(n) + "s" value = struct.pack(fmt, 6, 8, symbol_id, 8, dso_id, 8, sym_start, 8, sym_end, 4, binding, n, symbol_name) symbol_file.write(value) def branch_type_table(branch_type, name, *x): n = len(name) fmt = "!hiii" + str(n) + "s" value = struct.pack(fmt, 2, 4, branch_type, n, name) branch_type_file.write(value) def sample_table(sample_id, evsel_id, machine_id, thread_id, comm_id, dso_id, symbol_id, sym_offset, ip, time, cpu, to_dso_id, to_symbol_id, to_sym_offset, to_ip, period, weight, transaction, data_src, branch_type, in_tx, *x): if branches: value = struct.pack("!hiqiqiqiqiqiqiqiqiqiqiiiqiqiqiqiiiB", 17, 8, sample_id, 8, evsel_id, 8, machine_id, 8, thread_id, 8, comm_id, 8, dso_id, 8, symbol_id, 8, sym_offset, 8, ip, 8, time, 4, cpu, 8, to_dso_id, 8, to_symbol_id, 8, to_sym_offset, 8, to_ip, 4, branch_type, 1, in_tx) else: value = struct.pack("!hiqiqiqiqiqiqiqiqiqiqiiiqiqiqiqiqiqiqiqiiiB", 21, 8, sample_id, 8, evsel_id, 8, machine_id, 8, thread_id, 8, comm_id, 8, dso_id, 8, symbol_id, 8, sym_offset, 8, ip, 8, time, 4, cpu, 8, to_dso_id, 8, to_symbol_id, 8, to_sym_offset, 8, to_ip, 8, period, 8, weight, 8, transaction, 8, data_src, 4, branch_type, 1, in_tx) sample_file.write(value) def call_path_table(cp_id, parent_id, symbol_id, ip, *x): fmt = "!hiqiqiqiq" value = struct.pack(fmt, 4, 8, cp_id, 8, parent_id, 8, symbol_id, 8, ip) call_path_file.write(value) def call_return_table(cr_id, thread_id, comm_id, call_path_id, call_time, return_time, branch_count, call_id, return_id, parent_call_path_id, flags, *x): fmt = "!hiqiqiqiqiqiqiqiqiqiqii" value = struct.pack(fmt, 11, 8, cr_id, 8, thread_id, 8, comm_id, 8, call_path_id, 8, call_time, 8, return_time, 8, branch_count, 8, call_id, 8, return_id, 8, parent_call_path_id, 4, flags) call_file.write(value)
adamchainz/ansible
refs/heads/devel
lib/ansible/modules/cloud/amazon/ec2_group.py
7
#!/usr/bin/python # -*- coding: utf-8 -*- # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['stableinterface'], 'supported_by': 'curated'} DOCUMENTATION = ''' --- module: ec2_group author: "Andrew de Quincey (@adq)" version_added: "1.3" short_description: maintain an ec2 VPC security group. description: - maintains ec2 security groups. This module has a dependency on python-boto >= 2.5 options: name: description: - Name of the security group. required: true description: description: - Description of the security group. Required when C(state) is C(present). required: false vpc_id: description: - ID of the VPC to create the group in. required: false rules: description: - List of firewall inbound rules to enforce in this group (see example). If none are supplied, no inbound rules will be enabled. Rules list may include its own name in `group_name`. This allows idempotent loopback additions (e.g. allow group to access itself). Rule sources list support was added in version 2.4. This allows to define multiple sources per source type as well as multiple source types per rule. Prior to 2.4 an individual source is allowed. required: false rules_egress: description: - List of firewall outbound rules to enforce in this group (see example). If none are supplied, a default all-out rule is assumed. If an empty list is supplied, no outbound rules will be enabled. Rule Egress sources list support was added in version 2.4. required: false version_added: "1.6" state: version_added: "1.4" description: - Create or delete a security group required: false default: 'present' choices: [ "present", "absent" ] aliases: [] purge_rules: version_added: "1.8" description: - Purge existing rules on security group that are not found in rules required: false default: 'true' aliases: [] purge_rules_egress: version_added: "1.8" description: - Purge existing rules_egress on security group that are not found in rules_egress required: false default: 'true' aliases: [] extends_documentation_fragment: - aws - ec2 notes: - If a rule declares a group_name and that group doesn't exist, it will be automatically created. In that case, group_desc should be provided as well. The module will refuse to create a depended-on group without a description. ''' EXAMPLES = ''' - name: example ec2 group ec2_group: name: example description: an example EC2 group vpc_id: 12345 region: eu-west-1 aws_secret_key: SECRET aws_access_key: ACCESS rules: - proto: tcp from_port: 80 to_port: 80 cidr_ip: 0.0.0.0/0 - proto: tcp from_port: 22 to_port: 22 cidr_ip: 10.0.0.0/8 - proto: tcp from_port: 443 to_port: 443 group_id: amazon-elb/sg-87654321/amazon-elb-sg - proto: tcp from_port: 3306 to_port: 3306 group_id: 123412341234/sg-87654321/exact-name-of-sg - proto: udp from_port: 10050 to_port: 10050 cidr_ip: 10.0.0.0/8 - proto: udp from_port: 10051 to_port: 10051 group_id: sg-12345678 - proto: icmp from_port: 8 # icmp type, -1 = any type to_port: -1 # icmp subtype, -1 = any subtype cidr_ip: 10.0.0.0/8 - proto: all # the containing group name may be specified here group_name: example rules_egress: - proto: tcp from_port: 80 to_port: 80 cidr_ip: 0.0.0.0/0 group_name: example-other # description to use if example-other needs to be created group_desc: other example EC2 group - name: example2 ec2 group ec2_group: name: example2 description: an example2 EC2 group vpc_id: 12345 region: eu-west-1 rules: # 'ports' rule keyword was introduced in version 2.4. It accepts a single port value or a list of values including ranges (from_port-to_port). - proto: tcp ports: 22 group_name: example-vpn - proto: tcp ports: - 80 - 443 - 8080-8099 cidr_ip: 0.0.0.0/0 # Rule sources list support was added in version 2.4. This allows to define multiple sources per source type as well as multiple source types per rule. - proto: tcp ports: - 6379 - 26379 group_name: - example-vpn - example-redis - proto: tcp ports: 5665 group_name: example-vpn cidr_ip: - 172.16.1.0/24 - 172.16.17.0/24 group_id: - sg-edcd9784 ''' import json import re import time from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ec2 import ec2_connect, ec2_argument_spec try: import boto.ec2 from boto.ec2.securitygroup import SecurityGroup from boto.exception import BotoServerError HAS_BOTO = True except ImportError: HAS_BOTO = False import traceback def deduplicate_rules_args(rules): """Returns unique rules""" if rules is None: return None return list(dict(zip((json.dumps(r, sort_keys=True) for r in rules), rules)).values()) def make_rule_key(prefix, rule, group_id, cidr_ip): """Creates a unique key for an individual group rule""" if isinstance(rule, dict): proto, from_port, to_port = [rule.get(x, None) for x in ('proto', 'from_port', 'to_port')] # fix for 11177 if proto not in ['icmp', 'tcp', 'udp'] and from_port == -1 and to_port == -1: from_port = 'none' to_port = 'none' else: # isinstance boto.ec2.securitygroup.IPPermissions proto, from_port, to_port = [getattr(rule, x, None) for x in ('ip_protocol', 'from_port', 'to_port')] key = "%s-%s-%s-%s-%s-%s" % (prefix, proto, from_port, to_port, group_id, cidr_ip) return key.lower().replace('-none', '-None') def addRulesToLookup(rules, prefix, rules_dict): for rule in rules: for grant in rule.grants: rules_dict[make_rule_key(prefix, rule, grant.group_id, grant.cidr_ip)] = (rule, grant) def validate_rule(module, rule): VALID_PARAMS = ('cidr_ip', 'group_id', 'group_name', 'group_desc', 'proto', 'from_port', 'to_port') if not isinstance(rule, dict): module.fail_json(msg='Invalid rule parameter type [%s].' % type(rule)) for k in rule: if k not in VALID_PARAMS: module.fail_json(msg='Invalid rule parameter \'{}\''.format(k)) if 'group_id' in rule and 'cidr_ip' in rule: module.fail_json(msg='Specify group_id OR cidr_ip, not both') elif 'group_name' in rule and 'cidr_ip' in rule: module.fail_json(msg='Specify group_name OR cidr_ip, not both') elif 'group_id' in rule and 'group_name' in rule: module.fail_json(msg='Specify group_id OR group_name, not both') def get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id): """ Returns tuple of (group_id, ip) after validating rule params. rule: Dict describing a rule. name: Name of the security group being managed. groups: Dict of all available security groups. AWS accepts an ip range or a security group as target of a rule. This function validate the rule specification and return either a non-None group_id or a non-None ip range. """ FOREIGN_SECURITY_GROUP_REGEX = '^(\S+)/(sg-\S+)/(\S+)' group_id = None group_name = None ip = None target_group_created = False if 'group_id' in rule and 'cidr_ip' in rule: module.fail_json(msg="Specify group_id OR cidr_ip, not both") elif 'group_name' in rule and 'cidr_ip' in rule: module.fail_json(msg="Specify group_name OR cidr_ip, not both") elif 'group_id' in rule and 'group_name' in rule: module.fail_json(msg="Specify group_id OR group_name, not both") elif 'group_id' in rule and re.match(FOREIGN_SECURITY_GROUP_REGEX, rule['group_id']): # this is a foreign Security Group. Since you can't fetch it you must create an instance of it owner_id, group_id, group_name = re.match(FOREIGN_SECURITY_GROUP_REGEX, rule['group_id']).groups() group_instance = SecurityGroup(owner_id=owner_id, name=group_name, id=group_id) groups[group_id] = group_instance groups[group_name] = group_instance elif 'group_id' in rule: group_id = rule['group_id'] elif 'group_name' in rule: group_name = rule['group_name'] if group_name == name: group_id = group.id groups[group_id] = group groups[group_name] = group elif group_name in groups and (vpc_id is None or groups[group_name].vpc_id == vpc_id): group_id = groups[group_name].id else: if not rule.get('group_desc', '').strip(): module.fail_json(msg="group %s will be automatically created by rule %s and no description was provided" % (group_name, rule)) if not module.check_mode: auto_group = ec2.create_security_group(group_name, rule['group_desc'], vpc_id=vpc_id) group_id = auto_group.id groups[group_id] = auto_group groups[group_name] = auto_group target_group_created = True elif 'cidr_ip' in rule: ip = rule['cidr_ip'] return group_id, ip, target_group_created def ports_expand(ports): # takes a list of ports and returns a list of (port_from, port_to) ports_expanded = [] for port in ports: if not isinstance(port, str): ports_expanded.append((port,) * 2) elif '-' in port: ports_expanded.append(tuple(p.strip() for p in port.split('-', 1))) else: ports_expanded.append((port.strip(),) * 2) return ports_expanded def rule_expand_ports(rule): # takes a rule dict and returns a list of expanded rule dicts if 'ports' not in rule: return [rule] ports = rule['ports'] if isinstance(rule['ports'], list) else [rule['ports']] rule_expanded = [] for from_to in ports_expand(ports): temp_rule = rule.copy() del temp_rule['ports'] temp_rule['from_port'], temp_rule['to_port'] = from_to rule_expanded.append(temp_rule) return rule_expanded def rules_expand_ports(rules): # takes a list of rules and expands it based on 'ports' if not rules: return rules return [rule for rule_complex in rules for rule in rule_expand_ports(rule_complex)] def rule_expand_source(rule, source_type): # takes a rule dict and returns a list of expanded rule dicts for specified source_type sources = rule[source_type] if isinstance(rule[source_type], list) else [rule[source_type]] source_types_all = ('cidr_ip', 'group_id', 'group_name') rule_expanded = [] for source in sources: temp_rule = rule.copy() for s in source_types_all: temp_rule.pop(s, None) temp_rule[source_type] = source rule_expanded.append(temp_rule) return rule_expanded def rule_expand_sources(rule): # takes a rule dict and returns a list of expanded rule discts source_types = (stype for stype in ('cidr_ip', 'group_id', 'group_name') if stype in rule) return [r for stype in source_types for r in rule_expand_source(rule, stype)] def rules_expand_sources(rules): # takes a list of rules and expands it based on 'cidr_ip', 'group_id', 'group_name' if not rules: return rules return [rule for rule_complex in rules for rule in rule_expand_sources(rule_complex)] def main(): argument_spec = ec2_argument_spec() argument_spec.update(dict( name=dict(type='str', required=True), description=dict(type='str', required=False), vpc_id=dict(type='str'), rules=dict(type='list'), rules_egress=dict(type='list'), state=dict(default='present', type='str', choices=['present', 'absent']), purge_rules=dict(default=True, required=False, type='bool'), purge_rules_egress=dict(default=True, required=False, type='bool'), ) ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) if not HAS_BOTO: module.fail_json(msg='boto required for this module') name = module.params['name'] description = module.params['description'] vpc_id = module.params['vpc_id'] rules = deduplicate_rules_args(rules_expand_sources(rules_expand_ports(module.params['rules']))) rules_egress = deduplicate_rules_args(rules_expand_sources(rules_expand_ports(module.params['rules_egress']))) state = module.params.get('state') purge_rules = module.params['purge_rules'] purge_rules_egress = module.params['purge_rules_egress'] if state == 'present' and not description: module.fail_json(msg='Must provide description when state is present.') changed = False ec2 = ec2_connect(module) # find the group if present group = None groups = {} try: security_groups = ec2.get_all_security_groups() except BotoServerError as e: module.fail_json(msg="Error in get_all_security_groups: %s" % e.message, exception=traceback.format_exc()) for curGroup in security_groups: groups[curGroup.id] = curGroup if curGroup.name in groups: # Prioritise groups from the current VPC if vpc_id is None or curGroup.vpc_id == vpc_id: groups[curGroup.name] = curGroup else: groups[curGroup.name] = curGroup if curGroup.name == name and (vpc_id is None or curGroup.vpc_id == vpc_id): group = curGroup # Ensure requested group is absent if state == 'absent': if group: # found a match, delete it try: if not module.check_mode: group.delete() except BotoServerError as e: module.fail_json(msg="Unable to delete security group '%s' - %s" % (group, e.message), exception=traceback.format_exc()) else: group = None changed = True else: # no match found, no changes required pass # Ensure requested group is present elif state == 'present': if group: # existing group if group.description != description: module.fail_json(msg="Group description does not match existing group. ec2_group does not support this case.") # if the group doesn't exist, create it now else: # no match found, create it if not module.check_mode: group = ec2.create_security_group(name, description, vpc_id=vpc_id) # When a group is created, an egress_rule ALLOW ALL # to 0.0.0.0/0 is added automatically but it's not # reflected in the object returned by the AWS API # call. We re-read the group for getting an updated object # amazon sometimes takes a couple seconds to update the security group so wait till it exists while len(ec2.get_all_security_groups(filters={'group_id': group.id})) == 0: time.sleep(0.1) group = ec2.get_all_security_groups(group_ids=(group.id,))[0] changed = True else: module.fail_json(msg="Unsupported state requested: %s" % state) # create a lookup for all existing rules on the group if group: # Manage ingress rules groupRules = {} addRulesToLookup(group.rules, 'in', groupRules) # Now, go through all provided rules and ensure they are there. if rules is not None: for rule in rules: validate_rule(module, rule) group_id, ip, target_group_created = get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id) if target_group_created: changed = True if rule['proto'] in ('all', '-1', -1): rule['proto'] = -1 rule['from_port'] = None rule['to_port'] = None # Convert ip to list we can iterate over if not isinstance(ip, list): ip = [ip] # If rule already exists, don't later delete it for thisip in ip: ruleId = make_rule_key('in', rule, group_id, thisip) if ruleId not in groupRules: grantGroup = None if group_id: grantGroup = groups[group_id] if not module.check_mode: group.authorize(rule['proto'], rule['from_port'], rule['to_port'], thisip, grantGroup) changed = True else: del groupRules[ruleId] # Finally, remove anything left in the groupRules -- these will be defunct rules if purge_rules: for (rule, grant) in groupRules.values(): grantGroup = None if grant.group_id: if grant.owner_id != group.owner_id: # this is a foreign Security Group. Since you can't fetch it you must create an instance of it group_instance = SecurityGroup(owner_id=grant.owner_id, name=grant.name, id=grant.group_id) groups[grant.group_id] = group_instance groups[grant.name] = group_instance grantGroup = groups[grant.group_id] if not module.check_mode: group.revoke(rule.ip_protocol, rule.from_port, rule.to_port, grant.cidr_ip, grantGroup) changed = True # Manage egress rules groupRules = {} addRulesToLookup(group.rules_egress, 'out', groupRules) # Now, go through all provided rules and ensure they are there. if rules_egress is not None: for rule in rules_egress: validate_rule(module, rule) group_id, ip, target_group_created = get_target_from_rule(module, ec2, rule, name, group, groups, vpc_id) if target_group_created: changed = True if rule['proto'] in ('all', '-1', -1): rule['proto'] = -1 rule['from_port'] = None rule['to_port'] = None # Convert ip to list we can iterate over if not isinstance(ip, list): ip = [ip] # If rule already exists, don't later delete it for thisip in ip: ruleId = make_rule_key('out', rule, group_id, thisip) if ruleId in groupRules: del groupRules[ruleId] # Otherwise, add new rule else: grantGroup = None if group_id: grantGroup = groups[group_id].id if not module.check_mode: ec2.authorize_security_group_egress( group_id=group.id, ip_protocol=rule['proto'], from_port=rule['from_port'], to_port=rule['to_port'], src_group_id=grantGroup, cidr_ip=thisip) changed = True else: # when no egress rules are specified, # we add in a default allow all out rule, which was the # default behavior before egress rules were added default_egress_rule = 'out--1-None-None-None-0.0.0.0/0' if default_egress_rule not in groupRules: if not module.check_mode: ec2.authorize_security_group_egress( group_id=group.id, ip_protocol=-1, from_port=None, to_port=None, src_group_id=None, cidr_ip='0.0.0.0/0' ) changed = True else: # make sure the default egress rule is not removed del groupRules[default_egress_rule] # Finally, remove anything left in the groupRules -- these will be defunct rules if purge_rules_egress: for (rule, grant) in groupRules.values(): grantGroup = None if grant.group_id: grantGroup = groups[grant.group_id].id if not module.check_mode: ec2.revoke_security_group_egress( group_id=group.id, ip_protocol=rule.ip_protocol, from_port=rule.from_port, to_port=rule.to_port, src_group_id=grantGroup, cidr_ip=grant.cidr_ip) changed = True if group: module.exit_json(changed=changed, group_id=group.id) else: module.exit_json(changed=changed, group_id=None) if __name__ == '__main__': main()
hlamer/enki
refs/heads/master
enki/core/actionmanager.py
3
""" actionmanager --- Provides text path based access to main menu actions ====================================================================== Use this module for adding own actions to the main menu Shortcuts are configured by appshortcuts plugin """ from PyQt5.QtCore import pyqtSignal, QObject, pyqtSlot from PyQt5.QtWidgets import QMenu, QMenuBar, QAction from PyQt5.QtGui import QIcon, QKeySequence from enki.core.core import core class ActionMenuBar(QMenuBar): """Menu bar implementation. Contains actions, managed by ActionManager. Instance is created by MainWindow """ def __init__(self, parent, actionManager): QMenuBar.__init__(self, parent) self._manager = actionManager for action in self._manager.allActions(): self._onActionInserted(action) self._manager.actionInserted.connect(self._onActionInserted) self._manager.actionRemoved.connect(self._onActionRemoved) @pyqtSlot(QAction) def _onActionInserted(self, action): parent = self._manager.parentAction(action) if parent is None and action.menu(): self.addMenu(action.menu()) @pyqtSlot(QAction) def _onActionRemoved(self, action): parent = self._manager.parentAction(action) if parent is None and action.menu(): self.removeAction(action) class ActionManager(QObject): """Class provides text path based access to main menu actions """ actionInserted = pyqtSignal(QAction) """ actionInserted(action) **Signal** emitted, when new action has been inserted to the menu """ # pylint: disable=W0105 actionChanged = pyqtSignal(QAction) """ actionChanged(action) **Signal** emitted, when some action datahas been changed """ # pylint: disable=W0105 actionRemoved = pyqtSignal(QAction) """ actionRemoved(action) **Signal** emitted, when action has been removed from the menu """ # pylint: disable=W0105 def __init__(self, parent=None): QObject.__init__(self, parent) self._pathToAction = {} def terminate(self): if self._pathToAction: assert 0, 'ActionManager: you have to delete all actions before destroying actions model. ' + \ 'Existing actions: ' + ', '.join(self._pathToAction.keys()) def action(self, path): """Get action by its path. i.e. actionManager.action("mFile/mClose/aAll") """ return self._pathToAction.get(path, None) def menu(self, path): """Get action by its path. i.e. actionManager.action("mFile/mClose/aAll") """ if path in self._pathToAction: return self._pathToAction[path].menu() else: return None def path(self, action): """Get action path by reference to action """ return action.path def allActions(self): """Reqursive list of existing actions """ return iter(self._pathToAction.values()) @staticmethod def _parentPath(path): return '/'.join(path.split('/')[0: -1]) def addAction(self, path, action, icon=QIcon(), shortcut=None): """Add new action to the menu. Returns created QAction object. ``action`` might be string text or QAction instance. """ subPath = self._parentPath(path) parentAction = self.action(subPath) if parentAction is None: assert False, "Menu path not found: " + subPath if isinstance(action, str): action = QAction(icon, action, parentAction) else: action.setParent(parentAction) if shortcut is not None: action.setShortcut(shortcut) parentAction.menu().addAction(action) self._pathToAction[path] = action action.path = path action.changed.connect(self._onActionChanged) """ On Ubuntu 14.04 keyboard shortcuts doesn't work without this line http://stackoverflow.com/questions/23916623/ qt5-doesnt-recognised-shortcuts-unless-actions-are-added-to-a-toolbar """ core.mainWindow().addAction(action) self.actionInserted.emit(action) return action def removeAction(self, pathOrAction): """Remove action from the menu """ return self.removeMenu(pathOrAction) def addMenu(self, path, text, icon=QIcon()): """Add menu to the main menu or submenu of main menu """ action = self.action(path) if action is not None: if action.menu(): return action else: assert 0 # not a menu! parentMenuPath = self._parentPath(path) if parentMenuPath: parentAction = self.action(parentMenuPath) else: parentAction = None menu = QMenu() action = menu.menuAction() action._menu = menu # avoid deleting menu by the garbadge collectors action.setIcon(icon) action.setText(text) if parentAction is not None: action.setParent(parentAction) parentAction.menu().addMenu(menu) else: action.setParent(self) self._pathToAction[path] = action action.path = path action.changed.connect(self._onActionChanged) self.actionInserted.emit(action) return action def removeMenu(self, action): """Remove menu. If removeEmptyPath is True - remove also empty parent menus """ if isinstance(action, str): action = self.action(action) assert action is not None self._removeAction(action) return True def _removeAction(self, action): """Remove action by reference to it """ parent = self.parentAction(action) if parent is not None: parent.menu().removeAction(action) path = action.path del self._pathToAction[path] action.changed.disconnect(self._onActionChanged) action.setParent(None) core.mainWindow().removeAction(action) self.actionRemoved.emit(action) def _removeCompleteEmptyPathNode(self, action): """Remove empty menu and empty parent menus """ if not self.children(action): parentAction = self.parentAction(action) self._removeAction(action) self._removeCompleteEmptyPathNode(parentAction) def parentAction(self, action): """Parent action of the action """ if action is None: return None parentObject = action.parent() if parentObject != self: return parentObject else: return None def children(self, action): """List of children of action """ if action is None: return [object for object in QObject.children(self) if isinstance(object, QAction) and object in iter(self._pathToAction.values())] else: return [object for object in action.children() if object in iter(self._pathToAction.values())] def defaultShortcut(self, action): """Get actions default shortcut """ if isinstance(action, str): action = self.action(action) if action is not None: if hasattr(action, 'defaultShortcut'): return action.defaultShortcut return QKeySequence() def setDefaultShortcut(self, action, shortcut): """Set actions default shortcut """ if isinstance(action, str): action = self.action(action) if isinstance(shortcut, str): shortcut = QKeySequence(shortcut) action.defaultShortcut = shortcut if not action.shortcut(): action.setShortcut(shortcut) @pyqtSlot() def _onActionChanged(self): """Action changed handler. Retransmit signal with reference to the action """ action = self.sender() self.actionChanged.emit(action)
Eliminater74/LGD851_G3_L_Kernel_v20E
refs/heads/master
tools/perf/scripts/python/net_dropmonitor.py
4235
# Monitor the system for dropped packets and proudce a report of drop locations and counts import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * drop_log = {} kallsyms = [] def get_kallsyms_table(): global kallsyms try: f = open("/proc/kallsyms", "r") linecount = 0 for line in f: linecount = linecount+1 f.seek(0) except: return j = 0 for line in f: loc = int(line.split()[0], 16) name = line.split()[2] j = j +1 if ((j % 100) == 0): print "\r" + str(j) + "/" + str(linecount), kallsyms.append({ 'loc': loc, 'name' : name}) print "\r" + str(j) + "/" + str(linecount) kallsyms.sort() return def get_sym(sloc): loc = int(sloc) for i in kallsyms: if (i['loc'] >= loc): return (i['name'], i['loc']-loc) return (None, 0) def print_drop_table(): print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT") for i in drop_log.keys(): (sym, off) = get_sym(i) if sym == None: sym = i print "%25s %25s %25s" % (sym, off, drop_log[i]) def trace_begin(): print "Starting trace (Ctrl-C to dump results)" def trace_end(): print "Gathering kallsyms data" get_kallsyms_table() print_drop_table() # called from perf, when it finds a correspoinding event def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): slocation = str(location) try: drop_log[slocation] = drop_log[slocation] + 1 except: drop_log[slocation] = 1
guzru/django-avatar
refs/heads/master
avatar/templatetags/__init__.py
12133432
denisenkom/django
refs/heads/master
django/conf/locale/zh_CN/__init__.py
12133432
NL66278/odoo
refs/heads/8.0
addons/decimal_precision/decimal_precision.py
34
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import openerp from openerp import SUPERUSER_ID from openerp import tools from openerp.osv import orm, fields from openerp.modules.registry import RegistryManager class decimal_precision(orm.Model): _name = 'decimal.precision' _columns = { 'name': fields.char('Usage', select=True, required=True), 'digits': fields.integer('Digits', required=True), } _defaults = { 'digits': 2, } _sql_constraints = [ ('name_uniq', 'unique (name)', """Only one value can be defined for each given usage!"""), ] @tools.ormcache(skiparg=3) def precision_get(self, cr, uid, application): cr.execute('select digits from decimal_precision where name=%s', (application,)) res = cr.fetchone() return res[0] if res else 2 def clear_cache(self, cr): """clear cache and update models. Notify other workers to restart their registry.""" self.precision_get.clear_cache(self) env = openerp.api.Environment(cr, SUPERUSER_ID, {}) for model in self.pool.values(): for field in model._fields.values(): if field.type == 'float': field._setup_digits(env) RegistryManager.signal_registry_change(cr.dbname) def create(self, cr, uid, data, context=None): res = super(decimal_precision, self).create(cr, uid, data, context=context) self.clear_cache(cr) return res def unlink(self, cr, uid, ids, context=None): res = super(decimal_precision, self).unlink(cr, uid, ids, context=context) self.clear_cache(cr) return res def write(self, cr, uid, ids, data, *args, **argv): res = super(decimal_precision, self).write(cr, uid, ids, data, *args, **argv) self.clear_cache(cr) return res def get_precision(application): def change_digit(cr): decimal_precision = openerp.registry(cr.dbname)['decimal.precision'] res = decimal_precision.precision_get(cr, SUPERUSER_ID, application) return (16, res) return change_digit class DecimalPrecisionFloat(orm.AbstractModel): """ Override qweb.field.float to add a `decimal_precision` domain option and use that instead of the column's own value if it is specified """ _inherit = 'ir.qweb.field.float' def precision(self, cr, uid, field, options=None, context=None): dp = options and options.get('decimal_precision') if dp: return self.pool['decimal.precision'].precision_get( cr, uid, dp) return super(DecimalPrecisionFloat, self).precision( cr, uid, field, options=options, context=context) class DecimalPrecisionTestModel(orm.Model): _name = 'decimal.precision.test' _columns = { 'float': fields.float(), 'float_2': fields.float(digits=(16, 2)), 'float_4': fields.float(digits=(16, 4)), } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
jcnelson/syndicate
refs/heads/master
old/ms/django_ag/tests.py
6666
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase class SimpleTest(TestCase): def test_basic_addition(self): """ Tests that 1 + 1 always equals 2. """ self.assertEqual(1 + 1, 2)
ingadhoc/openerp-travel
refs/heads/master
travel_rental_service/wizard/travel_rental_service_import.py
2
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # This module copyright (C) 2010 - 2014 Savoir-faire Linux # (<http://www.savoirfairelinux.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, orm from openerp.osv.osv import except_osv from openerp.tools.translate import _ class travel_rental_service_import(orm.TransientModel): """Import data from other passengers""" _name = "travel.rental.service.import" _description = "Service Rental information import" _columns = { 'travel_id': fields.many2one('travel.travel'), 'cur_passenger_id': fields.many2one('travel.passenger'), 'passenger_id': fields.many2one( 'travel.passenger', string='Import Service Rental information from', help='Other passengers on the same journey.'), } def data_import(self, cr, uid, ids, context=None): """ Import service rental information from other passenger """ trsi_pool = self.pool.get('travel.rental.service.import') trs_pool = self.pool.get('travel.service.rental') for tsri_obj in trsi_pool.browse(cr, uid, ids, context=context): cur_passenger_obj = tsri_obj.cur_passenger_id other_passenger_obj = tsri_obj.passenger_id if not other_passenger_obj: raise except_osv(_('Error'), _('No source passenger selected.')) passenger_id = cur_passenger_obj.id for rental_obj in other_passenger_obj.rental_service_ids: new_rental_id = trs_pool.copy( cr, uid, rental_obj.id, context=context) trs_pool.write( cr, uid, new_rental_id, {'passenger_id': cur_passenger_obj.id}, context=context) return { 'name': 'Passengers', 'res_model': 'travel.passenger', 'view_mode': 'form', 'type': 'ir.actions.act_window', 'target': 'new', 'res_id': passenger_id, 'context': context, } def cancel(self, cr, uid, ids, context=None): trsi_pool = self.pool.get('travel.rental.service.import') passenger_id = False for obj in trsi_pool.browse(cr, uid, ids, context=context): passenger_id = obj.cur_passenger_id.id return { 'name': 'Passengers', 'res_model': 'travel.passenger', 'view_mode': 'form', 'type': 'ir.actions.act_window', 'target': 'new', 'res_id': passenger_id, 'context': context, }
youdonghai/intellij-community
refs/heads/master
plugins/hg4idea/testData/bin/mercurial/windows.py
93
# windows.py - Windows utility function implementations for Mercurial # # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from i18n import _ import osutil, encoding import errno, msvcrt, os, re, stat, sys, _winreg import win32 executablepath = win32.executablepath getuser = win32.getuser hidewindow = win32.hidewindow makedir = win32.makedir nlinks = win32.nlinks oslink = win32.oslink samedevice = win32.samedevice samefile = win32.samefile setsignalhandler = win32.setsignalhandler spawndetached = win32.spawndetached split = os.path.split termwidth = win32.termwidth testpid = win32.testpid unlink = win32.unlink umask = 0022 # wrap osutil.posixfile to provide friendlier exceptions def posixfile(name, mode='r', buffering=-1): try: return osutil.posixfile(name, mode, buffering) except WindowsError, err: raise IOError(err.errno, '%s: %s' % (name, err.strerror)) posixfile.__doc__ = osutil.posixfile.__doc__ class winstdout(object): '''stdout on windows misbehaves if sent through a pipe''' def __init__(self, fp): self.fp = fp def __getattr__(self, key): return getattr(self.fp, key) def close(self): try: self.fp.close() except IOError: pass def write(self, s): try: # This is workaround for "Not enough space" error on # writing large size of data to console. limit = 16000 l = len(s) start = 0 self.softspace = 0 while start < l: end = start + limit self.fp.write(s[start:end]) start = end except IOError, inst: if inst.errno != 0: raise self.close() raise IOError(errno.EPIPE, 'Broken pipe') def flush(self): try: return self.fp.flush() except IOError, inst: if inst.errno != errno.EINVAL: raise self.close() raise IOError(errno.EPIPE, 'Broken pipe') sys.__stdout__ = sys.stdout = winstdout(sys.stdout) def _is_win_9x(): '''return true if run on windows 95, 98 or me.''' try: return sys.getwindowsversion()[3] == 1 except AttributeError: return 'command' in os.environ.get('comspec', '') def openhardlinks(): return not _is_win_9x() def parsepatchoutput(output_line): """parses the output produced by patch and returns the filename""" pf = output_line[14:] if pf[0] == '`': pf = pf[1:-1] # Remove the quotes return pf def sshargs(sshcmd, host, user, port): '''Build argument list for ssh or Plink''' pflag = 'plink' in sshcmd.lower() and '-P' or '-p' args = user and ("%s@%s" % (user, host)) or host return port and ("%s %s %s" % (args, pflag, port)) or args def setflags(f, l, x): pass def copymode(src, dst, mode=None): pass def checkexec(path): return False def checklink(path): return False def setbinary(fd): # When run without console, pipes may expose invalid # fileno(), usually set to -1. fno = getattr(fd, 'fileno', None) if fno is not None and fno() >= 0: msvcrt.setmode(fno(), os.O_BINARY) def pconvert(path): return path.replace(os.sep, '/') def localpath(path): return path.replace('/', '\\') def normpath(path): return pconvert(os.path.normpath(path)) def normcase(path): return encoding.upper(path) def realpath(path): ''' Returns the true, canonical file system path equivalent to the given path. ''' # TODO: There may be a more clever way to do this that also handles other, # less common file systems. return os.path.normpath(normcase(os.path.realpath(path))) def samestat(s1, s2): return False # A sequence of backslashes is special iff it precedes a double quote: # - if there's an even number of backslashes, the double quote is not # quoted (i.e. it ends the quoted region) # - if there's an odd number of backslashes, the double quote is quoted # - in both cases, every pair of backslashes is unquoted into a single # backslash # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx ) # So, to quote a string, we must surround it in double quotes, double # the number of backslashes that precede double quotes and add another # backslash before every double quote (being careful with the double # quote we've appended to the end) _quotere = None def shellquote(s): global _quotere if _quotere is None: _quotere = re.compile(r'(\\*)("|\\$)') return '"%s"' % _quotere.sub(r'\1\1\\\2', s) def quotecommand(cmd): """Build a command string suitable for os.popen* calls.""" if sys.version_info < (2, 7, 1): # Python versions since 2.7.1 do this extra quoting themselves return '"' + cmd + '"' return cmd def popen(command, mode='r'): # Work around "popen spawned process may not write to stdout # under windows" # http://bugs.python.org/issue1366 command += " 2> %s" % os.devnull return os.popen(quotecommand(command), mode) def explainexit(code): return _("exited with status %d") % code, code # if you change this stub into a real check, please try to implement the # username and groupname functions above, too. def isowner(st): return True def findexe(command): '''Find executable for command searching like cmd.exe does. If command is a basename then PATH is searched for command. PATH isn't searched if command is an absolute or relative path. An extension from PATHEXT is found and added if not present. If command isn't found None is returned.''' pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD') pathexts = [ext for ext in pathext.lower().split(os.pathsep)] if os.path.splitext(command)[1].lower() in pathexts: pathexts = [''] def findexisting(pathcommand): 'Will append extension (if needed) and return existing file' for ext in pathexts: executable = pathcommand + ext if os.path.exists(executable): return executable return None if os.sep in command: return findexisting(command) for path in os.environ.get('PATH', '').split(os.pathsep): executable = findexisting(os.path.join(path, command)) if executable is not None: return executable return findexisting(os.path.expanduser(os.path.expandvars(command))) _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK]) def statfiles(files): '''Stat each file in files. Yield each stat, or None if a file does not exist or has a type we don't care about. Cluster and cache stat per directory to minimize number of OS stat calls.''' dircache = {} # dirname -> filename -> status | None if file does not exist getkind = stat.S_IFMT for nf in files: nf = normcase(nf) dir, base = os.path.split(nf) if not dir: dir = '.' cache = dircache.get(dir, None) if cache is None: try: dmap = dict([(normcase(n), s) for n, k, s in osutil.listdir(dir, True) if getkind(s.st_mode) in _wantedkinds]) except OSError, err: # handle directory not found in Python version prior to 2.5 # Python <= 2.4 returns native Windows code 3 in errno # Python >= 2.5 returns ENOENT and adds winerror field # EINVAL is raised if dir is not a directory. if err.errno not in (3, errno.ENOENT, errno.EINVAL, errno.ENOTDIR): raise dmap = {} cache = dircache.setdefault(dir, dmap) yield cache.get(base, None) def username(uid=None): """Return the name of the user with the given uid. If uid is None, return the name of the current user.""" return None def groupname(gid=None): """Return the name of the group with the given gid. If gid is None, return the name of the current group.""" return None def _removedirs(name): """special version of os.removedirs that does not remove symlinked directories or junction points if they actually contain files""" if osutil.listdir(name): return os.rmdir(name) head, tail = os.path.split(name) if not tail: head, tail = os.path.split(head) while head and tail: try: if osutil.listdir(head): return os.rmdir(head) except (ValueError, OSError): break head, tail = os.path.split(head) def unlinkpath(f, ignoremissing=False): """unlink and remove the directory if it is empty""" try: unlink(f) except OSError, e: if not (ignoremissing and e.errno == errno.ENOENT): raise # try removing directories that might now be empty try: _removedirs(os.path.dirname(f)) except OSError: pass def rename(src, dst): '''atomically rename file src to dst, replacing dst if it exists''' try: os.rename(src, dst) except OSError, e: if e.errno != errno.EEXIST: raise unlink(dst) os.rename(src, dst) def gethgcmd(): return [sys.executable] + sys.argv[:1] def groupmembers(name): # Don't support groups on Windows for now raise KeyError def isexec(f): return False class cachestat(object): def __init__(self, path): pass def cacheable(self): return False def lookupreg(key, valname=None, scope=None): ''' Look up a key/value name in the Windows registry. valname: value name. If unspecified, the default value for the key is used. scope: optionally specify scope for registry lookup, this can be a sequence of scopes to look up in order. Default (CURRENT_USER, LOCAL_MACHINE). ''' if scope is None: scope = (_winreg.HKEY_CURRENT_USER, _winreg.HKEY_LOCAL_MACHINE) elif not isinstance(scope, (list, tuple)): scope = (scope,) for s in scope: try: val = _winreg.QueryValueEx(_winreg.OpenKey(s, key), valname)[0] # never let a Unicode string escape into the wild return encoding.tolocal(val.encode('UTF-8')) except EnvironmentError: pass expandglobs = True def statislink(st): '''check whether a stat result is a symlink''' return False def statisexec(st): '''check whether a stat result is an executable file''' return False
smmribeiro/intellij-community
refs/heads/master
python/testData/resolve/LookAhead.py
83
def f(): return f<ref>oo foo = 1
AutorestCI/azure-sdk-for-python
refs/heads/master
azure-mgmt-devtestlabs/azure/mgmt/devtestlabs/models/gallery_image_paged.py
2
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.paging import Paged class GalleryImagePaged(Paged): """ A paging container for iterating over a list of :class:`GalleryImage <azure.mgmt.devtestlabs.models.GalleryImage>` object """ _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[GalleryImage]'} } def __init__(self, *args, **kwargs): super(GalleryImagePaged, self).__init__(*args, **kwargs)
heynemann/pyvows
refs/heads/master
pyvows/reporting/xunit.py
1
# -*- coding: utf-8 -*- '''Provides the `XUnitReporter` class, which creates XML reports after testing. ''' # pyVows testing engine # https://github.com/{heynemann,truemped}/pyvows # Licensed under the MIT license: # http://www.opensource.org/licenses/mit-license # Copyright (c) 2011 Daniel Truemper truemped@googlemail.com import codecs from datetime import datetime import socket import traceback from xml.dom.minidom import Document import re INVALID_CHARACTERS = re.compile(r"[\000-\010\013\014\016-\037]") INVALID_CHARACTERS = re.compile(r"[\000]") class XUnitReporter(object): '''Turns `VowsResult` objects into XUnit-style reports.''' def __init__(self, result): self.result_summary = self.summarize_results(result) def write_report(self, filename, encoding=None): # FIXME: Add Docstring with codecs.open(filename, 'w', encoding, 'replace') as output_file: output_file.write(self.to_xml(encoding)) def to_xml(self, encoding='utf-8'): # FIXME: Add Docstring document = self.create_report_document() return document.toxml(encoding=encoding) def summarize_results(self, result): # FIXME: Add Docstring result_summary = { 'total': result.total_test_count, 'errors': 0, 'skip': result.skipped_tests, 'failures': result.errored_tests, 'ts': datetime.now().strftime("%Y-%m-%dT%H:%M:%S"), 'hostname': socket.gethostname(), 'elapsed': result.elapsed_time, 'contexts': result.contexts } return result_summary def create_report_document(self): # FIXME: Add Docstring result_summary = self.result_summary document = Document() testsuite_node = document.createElement('testsuite') testsuite_node.setAttribute('name', 'pyvows') testsuite_node.setAttribute('tests', str(result_summary['total'])) testsuite_node.setAttribute('errors', str(result_summary['errors'])) testsuite_node.setAttribute('failures', str(result_summary['failures'])) testsuite_node.setAttribute('skip', str(result_summary['skip'])) testsuite_node.setAttribute('timestamp', str(result_summary['ts'])) testsuite_node.setAttribute('hostname', str(result_summary['hostname'])) testsuite_node.setAttribute('time', '{elapsed:.3f}'.format(elapsed=result_summary['elapsed'])) document.appendChild(testsuite_node) for context in result_summary['contexts']: self.create_test_case_elements(document, testsuite_node, context) return document def _safe_cdata(self, str): return INVALID_CHARACTERS.sub('', str) def create_test_case_elements(self, document, parent_node, context): # FIXME: Add Docstring topic_node = document.createElement('testcase') topic_node.setAttribute('classname', context['name']) topic_node.setAttribute('name', 'topic') topic_node.setAttribute('time', '0.0') stdOutNode = document.createElement('system-out') stdOutText = document.createCDATASection(self._safe_cdata(context['stdout'])) stdOutNode.appendChild(stdOutText) stdErrNode = document.createElement('system-err') stdErrText = document.createCDATASection(self._safe_cdata(context['stderr'])) stdErrNode.appendChild(stdErrText) topic_node.appendChild(stdOutNode) topic_node.appendChild(stdErrNode) if context.get('error', None): e = context['error'] error_msg = 'Error in {0!s}: {1!s}'.format(e.source, e.exc_info[1]) error_tb = traceback.format_exception(*e.exc_info) failure_node = document.createElement('failure') failure_node.setAttribute('type', e.exc_info[0].__name__) failure_node.setAttribute('message', error_msg) failure_text = document.createTextNode(''.join(error_tb)) failure_node.appendChild(failure_text) topic_node.appendChild(failure_node) if context.get('skip', None): skip_node = document.createElement('skipped') skip_node.setAttribute('message', str(context['skip'])) topic_node.appendChild(skip_node) parent_node.appendChild(topic_node) for test in context['tests']: test_stats = { 'context': context['name'], 'name': test['name'], 'taken': 0.0 } testcase_node = document.createElement('testcase') testcase_node.setAttribute('classname', str(test_stats['context'])) testcase_node.setAttribute('name', str(test_stats['name'])) testcase_node.setAttribute('time', '{time:.3f}'.format(time=test_stats['taken'])) stdOutNode = document.createElement('system-out') stdOutText = document.createCDATASection(self._safe_cdata(test['stdout'])) stdOutNode.appendChild(stdOutText) stdErrNode = document.createElement('system-err') stdErrText = document.createCDATASection(self._safe_cdata(test['stderr'])) stdErrNode.appendChild(stdErrText) testcase_node.appendChild(stdOutNode) testcase_node.appendChild(stdErrNode) parent_node.appendChild(testcase_node) if test.get('error', None): error = test['error'] error_msg = traceback.format_exception( error['type'], error['value'], error['traceback'] ) error_data = { 'errtype': error['type'].__name__, 'msg': error['value'], 'tb': ''.join(error_msg) } failure_node = document.createElement('failure') failure_node.setAttribute('type', str(error_data['errtype'])) failure_node.setAttribute('message', str(error_data['msg'])) failure_text = document.createTextNode(str(error_data['tb'])) failure_node.appendChild(failure_text) testcase_node.appendChild(failure_node) if test.get('skip', None): skip_node = document.createElement('skipped') skip_node.setAttribute('message', str(test['skip'])) testcase_node.appendChild(skip_node) for ctx in context['contexts']: self.create_test_case_elements(document, parent_node, ctx)
douglaskastle/mezzanine
refs/heads/master
mezzanine/core/migrations/__init__.py
12133432
tfroehlich82/erpnext
refs/heads/develop
erpnext/education/report/absent_student_report/__init__.py
12133432
poiati/django
refs/heads/master
tests/migrations/migrations_test_apps/migrated_app/models.py
12133432
Partoo/scrapy
refs/heads/master
scrapy/templates/project/module/__init__.py
12133432
chirilo/mozillians
refs/heads/master
vendor-local/lib/python/tablib/packages/yaml/reader.py
114
# This module contains abstractions for the input stream. You don't have to # looks further, there are no pretty code. # # We define two classes here. # # Mark(source, line, column) # It's just a record and its only use is producing nice error messages. # Parser does not use it for any other purposes. # # Reader(source, data) # Reader determines the encoding of `data` and converts it to unicode. # Reader provides the following methods and attributes: # reader.peek(length=1) - return the next `length` characters # reader.forward(length=1) - move the current position to `length` characters. # reader.index - the number of the current character. # reader.line, stream.column - the line and the column of the current character. __all__ = ['Reader', 'ReaderError'] from error import YAMLError, Mark import codecs, re # Unfortunately, codec functions in Python 2.3 does not support the `finish` # arguments, so we have to write our own wrappers. try: codecs.utf_8_decode('', 'strict', False) from codecs import utf_8_decode, utf_16_le_decode, utf_16_be_decode except TypeError: def utf_16_le_decode(data, errors, finish=False): if not finish and len(data) % 2 == 1: data = data[:-1] return codecs.utf_16_le_decode(data, errors) def utf_16_be_decode(data, errors, finish=False): if not finish and len(data) % 2 == 1: data = data[:-1] return codecs.utf_16_be_decode(data, errors) def utf_8_decode(data, errors, finish=False): if not finish: # We are trying to remove a possible incomplete multibyte character # from the suffix of the data. # The first byte of a multi-byte sequence is in the range 0xc0 to 0xfd. # All further bytes are in the range 0x80 to 0xbf. # UTF-8 encoded UCS characters may be up to six bytes long. count = 0 while count < 5 and count < len(data) \ and '\x80' <= data[-count-1] <= '\xBF': count -= 1 if count < 5 and count < len(data) \ and '\xC0' <= data[-count-1] <= '\xFD': data = data[:-count-1] return codecs.utf_8_decode(data, errors) class ReaderError(YAMLError): def __init__(self, name, position, character, encoding, reason): self.name = name self.character = character self.position = position self.encoding = encoding self.reason = reason def __str__(self): if isinstance(self.character, str): return "'%s' codec can't decode byte #x%02x: %s\n" \ " in \"%s\", position %d" \ % (self.encoding, ord(self.character), self.reason, self.name, self.position) else: return "unacceptable character #x%04x: %s\n" \ " in \"%s\", position %d" \ % (self.character, self.reason, self.name, self.position) class Reader(object): # Reader: # - determines the data encoding and converts it to unicode, # - checks if characters are in allowed range, # - adds '\0' to the end. # Reader accepts # - a `str` object, # - a `unicode` object, # - a file-like object with its `read` method returning `str`, # - a file-like object with its `read` method returning `unicode`. # Yeah, it's ugly and slow. def __init__(self, stream): self.name = None self.stream = None self.stream_pointer = 0 self.eof = True self.buffer = u'' self.pointer = 0 self.raw_buffer = None self.raw_decode = None self.encoding = None self.index = 0 self.line = 0 self.column = 0 if isinstance(stream, unicode): self.name = "<unicode string>" self.check_printable(stream) self.buffer = stream+u'\0' elif isinstance(stream, str): self.name = "<string>" self.raw_buffer = stream self.determine_encoding() else: self.stream = stream self.name = getattr(stream, 'name', "<file>") self.eof = False self.raw_buffer = '' self.determine_encoding() def peek(self, index=0): try: return self.buffer[self.pointer+index] except IndexError: self.update(index+1) return self.buffer[self.pointer+index] def prefix(self, length=1): if self.pointer+length >= len(self.buffer): self.update(length) return self.buffer[self.pointer:self.pointer+length] def forward(self, length=1): if self.pointer+length+1 >= len(self.buffer): self.update(length+1) while length: ch = self.buffer[self.pointer] self.pointer += 1 self.index += 1 if ch in u'\n\x85\u2028\u2029' \ or (ch == u'\r' and self.buffer[self.pointer] != u'\n'): self.line += 1 self.column = 0 elif ch != u'\uFEFF': self.column += 1 length -= 1 def get_mark(self): if self.stream is None: return Mark(self.name, self.index, self.line, self.column, self.buffer, self.pointer) else: return Mark(self.name, self.index, self.line, self.column, None, None) def determine_encoding(self): while not self.eof and len(self.raw_buffer) < 2: self.update_raw() if not isinstance(self.raw_buffer, unicode): if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): self.raw_decode = utf_16_le_decode self.encoding = 'utf-16-le' elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): self.raw_decode = utf_16_be_decode self.encoding = 'utf-16-be' else: self.raw_decode = utf_8_decode self.encoding = 'utf-8' self.update(1) NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]') def check_printable(self, data): match = self.NON_PRINTABLE.search(data) if match: character = match.group() position = self.index+(len(self.buffer)-self.pointer)+match.start() raise ReaderError(self.name, position, ord(character), 'unicode', "special characters are not allowed") def update(self, length): if self.raw_buffer is None: return self.buffer = self.buffer[self.pointer:] self.pointer = 0 while len(self.buffer) < length: if not self.eof: self.update_raw() if self.raw_decode is not None: try: data, converted = self.raw_decode(self.raw_buffer, 'strict', self.eof) except UnicodeDecodeError, exc: character = exc.object[exc.start] if self.stream is not None: position = self.stream_pointer-len(self.raw_buffer)+exc.start else: position = exc.start raise ReaderError(self.name, position, character, exc.encoding, exc.reason) else: data = self.raw_buffer converted = len(data) self.check_printable(data) self.buffer += data self.raw_buffer = self.raw_buffer[converted:] if self.eof: self.buffer += u'\0' self.raw_buffer = None break def update_raw(self, size=1024): data = self.stream.read(size) if data: self.raw_buffer += data self.stream_pointer += len(data) else: self.eof = True #try: # import psyco # psyco.bind(Reader) #except ImportError: # pass
shssoichiro/servo
refs/heads/master
tests/wpt/web-platform-tests/tools/pytest/_pytest/vendored_packages/pluggy.py
178
""" PluginManager, basic initialization and tracing. pluggy is the cristallized core of plugin management as used by some 150 plugins for pytest. Pluggy uses semantic versioning. Breaking changes are only foreseen for Major releases (incremented X in "X.Y.Z"). If you want to use pluggy in your project you should thus use a dependency restriction like "pluggy>=0.1.0,<1.0" to avoid surprises. pluggy is concerned with hook specification, hook implementations and hook calling. For any given hook specification a hook call invokes up to N implementations. A hook implementation can influence its position and type of execution: if attributed "tryfirst" or "trylast" it will be tried to execute first or last. However, if attributed "hookwrapper" an implementation can wrap all calls to non-hookwrapper implementations. A hookwrapper can thus execute some code ahead and after the execution of other hooks. Hook specification is done by way of a regular python function where both the function name and the names of all its arguments are significant. Each hook implementation function is verified against the original specification function, including the names of all its arguments. To allow for hook specifications to evolve over the livetime of a project, hook implementations can accept less arguments. One can thus add new arguments and semantics to a hook specification by adding another argument typically without breaking existing hook implementations. The chosen approach is meant to let a hook designer think carefuly about which objects are needed by an extension writer. By contrast, subclass-based extension mechanisms often expose a lot more state and behaviour than needed, thus restricting future developments. Pluggy currently consists of functionality for: - a way to register new hook specifications. Without a hook specification no hook calling can be performed. - a registry of plugins which contain hook implementation functions. It is possible to register plugins for which a hook specification is not yet known and validate all hooks when the system is in a more referentially consistent state. Setting an "optionalhook" attribution to a hook implementation will avoid PluginValidationError's if a specification is missing. This allows to have optional integration between plugins. - a "hook" relay object from which you can launch 1:N calls to registered hook implementation functions - a mechanism for ordering hook implementation functions - mechanisms for two different type of 1:N calls: "firstresult" for when the call should stop when the first implementation returns a non-None result. And the other (default) way of guaranteeing that all hook implementations will be called and their non-None result collected. - mechanisms for "historic" extension points such that all newly registered functions will receive all hook calls that happened before their registration. - a mechanism for discovering plugin objects which are based on setuptools based entry points. - a simple tracing mechanism, including tracing of plugin calls and their arguments. """ import sys import inspect __version__ = '0.3.1' __all__ = ["PluginManager", "PluginValidationError", "HookspecMarker", "HookimplMarker"] _py3 = sys.version_info > (3, 0) class HookspecMarker: """ Decorator helper class for marking functions as hook specifications. You can instantiate it with a project_name to get a decorator. Calling PluginManager.add_hookspecs later will discover all marked functions if the PluginManager uses the same project_name. """ def __init__(self, project_name): self.project_name = project_name def __call__(self, function=None, firstresult=False, historic=False): """ if passed a function, directly sets attributes on the function which will make it discoverable to add_hookspecs(). If passed no function, returns a decorator which can be applied to a function later using the attributes supplied. If firstresult is True the 1:N hook call (N being the number of registered hook implementation functions) will stop at I<=N when the I'th function returns a non-None result. If historic is True calls to a hook will be memorized and replayed on later registered plugins. """ def setattr_hookspec_opts(func): if historic and firstresult: raise ValueError("cannot have a historic firstresult hook") setattr(func, self.project_name + "_spec", dict(firstresult=firstresult, historic=historic)) return func if function is not None: return setattr_hookspec_opts(function) else: return setattr_hookspec_opts class HookimplMarker: """ Decorator helper class for marking functions as hook implementations. You can instantiate with a project_name to get a decorator. Calling PluginManager.register later will discover all marked functions if the PluginManager uses the same project_name. """ def __init__(self, project_name): self.project_name = project_name def __call__(self, function=None, hookwrapper=False, optionalhook=False, tryfirst=False, trylast=False): """ if passed a function, directly sets attributes on the function which will make it discoverable to register(). If passed no function, returns a decorator which can be applied to a function later using the attributes supplied. If optionalhook is True a missing matching hook specification will not result in an error (by default it is an error if no matching spec is found). If tryfirst is True this hook implementation will run as early as possible in the chain of N hook implementations for a specfication. If trylast is True this hook implementation will run as late as possible in the chain of N hook implementations. If hookwrapper is True the hook implementations needs to execute exactly one "yield". The code before the yield is run early before any non-hookwrapper function is run. The code after the yield is run after all non-hookwrapper function have run. The yield receives an ``_CallOutcome`` object representing the exception or result outcome of the inner calls (including other hookwrapper calls). """ def setattr_hookimpl_opts(func): setattr(func, self.project_name + "_impl", dict(hookwrapper=hookwrapper, optionalhook=optionalhook, tryfirst=tryfirst, trylast=trylast)) return func if function is None: return setattr_hookimpl_opts else: return setattr_hookimpl_opts(function) def normalize_hookimpl_opts(opts): opts.setdefault("tryfirst", False) opts.setdefault("trylast", False) opts.setdefault("hookwrapper", False) opts.setdefault("optionalhook", False) class _TagTracer: def __init__(self): self._tag2proc = {} self.writer = None self.indent = 0 def get(self, name): return _TagTracerSub(self, (name,)) def format_message(self, tags, args): if isinstance(args[-1], dict): extra = args[-1] args = args[:-1] else: extra = {} content = " ".join(map(str, args)) indent = " " * self.indent lines = [ "%s%s [%s]\n" % (indent, content, ":".join(tags)) ] for name, value in extra.items(): lines.append("%s %s: %s\n" % (indent, name, value)) return lines def processmessage(self, tags, args): if self.writer is not None and args: lines = self.format_message(tags, args) self.writer(''.join(lines)) try: self._tag2proc[tags](tags, args) except KeyError: pass def setwriter(self, writer): self.writer = writer def setprocessor(self, tags, processor): if isinstance(tags, str): tags = tuple(tags.split(":")) else: assert isinstance(tags, tuple) self._tag2proc[tags] = processor class _TagTracerSub: def __init__(self, root, tags): self.root = root self.tags = tags def __call__(self, *args): self.root.processmessage(self.tags, args) def setmyprocessor(self, processor): self.root.setprocessor(self.tags, processor) def get(self, name): return self.__class__(self.root, self.tags + (name,)) def _raise_wrapfail(wrap_controller, msg): co = wrap_controller.gi_code raise RuntimeError("wrap_controller at %r %s:%d %s" % (co.co_name, co.co_filename, co.co_firstlineno, msg)) def _wrapped_call(wrap_controller, func): """ Wrap calling to a function with a generator which needs to yield exactly once. The yield point will trigger calling the wrapped function and return its _CallOutcome to the yield point. The generator then needs to finish (raise StopIteration) in order for the wrapped call to complete. """ try: next(wrap_controller) # first yield except StopIteration: _raise_wrapfail(wrap_controller, "did not yield") call_outcome = _CallOutcome(func) try: wrap_controller.send(call_outcome) _raise_wrapfail(wrap_controller, "has second yield") except StopIteration: pass return call_outcome.get_result() class _CallOutcome: """ Outcome of a function call, either an exception or a proper result. Calling the ``get_result`` method will return the result or reraise the exception raised when the function was called. """ excinfo = None def __init__(self, func): try: self.result = func() except BaseException: self.excinfo = sys.exc_info() def force_result(self, result): self.result = result self.excinfo = None def get_result(self): if self.excinfo is None: return self.result else: ex = self.excinfo if _py3: raise ex[1].with_traceback(ex[2]) _reraise(*ex) # noqa if not _py3: exec(""" def _reraise(cls, val, tb): raise cls, val, tb """) class _TracedHookExecution: def __init__(self, pluginmanager, before, after): self.pluginmanager = pluginmanager self.before = before self.after = after self.oldcall = pluginmanager._inner_hookexec assert not isinstance(self.oldcall, _TracedHookExecution) self.pluginmanager._inner_hookexec = self def __call__(self, hook, hook_impls, kwargs): self.before(hook.name, hook_impls, kwargs) outcome = _CallOutcome(lambda: self.oldcall(hook, hook_impls, kwargs)) self.after(outcome, hook.name, hook_impls, kwargs) return outcome.get_result() def undo(self): self.pluginmanager._inner_hookexec = self.oldcall class PluginManager(object): """ Core Pluginmanager class which manages registration of plugin objects and 1:N hook calling. You can register new hooks by calling ``addhooks(module_or_class)``. You can register plugin objects (which contain hooks) by calling ``register(plugin)``. The Pluginmanager is initialized with a prefix that is searched for in the names of the dict of registered plugin objects. An optional excludefunc allows to blacklist names which are not considered as hooks despite a matching prefix. For debugging purposes you can call ``enable_tracing()`` which will subsequently send debug information to the trace helper. """ def __init__(self, project_name, implprefix=None): """ if implprefix is given implementation functions will be recognized if their name matches the implprefix. """ self.project_name = project_name self._name2plugin = {} self._plugin2hookcallers = {} self._plugin_distinfo = [] self.trace = _TagTracer().get("pluginmanage") self.hook = _HookRelay(self.trace.root.get("hook")) self._implprefix = implprefix self._inner_hookexec = lambda hook, methods, kwargs: \ _MultiCall(methods, kwargs, hook.spec_opts).execute() def _hookexec(self, hook, methods, kwargs): # called from all hookcaller instances. # enable_tracing will set its own wrapping function at self._inner_hookexec return self._inner_hookexec(hook, methods, kwargs) def register(self, plugin, name=None): """ Register a plugin and return its canonical name or None if the name is blocked from registering. Raise a ValueError if the plugin is already registered. """ plugin_name = name or self.get_canonical_name(plugin) if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers: if self._name2plugin.get(plugin_name, -1) is None: return # blocked plugin, return None to indicate no registration raise ValueError("Plugin already registered: %s=%s\n%s" % (plugin_name, plugin, self._name2plugin)) # XXX if an error happens we should make sure no state has been # changed at point of return self._name2plugin[plugin_name] = plugin # register matching hook implementations of the plugin self._plugin2hookcallers[plugin] = hookcallers = [] for name in dir(plugin): hookimpl_opts = self.parse_hookimpl_opts(plugin, name) if hookimpl_opts is not None: normalize_hookimpl_opts(hookimpl_opts) method = getattr(plugin, name) hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts) hook = getattr(self.hook, name, None) if hook is None: hook = _HookCaller(name, self._hookexec) setattr(self.hook, name, hook) elif hook.has_spec(): self._verify_hook(hook, hookimpl) hook._maybe_apply_history(hookimpl) hook._add_hookimpl(hookimpl) hookcallers.append(hook) return plugin_name def parse_hookimpl_opts(self, plugin, name): method = getattr(plugin, name) res = getattr(method, self.project_name + "_impl", None) if res is not None and not isinstance(res, dict): # false positive res = None elif res is None and self._implprefix and name.startswith(self._implprefix): res = {} return res def unregister(self, plugin=None, name=None): """ unregister a plugin object and all its contained hook implementations from internal data structures. """ if name is None: assert plugin is not None, "one of name or plugin needs to be specified" name = self.get_name(plugin) if plugin is None: plugin = self.get_plugin(name) # if self._name2plugin[name] == None registration was blocked: ignore if self._name2plugin.get(name): del self._name2plugin[name] for hookcaller in self._plugin2hookcallers.pop(plugin, []): hookcaller._remove_plugin(plugin) return plugin def set_blocked(self, name): """ block registrations of the given name, unregister if already registered. """ self.unregister(name=name) self._name2plugin[name] = None def is_blocked(self, name): """ return True if the name blogs registering plugins of that name. """ return name in self._name2plugin and self._name2plugin[name] is None def add_hookspecs(self, module_or_class): """ add new hook specifications defined in the given module_or_class. Functions are recognized if they have been decorated accordingly. """ names = [] for name in dir(module_or_class): spec_opts = self.parse_hookspec_opts(module_or_class, name) if spec_opts is not None: hc = getattr(self.hook, name, None) if hc is None: hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts) setattr(self.hook, name, hc) else: # plugins registered this hook without knowing the spec hc.set_specification(module_or_class, spec_opts) for hookfunction in (hc._wrappers + hc._nonwrappers): self._verify_hook(hc, hookfunction) names.append(name) if not names: raise ValueError("did not find any %r hooks in %r" % (self.project_name, module_or_class)) def parse_hookspec_opts(self, module_or_class, name): method = getattr(module_or_class, name) return getattr(method, self.project_name + "_spec", None) def get_plugins(self): """ return the set of registered plugins. """ return set(self._plugin2hookcallers) def is_registered(self, plugin): """ Return True if the plugin is already registered. """ return plugin in self._plugin2hookcallers def get_canonical_name(self, plugin): """ Return canonical name for a plugin object. Note that a plugin may be registered under a different name which was specified by the caller of register(plugin, name). To obtain the name of an registered plugin use ``get_name(plugin)`` instead.""" return getattr(plugin, "__name__", None) or str(id(plugin)) def get_plugin(self, name): """ Return a plugin or None for the given name. """ return self._name2plugin.get(name) def get_name(self, plugin): """ Return name for registered plugin or None if not registered. """ for name, val in self._name2plugin.items(): if plugin == val: return name def _verify_hook(self, hook, hookimpl): if hook.is_historic() and hookimpl.hookwrapper: raise PluginValidationError( "Plugin %r\nhook %r\nhistoric incompatible to hookwrapper" % (hookimpl.plugin_name, hook.name)) for arg in hookimpl.argnames: if arg not in hook.argnames: raise PluginValidationError( "Plugin %r\nhook %r\nargument %r not available\n" "plugin definition: %s\n" "available hookargs: %s" % (hookimpl.plugin_name, hook.name, arg, _formatdef(hookimpl.function), ", ".join(hook.argnames))) def check_pending(self): """ Verify that all hooks which have not been verified against a hook specification are optional, otherwise raise PluginValidationError""" for name in self.hook.__dict__: if name[0] != "_": hook = getattr(self.hook, name) if not hook.has_spec(): for hookimpl in (hook._wrappers + hook._nonwrappers): if not hookimpl.optionalhook: raise PluginValidationError( "unknown hook %r in plugin %r" % (name, hookimpl.plugin)) def load_setuptools_entrypoints(self, entrypoint_name): """ Load modules from querying the specified setuptools entrypoint name. Return the number of loaded plugins. """ from pkg_resources import iter_entry_points, DistributionNotFound for ep in iter_entry_points(entrypoint_name): # is the plugin registered or blocked? if self.get_plugin(ep.name) or self.is_blocked(ep.name): continue try: plugin = ep.load() except DistributionNotFound: continue self.register(plugin, name=ep.name) self._plugin_distinfo.append((plugin, ep.dist)) return len(self._plugin_distinfo) def list_plugin_distinfo(self): """ return list of distinfo/plugin tuples for all setuptools registered plugins. """ return list(self._plugin_distinfo) def list_name_plugin(self): """ return list of name/plugin pairs. """ return list(self._name2plugin.items()) def get_hookcallers(self, plugin): """ get all hook callers for the specified plugin. """ return self._plugin2hookcallers.get(plugin) def add_hookcall_monitoring(self, before, after): """ add before/after tracing functions for all hooks and return an undo function which, when called, will remove the added tracers. ``before(hook_name, hook_impls, kwargs)`` will be called ahead of all hook calls and receive a hookcaller instance, a list of HookImpl instances and the keyword arguments for the hook call. ``after(outcome, hook_name, hook_impls, kwargs)`` receives the same arguments as ``before`` but also a :py:class:`_CallOutcome`` object which represents the result of the overall hook call. """ return _TracedHookExecution(self, before, after).undo def enable_tracing(self): """ enable tracing of hook calls and return an undo function. """ hooktrace = self.hook._trace def before(hook_name, methods, kwargs): hooktrace.root.indent += 1 hooktrace(hook_name, kwargs) def after(outcome, hook_name, methods, kwargs): if outcome.excinfo is None: hooktrace("finish", hook_name, "-->", outcome.result) hooktrace.root.indent -= 1 return self.add_hookcall_monitoring(before, after) def subset_hook_caller(self, name, remove_plugins): """ Return a new _HookCaller instance for the named method which manages calls to all registered plugins except the ones from remove_plugins. """ orig = getattr(self.hook, name) plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)] if plugins_to_remove: hc = _HookCaller(orig.name, orig._hookexec, orig._specmodule_or_class, orig.spec_opts) for hookimpl in (orig._wrappers + orig._nonwrappers): plugin = hookimpl.plugin if plugin not in plugins_to_remove: hc._add_hookimpl(hookimpl) # we also keep track of this hook caller so it # gets properly removed on plugin unregistration self._plugin2hookcallers.setdefault(plugin, []).append(hc) return hc return orig class _MultiCall: """ execute a call into multiple python functions/methods. """ # XXX note that the __multicall__ argument is supported only # for pytest compatibility reasons. It was never officially # supported there and is explicitly deprecated since 2.8 # so we can remove it soon, allowing to avoid the below recursion # in execute() and simplify/speed up the execute loop. def __init__(self, hook_impls, kwargs, specopts={}): self.hook_impls = hook_impls self.kwargs = kwargs self.kwargs["__multicall__"] = self self.specopts = specopts def execute(self): all_kwargs = self.kwargs self.results = results = [] firstresult = self.specopts.get("firstresult") while self.hook_impls: hook_impl = self.hook_impls.pop() args = [all_kwargs[argname] for argname in hook_impl.argnames] if hook_impl.hookwrapper: return _wrapped_call(hook_impl.function(*args), self.execute) res = hook_impl.function(*args) if res is not None: if firstresult: return res results.append(res) if not firstresult: return results def __repr__(self): status = "%d meths" % (len(self.hook_impls),) if hasattr(self, "results"): status = ("%d results, " % len(self.results)) + status return "<_MultiCall %s, kwargs=%r>" % (status, self.kwargs) def varnames(func, startindex=None): """ return argument name tuple for a function, method, class or callable. In case of a class, its "__init__" method is considered. For methods the "self" parameter is not included unless you are passing an unbound method with Python3 (which has no supports for unbound methods) """ cache = getattr(func, "__dict__", {}) try: return cache["_varnames"] except KeyError: pass if inspect.isclass(func): try: func = func.__init__ except AttributeError: return () startindex = 1 else: if not inspect.isfunction(func) and not inspect.ismethod(func): func = getattr(func, '__call__', func) if startindex is None: startindex = int(inspect.ismethod(func)) try: rawcode = func.__code__ except AttributeError: return () try: x = rawcode.co_varnames[startindex:rawcode.co_argcount] except AttributeError: x = () else: defaults = func.__defaults__ if defaults: x = x[:-len(defaults)] try: cache["_varnames"] = x except TypeError: pass return x class _HookRelay: """ hook holder object for performing 1:N hook calls where N is the number of registered plugins. """ def __init__(self, trace): self._trace = trace class _HookCaller(object): def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None): self.name = name self._wrappers = [] self._nonwrappers = [] self._hookexec = hook_execute if specmodule_or_class is not None: assert spec_opts is not None self.set_specification(specmodule_or_class, spec_opts) def has_spec(self): return hasattr(self, "_specmodule_or_class") def set_specification(self, specmodule_or_class, spec_opts): assert not self.has_spec() self._specmodule_or_class = specmodule_or_class specfunc = getattr(specmodule_or_class, self.name) argnames = varnames(specfunc, startindex=inspect.isclass(specmodule_or_class)) assert "self" not in argnames # sanity check self.argnames = ["__multicall__"] + list(argnames) self.spec_opts = spec_opts if spec_opts.get("historic"): self._call_history = [] def is_historic(self): return hasattr(self, "_call_history") def _remove_plugin(self, plugin): def remove(wrappers): for i, method in enumerate(wrappers): if method.plugin == plugin: del wrappers[i] return True if remove(self._wrappers) is None: if remove(self._nonwrappers) is None: raise ValueError("plugin %r not found" % (plugin,)) def _add_hookimpl(self, hookimpl): if hookimpl.hookwrapper: methods = self._wrappers else: methods = self._nonwrappers if hookimpl.trylast: methods.insert(0, hookimpl) elif hookimpl.tryfirst: methods.append(hookimpl) else: # find last non-tryfirst method i = len(methods) - 1 while i >= 0 and methods[i].tryfirst: i -= 1 methods.insert(i + 1, hookimpl) def __repr__(self): return "<_HookCaller %r>" % (self.name,) def __call__(self, **kwargs): assert not self.is_historic() return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs) def call_historic(self, proc=None, kwargs=None): self._call_history.append((kwargs or {}, proc)) # historizing hooks don't return results self._hookexec(self, self._nonwrappers + self._wrappers, kwargs) def call_extra(self, methods, kwargs): """ Call the hook with some additional temporarily participating methods using the specified kwargs as call parameters. """ old = list(self._nonwrappers), list(self._wrappers) for method in methods: opts = dict(hookwrapper=False, trylast=False, tryfirst=False) hookimpl = HookImpl(None, "<temp>", method, opts) self._add_hookimpl(hookimpl) try: return self(**kwargs) finally: self._nonwrappers, self._wrappers = old def _maybe_apply_history(self, method): if self.is_historic(): for kwargs, proc in self._call_history: res = self._hookexec(self, [method], kwargs) if res and proc is not None: proc(res[0]) class HookImpl: def __init__(self, plugin, plugin_name, function, hook_impl_opts): self.function = function self.argnames = varnames(self.function) self.plugin = plugin self.opts = hook_impl_opts self.plugin_name = plugin_name self.__dict__.update(hook_impl_opts) class PluginValidationError(Exception): """ plugin failed validation. """ if hasattr(inspect, 'signature'): def _formatdef(func): return "%s%s" % ( func.__name__, str(inspect.signature(func)) ) else: def _formatdef(func): return "%s%s" % ( func.__name__, inspect.formatargspec(*inspect.getargspec(func)) )
jalavik/invenio-records
refs/heads/master
invenio_records/recordext/__init__.py
634
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2013 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
ProfessionalIT/professionalit-webiste
refs/heads/master
sdk/google_appengine/lib/django-1.5/django/contrib/sitemaps/management/__init__.py
12133432
simone/django-gb
refs/heads/master
tests/migrations/faulty_migrations/__init__.py
12133432
igsr/igsr_analysis
refs/heads/master
p3/__init__.py
12133432
chrys87/fenrir
refs/heads/master
src/fenrirscreenreader/commands/onHeartBeat/__init__.py
12133432
jt6562/XX-Net
refs/heads/master
python27/1.0/lib/darwin/gevent/_semaphore.py
6
def __bootstrap__(): global __bootstrap__, __loader__, __file__ import pkg_resources, imp __file__ = pkg_resources.resource_filename(__name__,'_semaphore.so') __loader__ = None; del __bootstrap__, __loader__ imp.load_dynamic(__name__,__file__) try: __bootstrap__() except (ImportError, LookupError): raise ImportError('No module named %s' % __name__)
cathook/PTTArticleRecommender
refs/heads/master
src/miner/modules/mining.py
2
import bs4 #import urllib2 import re import time import logging import json from datetime import datetime from optparse import OptionParser from optparse import Option, OptionValueError import urllib.request as urllib2 import os, sys import importlib importlib.reload(sys) LOGNAME = 'log' VERSION = '0.2' globvar = 0 #arr = [] class BBSCrawler(object): ''' @author: Paul Yang @note: This prog is to fetch the ptt's content based on the board name like car and the fetched files will be stored under the directory "./fetched/BOARDNAME/" @since: 2014/8/2, v0.2 ''' def __init__(self, board_name = 'car', myPageNum = 10, fetch_path = './', toNum = 0,debugFlag = False, forAll = False): ''' Constructor ''' self.useHeader = False ## debug flag to enable debug - not finished yet. self.debugFlag = debugFlag self.board_name = board_name ## put the cookie header for the board like Gossiping to pass around the limit of 18 age if self.board_name == 'Gossiping': self.initHeader() self.useHeader = True self.myPageNum = myPageNum ## if forAll is on, iterate the total number of pages for the board by getAllPagesInTheBoard() self.forAll = forAll self.toNum = toNum self.fetch_path = fetch_path self.path = os.path.join(self.fetch_path, self.board_name) self.ESPECIAL_URL = 'http://www.ptt.cc/bbs/' + self.board_name + '/index' + '.html' self.post_url = lambda id: 'http://www.ptt.cc/bbs/' + self.board_name + '/' + id + '.html' self.page_url = lambda n: 'http://www.ptt.cc/bbs/' + self.board_name + '/index' + str(n) + '.html' self.initLogging() self.statisticDic = dict() self.num_pushes = dict() self.metadic = dict() if not os.path.exists(self.path): os.makedirs(self.path) os.chdir(self.path) sys.stderr.write('Crawling "%s" ...\n' % self.board_name) self.logger.info('Crawling "%s" ...\n' % self.board_name) ## for over 18 content, need to put the header def initHeader(self): self.headers = dict() self.headers['Cookie'] = str('over18=1; __utma=156441338.1052450315.1398943535.1398943535.1398943535.1; __utmb=156441338.2.10.1398943535; __utmc=156441338; __utmz=156441338.1398943535.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)' ) def initLogging(self): ''' initializing logging function and put to /bbsCrawler.log ''' print ("initializing the logging .......") myLogPath = os.path.join(self.path, LOGNAME) try: os.makedirs(myLogPath) except: sys.stderr.write('Warning: "%s" already existed\n' % myLogPath) LOGPATH = myLogPath + '/bbsCrawler.log' #logger.warn('Warning: "%s" already existed\n' % myLogPath) self.logger = logging.getLogger('bbs crawler') self.logger.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') hdlr = logging.FileHandler(LOGPATH) hdlr.setFormatter(formatter) self.logger.addHandler(hdlr) #logger.setLevel(logging.DEBUG) self.logger.info('bbs crawler started') def remove_html_tags(self, data): p = re.compile(r'<.*?>') return p.sub('', data) def closeLogging(self): self.logger.info('closing logging') handlers = self.logger.handlers[:] for handler in handlers: handler.close() self.logger.removeHandler(handler) def getAllPagesInTheBoard(self): self.logger.info('getting all pages number from "%s" ...\n' % self.board_name) try: if (self.useHeader): request = urllib2.Request(self.ESPECIAL_URL, headers=self.headers) indexPage = bs4.BeautifulSoup(urllib2.urlopen(request).read(), "lxml") else: indexPage = bs4.BeautifulSoup(urllib2.urlopen(self.ESPECIAL_URL).read(), "lxml") ## filter '/bbs/car/index1275.html' to number only "1275" self.allPageNums = int(re.sub(r'[^0-9]+', '', indexPage.find_all("a", class_="btn wide")[1].get('href'))) sys.stderr.write('Total number of pages: %d\n' % self.allPageNums) self.logger.error('Total number of pages: %d\n' % self.allPageNums) global globvar globvar = self.allPageNums except: sys.stderr.write('can not get the number of pages') self.logger.error('cannot get the number of pages \n') def getContent(self): start_time = time.time() if (not self.forAll): ##use self.myPageNum for designate page pagesToRun = self.myPageNum else: ##use all number got from getAllPagesInTheBoard() pagesToRun = self.allPageNums if (self.toNum != 0 and not self.forAll): ## to suport 2 arguments start 3 -> 100 startIndex = pagesToRun endIndex = self.toNum else: startIndex = 1 endIndex = pagesToRun ## add the index to record the total number that has processed, the number of failure and the number of success self.statisticDic['indexFailure'] = 0 self.statisticDic['totalPostNum'] = 0 self.statisticDic['fetchFailureNum'] = 0 ID = 1 self.arr = [] with open('Metadata_json','w+') as metaDataFp: ## iterate through index page like "www.ptt.cc/bbs/car/index.html" to get each POST ID #for indexP in xrange(1, pagesToRun + 1): for indexP in range(startIndex, endIndex): sys.stderr.write('start from index %s ...\n' % indexP) self.logger.debug('start from index %s ...\n' % indexP) try: if (self.useHeader): ## if the page require header request = urllib2.Request(self.page_url(indexP), headers=self.headers) page = bs4.BeautifulSoup(urllib2.urlopen(request).read(), "lxml") else: page = bs4.BeautifulSoup(urllib2.urlopen(self.page_url(indexP)).read(), "lxml") except: sys.stderr.write('Error occured while fetching %s\n' % self.page_url(indexP)) self.logger.error('Error occured while fetching %s\n' % self.page_url(indexP)) ## how many index has failed self.statisticDic['indexFailure'] += 1 continue ## iterate through posts on this page for link in page.find_all(class_='r-ent'): try: ## For instance: "M.1368632629.A.AF7" post_id = link.a.get('href').split('/')[-1][:-5] """ ## Record the number of pushes from <div class="nrec">, which is an integer from -100 to 100 if (link.span): if link.span.contents[0] == u'爆' : self.num_pushes[post_id] = 100 elif link.span.contents[0] == "X1" : self.num_pushes[post_id] = -10 elif link.span.contents[0] == "X2" : self.num_pushes[post_id] = -20 elif link.span.contents[0] == "X3" : self.num_pushes[post_id] = -30 elif link.span.contents[0] == "X4" : self.num_pushes[post_id] = -40 elif link.span.contents[0] == "X5" : self.num_pushes[post_id] = -50 elif link.span.contents[0] == "X6" : self.num_pushes[post_id] = -60 elif link.span.contents[0] == "X7" : self.num_pushes[post_id] = -70 elif link.span.contents[0] == "X8" : self.num_pushes[post_id] = -80 elif link.span.contents[0] == "X9" : self.num_pushes[post_id] = -90 elif link.span.contents[0] == "XX" : self.num_pushes[post_id] = -100 else: self.num_pushes[post_id] = int(link.span.contents[0]) ## if can't find push, set 0 push else: self.num_pushes[post_id] = 0 """ except: sys.stderr.write('Error occured while fetching2 %s\n' % post_id) self.logger.error('Error occured while fetching %s\n' % post_id) continue ## Fetch the post content via post id, ex. http://www.ptt.cc/bbs/car/M.1400136465.A.DD5.html self.statisticDic['totalPostNum'] += 1 try: sys.stderr.write('Fetching %s ...\n' % post_id) self.logger.info('Fetching %s ...\n' % post_id) if (self.useHeader): ## if the page require header request = urllib2.Request(self.post_url(post_id), headers=self.headers) post = bs4.BeautifulSoup(urllib2.urlopen(request).read(), "lxml") else: post = bs4.BeautifulSoup(urllib2.urlopen(self.post_url(post_id)).read(), "lxml") except: sys.stderr.write('Error occured while fetching %s\n' % self.post_url(post_id)) self.logger.error('Error occured while fetching %s\n' % self.post_url(post_id)) ##self.fetchFailureNum += 1 self.statisticDic['fetchFailureNum'] += 1 continue with open(post_id, 'w') as contentFile_fp: try: strr = self.remove_html_tags(str(post.find(id='main-container'))) s = '發信站: 批踢踢實業坊(ptt.cc)' List = strr[strr.find(s)+len(s):].split('\n') # --> ['Line 1', 'Line 2', 'Line 3'] like = 0 dislike = 0 arrow = 0 LikeChinese = '推' DislikeChinese = '噓' ArrowChinese = '→' for replyString in List: if len(replyString) != 0: if replyString[0] == LikeChinese : like = like + 1 elif replyString[0] == ArrowChinese: arrow = arrow + 1 elif replyString[0] == DislikeChinese: dislike = dislike + 1 #print([like,arrow,dislike]) #if s in strr: #contentFile_fp.write(strr[0:strr.find(s)-5]) #else: #contentFile_fp.write(strr) contentFile_fp.write(strr[strr[1:].find('\n')+2:]) #contentFile_fp.write(strr) contentFile_fp.write('\n') #contentFile_fp.write(self.remove_html_tags(str(post.find(id='main-container')))) spans = post.find_all('span', {'class' : 'article-meta-value'}) count = 1 #self.num_pushes[post_id] = int(link.span.contents[0]) metaID= ID metaName= str(post_id) metaPush = [like, arrow, dislike] #contentFile_fp.write(str(post_id)+"\n") #contentFile_fp.write(str(link.span.contents[0])+"\n") for span in spans: #print(span.string) if count == 1: metaAuthor = span.string[0:span.string.find('(')-1] if count == 2: metaBoard = span.string if count == 3: if span.string.find("Re:") == -1: metaTitle = span.string else: metaTitle = span.string[4:] if count == 4: date_object = datetime.strptime(span.string[4:], '%b %d %H:%M:%S %Y') #contentFile_fp.write(str(date_object) + '\n') ## write title in a first line metaTime = str(date_object) #else: #contentFile_fp.write(span.string) ## write title in a first line #contentFile_fp.write('\n') count = count + 1 except: sys.stderr.write('Error occured while fetching4 %s\n' % metaName) continue contentFile_fp.close() os.chdir(self.fetch_path) # delay for a little while in fear of getting blocked #time.sleep(0.1) # json.dump({'Id':metaID,'Name':metaName, 'Push':metaPush, 'Author':metaAuthor, 'Board':metaBoard, 'Title':metaTitle,'Time':metaTime}, metaDataFp, indent=7, ensure_ascii=False) self.arr.append({'Id':metaID,'Name':metaName, 'Push':metaPush, 'Author':metaAuthor, 'Board':metaBoard, 'Title':metaTitle,'Time':metaTime}) ID = ID + 1 json.dump(self.arr, metaDataFp, indent=7, ensure_ascii=False) metaDataFp.close() #time.sleep(0.2) ## dump the number of pushes mapping to the file 'num_pushes_json' """ with open('num_pushes_json', 'w') as numPushesFp, open('metadata_dic_json', 'w') as metadataDicFp: self.logger.info('Saving the metadata dic and push mapping into JSON') #numPushesFp = open('num_pushes_json', 'w') #metadataDicFp = open('metadata_dic_json', 'w') json.dump(self.num_pushes, numPushesFp) json.dump(self.metadic, metadataDicFp) numPushesFp.close() metadataDicFp.close() """ ## do the final logging and printing all numbers self.logger.info('Ending crawling "%s" ... !! \n' % self.board_name) self.logger.info('\n') self.logger.info('Statistic: \n') self.logger.info('indexFailure number: "%s" \n' % self.statisticDic['indexFailure']) self.logger.info('totalPost number: "%s" \n' % self.statisticDic['totalPostNum']) self.logger.info('fetchFailure number: "%s" \n' % self.statisticDic['fetchFailureNum']) self.logger.info('\n') os.chdir(self.fetch_path) print( "the dir is: %s" %os.listdir(os.getcwd())) self.closeLogging() #os.rename(self.board_name,self.board_name + "_" + str(self.myPageNum) + "_" + str(self.toNum)) elapsed_time = time.time() - start_time print ("the dir is: %s" %os.listdir(os.getcwd())) print ("the total post num: %s" % self.statisticDic['totalPostNum']) print ("elapsed time: %s" % elapsed_time) class MultipleOption(Option): ACTIONS = Option.ACTIONS + ("extend",) STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",) TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",) ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",) def take_action(self, action, dest, opt, value, values, parser): if action == "extend": values.ensure_value(dest, []).append(value) else: Option.take_action(self, action, dest, opt, value, values, parser)
bobobox/ansible
refs/heads/devel
lib/ansible/modules/windows/win_service.py
17
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2014, Chris Hoffman <choffman@chathamfinancial.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # this is a windows documentation stub. actual code lives in the .ps1 # file of the same name ANSIBLE_METADATA = {'status': ['stableinterface'], 'supported_by': 'core', 'version': '1.0'} DOCUMENTATION = r''' --- module: win_service version_added: "1.7" short_description: Manages Windows services description: - Manages Windows services options: name: description: - Name of the service required: true default: null aliases: [] start_mode: description: - Set the startup type for the service required: false choices: - auto - manual - disabled state: description: - C(started)/C(stopped) are idempotent actions that will not run commands unless necessary. C(restarted) will always bounce the service. required: false choices: - started - stopped - restarted default: null aliases: [] author: "Chris Hoffman (@chrishoffman)" ''' EXAMPLES = r''' - name: Restart a service win_service: name: spooler state: restarted - name: Set service startup mode to auto and ensure it is started win_service: name: spooler start_mode: auto state: started '''
Yokan-Study/study
refs/heads/master
2018/02.07/python/jya.members.py
1
import requests headers = {'Authorization': '{key}'} #1명의 회원 이름 정보 가져오는 함수 getMember() 만들기 def getMember(id): r = requests.get('https://gapi.gabia.com/members?user_id={0}'.format(id), headers=headers) # print(r) j = r.json() # print(j) k = j['client_info'] hanname = k['hanadmin'] # j.hanadmin return hanname # hanname = j['client_info']['hanadmin'] # j.client_info.hanadmin # p = 'planning_d' # A = getMember(p) # # print(A) # 리스트로 넘어온 user_id를 이용해 여러명의 이름 정보 가져와서 리스트로 만들어주는 getMembers() 만들기 def getMembers(user_ids): # return user_ids members = [] for id in user_ids: C = getMember(id) if C != None: members.append(C) return members # ['planning_d', 'test1gabia'] # return members # k = [getMember('planning_d'), getMember('test1gabia')] # L = getMembers(k) # print(L) # q = ['planning_d', 'test1gabia', 'dalimix', 'jya9055', 'abc', 1] # B = getMembers(q) # print(B)
BeyondTheClouds/enoslib
refs/heads/master
enoslib/service/k3s/k3s.py
1
from enoslib.api import play_on, run_command from typing import List from ..service import Service from enoslib.objects import Host class K3s(Service): def __init__(self, master: List[Host], agent: List[Host]): """Deploy a K3s cluster. Reference: https://rancher.com/docs/k3s/latest/en/quick-start/ This is a basic setup for now. Let us know if something is needed here: |chat| For instance - automatic deployment of the dashboard - private registry configuration (e.g G5k registry) - ... Examples: .. literalinclude:: examples/k3s.py :language: python :linenos: """ self.master = master self.agent = agent self.roles = dict(master=self.master, agent=self.agent) def deploy(self): with play_on(roles=self.roles) as p: p.apt(name="curl", state="present") with play_on(pattern_hosts="master", roles=self.roles, gather_facts=False) as p: p.shell("curl -sfL https://get.k3s.io | sh") # Getting the token result = run_command( "cat /var/lib/rancher/k3s/server/node-token", pattern_hosts="master", roles=self.roles, ) token = result["ok"][self.master[0].alias]["stdout"] with play_on(pattern_hosts="agent", roles=self.roles, gather_facts=False) as p: p.shell(( f"curl -sfL https://get.k3s.io |" f"K3S_URL=https://{self.master[0].address}:6443 K3S_TOKEN={token} sh" )) def destroy(self): pass def backup(self): pass
crmccreary/openerp_server
refs/heads/master
openerp/addons/document_webdav/nodes.py
9
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from document import nodes from tools.safe_eval import safe_eval as eval import time import urllib import uuid try: from tools.dict_tools import dict_filter except ImportError: from document.dict_tools import dict_filter class node_acl_mixin(object): def _get_dav_owner(self, cr): return self.uuser def _get_dav_group(self, cr): return self.ugroup def _get_dav_supported_privilege_set(self, cr): return '' # TODO def _get_dav_current_user_privilege_set(self, cr): return '' # TODO def _get_dav_props_hlpr(self, cr, par_class, prop_model, prop_ref_field, res_id): """ Helper for dav properties, usable in subclasses @param par_class The parent class @param prop_model The name of the orm model holding the properties @param prop_ref_field The name of the field at prop_model pointing to us @param res_id the id of self in the corresponing orm table, that should match prop_model.prop_ref_field """ ret = par_class.get_dav_props(self, cr) if prop_model: propobj = self.context._dirobj.pool.get(prop_model) uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) # Not really needed because we don't do eval here: # ctx.update({'uid': uid, 'dbname': self.context.dbname }) # dict_filter(self.context.extra_ctx, ['username', 'groupname', 'webdav_path'], ctx) sdomain = [(prop_ref_field, '=', False),] if res_id: sdomain = ['|', (prop_ref_field, '=', res_id)] + sdomain prop_ids = propobj.search(cr, uid, sdomain, context=ctx) if prop_ids: ret = ret.copy() for pbro in propobj.browse(cr, uid, prop_ids, context=ctx): ret[pbro.namespace] = ret.get(pbro.namespace, ()) + \ (pbro.name,) # Note that we cannot have properties to conditionally appear # on the context, yet. return ret def _get_dav_eprop_hlpr(self, cr, ns, prop, par_class, prop_model, prop_ref_field, res_id): """ Helper for get dav eprop, usable in subclasses @param namespace the one to search for @param name Name to search for @param par_class The parent class @param prop_model The name of the orm model holding the properties @param prop_ref_field The name of the field at prop_model pointing to us @param res_id the id of self in the corresponing orm table, that should match prop_model.prop_ref_field """ ret = par_class.get_dav_eprop(self, cr, ns, prop) if ret is not None: return ret if prop_model: propobj = self.context._dirobj.pool.get(prop_model) uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) ctx.update({'uid': uid, 'dbname': self.context.dbname }) ctx['node_classname'] = "%s.%s" % (self.__class__.__module__, self.__class__.__name__) dict_filter(self.context.extra_ctx, ['username', 'groupname', 'webdav_path'], ctx) sdomain = [(prop_ref_field, '=', False),('namespace', '=', ns), ('name','=', prop)] if res_id: sdomain = ['|', (prop_ref_field, '=', res_id)] + sdomain prop_ids = propobj.search(cr, uid, sdomain, context=ctx) if prop_ids: pbro = propobj.browse(cr, uid, prop_ids[0], context=ctx) val = pbro.value if pbro.do_subst: if val.startswith("('") and val.endswith(")"): glbls = { 'urlquote': urllib.quote, } val = eval(val, glbls, ctx) else: val = val % ctx return val return None def _dav_lock_hlpr(self, cr, lock_data, par_class, prop_model, prop_ref_field, res_id): """ Helper, which uses the dav properties table for placing locks @param lock_data a dictionary of input to this function. @return list of tuples, DAV:activelock _contents_ structure. See webdav.py:class Prop2Xml() for semantics Note: although the DAV response shall be an <activelock/>, this function will only return the elements inside the activelock, because the calling function needs to append the <lockroot/> in it. See webdav.py:mk_lock_response() In order to reuse code, this function can be called with lock_data['unlock_mode']=True, in order to unlock. @return bool in unlock mode, (davstruct, prop_id, token) in lock/refresh, or (False, prop_id, token) if already locked, or (False, False, False) if lock not found to refresh """ assert prop_model assert res_id assert isinstance(lock_data, dict), '%r' % lock_data propobj = self.context._dirobj.pool.get(prop_model) uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) ctx.update({'uid': uid, 'dbname': self.context.dbname }) ctx['node_classname'] = "%s.%s" % (self.__class__.__module__, self.__class__.__name__) dict_filter(self.context.extra_ctx, ['username', 'groupname', 'webdav_path'], ctx) sdomain = [(prop_ref_field, '=', res_id), ('namespace', '=', 'DAV:'), ('name','=', 'lockdiscovery')] props_to_delete = [] lock_found = False lock_val = None tmout2 = int(lock_data.get('timeout', 3*3600)) prop_ids = propobj.search(cr, uid, sdomain, context=ctx) if prop_ids: for pbro in propobj.browse(cr, uid, prop_ids, context=ctx): val = pbro.value if pbro.do_subst: if val.startswith("('") and val.endswith(")"): glbls = { 'urlquote': urllib.quote, } val = eval(val, glbls, ctx) else: # all locks should be at "subst" format continue if not (val and isinstance(val, tuple) and val[0:2] == ( 'activelock','DAV:')): # print "Value is not activelock:", val continue old_token = False old_owner = False try: # discover the timeout. If anything goes wrong, delete # the lock (cleanup) tmout = False for parm in val[2]: if parm[1] != 'DAV:': continue if parm[0] == 'timeout': if isinstance(parm[2], basestring) \ and parm[2].startswith('Second-'): tmout = int(parm[2][7:]) elif parm[0] == 'locktoken': if isinstance(parm[2], basestring): old_token = parm[2] elif isinstance(parm[2], tuple) and \ parm[2][0:2] == ('href','DAV:'): old_token = parm[2][2] else: # print "Mangled token in DAV property: %r" % parm[2] props_to_delete.append(pbro.id) continue elif parm[0] == 'owner': old_owner = parm[2] # not used yet if tmout: mdate = pbro.write_date or pbro.create_date mdate = time.mktime(time.strptime(mdate,'%Y-%m-%d %H:%M:%S')) if mdate + tmout < time.time(): props_to_delete.append(pbro.id) continue else: props_to_delete.append(pbro.id) continue except ValueError: props_to_delete.append(pbro.id) continue # A valid lock is found here if lock_data.get('refresh', False): if old_token != lock_data.get('token'): continue # refresh mode. Just touch anything and the ORM will update # the write uid+date, won't it? # Note: we don't update the owner, because incoming refresh # wouldn't have a body, anyway. propobj.write(cr, uid, [pbro.id,], { 'name': 'lockdiscovery'}) elif lock_data.get('unlock_mode', False): if old_token != lock_data.get('token'): continue props_to_delete.append(pbro.id) lock_found = pbro.id lock_val = val if tmout2 > 3*3600: # 3 hours maximum tmout2 = 3*3600 elif tmout2 < 300: # 5 minutes minimum, but an unlock request can always # break it at any time. Ensures no negative values, either. tmout2 = 300 if props_to_delete: # explicitly delete, as admin, any of the ids we have identified. propobj.unlink(cr, 1, props_to_delete) if lock_data.get('unlock_mode', False): return lock_found and True elif (not lock_found) and not (lock_data.get('refresh', False)): # Create a new lock, attach and return it. new_token = uuid.uuid4().urn lock_val = ('activelock', 'DAV:', [ ('locktype', 'DAV:', (lock_data.get('locktype',False) or 'write','DAV:')), ('lockscope', 'DAV:', (lock_data.get('lockscope',False) or 'exclusive','DAV:')), # ? ('depth', 'DAV:', lock_data.get('depth','0') ), ('timeout','DAV:', 'Second-%d' % tmout2), ('locktoken', 'DAV:', ('href', 'DAV:', new_token)), # ('lockroot', 'DAV: ..., we don't store that, appended by caller ]) new_owner = lock_data.get('lockowner',False) or ctx.get('username', False) if new_owner: lock_val[2].append( ('owner', 'DAV:', new_owner) ) prop_id = propobj.create(cr, uid, { prop_ref_field: res_id, 'namespace': 'DAV:', 'name': 'lockdiscovery', 'do_subst': True, 'value': repr(lock_val) }) return (lock_val[2], prop_id, new_token ) elif not lock_found: # and refresh return (False, False, False) elif lock_found and not lock_data.get('refresh', False): # already locked return (False, lock_found, old_token) else: return (lock_val[2], lock_found, old_token ) class node_dir(node_acl_mixin, nodes.node_dir): """ override node_dir and add DAV functionality """ DAV_PROPS = { "DAV:": ('owner', 'group', 'supported-privilege-set', 'current-user-privilege-set'), } DAV_M_NS = { "DAV:" : '_get_dav',} http_options = { 'DAV': ['access-control',] } def get_dav_resourcetype(self, cr): return ('collection', 'DAV:') def get_dav_props(self, cr): return self._get_dav_props_hlpr(cr, nodes.node_dir, 'document.webdav.dir.property', 'dir_id', self.dir_id) def get_dav_eprop(self, cr, ns, prop): return self._get_dav_eprop_hlpr(cr, ns, prop, nodes.node_dir, 'document.webdav.dir.property', 'dir_id', self.dir_id) class node_file(node_acl_mixin, nodes.node_file): DAV_PROPS = { "DAV:": ('owner', 'group', 'supported-privilege-set', 'current-user-privilege-set', ), } DAV_M_NS = { "DAV:" : '_get_dav',} http_options = { 'DAV': ['access-control', ] } pass def get_dav_resourcetype(self, cr): return '' def get_dav_props(self, cr): return self._get_dav_props_hlpr(cr, nodes.node_file, 'document.webdav.file.property', 'file_id', self.file_id) def dav_lock(self, cr, lock_data): """ Locks or unlocks the node, using DAV semantics. Unlocking will be done when lock_data['unlock_mode'] == True See _dav_lock_hlpr() for calling details. It is fundamentally OK to use this function from non-DAV endpoints, but they will all have to emulate the tuple-in-list structure of the DAV lock data. RFC if this translation should be done inside the _dav_lock_hlpr (to ease other protocols). """ return self._dav_lock_hlpr(cr, lock_data, nodes.node_file, 'document.webdav.file.property', 'file_id', self.file_id) def dav_unlock(self, cr, token): """Releases the token lock held for the node This is a utility complement of dav_lock() """ lock_data = { 'token': token, 'unlock_mode': True } return self._dav_lock_hlpr(cr, lock_data, nodes.node_file, 'document.webdav.file.property', 'file_id', self.file_id) def get_dav_eprop(self, cr, ns, prop): if ns == 'DAV:' and prop == 'supportedlock': return [ ('lockentry', 'DAV:', [ ('lockscope','DAV:', ('shared', 'DAV:')), ('locktype','DAV:', ('write', 'DAV:')), ]), ('lockentry', 'DAV:', [ ('lockscope','DAV:', ('exclusive', 'DAV:')), ('locktype','DAV:', ('write', 'DAV:')), ] ) ] return self._get_dav_eprop_hlpr(cr, ns, prop, nodes.node_file, 'document.webdav.file.property', 'file_id', self.file_id) class node_database(nodes.node_database): def get_dav_resourcetype(self, cr): return ('collection', 'DAV:') def get_dav_props(self, cr): return self._get_dav_props_hlpr(cr, nodes.node_database, 'document.webdav.dir.property', 'dir_id', False) def get_dav_eprop(self, cr, ns, prop): return self._get_dav_eprop_hlpr(cr, nodes.node_database, ns, prop, 'document.webdav.dir.property', 'dir_id', False) class node_res_obj(node_acl_mixin, nodes.node_res_obj): DAV_PROPS = { "DAV:": ('owner', 'group', 'supported-privilege-set', 'current-user-privilege-set'), } DAV_M_NS = { "DAV:" : '_get_dav',} http_options = { 'DAV': ['access-control',] } def get_dav_resourcetype(self, cr): return ('collection', 'DAV:') def get_dav_props(self, cr): return self._get_dav_props_hlpr(cr, nodes.node_res_obj, 'document.webdav.dir.property', 'dir_id', self.dir_id) def get_dav_eprop(self, cr, ns, prop): return self._get_dav_eprop_hlpr(cr, ns, prop, nodes.node_res_obj, 'document.webdav.dir.property', 'dir_id', self.dir_id) class node_res_dir(node_acl_mixin, nodes.node_res_dir): DAV_PROPS = { "DAV:": ('owner', 'group', 'supported-privilege-set', 'current-user-privilege-set'), } DAV_M_NS = { "DAV:" : '_get_dav',} http_options = { 'DAV': ['access-control',] } res_obj_class = node_res_obj def get_dav_resourcetype(self, cr): return ('collection', 'DAV:') def get_dav_props(self, cr): return self._get_dav_props_hlpr(cr, nodes.node_res_dir, 'document.webdav.dir.property', 'dir_id', self.dir_id) def get_dav_eprop(self, cr, ns, prop): return self._get_dav_eprop_hlpr(cr, ns, prop, nodes.node_res_dir, 'document.webdav.dir.property', 'dir_id', self.dir_id) # Some copies, so that this module can replace 'from document import nodes' get_node_context = nodes.get_node_context node_context = nodes.node_context node_class = nodes.node_class node_descriptor = nodes.node_descriptor #eof # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
krintoxi/NoobSec-Toolkit
refs/heads/master
NoobSecToolkit - MAC OSX/tools/sqli/tamper/randomcase.py
10
#!/usr/bin/env python """ Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import re from lib.core.common import randomRange from lib.core.data import kb from lib.core.enums import PRIORITY __priority__ = PRIORITY.NORMAL def dependencies(): pass def tamper(payload, **kwargs): """ Replaces each keyword character with random case value Tested against: * Microsoft SQL Server 2005 * MySQL 4, 5.0 and 5.5 * Oracle 10g * PostgreSQL 8.3, 8.4, 9.0 Notes: * Useful to bypass very weak and bespoke web application firewalls that has poorly written permissive regular expressions * This tamper script should work against all (?) databases >>> import random >>> random.seed(0) >>> tamper('INSERT') 'INseRt' """ retVal = payload if payload: for match in re.finditer(r"[A-Za-z_]+", retVal): word = match.group() if word.upper() in kb.keywords: while True: _ = "" for i in xrange(len(word)): _ += word[i].upper() if randomRange(0, 1) else word[i].lower() if len(_) > 1 and _ not in (_.lower(), _.upper()): break retVal = retVal.replace(word, _) return retVal
ian-garrett/meetMe
refs/heads/master
env/lib/python3.4/site-packages/setuptools/site-patch.py
720
def __boot(): import sys import os PYTHONPATH = os.environ.get('PYTHONPATH') if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): PYTHONPATH = [] else: PYTHONPATH = PYTHONPATH.split(os.pathsep) pic = getattr(sys,'path_importer_cache',{}) stdpath = sys.path[len(PYTHONPATH):] mydir = os.path.dirname(__file__) #print "searching",stdpath,sys.path for item in stdpath: if item==mydir or not item: continue # skip if current dir. on Windows, or my own directory importer = pic.get(item) if importer is not None: loader = importer.find_module('site') if loader is not None: # This should actually reload the current module loader.load_module('site') break else: try: import imp # Avoid import loop in Python >= 3.3 stream, path, descr = imp.find_module('site',[item]) except ImportError: continue if stream is None: continue try: # This should actually reload the current module imp.load_module('site',stream,path,descr) finally: stream.close() break else: raise ImportError("Couldn't find the real 'site' module") #print "loaded", __file__ known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp oldpos = getattr(sys,'__egginsert',0) # save old insertion position sys.__egginsert = 0 # and reset the current one for item in PYTHONPATH: addsitedir(item) sys.__egginsert += oldpos # restore effective old position d, nd = makepath(stdpath[0]) insert_at = None new_path = [] for item in sys.path: p, np = makepath(item) if np==nd and insert_at is None: # We've hit the first 'system' path entry, so added entries go here insert_at = len(new_path) if np in known_paths or insert_at is None: new_path.append(item) else: # new path after the insert point, back-insert it new_path.insert(insert_at, item) insert_at += 1 sys.path[:] = new_path if __name__=='site': __boot() del __boot
hsharsha/depot_tools
refs/heads/master
third_party/boto/pyami/copybot.py
102
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # import boto from boto.pyami.scriptbase import ScriptBase import os, StringIO class CopyBot(ScriptBase): def __init__(self): ScriptBase.__init__(self) self.wdir = boto.config.get('Pyami', 'working_dir') self.log_file = '%s.log' % self.instance_id self.log_path = os.path.join(self.wdir, self.log_file) boto.set_file_logger(self.name, self.log_path) self.src_name = boto.config.get(self.name, 'src_bucket') self.dst_name = boto.config.get(self.name, 'dst_bucket') self.replace = boto.config.getbool(self.name, 'replace_dst', True) s3 = boto.connect_s3() self.src = s3.lookup(self.src_name) if not self.src: boto.log.error('Source bucket does not exist: %s' % self.src_name) dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None) if dest_access_key: dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None) s3 = boto.connect(dest_access_key, dest_secret_key) self.dst = s3.lookup(self.dst_name) if not self.dst: self.dst = s3.create_bucket(self.dst_name) def copy_bucket_acl(self): if boto.config.get(self.name, 'copy_acls', True): acl = self.src.get_xml_acl() self.dst.set_xml_acl(acl) def copy_key_acl(self, src, dst): if boto.config.get(self.name, 'copy_acls', True): acl = src.get_xml_acl() dst.set_xml_acl(acl) def copy_keys(self): boto.log.info('src=%s' % self.src.name) boto.log.info('dst=%s' % self.dst.name) try: for key in self.src: if not self.replace: exists = self.dst.lookup(key.name) if exists: boto.log.info('key=%s already exists in %s, skipping' % (key.name, self.dst.name)) continue boto.log.info('copying %d bytes from key=%s' % (key.size, key.name)) prefix, base = os.path.split(key.name) path = os.path.join(self.wdir, base) key.get_contents_to_filename(path) new_key = self.dst.new_key(key.name) new_key.set_contents_from_filename(path) self.copy_key_acl(key, new_key) os.unlink(path) except: boto.log.exception('Error copying key: %s' % key.name) def copy_log(self): key = self.dst.new_key(self.log_file) key.set_contents_from_filename(self.log_path) def main(self): fp = StringIO.StringIO() boto.config.dump_safe(fp) self.notify('%s (%s) Starting' % (self.name, self.instance_id), fp.getvalue()) if self.src and self.dst: self.copy_keys() if self.dst: self.copy_log() self.notify('%s (%s) Stopping' % (self.name, self.instance_id), 'Copy Operation Complete') if boto.config.getbool(self.name, 'exit_on_completion', True): ec2 = boto.connect_ec2() ec2.terminate_instances([self.instance_id])
shaufi/odoo
refs/heads/8.0
addons/mail/tests/test_mail_features.py
172
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Business Applications # Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from ..mail_mail import mail_mail from ..mail_thread import mail_thread from .common import TestMail from openerp.tools import mute_logger, email_split, html2plaintext from openerp.tools.mail import html_sanitize class test_mail(TestMail): def test_000_alias_setup(self): """ Test basic mail.alias setup works, before trying to use them for routing """ cr, uid = self.cr, self.uid self.user_valentin_id = self.res_users.create(cr, uid, {'name': 'Valentin Cognito', 'email': 'valentin.cognito@gmail.com', 'login': 'valentin.cognito', 'alias_name': 'valentin.cognito'}) self.user_valentin = self.res_users.browse(cr, uid, self.user_valentin_id) self.assertEquals(self.user_valentin.alias_name, self.user_valentin.login, "Login should be used as alias") self.user_pagan_id = self.res_users.create(cr, uid, {'name': 'Pagan Le Marchant', 'email': 'plmarchant@gmail.com', 'login': 'plmarchant@gmail.com', 'alias_name': 'plmarchant@gmail.com'}) self.user_pagan = self.res_users.browse(cr, uid, self.user_pagan_id) self.assertEquals(self.user_pagan.alias_name, 'plmarchant', "If login is an email, the alias should keep only the local part") self.user_barty_id = self.res_users.create(cr, uid, {'name': 'Bartholomew Ironside', 'email': 'barty@gmail.com', 'login': 'b4r+_#_R3wl$$', 'alias_name': 'b4r+_#_R3wl$$'}) self.user_barty = self.res_users.browse(cr, uid, self.user_barty_id) self.assertEquals(self.user_barty.alias_name, 'b4r+_-_r3wl-', 'Disallowed chars should be replaced by hyphens') def test_00_followers_function_field(self): """ Tests designed for the many2many function field 'follower_ids'. We will test to perform writes using the many2many commands 0, 3, 4, 5 and 6. """ cr, uid, user_admin, partner_bert_id, group_pigs = self.cr, self.uid, self.user_admin, self.partner_bert_id, self.group_pigs # Data: create 'disturbing' values in mail.followers: same res_id, other res_model; same res_model, other res_id group_dummy_id = self.mail_group.create(cr, uid, {'name': 'Dummy group'}, {'mail_create_nolog': True}) self.mail_followers.create(cr, uid, {'res_model': 'mail.thread', 'res_id': self.group_pigs_id, 'partner_id': partner_bert_id}) self.mail_followers.create(cr, uid, {'res_model': 'mail.group', 'res_id': group_dummy_id, 'partner_id': partner_bert_id}) # Pigs just created: should be only Admin as follower follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertEqual(follower_ids, set([user_admin.partner_id.id]), 'Admin should be the only Pigs fan') # Subscribe Bert through a '4' command group_pigs.write({'message_follower_ids': [(4, partner_bert_id)]}) group_pigs.refresh() follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertEqual(follower_ids, set([partner_bert_id, user_admin.partner_id.id]), 'Bert and Admin should be the only Pigs fans') # Unsubscribe Bert through a '3' command group_pigs.write({'message_follower_ids': [(3, partner_bert_id)]}) group_pigs.refresh() follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertEqual(follower_ids, set([user_admin.partner_id.id]), 'Admin should be the only Pigs fan') # Set followers through a '6' command group_pigs.write({'message_follower_ids': [(6, 0, [partner_bert_id])]}) group_pigs.refresh() follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertEqual(follower_ids, set([partner_bert_id]), 'Bert should be the only Pigs fan') # Add a follower created on the fly through a '0' command group_pigs.write({'message_follower_ids': [(0, 0, {'name': 'Patrick Fiori'})]}) partner_patrick_id = self.res_partner.search(cr, uid, [('name', '=', 'Patrick Fiori')])[0] group_pigs.refresh() follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertEqual(follower_ids, set([partner_bert_id, partner_patrick_id]), 'Bert and Patrick should be the only Pigs fans') # Finally, unlink through a '5' command group_pigs.write({'message_follower_ids': [(5, 0)]}) group_pigs.refresh() follower_ids = set([follower.id for follower in group_pigs.message_follower_ids]) self.assertFalse(follower_ids, 'Pigs group should not have fans anymore') # Test dummy data has not been altered fol_obj_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.thread'), ('res_id', '=', self.group_pigs_id)]) follower_ids = set([follower.partner_id.id for follower in self.mail_followers.browse(cr, uid, fol_obj_ids)]) self.assertEqual(follower_ids, set([partner_bert_id]), 'Bert should be the follower of dummy mail.thread data') fol_obj_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.group'), ('res_id', '=', group_dummy_id)]) follower_ids = set([follower.partner_id.id for follower in self.mail_followers.browse(cr, uid, fol_obj_ids)]) self.assertEqual(follower_ids, set([partner_bert_id, user_admin.partner_id.id]), 'Bert and Admin should be the followers of dummy mail.group data') def test_05_message_followers_and_subtypes(self): """ Tests designed for the subscriber API as well as message subtypes """ cr, uid, user_admin, user_raoul, group_pigs = self.cr, self.uid, self.user_admin, self.user_raoul, self.group_pigs # Data: message subtypes self.mail_message_subtype.create(cr, uid, {'name': 'mt_mg_def', 'default': True, 'res_model': 'mail.group'}) self.mail_message_subtype.create(cr, uid, {'name': 'mt_other_def', 'default': True, 'res_model': 'crm.lead'}) self.mail_message_subtype.create(cr, uid, {'name': 'mt_all_def', 'default': True, 'res_model': False}) mt_mg_nodef = self.mail_message_subtype.create(cr, uid, {'name': 'mt_mg_nodef', 'default': False, 'res_model': 'mail.group'}) mt_all_nodef = self.mail_message_subtype.create(cr, uid, {'name': 'mt_all_nodef', 'default': False, 'res_model': False}) default_group_subtypes = self.mail_message_subtype.search(cr, uid, [('default', '=', True), '|', ('res_model', '=', 'mail.group'), ('res_model', '=', False)]) # ---------------------------------------- # CASE1: test subscriptions with subtypes # ---------------------------------------- # Do: subscribe Raoul, should have default subtypes group_pigs.message_subscribe_users([user_raoul.id]) group_pigs.refresh() # Test: 2 followers (Admin and Raoul) follower_ids = [follower.id for follower in group_pigs.message_follower_ids] self.assertEqual(set(follower_ids), set([user_raoul.partner_id.id, user_admin.partner_id.id]), 'message_subscribe: Admin and Raoul should be the only 2 Pigs fans') # Raoul follows default subtypes fol_ids = self.mail_followers.search(cr, uid, [ ('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ('partner_id', '=', user_raoul.partner_id.id) ]) fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0] fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids]) self.assertEqual(set(fol_subtype_ids), set(default_group_subtypes), 'message_subscribe: Raoul subscription subtypes are incorrect, should be all default ones') # Do: subscribe Raoul with specified new subtypes group_pigs.message_subscribe_users([user_raoul.id], subtype_ids=[mt_mg_nodef]) # Test: 2 followers (Admin and Raoul) follower_ids = [follower.id for follower in group_pigs.message_follower_ids] self.assertEqual(set(follower_ids), set([user_raoul.partner_id.id, user_admin.partner_id.id]), 'message_subscribe: Admin and Raoul should be the only 2 Pigs fans') # Test: 2 lines in mail.followers (no duplicate for Raoul) fol_ids = self.mail_followers.search(cr, uid, [ ('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ]) self.assertEqual(len(fol_ids), 2, 'message_subscribe: subscribing an already-existing follower should not create new entries in mail.followers') # Test: Raoul follows only specified subtypes fol_ids = self.mail_followers.search(cr, uid, [ ('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ('partner_id', '=', user_raoul.partner_id.id) ]) fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0] fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids]) self.assertEqual(set(fol_subtype_ids), set([mt_mg_nodef]), 'message_subscribe: Raoul subscription subtypes are incorrect, should be only specified') # Do: Subscribe Raoul without specified subtypes: should not erase existing subscription subtypes group_pigs.message_subscribe_users([user_raoul.id, user_raoul.id]) group_pigs.message_subscribe_users([user_raoul.id]) group_pigs.refresh() # Test: 2 followers (Admin and Raoul) follower_ids = [follower.id for follower in group_pigs.message_follower_ids] self.assertEqual(set(follower_ids), set([user_raoul.partner_id.id, user_admin.partner_id.id]), 'message_subscribe: Admin and Raoul should be the only 2 Pigs fans') # Test: Raoul follows default subtypes fol_ids = self.mail_followers.search(cr, uid, [ ('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ('partner_id', '=', user_raoul.partner_id.id) ]) fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0] fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids]) self.assertEqual(set(fol_subtype_ids), set([mt_mg_nodef]), 'message_subscribe: Raoul subscription subtypes are incorrect, should be only specified') # Do: Unsubscribe Raoul twice through message_unsubscribe_users group_pigs.message_unsubscribe_users([user_raoul.id, user_raoul.id]) group_pigs.refresh() # Test: 1 follower (Admin) follower_ids = [follower.id for follower in group_pigs.message_follower_ids] self.assertEqual(follower_ids, [user_admin.partner_id.id], 'Admin must be the only Pigs fan') # Test: 1 lines in mail.followers (no duplicate for Raoul) fol_ids = self.mail_followers.search(cr, uid, [ ('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id) ]) self.assertEqual(len(fol_ids), 1, 'message_subscribe: group should have only 1 entry in mail.follower for 1 follower') # Do: subscribe Admin with subtype_ids group_pigs.message_subscribe_users([uid], [mt_mg_nodef, mt_all_nodef]) fol_ids = self.mail_followers.search(cr, uid, [('res_model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id), ('partner_id', '=', user_admin.partner_id.id)]) fol_obj = self.mail_followers.browse(cr, uid, fol_ids)[0] fol_subtype_ids = set([subtype.id for subtype in fol_obj.subtype_ids]) self.assertEqual(set(fol_subtype_ids), set([mt_mg_nodef, mt_all_nodef]), 'subscription subtypes are incorrect') # ---------------------------------------- # CASE2: test mail_thread fields # ---------------------------------------- subtype_data = group_pigs._get_subscription_data(None, None)[group_pigs.id]['message_subtype_data'] self.assertEqual(set(subtype_data.keys()), set(['Discussions', 'mt_mg_def', 'mt_all_def', 'mt_mg_nodef', 'mt_all_nodef']), 'mail.group available subtypes incorrect') self.assertFalse(subtype_data['Discussions']['followed'], 'Admin should not follow Discussions in pigs') self.assertTrue(subtype_data['mt_mg_nodef']['followed'], 'Admin should follow mt_mg_nodef in pigs') self.assertTrue(subtype_data['mt_all_nodef']['followed'], 'Admin should follow mt_all_nodef in pigs') def test_11_notification_url(self): """ Tests designed to test the URL added in notification emails. """ cr, uid, group_pigs = self.cr, self.uid, self.group_pigs # Test URL formatting base_url = self.registry('ir.config_parameter').get_param(cr, uid, 'web.base.url') # Partner data partner_raoul = self.res_partner.browse(cr, uid, self.partner_raoul_id) partner_bert_id = self.res_partner.create(cr, uid, {'name': 'bert'}) partner_bert = self.res_partner.browse(cr, uid, partner_bert_id) # Mail data mail_mail_id = self.mail_mail.create(cr, uid, {'state': 'exception'}) mail = self.mail_mail.browse(cr, uid, mail_mail_id) # Test: link for nobody -> None url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail) self.assertEqual(url, None, 'notification email: mails not send to a specific partner should not have any URL') # Test: link for partner -> None url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_bert) self.assertEqual(url, None, 'notification email: mails send to a not-user partner should not have any URL') # Test: link for user -> signin url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_raoul) self.assertIn(base_url, url, 'notification email: link should contain web.base.url') self.assertIn('db=%s' % cr.dbname, url, 'notification email: link should contain database name') self.assertIn('action=mail.action_mail_redirect', url, 'notification email: link should contain the redirect action') self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url, 'notification email: link should contain the user login') # Test: link for user -> with model and res_id mail_mail_id = self.mail_mail.create(cr, uid, {'model': 'mail.group', 'res_id': group_pigs.id}) mail = self.mail_mail.browse(cr, uid, mail_mail_id) url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_raoul) self.assertIn(base_url, url, 'notification email: link should contain web.base.url') self.assertIn('db=%s' % cr.dbname, url, 'notification email: link should contain database name') self.assertIn('action=mail.action_mail_redirect', url, 'notification email: link should contain the redirect action') self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url, 'notification email: link should contain the user login') self.assertIn('model=mail.group', url, 'notification email: link should contain the model when having not notification email on a record') self.assertIn('res_id=%s' % group_pigs.id, url, 'notification email: link should contain the res_id when having not notification email on a record') # Test: link for user -> with model and res_id mail_mail_id = self.mail_mail.create(cr, uid, {'notification': True, 'model': 'mail.group', 'res_id': group_pigs.id}) mail = self.mail_mail.browse(cr, uid, mail_mail_id) url = mail_mail._get_partner_access_link(self.mail_mail, cr, uid, mail, partner=partner_raoul) self.assertIn(base_url, url, 'notification email: link should contain web.base.url') self.assertIn('db=%s' % cr.dbname, url, 'notification email: link should contain database name') self.assertIn('action=mail.action_mail_redirect', url, 'notification email: link should contain the redirect action') self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url, 'notification email: link should contain the user login') self.assertIn('message_id=%s' % mail.mail_message_id.id, url, 'notification email: link based on message should contain the mail_message id') self.assertNotIn('model=mail.group', url, 'notification email: link based on message should not contain model') self.assertNotIn('res_id=%s' % group_pigs.id, url, 'notification email: link based on message should not contain res_id') @mute_logger('openerp.addons.mail.mail_thread', 'openerp.models') def test_12_inbox_redirection(self): """ Tests designed to test the inbox redirection of emails notification URLs. """ cr, uid, user_admin, group_pigs = self.cr, self.uid, self.user_admin, self.group_pigs model, act_id = self.ir_model_data.get_object_reference(cr, uid, 'mail', 'action_mail_inbox_feeds') # Data: post a message on pigs msg_id = self.group_pigs.message_post(body='My body', partner_ids=[self.partner_bert_id], type='comment', subtype='mail.mt_comment') # No specific parameters -> should redirect to Inbox action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_raoul_id, {'params': {}}) self.assertEqual( action.get('type'), 'ir.actions.client', 'URL redirection: action without parameters should redirect to client action Inbox' ) self.assertEqual( action.get('id'), act_id, 'URL redirection: action without parameters should redirect to client action Inbox' ) # Raoul has read access to Pigs -> should redirect to form view of Pigs action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_raoul_id, {'params': {'message_id': msg_id}}) self.assertEqual( action.get('type'), 'ir.actions.act_window', 'URL redirection: action with message_id for read-accredited user should redirect to Pigs' ) self.assertEqual( action.get('res_id'), group_pigs.id, 'URL redirection: action with message_id for read-accredited user should redirect to Pigs' ) action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_raoul_id, {'params': {'model': 'mail.group', 'res_id': group_pigs.id}}) self.assertEqual( action.get('type'), 'ir.actions.act_window', 'URL redirection: action with message_id for read-accredited user should redirect to Pigs' ) self.assertEqual( action.get('res_id'), group_pigs.id, 'URL redirection: action with message_id for read-accredited user should redirect to Pigs' ) # Bert has no read access to Pigs -> should redirect to Inbox action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_bert_id, {'params': {'message_id': msg_id}}) self.assertEqual( action.get('type'), 'ir.actions.client', 'URL redirection: action without parameters should redirect to client action Inbox' ) self.assertEqual( action.get('id'), act_id, 'URL redirection: action without parameters should redirect to client action Inbox' ) action = mail_thread.message_redirect_action(self.mail_thread, cr, self.user_bert_id, {'params': {'model': 'mail.group', 'res_id': group_pigs.id}}) self.assertEqual( action.get('type'), 'ir.actions.client', 'URL redirection: action without parameters should redirect to client action Inbox' ) self.assertEqual( action.get('id'), act_id, 'URL redirection: action without parameters should redirect to client action Inbox' ) def test_20_message_post(self): """ Tests designed for message_post. """ cr, uid, user_raoul, group_pigs = self.cr, self.uid, self.user_raoul, self.group_pigs # -------------------------------------------------- # Data creation # -------------------------------------------------- # 0 - Update existing users-partners self.res_users.write(cr, uid, [uid], {'email': 'a@a', 'notify_email': 'always'}) self.res_users.write(cr, uid, [self.user_raoul_id], {'email': 'r@r'}) # 1 - Bert Tartopoils, with email, should receive emails for comments and emails p_b_id = self.res_partner.create(cr, uid, {'name': 'Bert Tartopoils', 'email': 'b@b'}) # 2 - Carine Poilvache, with email, should receive emails for emails p_c_id = self.res_partner.create(cr, uid, {'name': 'Carine Poilvache', 'email': 'c@c', 'notify_email': 'none'}) # 3 - Dédé Grosbedon, without email, to test email verification; should receive emails for every message p_d_id = self.res_partner.create(cr, uid, {'name': 'Dédé Grosbedon', 'email': 'd@d', 'notify_email': 'always'}) # 4 - Attachments attach1_id = self.ir_attachment.create(cr, user_raoul.id, { 'name': 'Attach1', 'datas_fname': 'Attach1', 'datas': 'bWlncmF0aW9uIHRlc3Q=', 'res_model': 'mail.compose.message', 'res_id': 0}) attach2_id = self.ir_attachment.create(cr, user_raoul.id, { 'name': 'Attach2', 'datas_fname': 'Attach2', 'datas': 'bWlncmF0aW9uIHRlc3Q=', 'res_model': 'mail.compose.message', 'res_id': 0}) attach3_id = self.ir_attachment.create(cr, user_raoul.id, { 'name': 'Attach3', 'datas_fname': 'Attach3', 'datas': 'bWlncmF0aW9uIHRlc3Q=', 'res_model': 'mail.compose.message', 'res_id': 0}) # 5 - Mail data _subject = 'Pigs' _mail_subject = 'Re: %s' % (group_pigs.name) _body1 = '<p>Pigs rules</p>' _body2 = '<html>Pigs rocks</html>' _attachments = [ ('List1', 'My first attachment'), ('List2', 'My second attachment') ] # -------------------------------------------------- # CASE1: post comment + partners + attachments # -------------------------------------------------- # Data: set alias_domain to see emails with alias self.registry('ir.config_parameter').set_param(self.cr, self.uid, 'mail.catchall.domain', 'schlouby.fr') # Data: change Pigs name to test reply_to self.mail_group.write(cr, uid, [self.group_pigs_id], {'name': '"Pigs" !ù $%-'}) # Do: subscribe Raoul new_follower_ids = [self.partner_raoul_id] group_pigs.message_subscribe(new_follower_ids) # Test: group followers = Raoul + uid group_fids = [follower.id for follower in group_pigs.message_follower_ids] test_fids = new_follower_ids + [self.partner_admin_id] self.assertEqual(set(test_fids), set(group_fids), 'message_subscribe: incorrect followers after subscribe') # Do: Raoul message_post on Pigs self._init_mock_build_email() msg1_id = self.mail_group.message_post(cr, user_raoul.id, self.group_pigs_id, body=_body1, subject=_subject, partner_ids=[p_b_id, p_c_id], attachment_ids=[attach1_id, attach2_id], attachments=_attachments, type='comment', subtype='mt_comment') msg = self.mail_message.browse(cr, uid, msg1_id) msg_message_id = msg.message_id msg_pids = [partner.id for partner in msg.notified_partner_ids] msg_aids = [attach.id for attach in msg.attachment_ids] sent_emails = self._build_email_kwargs_list # Test: mail_message: subject and body not modified self.assertEqual(_subject, msg.subject, 'message_post: mail.message subject incorrect') self.assertEqual(_body1, msg.body, 'message_post: mail.message body incorrect') # Test: mail_message: notified_partner_ids = group followers + partner_ids - author test_pids = set([self.partner_admin_id, p_b_id, p_c_id]) self.assertEqual(test_pids, set(msg_pids), 'message_post: mail.message notified partners incorrect') # Test: mail_message: attachments (4, attachment_ids + attachments) test_aids = set([attach1_id, attach2_id]) msg_attach_names = set([attach.name for attach in msg.attachment_ids]) test_attach_names = set(['Attach1', 'Attach2', 'List1', 'List2']) self.assertEqual(len(msg_aids), 4, 'message_post: mail.message wrong number of attachments') self.assertEqual(msg_attach_names, test_attach_names, 'message_post: mail.message attachments incorrectly added') self.assertTrue(test_aids.issubset(set(msg_aids)), 'message_post: mail.message attachments duplicated') for attach in msg.attachment_ids: self.assertEqual(attach.res_model, 'mail.group', 'message_post: mail.message attachments were not linked to the document') self.assertEqual(attach.res_id, group_pigs.id, 'message_post: mail.message attachments were not linked to the document') if 'List' in attach.name: self.assertIn((attach.name, attach.datas.decode('base64')), _attachments, 'message_post: mail.message attachment name / data incorrect') dl_attach = self.mail_message.download_attachment(cr, user_raoul.id, id_message=msg.id, attachment_id=attach.id) self.assertIn((dl_attach['filename'], dl_attach['base64'].decode('base64')), _attachments, 'message_post: mail.message download_attachment is incorrect') # Test: followers: same as before (author was already subscribed) group_pigs.refresh() group_fids = [follower.id for follower in group_pigs.message_follower_ids] test_fids = new_follower_ids + [self.partner_admin_id] self.assertEqual(set(test_fids), set(group_fids), 'message_post: wrong followers after posting') # Test: mail_mail: notifications have been deleted self.assertFalse(self.mail_mail.search(cr, uid, [('mail_message_id', '=', msg1_id)]), 'message_post: mail.mail notifications should have been auto-deleted!') # Test: notifications emails: to a and b, c is email only, r is author test_emailto = ['Administrator <a@a>', 'Bert Tartopoils <b@b>'] # test_emailto = ['"Followers of -Pigs-" <a@a>', '"Followers of -Pigs-" <b@b>'] self.assertEqual(len(sent_emails), 2, 'message_post: notification emails wrong number of send emails') self.assertEqual(set([m['email_to'][0] for m in sent_emails]), set(test_emailto), 'message_post: notification emails wrong recipients (email_to)') for sent_email in sent_emails: self.assertEqual(sent_email['email_from'], 'Raoul Grosbedon <raoul@schlouby.fr>', 'message_post: notification email wrong email_from: should use alias of sender') self.assertEqual(len(sent_email['email_to']), 1, 'message_post: notification email sent to more than one email address instead of a precise partner') self.assertIn(sent_email['email_to'][0], test_emailto, 'message_post: notification email email_to incorrect') self.assertEqual(sent_email['reply_to'], u'"YourCompany \\"Pigs\\" !ù $%-" <group+pigs@schlouby.fr>', 'message_post: notification email reply_to incorrect') self.assertEqual(_subject, sent_email['subject'], 'message_post: notification email subject incorrect') self.assertIn(_body1, sent_email['body'], 'message_post: notification email body incorrect') self.assertIn('Pigs rules', sent_email['body_alternative'], 'message_post: notification email body alternative should contain the body') self.assertNotIn('<p>', sent_email['body_alternative'], 'message_post: notification email body alternative still contains html') self.assertFalse(sent_email['references'], 'message_post: references should be False when sending a message that is not a reply') # Test: notification linked to this message = group followers = notified_partner_ids notif_ids = self.mail_notification.search(cr, uid, [('message_id', '=', msg1_id)]) notif_pids = set([notif.partner_id.id for notif in self.mail_notification.browse(cr, uid, notif_ids)]) self.assertEqual(notif_pids, test_pids, 'message_post: mail.message created mail.notification incorrect') # Data: Pigs name back to normal self.mail_group.write(cr, uid, [self.group_pigs_id], {'name': 'Pigs'}) # -------------------------------------------------- # CASE2: reply + parent_id + parent notification # -------------------------------------------------- # Data: remove alias_domain to see emails with alias param_ids = self.registry('ir.config_parameter').search(cr, uid, [('key', '=', 'mail.catchall.domain')]) self.registry('ir.config_parameter').unlink(cr, uid, param_ids) # Do: Raoul message_post on Pigs self._init_mock_build_email() msg2_id = self.mail_group.message_post(cr, user_raoul.id, self.group_pigs_id, body=_body2, type='email', subtype='mt_comment', partner_ids=[p_d_id], parent_id=msg1_id, attachment_ids=[attach3_id], context={'mail_post_autofollow': True}) msg = self.mail_message.browse(cr, uid, msg2_id) msg_pids = [partner.id for partner in msg.notified_partner_ids] msg_aids = [attach.id for attach in msg.attachment_ids] sent_emails = self._build_email_kwargs_list # Test: mail_message: subject is False, body, parent_id is msg_id self.assertEqual(msg.subject, False, 'message_post: mail.message subject incorrect') self.assertEqual(msg.body, html_sanitize(_body2), 'message_post: mail.message body incorrect') self.assertEqual(msg.parent_id.id, msg1_id, 'message_post: mail.message parent_id incorrect') # Test: mail_message: notified_partner_ids = group followers test_pids = [self.partner_admin_id, p_d_id] self.assertEqual(set(test_pids), set(msg_pids), 'message_post: mail.message partners incorrect') # Test: mail_message: notifications linked to this message = group followers = notified_partner_ids notif_ids = self.mail_notification.search(cr, uid, [('message_id', '=', msg2_id)]) notif_pids = [notif.partner_id.id for notif in self.mail_notification.browse(cr, uid, notif_ids)] self.assertEqual(set(test_pids), set(notif_pids), 'message_post: mail.message notification partners incorrect') # Test: mail_mail: notifications deleted self.assertFalse(self.mail_mail.search(cr, uid, [('mail_message_id', '=', msg2_id)]), 'mail.mail notifications should have been auto-deleted!') # Test: emails send by server (to a, b, c, d) test_emailto = [u'Administrator <a@a>', u'Bert Tartopoils <b@b>', u'Carine Poilvache <c@c>', u'D\xe9d\xe9 Grosbedon <d@d>'] # test_emailto = [u'"Followers of Pigs" <a@a>', u'"Followers of Pigs" <b@b>', u'"Followers of Pigs" <c@c>', u'"Followers of Pigs" <d@d>'] # self.assertEqual(len(sent_emails), 3, 'sent_email number of sent emails incorrect') for sent_email in sent_emails: self.assertEqual(sent_email['email_from'], 'Raoul Grosbedon <r@r>', 'message_post: notification email wrong email_from: should use email of sender when no alias domain set') self.assertEqual(len(sent_email['email_to']), 1, 'message_post: notification email sent to more than one email address instead of a precise partner') self.assertIn(sent_email['email_to'][0], test_emailto, 'message_post: notification email email_to incorrect') self.assertEqual(email_split(sent_email['reply_to']), ['r@r'], # was '"Followers of Pigs" <r@r>', but makes no sense 'message_post: notification email reply_to incorrect: should have raoul email') self.assertEqual(_mail_subject, sent_email['subject'], 'message_post: notification email subject incorrect') self.assertIn(html_sanitize(_body2), sent_email['body'], 'message_post: notification email does not contain the body') self.assertIn('Pigs rocks', sent_email['body_alternative'], 'message_post: notification email body alternative should contain the body') self.assertNotIn('<p>', sent_email['body_alternative'], 'message_post: notification email body alternative still contains html') self.assertIn(msg_message_id, sent_email['references'], 'message_post: notification email references lacks parent message message_id') # Test: attachments + download for attach in msg.attachment_ids: self.assertEqual(attach.res_model, 'mail.group', 'message_post: mail.message attachment res_model incorrect') self.assertEqual(attach.res_id, self.group_pigs_id, 'message_post: mail.message attachment res_id incorrect') # Test: Dédé has been notified -> should also have been notified of the parent message msg = self.mail_message.browse(cr, uid, msg1_id) msg_pids = set([partner.id for partner in msg.notified_partner_ids]) test_pids = set([self.partner_admin_id, p_b_id, p_c_id, p_d_id]) self.assertEqual(test_pids, msg_pids, 'message_post: mail.message parent notification not created') # Do: reply to last message msg3_id = self.mail_group.message_post(cr, user_raoul.id, self.group_pigs_id, body='Test', parent_id=msg2_id) msg = self.mail_message.browse(cr, uid, msg3_id) # Test: check that its parent will be the first message self.assertEqual(msg.parent_id.id, msg1_id, 'message_post did not flatten the thread structure') def test_25_message_compose_wizard(self): """ Tests designed for the mail.compose.message wizard. """ cr, uid, user_raoul, group_pigs = self.cr, self.uid, self.user_raoul, self.group_pigs mail_compose = self.registry('mail.compose.message') # -------------------------------------------------- # Data creation # -------------------------------------------------- # 0 - Update existing users-partners self.res_users.write(cr, uid, [uid], {'email': 'a@a'}) self.res_users.write(cr, uid, [self.user_raoul_id], {'email': 'r@r'}) # 1 - Bert Tartopoils, with email, should receive emails for comments and emails p_b_id = self.res_partner.create(cr, uid, {'name': 'Bert Tartopoils', 'email': 'b@b'}) # 2 - Carine Poilvache, with email, should receive emails for emails p_c_id = self.res_partner.create(cr, uid, {'name': 'Carine Poilvache', 'email': 'c@c', 'notify_email': 'always'}) # 3 - Dédé Grosbedon, without email, to test email verification; should receive emails for every message p_d_id = self.res_partner.create(cr, uid, {'name': 'Dédé Grosbedon', 'email': 'd@d', 'notify_email': 'always'}) # 4 - Create a Bird mail.group, that will be used to test mass mailing group_bird_id = self.mail_group.create(cr, uid, { 'name': 'Bird', 'description': 'Bird resistance', }, context={'mail_create_nolog': True}) group_bird = self.mail_group.browse(cr, uid, group_bird_id) # 5 - Mail data _subject = 'Pigs' _body = 'Pigs <b>rule</b>' _reply_subject = 'Re: %s' % _subject _attachments = [ {'name': 'First', 'datas_fname': 'first.txt', 'datas': 'My first attachment'.encode('base64')}, {'name': 'Second', 'datas_fname': 'second.txt', 'datas': 'My second attachment'.encode('base64')} ] _attachments_test = [('first.txt', 'My first attachment'), ('second.txt', 'My second attachment')] # 6 - Subscribe Bert to Pigs group_pigs.message_subscribe([p_b_id]) # -------------------------------------------------- # CASE1: wizard + partners + context keys # -------------------------------------------------- # Do: Raoul wizard-composes on Pigs with auto-follow for partners, not for author compose_id = mail_compose.create(cr, user_raoul.id, { 'subject': _subject, 'body': _body, 'partner_ids': [(4, p_c_id), (4, p_d_id)], }, context={ 'default_composition_mode': 'comment', 'default_model': 'mail.group', 'default_res_id': self.group_pigs_id, }) compose = mail_compose.browse(cr, uid, compose_id) # Test: mail.compose.message: composition_mode, model, res_id self.assertEqual(compose.composition_mode, 'comment', 'compose wizard: mail.compose.message incorrect composition_mode') self.assertEqual(compose.model, 'mail.group', 'compose wizard: mail.compose.message incorrect model') self.assertEqual(compose.res_id, self.group_pigs_id, 'compose wizard: mail.compose.message incorrect res_id') # Do: Post the comment mail_compose.send_mail(cr, user_raoul.id, [compose_id], {'mail_post_autofollow': True, 'mail_create_nosubscribe': True}) group_pigs.refresh() message = group_pigs.message_ids[0] # Test: mail.group: followers (c and d added by auto follow key; raoul not added by nosubscribe key) pigs_pids = [p.id for p in group_pigs.message_follower_ids] test_pids = [self.partner_admin_id, p_b_id, p_c_id, p_d_id] self.assertEqual(set(pigs_pids), set(test_pids), 'compose wizard: mail_post_autofollow and mail_create_nosubscribe context keys not correctly taken into account') # Test: mail.message: subject, body inside p self.assertEqual(message.subject, _subject, 'compose wizard: mail.message incorrect subject') self.assertEqual(message.body, '<p>%s</p>' % _body, 'compose wizard: mail.message incorrect body') # Test: mail.message: notified_partner_ids = admin + bert (followers) + c + d (recipients) msg_pids = [partner.id for partner in message.notified_partner_ids] test_pids = [self.partner_admin_id, p_b_id, p_c_id, p_d_id] self.assertEqual(set(msg_pids), set(test_pids), 'compose wizard: mail.message notified_partner_ids incorrect') # -------------------------------------------------- # CASE2: reply + attachments # -------------------------------------------------- # Do: Reply with attachments compose_id = mail_compose.create(cr, user_raoul.id, { 'attachment_ids': [(0, 0, _attachments[0]), (0, 0, _attachments[1])] }, context={ 'default_composition_mode': 'comment', 'default_res_id': self.group_pigs_id, 'default_parent_id': message.id }) compose = mail_compose.browse(cr, uid, compose_id) # Test: mail.compose.message: model, res_id, parent_id self.assertEqual(compose.model, 'mail.group', 'compose wizard: mail.compose.message incorrect model') self.assertEqual(compose.res_id, self.group_pigs_id, 'compose wizard: mail.compose.message incorrect res_id') self.assertEqual(compose.parent_id.id, message.id, 'compose wizard: mail.compose.message incorrect parent_id') # Test: mail.compose.message: subject as Re:.., body, parent_id self.assertEqual(compose.subject, _reply_subject, 'compose wizard: mail.compose.message incorrect subject') self.assertFalse(compose.body, 'compose wizard: mail.compose.message body should not contain parent message body') self.assertEqual(compose.parent_id and compose.parent_id.id, message.id, 'compose wizard: mail.compose.message parent_id incorrect') # Test: mail.compose.message: attachments for attach in compose.attachment_ids: self.assertIn((attach.datas_fname, attach.datas.decode('base64')), _attachments_test, 'compose wizard: mail.message attachment name / data incorrect') # -------------------------------------------------- # CASE3: mass_mail on Pigs and Bird # -------------------------------------------------- # Do: Compose in mass_mail_mode on pigs and bird compose_id = mail_compose.create( cr, user_raoul.id, { 'subject': _subject, 'body': '${object.description}', 'partner_ids': [(4, p_c_id), (4, p_d_id)], }, context={ 'default_composition_mode': 'mass_mail', 'default_model': 'mail.group', 'default_res_id': False, 'active_ids': [self.group_pigs_id, group_bird_id], }) compose = mail_compose.browse(cr, uid, compose_id) # Do: Post the comment, get created message for each group mail_compose.send_mail(cr, user_raoul.id, [compose_id], context={ 'default_res_id': -1, 'active_ids': [self.group_pigs_id, group_bird_id] }) # check mail_mail mail_mail_ids = self.mail_mail.search(cr, uid, [('subject', '=', _subject)]) for mail_mail in self.mail_mail.browse(cr, uid, mail_mail_ids): self.assertEqual(set([p.id for p in mail_mail.recipient_ids]), set([p_c_id, p_d_id]), 'compose wizard: mail_mail mass mailing: mail.mail in mass mail incorrect recipients') # check logged messages group_pigs.refresh() group_bird.refresh() message1 = group_pigs.message_ids[0] message2 = group_bird.message_ids[0] # Test: Pigs and Bird did receive their message test_msg_ids = self.mail_message.search(cr, uid, [], limit=2) self.assertIn(message1.id, test_msg_ids, 'compose wizard: Pigs did not receive its mass mailing message') self.assertIn(message2.id, test_msg_ids, 'compose wizard: Bird did not receive its mass mailing message') # Test: mail.message: subject, body, subtype, notified partners (nobody + specific recipients) self.assertEqual(message1.subject, _subject, 'compose wizard: message_post: mail.message in mass mail subject incorrect') self.assertEqual(message1.body, '<p>%s</p>' % group_pigs.description, 'compose wizard: message_post: mail.message in mass mail body incorrect') # self.assertEqual(set([p.id for p in message1.notified_partner_ids]), set([p_c_id, p_d_id]), # 'compose wizard: message_post: mail.message in mass mail incorrect notified partners') self.assertEqual(message2.subject, _subject, 'compose wizard: message_post: mail.message in mass mail subject incorrect') self.assertEqual(message2.body, '<p>%s</p>' % group_bird.description, 'compose wizard: message_post: mail.message in mass mail body incorrect') # self.assertEqual(set([p.id for p in message2.notified_partner_ids]), set([p_c_id, p_d_id]), # 'compose wizard: message_post: mail.message in mass mail incorrect notified partners') # Test: mail.group followers: author not added as follower in mass mail mode pigs_pids = [p.id for p in group_pigs.message_follower_ids] test_pids = [self.partner_admin_id, p_b_id, p_c_id, p_d_id] self.assertEqual(set(pigs_pids), set(test_pids), 'compose wizard: mail_post_autofollow and mail_create_nosubscribe context keys not correctly taken into account') bird_pids = [p.id for p in group_bird.message_follower_ids] test_pids = [self.partner_admin_id] self.assertEqual(set(bird_pids), set(test_pids), 'compose wizard: mail_post_autofollow and mail_create_nosubscribe context keys not correctly taken into account') # Do: Compose in mass_mail, coming from list_view, we have an active_domain that should be supported compose_id = mail_compose.create(cr, user_raoul.id, { 'subject': _subject, 'body': '${object.description}', 'partner_ids': [(4, p_c_id), (4, p_d_id)], }, context={ 'default_composition_mode': 'mass_mail', 'default_model': 'mail.group', 'default_res_id': False, 'active_ids': [self.group_pigs_id], 'active_domain': [('name', 'in', ['Pigs', 'Bird'])], }) compose = mail_compose.browse(cr, uid, compose_id) # Do: Post the comment, get created message for each group mail_compose.send_mail( cr, user_raoul.id, [compose_id], context={ 'default_res_id': -1, 'active_ids': [self.group_pigs_id, group_bird_id] }) group_pigs.refresh() group_bird.refresh() message1 = group_pigs.message_ids[0] message2 = group_bird.message_ids[0] # Test: Pigs and Bird did receive their message test_msg_ids = self.mail_message.search(cr, uid, [], limit=2) self.assertIn(message1.id, test_msg_ids, 'compose wizard: Pigs did not receive its mass mailing message') self.assertIn(message2.id, test_msg_ids, 'compose wizard: Bird did not receive its mass mailing message') def test_30_needaction(self): """ Tests for mail.message needaction. """ cr, uid, user_admin, user_raoul, group_pigs = self.cr, self.uid, self.user_admin, self.user_raoul, self.group_pigs na_admin_base = self.mail_message._needaction_count(cr, uid, domain=[]) na_demo_base = self.mail_message._needaction_count(cr, user_raoul.id, domain=[]) # Test: number of unread notification = needaction on mail.message notif_ids = self.mail_notification.search(cr, uid, [ ('partner_id', '=', user_admin.partner_id.id), ('is_read', '=', False) ]) na_count = self.mail_message._needaction_count(cr, uid, domain=[]) self.assertEqual(len(notif_ids), na_count, 'unread notifications count does not match needaction count') # Do: post 2 message on group_pigs as admin, 3 messages as demo user for dummy in range(2): group_pigs.message_post(body='My Body', subtype='mt_comment') raoul_pigs = group_pigs.sudo(user_raoul) for dummy in range(3): raoul_pigs.message_post(body='My Demo Body', subtype='mt_comment') # Test: admin has 3 new notifications (from demo), and 3 new needaction notif_ids = self.mail_notification.search(cr, uid, [ ('partner_id', '=', user_admin.partner_id.id), ('is_read', '=', False) ]) self.assertEqual(len(notif_ids), na_admin_base + 3, 'Admin should have 3 new unread notifications') na_admin = self.mail_message._needaction_count(cr, uid, domain=[]) na_admin_group = self.mail_message._needaction_count(cr, uid, domain=[('model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id)]) self.assertEqual(na_admin, na_admin_base + 3, 'Admin should have 3 new needaction') self.assertEqual(na_admin_group, 3, 'Admin should have 3 needaction related to Pigs') # Test: demo has 0 new notifications (not a follower, not receiving its own messages), and 0 new needaction notif_ids = self.mail_notification.search(cr, uid, [ ('partner_id', '=', user_raoul.partner_id.id), ('is_read', '=', False) ]) self.assertEqual(len(notif_ids), na_demo_base + 0, 'Demo should have 0 new unread notifications') na_demo = self.mail_message._needaction_count(cr, user_raoul.id, domain=[]) na_demo_group = self.mail_message._needaction_count(cr, user_raoul.id, domain=[('model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id)]) self.assertEqual(na_demo, na_demo_base + 0, 'Demo should have 0 new needaction') self.assertEqual(na_demo_group, 0, 'Demo should have 0 needaction related to Pigs') def test_40_track_field(self): """ Testing auto tracking of fields. """ def _strip_string_spaces(body): return body.replace(' ', '').replace('\n', '') # Data: subscribe Raoul to Pigs, because he will change the public attribute and may loose access to the record cr, uid = self.cr, self.uid self.mail_group.message_subscribe_users(cr, uid, [self.group_pigs_id], [self.user_raoul_id]) # Data: res.users.group, to test group_public_id automatic logging group_system_ref = self.registry('ir.model.data').get_object_reference(cr, uid, 'base', 'group_system') group_system_id = group_system_ref and group_system_ref[1] or False # Data: custom subtypes mt_private_id = self.mail_message_subtype.create(cr, uid, {'name': 'private', 'description': 'Private public'}) self.ir_model_data.create(cr, uid, {'name': 'mt_private', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_private_id}) mt_name_supername_id = self.mail_message_subtype.create(cr, uid, {'name': 'name_supername', 'description': 'Supername name'}) self.ir_model_data.create(cr, uid, {'name': 'mt_name_supername', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_name_supername_id}) mt_group_public_set_id = self.mail_message_subtype.create(cr, uid, {'name': 'group_public_set', 'description': 'Group set'}) self.ir_model_data.create(cr, uid, {'name': 'mt_group_public_set', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_group_public_set_id}) mt_group_public_id = self.mail_message_subtype.create(cr, uid, {'name': 'group_public', 'description': 'Group changed'}) self.ir_model_data.create(cr, uid, {'name': 'mt_group_public', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_group_public_id}) # Data: alter mail_group model for testing purposes (test on classic, selection and many2one fields) cls = type(self.mail_group) self.assertNotIn('_track', cls.__dict__) cls._track = { 'public': { 'mail.mt_private': lambda self, cr, uid, obj, ctx=None: obj.public == 'private', }, 'name': { 'mail.mt_name_supername': lambda self, cr, uid, obj, ctx=None: obj.name == 'supername', }, 'group_public_id': { 'mail.mt_group_public_set': lambda self, cr, uid, obj, ctx=None: obj.group_public_id, 'mail.mt_group_public': lambda self, cr, uid, obj, ctx=None: True, }, } visibility = {'public': 'onchange', 'name': 'always', 'group_public_id': 'onchange'} for key in visibility: self.assertFalse(hasattr(getattr(cls, key), 'track_visibility')) getattr(cls, key).track_visibility = visibility[key] @self.addCleanup def cleanup(): delattr(cls, '_track') for key in visibility: del getattr(cls, key).track_visibility # Test: change name -> always tracked, not related to a subtype self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'public': 'public'}) self.group_pigs.refresh() self.assertEqual(len(self.group_pigs.message_ids), 1, 'tracked: a message should have been produced') # Test: first produced message: no subtype, name change tracked last_msg = self.group_pigs.message_ids[-1] self.assertFalse(last_msg.subtype_id, 'tracked: message should not have been linked to a subtype') self.assertIn(u'SelectedGroupOnly\u2192Public', _strip_string_spaces(last_msg.body), 'tracked: message body incorrect') self.assertIn('Pigs', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field') # Test: change name as supername, public as private -> 2 subtypes self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'name': 'supername', 'public': 'private'}) self.group_pigs.refresh() self.assertEqual(len(self.group_pigs.message_ids), 3, 'tracked: two messages should have been produced') # Test: first produced message: mt_name_supername last_msg = self.group_pigs.message_ids[-2] self.assertEqual(last_msg.subtype_id.id, mt_private_id, 'tracked: message should be linked to mt_private subtype') self.assertIn('Private public', last_msg.body, 'tracked: message body does not hold the subtype description') self.assertIn(u'Pigs\u2192supername', _strip_string_spaces(last_msg.body), 'tracked: message body incorrect') # Test: second produced message: mt_name_supername last_msg = self.group_pigs.message_ids[-3] self.assertEqual(last_msg.subtype_id.id, mt_name_supername_id, 'tracked: message should be linked to mt_name_supername subtype') self.assertIn('Supername name', last_msg.body, 'tracked: message body does not hold the subtype description') self.assertIn(u'Public\u2192Private', _strip_string_spaces(last_msg.body), 'tracked: message body incorrect') self.assertIn(u'Pigs\u2192supername', _strip_string_spaces(last_msg.body), 'tracked feature: message body does not hold always tracked field') # Test: change public as public, group_public_id -> 2 subtypes, name always tracked self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'public': 'public', 'group_public_id': group_system_id}) self.group_pigs.refresh() self.assertEqual(len(self.group_pigs.message_ids), 5, 'tracked: one message should have been produced') # Test: first produced message: mt_group_public_set_id, with name always tracked, public tracked on change last_msg = self.group_pigs.message_ids[-4] self.assertEqual(last_msg.subtype_id.id, mt_group_public_set_id, 'tracked: message should be linked to mt_group_public_set_id') self.assertIn('Group set', last_msg.body, 'tracked: message body does not hold the subtype description') self.assertIn(u'Private\u2192Public', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold changed tracked field') self.assertIn(u'HumanResources/Employee\u2192Administration/Settings', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field') # Test: second produced message: mt_group_public_id, with name always tracked, public tracked on change last_msg = self.group_pigs.message_ids[-5] self.assertEqual(last_msg.subtype_id.id, mt_group_public_id, 'tracked: message should be linked to mt_group_public_id') self.assertIn('Group changed', last_msg.body, 'tracked: message body does not hold the subtype description') self.assertIn(u'Private\u2192Public', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold changed tracked field') self.assertIn(u'HumanResources/Employee\u2192Administration/Settings', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field') # Test: change group_public_id to False -> 1 subtype, name always tracked self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'group_public_id': False}) self.group_pigs.refresh() self.assertEqual(len(self.group_pigs.message_ids), 6, 'tracked: one message should have been produced') # Test: first produced message: mt_group_public_set_id, with name always tracked, public tracked on change last_msg = self.group_pigs.message_ids[-6] self.assertEqual(last_msg.subtype_id.id, mt_group_public_id, 'tracked: message should be linked to mt_group_public_id') self.assertIn('Group changed', last_msg.body, 'tracked: message body does not hold the subtype description') self.assertIn(u'Administration/Settings\u2192', _strip_string_spaces(last_msg.body), 'tracked: message body does not hold always tracked field') # Test: change not tracked field, no tracking message self.mail_group.write(cr, self.user_raoul_id, [self.group_pigs_id], {'description': 'Dummy'}) self.group_pigs.refresh() self.assertEqual(len(self.group_pigs.message_ids), 6, 'tracked: No message should have been produced')
bartscheers/tkp
refs/heads/master
tests/test_database/test_alchemy.py
1
import unittest import logging import tkp.db import tkp.db.model import tkp.db.alchemy from tkp.testutil.alchemy import gen_band, gen_dataset, gen_skyregion,\ gen_lightcurve logging.basicConfig(level=logging.INFO) logging.getLogger('sqlalchemy.engine').setLevel(logging.WARNING) class TestApi(unittest.TestCase): @classmethod def setUpClass(cls): cls.db = tkp.db.Database() cls.db.connect() def setUp(self): self.session = self.db.Session() # make 2 datasets with 2 lightcurves each. Lightcurves have different # band band1 = gen_band(central=150**6) band2 = gen_band(central=160**6) self.dataset1 = gen_dataset('sqlalchemy test') self.dataset2 = gen_dataset('sqlalchemy test') skyregion1 = gen_skyregion(self.dataset1) skyregion2 = gen_skyregion(self.dataset2) lightcurve1 = gen_lightcurve(band1, self.dataset1, skyregion1) lightcurve2 = gen_lightcurve(band2, self.dataset1, skyregion1) lightcurve3 = gen_lightcurve(band1, self.dataset2, skyregion2) lightcurve4 = gen_lightcurve(band2, self.dataset2, skyregion2) db_objecsts = lightcurve1 + lightcurve2 + lightcurve3 + lightcurve4 self.session.add_all(db_objecsts) self.session.flush() self.session.commit() def test_last_assoc_timestamps(self): q = tkp.db.alchemy._last_assoc_timestamps(self.session, self.dataset1) r = self.session.query(q).all() self.assertEqual(len(r), 2) # we have two bands def test_last_assoc_per_band(self): q = tkp.db.alchemy._last_assoc_per_band(self.session, self.dataset1) r = self.session.query(q).all() self.assertEqual(len(r), 2) # we have two bands def test_last_ts_fmax(self): q = tkp.db.alchemy._last_ts_fmax(self.session, self.dataset1) r = self.session.query(q).all()[0] self.assertEqual(r.max_flux, 0.01) def test_newsrc_trigger(self): q = tkp.db.alchemy._newsrc_trigger(self.session, self.dataset1) self.session.query(q).all() def test_combined(self): q = tkp.db.alchemy._combined(self.session, self.dataset1) r = list(self.session.query(q).all()[0]) r = [item for i, item in enumerate(r) if i not in (0, 5, 6, 10, 11, 16)] shouldbe = [1.0, 1.0, 1.0, 1.0, 1, 0.0, 0.0, None, None, 0.01, 0.01] self.assertEqual(r, shouldbe) def test_transient(self): r = tkp.db.alchemy.transients(self.session, self.dataset1).all() self.assertEqual(len(r), 2) def test_transient_region(self): """ Ra & Decl filtering """ r = tkp.db.alchemy.transients(self.session, self.dataset1, ra_range=(0, 2), decl_range=(0, 2)).all() self.assertEqual(len(r), 2) r = tkp.db.alchemy.transients(self.session, self.dataset1, ra_range=(20, 22), decl_range=(20, 22)).all() self.assertEqual(len(r), 0) def test_transient_cutoff(self): """ V_int & eta_int filtering """ r = tkp.db.alchemy.transients(self.session, self.dataset1, v_int_min=0, eta_int_min=0).all() self.assertEqual(len(r), 2) q = tkp.db.alchemy.transients(self.session, self.dataset1, v_int_min=1000, eta_int_min=0) r = q.all() self.assertEqual(len(r), 0) r = tkp.db.alchemy.transients(self.session, self.dataset1, v_int_min=0, eta_int_min=1000).all() self.assertEqual(len(r), 0) def test_transient_newsource(self): """ Check if we can filter on new source transients only """ r = tkp.db.alchemy.transients(self.session, self.dataset1, new_src_only=True).all() self.assertEqual(len(r), 2)
sidzan/netforce
refs/heads/master
netforce_general/netforce_general/controllers/inline_help_body.py
4
# Copyright (c) 2012-2015 Netforce Co. Ltd. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. from netforce.controller import Controller from netforce.template import render from netforce.model import get_model from netforce.database import get_connection,get_active_db from netforce.access import set_active_user class InlineHelpBody(Controller): _path="/inline_help_body" def get(self): db=get_connection() try: set_active_user(1) action=self.get_argument("action") res=get_model("inline.help").search([["action","=",action]]) if not res: raise Exception("Inline help not found for action '%s'"%action) page_id=res[0] page=get_model("inline.help").browse(page_id) self.write(page.content) db.commit() except: import traceback traceback.print_exc() db.rollback() InlineHelpBody.register()
ihsanudin/odoo
refs/heads/8.0
addons/hw_escpos/controllers/main.py
26
# -*- coding: utf-8 -*- import commands import logging import simplejson import os import os.path import io import base64 import openerp import time import random import math import md5 import openerp.addons.hw_proxy.controllers.main as hw_proxy import pickle import re import subprocess import traceback try: from .. escpos import * from .. escpos.exceptions import * from .. escpos.printer import Usb except ImportError: escpos = printer = None from threading import Thread, Lock from Queue import Queue, Empty try: import usb.core except ImportError: usb = None from PIL import Image from openerp import http from openerp.http import request from openerp.tools.translate import _ _logger = logging.getLogger(__name__) # workaround https://bugs.launchpad.net/openobject-server/+bug/947231 # related to http://bugs.python.org/issue7980 from datetime import datetime datetime.strptime('2012-01-01', '%Y-%m-%d') class EscposDriver(Thread): def __init__(self): Thread.__init__(self) self.queue = Queue() self.lock = Lock() self.status = {'status':'connecting', 'messages':[]} def supported_devices(self): if not os.path.isfile('escpos_devices.pickle'): return supported_devices.device_list else: try: f = open('escpos_devices.pickle','r') return pickle.load(f) f.close() except Exception as e: self.set_status('error',str(e)) return supported_devices.device_list def add_supported_device(self,device_string): r = re.compile('[0-9A-Fa-f]{4}:[0-9A-Fa-f]{4}'); match = r.search(device_string) if match: match = match.group().split(':') vendor = int(match[0],16) product = int(match[1],16) name = device_string.split('ID') if len(name) >= 2: name = name[1] else: name = name[0] _logger.info('ESC/POS: adding support for device: '+match[0]+':'+match[1]+' '+name) device_list = supported_devices.device_list[:] if os.path.isfile('escpos_devices.pickle'): try: f = open('escpos_devices.pickle','r') device_list = pickle.load(f) f.close() except Exception as e: self.set_status('error',str(e)) device_list.append({ 'vendor': vendor, 'product': product, 'name': name, }) try: f = open('escpos_devices.pickle','w+') f.seek(0) pickle.dump(device_list,f) f.close() except Exception as e: self.set_status('error',str(e)) def connected_usb_devices(self): connected = [] for device in self.supported_devices(): if usb.core.find(idVendor=device['vendor'], idProduct=device['product']) != None: connected.append(device) return connected def lockedstart(self): with self.lock: if not self.isAlive(): self.daemon = True self.start() def get_escpos_printer(self): printers = self.connected_usb_devices() if len(printers) > 0: self.set_status('connected','Connected to '+printers[0]['name']) return Usb(printers[0]['vendor'], printers[0]['product']) else: self.set_status('disconnected','Printer Not Found') return None def get_status(self): self.push_task('status') return self.status def open_cashbox(self,printer): printer.cashdraw(2) printer.cashdraw(5) def set_status(self, status, message = None): _logger.info(status+' : '+ (message or 'no message')) if status == self.status['status']: if message != None and (len(self.status['messages']) == 0 or message != self.status['messages'][-1]): self.status['messages'].append(message) else: self.status['status'] = status if message: self.status['messages'] = [message] else: self.status['messages'] = [] if status == 'error' and message: _logger.error('ESC/POS Error: '+message) elif status == 'disconnected' and message: _logger.warning('ESC/POS Device Disconnected: '+message) def run(self): printer = None if not escpos: _logger.error('ESC/POS cannot initialize, please verify system dependencies.') return while True: try: error = True timestamp, task, data = self.queue.get(True) printer = self.get_escpos_printer() if printer == None: if task != 'status': self.queue.put((timestamp,task,data)) error = False time.sleep(5) continue elif task == 'receipt': if timestamp >= time.time() - 1 * 60 * 60: self.print_receipt_body(printer,data) printer.cut() elif task == 'xml_receipt': if timestamp >= time.time() - 1 * 60 * 60: printer.receipt(data) elif task == 'cashbox': if timestamp >= time.time() - 12: self.open_cashbox(printer) elif task == 'printstatus': self.print_status(printer) elif task == 'status': pass error = False except NoDeviceError as e: print "No device found %s" %str(e) except HandleDeviceError as e: print "Impossible to handle the device due to previous error %s" % str(e) except TicketNotPrinted as e: print "The ticket does not seems to have been fully printed %s" % str(e) except NoStatusError as e: print "Impossible to get the status of the printer %s" % str(e) except Exception as e: self.set_status('error', str(e)) errmsg = str(e) + '\n' + '-'*60+'\n' + traceback.format_exc() + '-'*60 + '\n' _logger.error(errmsg); finally: if error: self.queue.put((timestamp, task, data)) if printer: printer.close() def push_task(self,task, data = None): self.lockedstart() self.queue.put((time.time(),task,data)) def print_status(self,eprint): localips = ['0.0.0.0','127.0.0.1','127.0.1.1'] ips = [ c.split(':')[1].split(' ')[0] for c in commands.getoutput("/sbin/ifconfig").split('\n') if 'inet addr' in c ] ips = [ ip for ip in ips if ip not in localips ] eprint.text('\n\n') eprint.set(align='center',type='b',height=2,width=2) eprint.text('PosBox Status\n') eprint.text('\n') eprint.set(align='center') if len(ips) == 0: eprint.text('ERROR: Could not connect to LAN\n\nPlease check that the PosBox is correc-\ntly connected with a network cable,\n that the LAN is setup with DHCP, and\nthat network addresses are available') elif len(ips) == 1: eprint.text('IP Address:\n'+ips[0]+'\n') else: eprint.text('IP Addresses:\n') for ip in ips: eprint.text(ip+'\n') if len(ips) >= 1: eprint.text('\nHomepage:\nhttp://'+ips[0]+':8069\n') eprint.text('\n\n') eprint.cut() def print_receipt_body(self,eprint,receipt): def check(string): return string != True and bool(string) and string.strip() def price(amount): return ("{0:."+str(receipt['precision']['price'])+"f}").format(amount) def money(amount): return ("{0:."+str(receipt['precision']['money'])+"f}").format(amount) def quantity(amount): if math.floor(amount) != amount: return ("{0:."+str(receipt['precision']['quantity'])+"f}").format(amount) else: return str(amount) def printline(left, right='', width=40, ratio=0.5, indent=0): lwidth = int(width * ratio) rwidth = width - lwidth lwidth = lwidth - indent left = left[:lwidth] if len(left) != lwidth: left = left + ' ' * (lwidth - len(left)) right = right[-rwidth:] if len(right) != rwidth: right = ' ' * (rwidth - len(right)) + right return ' ' * indent + left + right + '\n' def print_taxes(): taxes = receipt['tax_details'] for tax in taxes: eprint.text(printline(tax['tax']['name'],price(tax['amount']), width=40,ratio=0.6)) # Receipt Header if receipt['company']['logo']: eprint.set(align='center') eprint.print_base64_image(receipt['company']['logo']) eprint.text('\n') else: eprint.set(align='center',type='b',height=2,width=2) eprint.text(receipt['company']['name'] + '\n') eprint.set(align='center',type='b') if check(receipt['company']['contact_address']): eprint.text(receipt['company']['contact_address'] + '\n') if check(receipt['company']['phone']): eprint.text('Tel:' + receipt['company']['phone'] + '\n') if check(receipt['company']['vat']): eprint.text('VAT:' + receipt['company']['vat'] + '\n') if check(receipt['company']['email']): eprint.text(receipt['company']['email'] + '\n') if check(receipt['company']['website']): eprint.text(receipt['company']['website'] + '\n') if check(receipt['header']): eprint.text(receipt['header']+'\n') if check(receipt['cashier']): eprint.text('-'*32+'\n') eprint.text('Served by '+receipt['cashier']+'\n') # Orderlines eprint.text('\n\n') eprint.set(align='center') for line in receipt['orderlines']: pricestr = price(line['price_display']) if line['discount'] == 0 and line['unit_name'] == 'Unit(s)' and line['quantity'] == 1: eprint.text(printline(line['product_name'],pricestr,ratio=0.6)) else: eprint.text(printline(line['product_name'],ratio=0.6)) if line['discount'] != 0: eprint.text(printline('Discount: '+str(line['discount'])+'%', ratio=0.6, indent=2)) if line['unit_name'] == 'Unit(s)': eprint.text( printline( quantity(line['quantity']) + ' x ' + price(line['price']), pricestr, ratio=0.6, indent=2)) else: eprint.text( printline( quantity(line['quantity']) + line['unit_name'] + ' x ' + price(line['price']), pricestr, ratio=0.6, indent=2)) # Subtotal if the taxes are not included taxincluded = True if money(receipt['subtotal']) != money(receipt['total_with_tax']): eprint.text(printline('','-------')); eprint.text(printline(_('Subtotal'),money(receipt['subtotal']),width=40, ratio=0.6)) print_taxes() #eprint.text(printline(_('Taxes'),money(receipt['total_tax']),width=40, ratio=0.6)) taxincluded = False # Total eprint.text(printline('','-------')); eprint.set(align='center',height=2) eprint.text(printline(_(' TOTAL'),money(receipt['total_with_tax']),width=40, ratio=0.6)) eprint.text('\n\n'); # Paymentlines eprint.set(align='center') for line in receipt['paymentlines']: eprint.text(printline(line['journal'], money(line['amount']), ratio=0.6)) eprint.text('\n'); eprint.set(align='center',height=2) eprint.text(printline(_(' CHANGE'),money(receipt['change']),width=40, ratio=0.6)) eprint.set(align='center') eprint.text('\n'); # Extra Payment info if receipt['total_discount'] != 0: eprint.text(printline(_('Discounts'),money(receipt['total_discount']),width=40, ratio=0.6)) if taxincluded: print_taxes() #eprint.text(printline(_('Taxes'),money(receipt['total_tax']),width=40, ratio=0.6)) # Footer if check(receipt['footer']): eprint.text('\n'+receipt['footer']+'\n\n') eprint.text(receipt['name']+'\n') eprint.text( str(receipt['date']['date']).zfill(2) +'/'+ str(receipt['date']['month']+1).zfill(2) +'/'+ str(receipt['date']['year']).zfill(4) +' '+ str(receipt['date']['hour']).zfill(2) +':'+ str(receipt['date']['minute']).zfill(2) ) driver = EscposDriver() driver.push_task('printstatus') hw_proxy.drivers['escpos'] = driver class EscposProxy(hw_proxy.Proxy): @http.route('/hw_proxy/open_cashbox', type='json', auth='none', cors='*') def open_cashbox(self): _logger.info('ESC/POS: OPEN CASHBOX') driver.push_task('cashbox') @http.route('/hw_proxy/print_receipt', type='json', auth='none', cors='*') def print_receipt(self, receipt): _logger.info('ESC/POS: PRINT RECEIPT') driver.push_task('receipt',receipt) @http.route('/hw_proxy/print_xml_receipt', type='json', auth='none', cors='*') def print_xml_receipt(self, receipt): _logger.info('ESC/POS: PRINT XML RECEIPT') driver.push_task('xml_receipt',receipt) @http.route('/hw_proxy/escpos/add_supported_device', type='http', auth='none', cors='*') def add_supported_device(self, device_string): _logger.info('ESC/POS: ADDED NEW DEVICE:'+device_string) driver.add_supported_device(device_string) return "The device:\n"+device_string+"\n has been added to the list of supported devices.<br/><a href='/hw_proxy/status'>Ok</a>" @http.route('/hw_proxy/escpos/reset_supported_devices', type='http', auth='none', cors='*') def reset_supported_devices(self): try: os.remove('escpos_devices.pickle') except Exception as e: pass return 'The list of supported devices has been reset to factory defaults.<br/><a href="/hw_proxy/status">Ok</a>'
leansoft/edx-platform
refs/heads/master
common/djangoapps/student/migrations/0025_auto__add_field_courseenrollmentallowed_auto_enroll.py
114
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'CourseEnrollmentAllowed.auto_enroll' db.add_column('student_courseenrollmentallowed', 'auto_enroll', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False) def backwards(self, orm): # Deleting field 'CourseEnrollmentAllowed.auto_enroll' db.delete_column('student_courseenrollmentallowed', 'auto_enroll') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'student.courseenrollment': { 'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'}, 'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'student.courseenrollmentallowed': { 'Meta': {'unique_together': "(('email', 'course_id'),)", 'object_name': 'CourseEnrollmentAllowed'}, 'auto_enroll': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}), 'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'student.pendingemailchange': { 'Meta': {'object_name': 'PendingEmailChange'}, 'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}) }, 'student.pendingnamechange': { 'Meta': {'object_name': 'PendingNameChange'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}) }, 'student.registration': { 'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"}, 'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'}) }, 'student.testcenterregistration': { 'Meta': {'object_name': 'TestCenterRegistration'}, 'accommodation_code': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}), 'accommodation_request': ('django.db.models.fields.CharField', [], {'db_index': 'False', 'max_length': '1024', 'blank': 'True'}), 'authorization_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}), 'client_authorization_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20', 'db_index': 'True'}), 'confirmed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}), 'eligibility_appointment_date_first': ('django.db.models.fields.DateField', [], {'db_index': 'True'}), 'eligibility_appointment_date_last': ('django.db.models.fields.DateField', [], {'db_index': 'True'}), 'exam_series_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'processed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'testcenter_user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['student.TestCenterUser']"}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}), 'upload_error_message': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}), 'upload_status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'blank': 'True'}), 'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'user_updated_at': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}) }, 'student.testcenteruser': { 'Meta': {'object_name': 'TestCenterUser'}, 'address_1': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'address_2': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}), 'address_3': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}), 'candidate_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_index': 'True'}), 'city': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}), 'client_candidate_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}), 'company_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}), 'confirmed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'db_index': 'True'}), 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}), 'extension': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '8', 'blank': 'True'}), 'fax': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}), 'fax_country_code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}), 'middle_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'phone': ('django.db.models.fields.CharField', [], {'max_length': '35'}), 'phone_country_code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'db_index': 'True'}), 'postal_code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '16', 'blank': 'True'}), 'processed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'salutation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}), 'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'blank': 'True'}), 'suffix': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}), 'upload_error_message': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}), 'upload_status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'blank': 'True'}), 'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'unique': 'True'}), 'user_updated_at': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}) }, 'student.userprofile': { 'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"}, 'allow_certificate': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}), 'gender': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}), 'goals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}), 'level_of_education': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}), 'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}), 'mailing_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}), 'year_of_birth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}) }, 'student.usertestgroup': { 'Meta': {'object_name': 'UserTestGroup'}, 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}), 'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'}) } } complete_apps = ['student']
DirtyUnicorns/android_external_chromium-org
refs/heads/kitkat
tools/telemetry/telemetry/page/page_test_runner.py
25
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import sys from telemetry import test as test_module from telemetry.core import browser_options from telemetry.core import discover from telemetry.page import page_test as page_test_module from telemetry.page import page_runner from telemetry.page import page_set def Main(base_dir, page_set_filenames): """Turns a PageTest into a command-line program. Args: base_dir: Path to directory containing tests and ProfileCreators. """ runner = PageTestRunner() sys.exit(runner.Run(base_dir, page_set_filenames)) class PageTestRunner(object): def __init__(self): self._parser = None self._options = None self._args = None @property def test_class(self): return page_test_module.PageTest @property def test_class_name(self): return 'test' def Run(self, base_dir, page_set_filenames): test, ps, expectations = self.ParseCommandLine(sys.argv, base_dir, page_set_filenames) results = page_runner.Run(test, ps, expectations, self._options) results.PrintSummary() return min(255, len(results.failures + results.errors)) def FindTestConstructors(self, base_dir): # Look for both Tests and PageTests, but Tests get priority, because test_constructors = discover.DiscoverClasses( base_dir, base_dir, self.test_class) test_constructors.update(discover.DiscoverClasses( base_dir, base_dir, test_module.Test, index_by_class_name=True)) return test_constructors def FindTestName(self, test_constructors, args): """Find the test name in an arbitrary argument list. We can't use the optparse parser, because the test may add its own command-line options. If the user passed in any of those, the optparse parsing will fail. Returns: test_name or None """ test_name = None for arg in [self.GetModernizedTestName(a) for a in args]: if arg in test_constructors: test_name = arg return test_name def GetModernizedTestName(self, arg): """Sometimes tests change names but buildbots keep calling the old name. If arg matches an old test name, return the new test name instead. Otherwise, return the arg. """ return arg def GetPageSet(self, test, page_set_filenames): ps = test.CreatePageSet(self._args, self._options) if ps: return ps if len(self._args) < 2: page_set_list = ',\n'.join( sorted([os.path.relpath(f) for f in page_set_filenames])) self.PrintParseError( 'No page set, file, or URL specified.\n' 'Available page sets:\n' '%s' % page_set_list) page_set_arg = self._args[1] # We've been given a URL. Create a page set with just that URL. if (page_set_arg.startswith('http://') or page_set_arg.startswith('https://')): self._options.allow_live_sites = True return page_set.PageSet.FromDict({ 'pages': [{'url': page_set_arg}] }, os.path.dirname(__file__)) # We've been given a page set JSON. Load it. if page_set_arg.endswith('.json'): return page_set.PageSet.FromFile(page_set_arg) # We've been given a file or directory. Create a page set containing it. if os.path.exists(page_set_arg): page_set_dict = {'pages': []} def _AddFile(file_path): page_set_dict['pages'].append({'url': 'file://' + file_path}) def _AddDir(dir_path): for path in os.listdir(dir_path): path = os.path.join(dir_path, path) _AddPath(path) def _AddPath(path): if os.path.isdir(path): _AddDir(path) else: _AddFile(path) _AddPath(page_set_arg) return page_set.PageSet.FromDict(page_set_dict, os.getcwd() + os.sep) raise Exception('Did not understand "%s". Pass a page set, file or URL.' % page_set_arg) def ParseCommandLine(self, args, base_dir, page_set_filenames): self._options = browser_options.BrowserFinderOptions() self._parser = self._options.CreateParser( '%%prog [options] %s page_set' % self.test_class_name) test_constructors = self.FindTestConstructors(base_dir) test_name = self.FindTestName(test_constructors, args) test = None if test_name: test = test_constructors[test_name]() if isinstance(test, test_module.Test): page_test = test.test() else: page_test = test page_test.AddCommandLineOptions(self._parser) page_runner.AddCommandLineOptions(self._parser) _, self._args = self._parser.parse_args() if len(self._args) < 1: error_message = 'No %s specified.\nAvailable %ss:\n' % ( self.test_class_name, self.test_class_name) test_list_string = ',\n'.join(sorted(test_constructors.keys())) self.PrintParseError(error_message + test_list_string) if not test: error_message = 'No %s named %s.\nAvailable %ss:\n' % ( self.test_class_name, self._args[0], self.test_class_name) test_list_string = ',\n'.join(sorted(test_constructors.keys())) self.PrintParseError(error_message + test_list_string) if isinstance(test, test_module.Test): ps = test.CreatePageSet(self._options) expectations = test.CreateExpectations(ps) else: ps = self.GetPageSet(test, page_set_filenames) expectations = test.CreateExpectations(ps) if len(self._args) > 2: self.PrintParseError('Too many arguments.') return page_test, ps, expectations def PrintParseError(self, message): self._parser.error(message)
AnTAVR/aai2
refs/heads/master
src/modules/base/m_main.py
1
import logging from gettext import gettext as _ from typing import Tuple, List from aai_framework.dial import ColorTxt from aai_framework.interface import ModuleInterface, DialogTestInterface from libs.db import DbDomains, DbTimezones, DbLocales, DbKeymaps, DbFonts, DbFontMaps, DbFontUnimaps from libs.pacman import Pkg from .l_base import MirrorList from .main import vars_, Options, Vars logger = logging.getLogger(__name__) class BaseGroupPkg(Pkg): @classmethod def install(cls): return class Module(ModuleInterface, DialogTestInterface): ID = 'base' LEN_INSTALL = 1165 opti_: Options = None DEFAULT_COUNTRY = 'RU' DEFAULT_ITEM = 'none' @property def vars_(self) -> Vars: return vars_ def __init__(self): super().__init__() self.opti_ = Options() self.conflicts.add(self) @property def is_run(self) -> bool: return self.vars_.is_ok def _head_txt(self, help_txt_new: List[str] = None, all_: bool = False) -> List[str]: tmp = [ (_('домен ({}): {}'), 'country', self.opti_.country, all_), (_('часовой пояс ({}): {}'), 'timezone', self.opti_.timezone, all_), (_('localtime ({}): {}'), 'localtime', self.opti_.localtime, all_), (_('локаль ({}): {}'), 'locale', self.opti_.locale, all_), (_('раскладка клавиатуры ({}): {}'), 'keymap', self.opti_.keymap, all_), (_('доп. раскладка клавиатуры ({}): {}'), 'keymap_toggle', self.opti_.keymap_toggle, False), (_('Шрифт ({}): {}'), 'font', self.opti_.font, all_), (_('карта шрифта ({}): {}'), 'font_map', self.opti_.font_map, False), (_('unicode карта шрифта ({}): {}'), 'font_unimap', self.opti_.font_unimap, False), (_('имя компьютера ({}): {}'), 'hostname', self.opti_.hostname, all_), (_('mirrorlist ({}): {}'), 'mirrorlist', self.opti_.mirrorlist, all_), ] help_txt = self.format_head_txt(tmp) if help_txt_new is not None: help_txt = help_txt_new + help_txt return help_txt @property def name(self) -> str: return _('Базовая система') @property def menu_item(self) -> Tuple[str, str]: text = [ColorTxt('(' + _('ВЫПОЛНЕНО') + ')').green.bold if self.is_run else ColorTxt('(' + _('ОБЯЗАТЕЛЬНО!!!') + ')').red.bold] return super().get_menu_item(text) def dialog_country(self) -> Tuple[str, str]: domains = DbDomains().read() items = domains.menu_items help_txt = self._head_txt() help_txt += ['', _('Выберите домен вашей страны')] default = self.opti_.country or self.DEFAULT_COUNTRY return self._dialog_menu(items, default, help_txt) def dialog_timezone(self) -> Tuple[str, str]: domains = DbDomains().read() timezones = DbTimezones().read() items = timezones.menu_items help_txt = self._head_txt() help_txt += ['', _('Выберите часовой пояс')] country = domains.get(self.opti_.country) default = '' if country: default = country.default_timezone else: help_txt.append('') help_txt.append(ColorTxt(_('В базе {} нет часового пояса по умолчанию для домена <{}> !!!').format( domains.db_file_name, self.opti_.country)).red.bold) return self._dialog_menu(items, default, help_txt) def dialog_localtime(self) -> Tuple[str, str]: items = [('UTC', _('Всемирное координированное время')), ('LOCAL', _('Местное время') + ' ' + ColorTxt(_('(КАТЕГОРИЧЕСКИ НЕ РЕКОМЕНДУЕТСЯ)')).red.bold)] help_txt = self._head_txt() help_txt += ['', _('Выберите тип аппаратных часов')] default = 'UTC' return self._dialog_menu(items, default, help_txt) def dialog_locale(self) -> Tuple[str, str]: timezones = DbTimezones().read() locales = DbLocales().read() items = locales.menu_items help_txt = self._head_txt() help_txt += ['', _('Выберите локаль')] timezone = timezones.get(self.opti_.timezone, 'timezone') default = '' if timezone: default = timezone.default_locale else: help_txt.append('') help_txt.append(ColorTxt(_('В базе {} нет локали по умолчанию для часового пояса <{}> !!!').format( timezones.db_file_name, self.opti_.timezone)).red.bold) return self._dialog_menu(items, default, help_txt) def dialog_keymap(self) -> Tuple[str, str]: locales = DbLocales().read() keymaps = DbKeymaps().read() items = keymaps.menu_items help_txt = self._head_txt() help_txt += ['', _('Выберите раскладку клавиатуры')] locale = locales.get(self.opti_.locale, 'locale') default = '' if locale: default = locale.default_keymap else: help_txt.append('') help_txt.append(ColorTxt(_('В базе {} нет раскладки по умолчанию для локали <{}> !!!').format( locales.db_file_name, self.opti_.locale)).red.bold) return self._dialog_menu(items, default, help_txt) def dialog_keymap_toggle(self) -> Tuple[str, str]: keymaps = DbKeymaps().read() items = keymaps.menu_items items.insert(0, (self.DEFAULT_ITEM, self.DEFAULT_ITEM)) help_txt = self._head_txt() help_txt += ['', _('Выберите дополнительную раскладку клавиатуры')] default = self.DEFAULT_ITEM return self._dialog_menu(items, default, help_txt) def dialog_font(self) -> Tuple[str, str]: locales = DbLocales().read() fonts = DbFonts().read() items = fonts.menu_items help_txt = self._head_txt() help_txt += ['', _('Выберите шрифт')] locale = locales.get(self.opti_.locale, 'locale') default = '' if locale: default = locale.default_font else: help_txt.append('') help_txt.append(ColorTxt(_('В базе {} нет шрифта по умолчанию для локали <{}> !!!').format( locales.db_file_name, self.opti_.locale)).red.bold) return self._dialog_menu(items, default, help_txt) def dialog_font_map(self) -> Tuple[str, str]: fonts = DbFonts().read() fontmaps = DbFontMaps().read() items = fontmaps.menu_items items.insert(0, (self.DEFAULT_ITEM, self.DEFAULT_ITEM)) help_txt = self._head_txt() help_txt += ['', _('Выберите карту шрифта')] font = fonts.get(self.opti_.font) default = '' if font: default = font.font_map else: help_txt.append('') help_txt.append(ColorTxt(_('В базе {} нет карты шрифта по умолчанию для шрифта <{}> !!!').format( fonts.db_file_name, self.opti_.font)).red.bold) if not default: default = self.DEFAULT_ITEM return self._dialog_menu(items, default, help_txt) def dialog_font_unimap(self) -> Tuple[str, str]: fonts = DbFonts().read() fontunimaps = DbFontUnimaps().read() items = fontunimaps.menu_items items.insert(0, (self.DEFAULT_ITEM, self.DEFAULT_ITEM)) help_txt = self._head_txt() help_txt += ['', _('Выберите unicode карту шрифта')] font = fonts.get(self.opti_.font) default = '' if font: default = font.font_unimap else: help_txt.append('') help_txt.append(ColorTxt(_('В базе {} нет unicode карты шрифта по умолчанию для шрифта <{}> !!!').format( fonts.db_file_name, self.opti_.font)).red.bold) if not default: default = self.DEFAULT_ITEM return self._dialog_menu(items, default, help_txt) def dialog_mirrorlist(self) -> Tuple[str, List[str]]: mirrorlist = MirrorList(self.opti_.country).read_raw() choices: List[Tuple[str, str, str]] = sorted(mirrorlist.choices, key=lambda x: (x[1], x[0])) default: Tuple[str, ...] = tuple(mirrorlist.default_items) help_txt = self._head_txt() help_txt += ['', _('Выберите зеркала (от 1 до 6 шт.)')] return self._dialog_checklist(choices, default, help_txt) def dialog_hostname(self) -> Tuple[str, str]: demo_text = 'hostname.domain.org' demo_text = ColorTxt(demo_text).blue.bold help_txt = self._head_txt() help_txt += ['', _('Введите имя компьютера'), demo_text] default = 'hostname.' + self.opti_.country.lower() return self._dialog_inputbox(default, help_txt) # @ModuleInterface.decor_can_not_perform def run(self) -> bool: self.opti_ = Options() code, value = self.dialog_country() if code in self.my_dialog.ESC_CANCEL: return False self.opti_.country = value or self.opti_.__class__.country # code, value = self.dialog_timezone() # if code in self.my_dialog.ESC_CANCEL: # return False # self.opti_.timezone = value or self.opti_.__class__.timezone # # code, value = self.dialog_localtime() # if code in self.my_dialog.ESC_CANCEL: # return False # self.opti_.localtime = value or self.opti_.__class__.localtime # # code, value = self.dialog_locale() # if code in self.my_dialog.ESC_CANCEL: # return False # self.opti_.locale = value or self.opti_.__class__.locale # # code, value = self.dialog_keymap() # if code in self.my_dialog.ESC_CANCEL: # return False # self.opti_.keymap = value or self.opti_.__class__.keymap # # code, value = self.dialog_keymap_toggle() # if code in self.my_dialog.ESC_CANCEL: # return False # if value == self.DEFAULT_ITEM: # value = None # self.opti_.keymap_toggle = value or self.opti_.__class__.keymap_toggle # # code, value = self.dialog_font() # if code in self.my_dialog.ESC_CANCEL: # return False # self.opti_.font = value or self.opti_.__class__.font # # code, value = self.dialog_font_map() # if code in self.my_dialog.ESC_CANCEL: # return False # if value == self.DEFAULT_ITEM: # value = None # self.opti_.font_map = value or self.opti_.__class__.font_map # # code, value = self.dialog_font_unimap() # if code in self.my_dialog.ESC_CANCEL: # return False # if value == self.DEFAULT_ITEM: # value = None # self.opti_.font_unimap = value or self.opti_.__class__.font_unimap # # code, value = self.dialog_hostname() # if code in self.my_dialog.ESC_CANCEL: # return False # self.opti_.hostname = value or self.opti_.__class__.hostname code, value = self.dialog_mirrorlist() if code in self.my_dialog.ESC_CANCEL: return False self.opti_.mirrorlist = value or self.opti_.__class__.mirrorlist code = self.dialog_test() if code == self.my_dialog.OK: return True
marioyc/RL-algorithms
refs/heads/master
train_atari.py
1
import json import logging import numpy as np import os import random import time from tqdm import tqdm # atari learning environment imports from ale_python_interface import ALEInterface # custom imports import common.feature_extractors as feature_extractors import common.file_utils as file_utils import common.learning_agents as learning_agents # load config file FEATURES = 'basic' f = open('config.json') config = json.load(f)[FEATURES] # training parameters GAME = 'space_invaders' LOAD_WEIGHTS = False LOAD_WEIGHTS_FILENAME = '' NUM_EPISODES_AVERAGE_OVER = 30 TEST_INTERVAL = 50 NUM_EPISODES_TEST_OVER = 30 RECORD_BEST = True random.seed(42) def run_episode(ale, agent, train=True): total_reward = 0 num_frames = 0 newAction = random.choice(agent.actions) frames = [] screen = ale.getScreen() state = {"screen" : screen} agent.startEpisode(state) initial_value = agent.getValue() while not ale.game_over(): # if newAction is None then we are training an off-policy algorithm # otherwise, we are training an on policy algorithm if not train or newAction is None: action = agent.getAction() else: action = newAction reward = ale.act(action) total_reward += reward if not ale.game_over(): new_screen = ale.getScreen() if RECORD_BEST: frames.append(ale.getScreenRGB()) new_state = {"screen": new_screen} else: new_state = None if train: newAction = agent.incorporateFeedback(state, action, reward, new_state) elif new_state is not None: agent.featureExtractor.extractFeatures(new_state) state = new_state num_frames += 1 ale.reset_game() return initial_value, total_reward, num_frames, frames def train_agent(ale, agent): """ trains an agent to play a game ale: instance of the ALE interface agent: the algorithm/agent that learns to play the game """ screen_dims = ale.getScreenDims() assert(screen_dims[0] == 160 and screen_dims[1] == 210) # statistics stats = { "average_interval" : NUM_EPISODES_AVERAGE_OVER, "rewards" : [], "rewards_average_all" : [], "rewards_average_partial" : [], "initial_value" : [], "frames" : [], "frames_average_all" : [], "frames_average_partial" : [], "features" : [], "feature_weights_min" : [], "feature_weights_max" : [], "feature_weights_average" : [], "test_interval": TEST_INTERVAL, "test_mean" : [], "test_std" : [], } best_reward = 0 # flag for first non-zero reward sawFirst = False firstReward = 0.0 logging.info('Starting training') for episode in tqdm(range(config['train_episodes'])): start = time.time() initial_value, total_reward, num_frames, frames = run_episode(ale, agent) end = time.time() logging.info('episode: %d, score: %d, number of frames: %d, time: %.4fm', episode, total_reward, num_frames, (end - start) / 60) filename_prefix = "{}-{}-{}".format(GAME, agent.name, FEATURES) filename = "{}-{}".format(filename_prefix, episode) if total_reward > best_reward: best_reward = total_reward logging.info('Best reward: %d', total_reward) if RECORD_BEST: file_utils.save_videos(frames, screen_dims, filename) file_utils.save_weights(agent.weights, filename) # update and plot statistics of current episode stats["rewards"].append(total_reward) stats["rewards_average_all"].append(np.mean(stats["rewards"])) stats["rewards_average_partial"].append(np.mean(stats["rewards"][-NUM_EPISODES_AVERAGE_OVER:])) stats["initial_value"].append(initial_value) stats["frames"].append(num_frames) stats["frames_average_all"].append(np.mean(stats["frames"])) stats["frames_average_partial"].append(np.mean(stats["frames"][-NUM_EPISODES_AVERAGE_OVER:])) stats["features"].append(len(agent.weights)) weights = [v for k,v in agent.weights.iteritems()] stats["feature_weights_min"].append(min(weights)) stats["feature_weights_max"].append(max(weights)) stats["feature_weights_average"].append(np.mean(weights)) if (episode + 1) % TEST_INTERVAL == 0: test_results = np.zeros(NUM_EPISODES_TEST_OVER) for test_episode in range(NUM_EPISODES_TEST_OVER): initial_value, total_reward, num_frames, frames = run_episode(ale, agent, train=False) logging.info('test episode: %d, score: %d, number of frames: %d', test_episode, total_reward, num_frames) test_results[test_episode] = total_reward stats["test_mean"].append(np.mean(test_results)) stats["test_std"].append(np.std(test_results)) file_utils.plot_stats(stats, filename_prefix) logging.info('Ending training') file_utils.save_weights(agent.weights, filename) if __name__ == '__main__': game = GAME + '.bin' gamepath = os.path.join('roms', game) # load the ale interface to interact with ale = ALEInterface() ale.setInt('random_seed', 42) ale.setFloat("repeat_action_probability", 0.00) ale.setInt("frame_skip", config['frame_skip']) #ale.setBool("color_averaging", True) ale.loadROM(gamepath) feature_extractor = feature_extractors.AtariFeatureExtractor(mode=FEATURES, background=file_utils.load_background(GAME)) agent = learning_agents.SARSALambdaLearningAlgorithm( actions=ale.getMinimalActionSet(), featureExtractor=feature_extractor, discount=config['gamma'], explorationProb=config['exploration_probability'], stepSize=config['step'], decay=config['lambda'] * config['gamma'], threshold=config['elegibility_traces_threshold']) logging.basicConfig(filename='logs/{}-{}-{}.log'.format(GAME, agent.name, FEATURES), format='%(asctime)s %(message)s', level=logging.INFO) if LOAD_WEIGHTS: agent.weights = file_utils.load_weights(WEIGHTS_FILENAME) train_agent(ale, agent)
ESS-LLP/frappe
refs/heads/develop
frappe/desk/page/backups/backups.py
14
import os import frappe from frappe import _ from frappe.utils import get_site_path, cint, get_url from frappe.utils.data import convert_utc_to_user_timezone import datetime def get_context(context): def get_time(path): dt = os.path.getmtime(path) return convert_utc_to_user_timezone(datetime.datetime.utcfromtimestamp(dt)).strftime('%Y-%m-%d %H:%M') def get_size(path): size = os.path.getsize(path) if size > 1048576: return "{0:.1f}M".format(float(size) / 1048576) else: return "{0:.1f}K".format(float(size) / 1024) path = get_site_path('private', 'backups') files = [x for x in os.listdir(path) if os.path.isfile(os.path.join(path, x))] backup_limit = get_scheduled_backup_limit() if len(files) > backup_limit: cleanup_old_backups(path, files, backup_limit) files = [('/backups/' + _file, get_time(os.path.join(path, _file)), get_size(os.path.join(path, _file))) for _file in files if _file.endswith('sql.gz')] files.sort(key=lambda x: x[1], reverse=True) return {"files": files} def get_scheduled_backup_limit(): backup_limit = frappe.db.get_singles_value('System Settings', 'backup_limit') return cint(backup_limit) def cleanup_old_backups(site_path, files, limit): backup_paths = [] for f in files: if f.endswith('sql.gz'): _path = os.path.abspath(os.path.join(site_path, f)) backup_paths.append(_path) backup_paths = sorted(backup_paths, key=os.path.getctime) files_to_delete = len(backup_paths) - limit for idx in range(0, files_to_delete): f = os.path.basename(backup_paths[idx]) files.remove(f) os.remove(backup_paths[idx]) def delete_downloadable_backups(): path = get_site_path('private', 'backups') files = [x for x in os.listdir(path) if os.path.isfile(os.path.join(path, x))] backup_limit = get_scheduled_backup_limit() if len(files) > backup_limit: cleanup_old_backups(path, files, backup_limit) @frappe.whitelist() def schedule_files_backup(user_email): from frappe.utils.background_jobs import enqueue, get_jobs queued_jobs = get_jobs(site=frappe.local.site, queue="long") method = 'frappe.desk.page.backups.backups.backup_files_and_notify_user' if method not in queued_jobs[frappe.local.site]: enqueue("frappe.desk.page.backups.backups.backup_files_and_notify_user", queue='long', user_email=user_email) frappe.msgprint(_("Queued for backup. You will receive an email with the download link")) else: frappe.msgprint(_("Backup job is already queued. You will receive an email with the download link")) def backup_files_and_notify_user(user_email=None): from frappe.utils.backups import backup backup_files = backup(with_files=True) get_downloadable_links(backup_files) subject = _("File backup is ready") frappe.sendmail( recipients=[user_email], subject=subject, template="file_backup_notification", args=backup_files, header=[subject, 'green'] ) def get_downloadable_links(backup_files): for key in ['backup_path_files', 'backup_path_private_files']: path = backup_files[key] backup_files[key] = get_url('/'.join(path.split('/')[-2:]))
314r/joliebulle
refs/heads/master
joliebulle/base.py
1
#!/usr/bin/python #­*­coding: utf­8 -­*­ import PyQt5 import sys import logging from PyQt5 import QtCore import json import xml.etree.ElementTree as ET from globals import * from model.fermentable import * from model.hop import * from model.yeast import * from model.misc import * from model.mash import * from model.constants import * from operator import attrgetter from singleton import Singleton logger = logging.getLogger(__name__) class ImportBase(object,metaclass=Singleton) : def __init__(self): logger.debug("Import %s", database_file) fichierBeerXML = database_file self.arbre = ET.parse(fichierBeerXML) presentation=self.arbre.find('.//RECIPE') fermentables=self.arbre.findall('.//FERMENTABLE') hops = self.arbre.findall('.//HOP') levures = self.arbre.findall('.//YEAST') misc = self.arbre.findall('.//MISC') self.listeFermentables = list() self.listeHops = list() self.listeYeasts = list() self.listeMiscs = list() #Ingredient fermentescibles for element in fermentables: self.listeFermentables.append( Fermentable.parse(element) ) self.listeFermentables = sorted(self.listeFermentables, key=attrgetter('name')) logger.debug( "%s fermentables in database, using %s bytes in memory", len(self.listeFermentables), sys.getsizeof(self.listeFermentables) ) #Houblons for element in hops: self.listeHops.append( Hop.parse(element) ) self.listeHops = sorted(self.listeHops, key=attrgetter('name')) logger.debug( "%s hops in database, using %s bytes in memory", len(self.listeHops), sys.getsizeof(self.listeHops) ) #Levures for element in levures: self.listeYeasts.append( Yeast.parse(element) ) self.listeYeasts = sorted(self.listeYeasts, key=attrgetter('name')) logger.debug( "%s yeasts in database, using %s bytes in memory", len(self.listeYeasts), sys.getsizeof(self.listeYeasts) ) #Ingredients divers for element in misc: self.listeMiscs.append( Misc.parse(element) ) self.listeMiscs = sorted(self.listeMiscs, key=attrgetter('name')) logger.debug( "%s miscs in database, using %s bytes in memory", len(self.listeMiscs), sys.getsizeof(self.listeMiscs) ) logger.debug("Import %s terminé", database_file) #Import Mash file logger.debug("Import %s", mash_file) arbre = ET.parse(mash_file) mash = arbre.findall('.//MASH') self.listeMashes = list() for element in mash: self.listeMashes.append( Mash.parse(element) ) logger.debug( "%s mash in database, using %s bytes in memory", len(self.listeMashes), sys.getsizeof(self.listeMashes) ) logger.debug("Import %s terminé", mash_file) @staticmethod def __indent(elem, level=0): i = "\n" + level*" " if len(elem): if not elem.text or not elem.text.strip(): elem.text = i + " " if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: ImportBase.__indent(elem, level+1) if not elem.tail or not elem.tail.strip(): elem.tail = i else: if level and (not elem.tail or not elem.tail.strip()): elem.tail = i @staticmethod def save(root_node): with open(database_file, 'wb') as database_xml: ImportBase.__indent(root_node) ET.ElementTree(root_node).write(database_xml, encoding="utf-8") @staticmethod def addFermentable(f): ImportBase().listeFermentables.append(f) ImportBase().listeFermentables = sorted(ImportBase().listeFermentables, key=attrgetter('name')) root = ImportBase().arbre.getroot() root.append(f.toXml()) ImportBase.save(root) @staticmethod def delFermentable(f): ImportBase().listeFermentables.remove(f) root = ImportBase().arbre.getroot() iterator = root.iter("FERMENTABLE") item = None for elem in iterator : tempF = Fermentable.parse(elem) if f.name == tempF.name and f.type == tempF.type and f.color == tempF.color and f.recommendMash == tempF.recommendMash and f.fyield == tempF.fyield : item = elem if item is not None: root.remove(item) ImportBase.save(root) @staticmethod def addHop(h): ImportBase().listeHops.append(h) ImportBase().listeHops = sorted(ImportBase().listeHops, key=attrgetter('name')) root = ImportBase().arbre.getroot() root.append(h.toXml()) databaseXML = open(database_file, 'wb') ImportBase().arbre._setroot(root) ImportBase().arbre.write(databaseXML, encoding="utf-8") databaseXML.close() @staticmethod def delHop(h): ImportBase().listeHops.remove(h) root = ImportBase().arbre.getroot() iterator = root.iter("HOP") item = None for elem in iterator : tempHop = Hop.parse(elem) if h.name == tempHop.name and h.form == tempHop.form and h.alpha == tempHop.alpha : item = elem if item is not None: root.remove(item) ImportBase.save(root) @staticmethod def addMisc(m): ImportBase().listeMiscs.append(m) ImportBase().listeMiscs = sorted(ImportBase().listeMiscs, key=attrgetter('name')) root = ImportBase().arbre.getroot() root.append(m.toXml()) ImportBase.save(root) @staticmethod def delMisc(m): ImportBase().listeMiscs.remove(m) root = ImportBase().arbre.getroot() iterator = root.iter("MISC") item = None for elem in iterator : tempMisc = Misc.parse(elem) if m.name == tempMisc.name and m.type == tempMisc.type: item = elem if item is not None: root.remove(item) ImportBase.save(root) @staticmethod def addYeast(y): ImportBase().listeYeasts.append(y) ImportBase().listeYeasts = sorted(ImportBase().listeYeasts, key=attrgetter('name')) root = ImportBase().arbre.getroot() root.append(y.toXml()) ImportBase.save(root) @staticmethod def delYeast(y): ImportBase().listeYeasts.remove(y) root = ImportBase().arbre.getroot() iterator = root.iter("YEAST") item = None for elem in iterator : tempYeast = Yeast.parse(elem) if y.name == tempYeast.name and y.form == tempYeast.form and y.labo == tempYeast.labo and y.productId == tempYeast.productId and y.attenuation == tempYeast.attenuation : item = elem if item is not None: root.remove(item) ImportBase.save(root) @staticmethod def exportjson() : # data = [] dic ={} fermentables=[] for f in ImportBase().listeFermentables : fermentable = {} fermentable['name'] = f.name fermentable['color'] = f.color fermentable['type'] = f.type fermentable['fyield'] = f.fyield fermentables.append(fermentable) dic['fermentables'] = fermentables hops=[] for h in ImportBase().listeHops : hop={} hop['name'] = h.name hop['alpha'] = h.alpha if h.form == HOP_FORM_PELLET : hop['form'] = "Pellet" elif h.form == HOP_FORM_LEAF : hop['form'] = "Leaf" elif h.form == HOP_FORM_PLUG : hop['form'] = "Plug" hops.append(hop) dic['hops'] = hops miscs=[] for m in ImportBase().listeMiscs : misc = {} misc['name'] = m.name misc['type'] = m.type misc['use'] = m.use miscs.append(misc) dic['miscs'] = miscs yeasts=[] for y in ImportBase().listeYeasts : yeast = {} yeast['name'] = y.name yeast['product_id'] = y.productId yeast['labo'] = y.labo yeast['form'] = y.form yeast['attenuation'] = y.attenuation yeasts.append(yeast) dic['yeasts'] = yeasts # data.append(dic) dic = json.dumps(dic) # dic = dic.replace("'","&#39;") return dic
kmee/odoo-brazil-hr
refs/heads/8.0-develop
l10n_br_hr_vacation/models/hr_payslip.py
2
# -*- coding: utf-8 -*- # Copyright 2017 KMEE Hendrix Costa # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from openerp import fields, models class HrPayslip(models.Model): _inherit = 'hr.payslip' periodo_aquisitivo_provisao = fields.Char( string=u'Período Aquisitivo do Calculo', readonly=True, help=u'Campo apenas para informação do período aquisitivo', )
datasciencebr/serenata-toolbox
refs/heads/master
tests/journey/test_chamber_of_deputies_deputies_dataset.py
1
import os from unittest import main, skipIf, TestCase import numpy as np from serenata_toolbox.chamber_of_deputies.deputies_dataset import DeputiesDataset class TestDeputiesDataset(TestCase): def setUp(self): self.subject = DeputiesDataset() def test_fetch(self): df = self.subject.fetch() actualColumns = set(df.columns) expectedColumns = { 'congressperson_id', 'budget_id', 'condition', 'congressperson_document', 'civil_name', 'congressperson_name', 'picture_url', 'gender', 'state', 'party', 'phone_number', 'email' } self.assertEqual(expectedColumns, actualColumns) expectedGenders = {'male', 'female'} actualGenders = set(df.gender.unique()) self.assertEqual(expectedGenders, actualGenders) expectedConditions = {'Substitute', 'Holder'} actualConditions = set(df.condition.unique()) self.assertEqual(expectedConditions, actualConditions) if __name__ == '__main__': main()
kadamski/certmaster
refs/heads/master
certmaster/CommonErrors.py
4
# This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # # Copyright 2005 Dan Williams <dcbw@redhat.com> and Red Hat, Inc. from exceptions import Exception def canIgnoreSSLError(e): """ Identify common network errors that mean we cannot connect to the server """ # This is a bit complicated by the fact that different versions of # M2Crypto & OpenSSL seem to return different error codes for the # same type of error s = "%s" % e if e[0] == 104: # Connection refused return True elif e[0] == 111: # Connection reset by peer return True elif e[0] == 61: # Connection refused return True elif e[0] == 54: # Connection reset by peer return True elif s == "no certificate returned": return True elif s == "wrong version number": return True elif s == "unexpected eof": return True return False def canIgnoreSocketError(e): """ Identify common network errors that mean we cannot connect to the server """ try: if e[0] == 111: # Connection refused return True elif e[0] == 104: # Connection reset by peer return True elif e[0] == 61: # Connection refused return True except IndexError: return True return False # FIXME: is anything using this? remove underscores class CertMaster_Client_Exception(Exception): def __init__(self, value=None): Exception.__init__(self) self.value = value def __str__(self): return "%s" %(self.value,)
nemmeviu/hosts-footprint
refs/heads/master
global/general/management/commands/global_2_el.py
2
#encoding=utf8 import sys #from __future__ import print_function from django.core.management.base import BaseCommand, CommandError from general.models import * def main(options): # total tiendas all_locals = Local.objects.all() print(len(all_locals)) for x in all_locals: if options['action'] == 'save': x_obj = { 'country': x.city.country.name, 'city': x.city.city, 'businessunit': x.flag.businessunit.businessunit, 'flag': x.flag.flag, 'local_id': x.local_id, 'local_address': x.local_address, 'local_desc': x.local_desc, 'geo_point': { 'lat': x.lat, 'lon': x.lon } } el = ElsSave('locals') el.els_save(x_obj) else: print(""" country: %s \n city: %s \n businessunit: %s \n flag: %s \n local_id: %s \n local_address: %s \n local_desc: %s \n lat: %s \n lon: %s \n """ % ( x.city.country, x.city.city, x.flag.businessunit.businessunit, x.flag, x.local_id, x.local_address, x.local_desc, x.lat, x.lon) ) class Command(BaseCommand): help = 'create global object and save on elasticsearch' def add_arguments(self, parser): parser.add_argument('-a', '--action', required=False, \ help=u'save or console') def handle(self, *args, **options): main(options)
nagyistoce/edx-platform
refs/heads/master
openedx/core/lib/cache_utils.py
75
""" Utilities related to caching. """ import functools from xblock.core import XBlock def memoize_in_request_cache(request_cache_attr_name=None): """ Memoize a method call's results in the request_cache if there's one. Creates the cache key by joining the unicode of all the args with &; so, if your arg may use the default &, it may have false hits. Arguments: request_cache_attr_name - The name of the field or property in this method's containing class that stores the request_cache. """ def _decorator(func): """Outer method decorator.""" @functools.wraps(func) def _wrapper(self, *args, **kwargs): """ Wraps a method to memoize results. """ request_cache = getattr(self, request_cache_attr_name, None) if request_cache: cache_key = '&'.join([hashvalue(arg) for arg in args]) if cache_key in request_cache.data.setdefault(func.__name__, {}): return request_cache.data[func.__name__][cache_key] result = func(self, *args, **kwargs) request_cache.data[func.__name__][cache_key] = result return result else: return func(self, *args, **kwargs) return _wrapper return _decorator def hashvalue(arg): """ If arg is an xblock, use its location. otherwise just turn it into a string """ if isinstance(arg, XBlock): return unicode(arg.location) else: return unicode(arg)
egabancho/invenio-accounts
refs/heads/master
tests/test_accounts_user.py
1
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2006, 2007, 2008, 2010, 2011, 2013, 2015 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Unit tests for the user handling library.""" from invenio.testsuite import InvenioTestCase class UserTestCase(InvenioTestCase): """Test User class.""" @property def config(self): """Config.""" cfg = super(UserTestCase, self).config cfg['PACKAGES'] = [ 'invenio.base', 'invenio_accounts', ] return cfg def test_note_is_converted_to_string(self): from invenio_accounts.models import User u = User(email="test@test.pl", password="") u.note = 2 self.assertTrue(isinstance(u.note, str)) def test_verify_email_works_with_numbers_and_strings(self): from invenio_accounts.models import User u = User(email="test@test.pl", password="") u.note = 2 self.assertTrue(u.verify_email()) u2 = User(email="test2@test2.pl", password="") u2.note = "2" self.assertTrue(u2.verify_email())
dwillcox/nucplotlib
refs/heads/master
nucplot.py
1
""" nucplot is a script for plotting the results of a nuclear reaction network calculation from, e.g. Frank Timmes' TORCH code. execute 'python nucplot.py --help' for command line options. TORCH commonly produces lots of files with a subset of the total number of isotopes in each file for the same calculation. Before using these files as input, you should concatenate them into one file using the readcat and writecat functions defined in the Torch class. For an example, see the catTorch.py script provided. Use the concatenated file as the input for this script. Copyright 2015 Donald E. Willcox This file is part of nucplotlib. nucplotlib is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. nucplotlib is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with nucplotlib. If not, see <http://www.gnu.org/licenses/>. """ from __future__ import print_function import numpy as np import matplotlib as mpl import matplotlib.cm as cm from matplotlib import pyplot as plt from Nuclides import * from Torch import TorchZND import argparse # Parse command line parameters parser = argparse.ArgumentParser() parser.add_argument("input_file",type=str, help="Name of the input file containing time and nuclide tracks.") parser.add_argument("-o","--outputprefix",type=str,help="Prefix of the output plots (default: 'nuclides'). The time step will be appended with the appropriate file extension.") parser.add_argument("-ts","--starttime",type=float,help="Start plotting at given simulation time (default: plot all times in dataset).") parser.add_argument("-te","--endtime",type=float,help="End plotting at given simulation time (default: plot all times in dataset).") parser.add_argument("-n","--numplots",type=int,help="Generates the given number of plots evenly spaced in time. Cannot be used along with --stride. Note that if you specify numplots so high that the time domain must be subdivided more finely than the data points in the dataset, data points will not be duplicated in the output, so the number of plots saved will be less than numplots.") parser.add_argument("-s","--stride",type=int,help="Generates plots every specified number of time points (not necessarily evenly spaced!!). Cannot be used along with --numplots. Default stride is 1.") parser.add_argument("-cm","--colormap",type=str,help="Specifies the case-sensitive matplotlib colormap with which to represent abundances.") parser.add_argument("-cmin","--colormin",type=float,help="Specify the minimum value of the colormap. If --logabundances is set, you should specify the minimum value in logarithmic form. (default: 1.0e-5)") parser.add_argument("-cmax","--colormax",type=float,help="Specify the maximum value of the colormap. If --logabundances is set, you should specify the maximum value in logarithmic form. (default: 1.0)") parser.add_argument("-title","--title",type=str,help="Use the specified plot title text with the time corresponding to the plot. (Default is 'TORCH Nuclides').") # Haven't developed the code to do maxabundances yet parser.add_argument("-max","--maxabundances",action="store_true",help="Shade each cell by its maximum value within the time range specified by --starttime and --endtime or, by default, the whole time range.") parser.add_argument("-logx","--logabundances",action="store_true",help="Use base-10 logarithm of abundances for setting the color mapping.") parser.add_argument("-logt","--logtime",action="store_true",help="Use evenly spaced points in logarithmic time (if the option --numplots is specified). (Default is evenly spaced points in linear time.)") parser.add_argument("-png","--png",action="store_true",help="Save plots as pngs. (Default is eps).") parser.add_argument("-dpi","--resolution",type=int,help="Use the specified dpi and output pngs. (Default is eps).") parser.add_argument("-pdf","--pdf",action="store_true",help="Save plots as pdf. (Default is eps).") parser.add_argument("-eps","--eps",action="store_true",help="Save plots as eps. (In addition to other output options).") args = parser.parse_args() # Quickly sanity check the input arguments if args.stride and args.numplots: print('Error: please use only one of either the --numplots or --stride options!') exit() elif args.stride and args.maxabundances: print('Error: --maxabundances and --stride may not be used together!') exit() elif args.numplots and args.maxabundances: print('Error: --maxabundances and --numplots may not be used together!') exit() # Plotting parameters #n_range = [-0.5,100] #z_range = [-0.5,100] box_widths = 1 # Set color limits # Default cmin: 1.0e-5 # Default cmax: 1.0 cmap_cmin = 1.0e-5 cmap_cmax = 1.0 if args.colormin: if args.logabundances: cmap_cmin = np.power(10.0,args.colormin) else: cmap_cmin = args.colormin if args.colormax: if args.logabundances: cmap_cmax = np.power(10.0,args.colormax) else: cmap_cmax = args.colormax #cm.set_clim(cmap_cmin,cmap_cmax) if args.colormap: # Need to add try-except clause to handle incorrect colormap errors. cmap = cm.get_cmap(args.colormap) else: cmap = cm.get_cmap('Blues') #if args.logabundances: # cmap = cmap.LogNorm(cmap_cmin,cmap_cmax) # Data handling classes tznd = TorchZND() nucs = Nuclides() # Read the TORCH dataset and fill the Nuclides data structure tznd.readcat([args.input_file]) data = tznd.get_numpy_data() data['logtime'] = np.zeros(len(data['time']),dtype=np.float64) data['logtime'][1:] = np.log10(data['time'][1:]) data['logtime'][0] = np.log10(0.5*(data['time'][1]+data['time'][0])) nucs.load_dataset(data) # Get plot range from dataset nz_range = nucs.get_range_nz() n_range = nz_range['n'] n_range[1] = n_range[1] + 1 z_range = nz_range['z'] z_range[1] = z_range[1] + 1 if n_range[0]==0: n_range[0]=-0.5 if z_range[0]==0: z_range[0]=-0.5 # Set up the iteration over the dataset by interpreting arguments def round_time_index(d,t,n): # Round the time index down if necessary if n == 0: return 0 elif n == len(d['time']): return n-1 elif (t-d['time'][n-1] < d['time'][n]-t): return n-1 else: return n if args.starttime: plt_itime_begin = np.searchsorted(data['time'],args.starttime) plt_itime_begin = round_time_index(data,args.starttime,plt_itime_begin) else: plt_itime_begin = 0 if args.endtime: plt_itime_end = np.searchsorted(data['time'],args.endtime) plt_itime_end = round_time_index(data,args.endtime,plt_itime_end) else: plt_itime_end = len(data['time'])-1 if args.numplots: if args.logtime: plt_time_indices = np.zeros(args.numplots,dtype=int) if plt_itime_begin == 0: plt_log_itime_begin = plt_itime_begin+1 else: plt_log_itime_begin = plt_itime_begin plt_time_values = np.logspace(data['logtime'][plt_log_itime_begin],data['logtime'][plt_itime_end],args.numplots) #print(plt_time_values) plt_time_indices = np.searchsorted(data['time'],plt_time_values) #print(plt_time_indices) for pti in xrange(len(plt_time_indices)): if (plt_time_values[pti]-data['time'][plt_time_indices[pti]-1])<(data['time'][plt_time_indices[pti]]-plt_time_values[pti]): plt_time_indices[pti] -= 1 #print(plt_time_indices) #tv = [] #for pti in plt_time_indices: # tv.append(data['time'][pti]) #print(tv) else: plt_time_values = np.linspace(data['time'][plt_itime_begin],data['time'][plt_itime_end],args.numplots) plt_time_indices = np.searchsorted(data['time'],plt_time_values) for pt in range(len(plt_time_values)): plt_time_indices[pt] = round_time_index(data,plt_time_values[pt],plt_time_indices[pt]) elif args.stride: plt_time_indices = range(plt_itime_begin,plt_itime_end,args.stride) plt_time_indices.append(plt_itime_end) else: plt_time_indices = range(plt_itime_begin,plt_itime_end) plt_time_indices.append(plt_itime_end) if args.outputprefix: plt_prefix = args.outputprefix else: plt_prefix = 'nuclides' if args.title: plt_title = args.title else: plt_title = 'TORCH Nuclides' def x2clog(x,cmin=cmap_cmin,cmax=cmap_cmax): y = (np.log10(x)-np.log10(cmin))/(np.log10(cmax)-np.log10(cmin)) if y > 1.0: return 1.0 elif y < 0: return 0.0 else: return y plt_dec_places = int(np.floor(np.log10(plt_time_indices[-1])))+1 # Iterate over the time steps in the dataset and plot for t_n in plt_time_indices: # Setup the plot fig = plt.gcf() fig.clf() axes = fig.add_axes([0.1,0.1,0.8,0.8]) # Plot isotope grid for i in nucs.nucdata: if args.logabundances: square = plt.Rectangle((i.n-0.5*box_widths,i.z-0.5*box_widths), box_widths,box_widths,facecolor=cmap(x2clog(i.x[t_n])), #box_widths,box_widths,facecolor=cmap(np.log10(i.x[t_n])), edgecolor='black') else: square = plt.Rectangle((i.n-0.5*box_widths,i.z-0.5*box_widths), box_widths,box_widths,facecolor=cmap(i.x[t_n]), edgecolor='black') axes.add_patch(square) # Set Plot Limits plt.ylim(z_range) plt.xlim(n_range) # Labels/Titles and Show/Save plt.title(plt_title + ', t = '+'{0:0.6e}'.format(data['time'][t_n]) + ' s') plt.xlabel('N') plt.ylabel('Z') # Setup colorbar axcb = mpl.colorbar.make_axes(axes,fraction=0.05)[0] cbar = mpl.colorbar.ColorbarBase(axcb, cmap=cmap, norm=mpl.colors.LogNorm(vmin=cmap_cmin, vmax=cmap_cmax)) #plt.show() t_n_spec = '{0:>0' + str(plt_dec_places) + '}' t_n_filename = t_n_spec.format(t_n) if args.resolution: plt.savefig(plt_prefix + '_'+t_n_filename+'.png',dpi=args.resolution) elif args.png: plt.savefig(plt_prefix + '_'+t_n_filename+'.png',dpi=300) if args.pdf: plt.savefig(plt_prefix + '_'+t_n_filename+'.pdf') if args.eps or not (args.resolution or args.png or args.pdf): plt.savefig(plt_prefix + '_'+t_n_filename+'.eps') print("Plotting complete!") print("If you would like to combine pngs into a video file (e.g. mp4), ") print("and have ffmpeg installed, consider using a command such as:") print("./ffmpeg -framerate 30 -pattern_type glob -i 'nuclides_*.png' -c:v libx264 -r 30 -pix_fmt yuv420p nuclides.mp4")
asm-products/movie-database-service
refs/heads/master
ani/lib/python2.7/site-packages/django/conf/locale/sr_Latn/__init__.py
12133432
Ghost-script/dyno-chat
refs/heads/master
kickchat/apps/pulsar/utils/settings/__init__.py
12133432
rhertzog/django
refs/heads/master
tests/migrations/test_migrations_no_changes/__init__.py
12133432
popazerty/test-1
refs/heads/master
lib/python/Plugins/SystemPlugins/CommonInterfaceAssignment/__init__.py
12133432
manassolanki/erpnext
refs/heads/develop
erpnext/patches/v8_0/update_student_groups_from_student_batches.py
40
# Copyright (c) 2017, Frappe and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe from frappe.model.utils.rename_field import * from frappe.model.mapper import get_mapped_doc def execute(): if frappe.db.table_exists("Student Batch"): student_batches = frappe.db.sql('''select name from `tabStudent Batch`''', as_dict=1) for student_batch in student_batches: if frappe.db.exists("Student Group", student_batch.get("name")): student_group = frappe.get_doc("Student Group", student_batch.get("name")) if frappe.db.table_exists("Student Batch Student"): current_student_list = frappe.db.sql_list('''select student from `tabStudent Group Student` where parent=%s''', (student_group.name)) batch_student_list = frappe.db.sql_list('''select student from `tabStudent Batch Student` where parent=%s''', (student_group.name)) student_list = list(set(batch_student_list)-set(current_student_list)) if student_list: student_group.extend("students", [{"student":d} for d in student_list]) if frappe.db.table_exists("Student Batch Instructor"): current_instructor_list = frappe.db.sql_list('''select instructor from `tabStudent Group Instructor` where parent=%s''', (student_group.name)) batch_instructor_list = frappe.db.sql_list('''select instructor from `tabStudent Batch Instructor` where parent=%s''', (student_group.name)) instructor_list = list(set(batch_instructor_list)-set(current_instructor_list)) if instructor_list: student_group.extend("instructors", [{"instructor":d} for d in instructor_list]) student_group.save()
SergeyMakarenko/fbthrift
refs/heads/master
thrift/compiler/py/generate/__init__.py
5
#!/usr/local/bin/python2.6 -tt # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import t_generator import t_cpp_generator import t_schema_generator __all__ = [t_cpp_generator, t_schema_generator, t_generator]
nopjmp/SickRage
refs/heads/master
sickbeard/name_parser/regexes.py
11
# coding=utf-8 # Author: Nic Wolfe <nic@wolfeden.ca> # URL: https://sickrage.github.io # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. # all regexes are case insensitive from __future__ import print_function, unicode_literals normal_regexes = [ ('standard_repeat', # Show.Name.S01E02.S01E03.Source.Quality.Etc-Group # Show Name - S01E02 - S01E03 - S01E04 - Ep Name r''' ^(?P<series_name>.+?)[. _-]+ # Show_Name and separator s(?P<season_num>\d+)[. _-]* # S01 and optional separator e(?P<ep_num>\d+) # E02 and separator ([. _-]+s(?P=season_num)[. _-]* # S01 and optional separator e(?P<extra_ep_num>\d+))+ # E03/etc and separator [. _-]*((?P<extra_info>.+?) # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ('fov_repeat', # Show.Name.1x02.1x03.Source.Quality.Etc-Group # Show Name - 1x02 - 1x03 - 1x04 - Ep Name r''' ^(?P<series_name>.+?)[. _-]+ # Show_Name and separator (?P<season_num>\d+)x # 1x (?P<ep_num>\d+) # 02 and separator ([. _-]+(?P=season_num)x # 1x (?P<extra_ep_num>\d+))+ # 03/etc and separator [. _-]*((?P<extra_info>.+?) # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ('standard', # Show.Name.S01E02.Source.Quality.Etc-Group # Show Name - S01E02 - My Ep Name # Show.Name.S01.E03.My.Ep.Name # Show.Name.S01E02E03.Source.Quality.Etc-Group # Show Name - S01E02-03 - My Ep Name # Show.Name.S01.E02.E03 r''' ^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator \(?s(?P<season_num>\d+)[. _-]* # S01 and optional separator e(?P<ep_num>\d+)\)? # E02 and separator (([. _-]*e|-) # linking e/- char (?P<extra_ep_num>(?!(1080|720|480)[pi])\d+)(\))?)* # additional E03/etc ([. _,-]+((?P<extra_info>.+?) # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?)?$ # Group '''), ('newpct', # American Horror Story - Temporada 4 HDTV x264[Cap.408_409]SPANISH AUDIO -NEWPCT # American Horror Story - Temporada 4 [HDTV][Cap.408][Espanol Castellano] # American Horror Story - Temporada 4 HDTV x264[Cap.408]SPANISH AUDIO –NEWPCT) r''' (?P<series_name>.+?).-.+\d{1,2}[ ,.] # Show name: American Horror Story (?P<extra_info>.+)\[Cap\. # Quality: HDTV x264, [HDTV], HDTV x264 (?P<season_num>\d{1,2}) # Season Number: 4 (?P<ep_num>\d{2}) # Episode Number: 08 ((_\d{1,2}(?P<extra_ep_num>\d{2}))|.*\]) # Episode number2: 09 '''), ('fov', # Show_Name.1x02.Source_Quality_Etc-Group # Show Name - 1x02 - My Ep Name # Show_Name.1x02x03x04.Source_Quality_Etc-Group # Show Name - 1x02-03-04 - My Ep Name r''' ^((?!\[.+?\])(?P<series_name>.+?)[\[. _-]+)? # Show_Name and separator if no brackets group (?P<season_num>\d+)x # 1x (?P<ep_num>\d+) # 02 and separator (([. _-]*x|-) # linking x/- char (?P<extra_ep_num> (?!(1080|720|480)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps \d+))* # additional x03/etc [\]. _-]*((?P<extra_info>.+?) # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ('scene_date_format', # Show.Name.2010.11.23.Source.Quality.Etc-Group # Show Name - 2010-11-23 - Ep Name r''' ^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator (?P<air_date>(\d+[. _-]\d+[. _-]\d+)|(\d+\w+[. _-]\w+[. _-]\d+)) [. _-]*((?P<extra_info>.+?) # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ('scene_sports_format', # Show.Name.100.Event.2010.11.23.Source.Quality.Etc-Group # Show.Name.2010.11.23.Source.Quality.Etc-Group # Show Name - 2010-11-23 - Ep Name r''' ^(?P<series_name>.*?(UEFA|MLB|ESPN|WWE|MMA|UFC|TNA|EPL|NASCAR|NBA|NFL|NHL|NRL|PGA|SUPER LEAGUE|FORMULA|FIFA|NETBALL|MOTOGP).*?)[. _-]+ ((?P<series_num>\d{1,3})[. _-]+)? (?P<air_date>(\d+[. _-]\d+[. _-]\d+)|(\d+\w+[. _-]\w+[. _-]\d+))[. _-]+ ((?P<extra_info>.+?)((?<![. _-]) (?<!WEB)-(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ '''), ('stupid_with_denotative', # aaf-sns03e09 # flhd-supernaturals07e02-1080p r''' (?P<release_group>.+?)(?<!WEB)-(?P<series_name>\w*)(?<!\d)[\. ]? # aaf-sn (?!264) # don't count x264 s(?P<season_num>\d{1,2}) # s03 e(?P<ep_num>\d{2})(?:(rp|-(1080p|720p)))?$ # e09 '''), ('stupid', # tpz-abc102 r''' (?P<release_group>.+?)(?<!WEB)-(?P<series_name>\w*)(?<!\d)[\. ]? # tpz-abc (?!264) # don't count x264 (?P<season_num>\d{1,2}) # 1 (?P<ep_num>\d{2})$ # 02 '''), ('verbose', # Show Name Season 1 Episode 2 Ep Name r''' ^(?P<series_name>.+?)[. _-]+ # Show Name and separator (season|series)[. _-]+ # season and separator (?P<season_num>\d+)[. _-]+ # 1 episode[. _-]+ # episode and separator (?P<ep_num>\d+)[. _-]+ # 02 and separator (?P<extra_info>.+)$ # Source_Quality_Etc- '''), ('season_only', # Show.Name.S01.Source.Quality.Etc-Group r''' ^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator s(eason[. _-])? # S01/Season 01 (?P<season_num>\d+)[. _-]* # S01 and optional separator [. _-]*((?P<extra_info>.+?) # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ('no_season_multi_ep', # Show.Name.E02-03 # Show.Name.E02.2010 r''' ^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator (e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part (?P<ep_num>(\d+|(?<!e)[ivx]+)) # first ep num ((([. _-]+(and|&|to)[. _-]+)|-) # and/&/to joiner (?P<extra_ep_num>(?!(1080|720|480)[pi])(\d+|(?<!e)[ivx]+))[. _-]) # second ep num ([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ('no_season_general', # Show.Name.E23.Test # Show.Name.Part.3.Source.Quality.Etc-Group # Show.Name.Part.1.and.Part.2.Blah-Group r''' ^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator (e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part (?P<ep_num>(\d+|((?<!e)[ivx]+(?=[. _-])))) # first ep num ([. _-]+((and|&|to)[. _-]+)? # and/&/to joiner ((e(p(isode)?)?|part|pt)[. _-]?) # e, ep, episode, or part (?P<extra_ep_num>(?!(1080|720|480)[pi]) (\d+|((?<!e)[ivx]+(?=[. _-]))))[. _-])* # second ep num ([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ('bare', # Show.Name.102.Source.Quality.Etc-Group r''' ^(?P<series_name>.+?)[. _-]+ # Show_Name and separator (?P<season_num>\d{1,2}) # 1 (e?) # Optional episode separator (?P<ep_num>\d{2}) # 02 and separator ([. _-]+(?P<extra_info>(?!\d{3}[. _-]+)[^-]+) # Source_Quality_Etc- (-(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ('no_season', # Show Name - 01 - Ep Name # 01 - Ep Name # 01 - Ep Name r''' ^((?P<series_name>.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator (?P<ep_num>\d{1,3}) # 02 (?:-(?P<extra_ep_num>\d{1,3}))* # -03-04-05 etc (\s*(?:of)?\s*\d{1,3})? # of joiner (with or without spaces) and series total ep [. _-]+((?P<extra_info>.+?) # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ] anime_regexes = [ ('anime_horriblesubs', # [HorribleSubs] Maria the Virgin Witch - 01 [720p].mkv r''' ^(?:\[(?P<release_group>HorribleSubs)\][\s\.]) (?:(?P<series_name>.+?)[\s\.]-[\s\.]) (?P<ep_ab_num>((?!(1080|720|480)[pi]))\d{1,3}) (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? (?:v(?P<version>[0-9]))? (?:[\w\.\s]*) (?:(?:(?:[\[\(])(?P<extra_info>\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)(?:[\]\)]))|(?:\d{3,4}[xp])) .*? '''), ('anime_ultimate', r''' ^(?:\[(?P<release_group>.+?)\][ ._-]*) (?P<series_name>.+?)[ ._-]+ (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}) (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))?[ ._-]+? (?:v(?P<version>[0-9]))? (?:[\w\.]*) (?:(?:(?:[\[\(])(?P<extra_info>\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)(?:[\]\)]))|(?:\d{3,4}[xp])) (?:[ ._]?\[(?P<crc>\w+)\])? .*? '''), ('anime_french_fansub', # [Kaerizaki-Fansub]_One_Piece_727_[VOSTFR][HD_1280x720].mp4 # [Titania-Fansub]_Fairy_Tail_269_[VOSTFR]_[720p]_[1921E00C].mp4 # [ISLAND]One_Piece_726_[VOSTFR]_[V1]_[8bit]_[720p]_[2F7B3FA2].mp4 # Naruto Shippuden 445 VOSTFR par Fansub-Resistance (1280*720) - version MQ # Dragon Ball Super 015 VOSTFR par Fansub-Resistance (1280x720) - HQ version # [Mystic.Z-Team].Dragon.Ball.Super.-.épisode.36.VOSTFR.720p # [Z-Team][DBSuper.pw] Dragon Ball Super - 028 (VOSTFR)(720p AAC)(MP4) # [SnF] Shokugeki no Souma - 24 VOSTFR [720p][41761A60].mkv # [Y-F] Ao no Kanata no Four Rhythm - 03 Vostfr HD 8bits # Phantasy Star Online 2 - The Animation 04 vostfr FHD # Detective Conan 804 vostfr HD # Active Raid 04 vostfr [1080p] # Sekko Boys 04 vostfr [720p] r''' ^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator (Optional) ((\[|\().+?(\]|\))[ ._-]*)? # Extra info (Optionnal) (?P<series_name>.+?)[ ._-]+ # Show_Name and separator ((épisode|episode|Episode)[ ._-]+)? # Sentence for special fansub (Optionnal) (?P<ep_ab_num>\d{1,3})[ ._-]+ # Episode number and separator (((\[|\())?(VOSTFR|vostfr|Vostfr|VostFR|vostFR)((\]|\)))?([ ._-])*)+ # Subtitle Language and separator (par Fansub-Resistance)? # Sentence for special fansub (Optionnal) (\[((v|V)(?P<version>[0-9]))\]([ ._-])*)? # Version and separator (Optional) ((\[(8|10)(Bits|bits|Bit|bit)\])?([ ._-])*)? # Colour resolution and separator (Optional) ((\[|\()((FHD|HD|SD)*([ ._-])*((?P<extra_info>\d{3,4}[xp*]?\d{0,4}[\.\w\s-]*)))(\]|\)))? # Source_Quality_Etc- ([ ._-]*\[(?P<crc>\w{8})\])? # CRC (Optional) .* # Separator and EOL '''), ('anime_standard', # [Group Name] Show Name.13-14 # [Group Name] Show Name - 13-14 # Show Name 13-14 # [Group Name] Show Name.13 # [Group Name] Show Name - 13 # Show Name 13 r''' ^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator (?P<series_name>.+?)[ ._-]+ # Show_Name and separator (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}) # E01 (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # E02 (v(?P<version>[0-9]))? # version [ ._-]+\[(?P<extra_info>\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)\] # Source_Quality_Etc- (\[(?P<crc>\w{8})\])? # CRC .*? # Separator and EOL '''), ('anime_standard_round', # [Stratos-Subs]_Infinite_Stratos_-_12_(1280x720_H.264_AAC)_[379759DB] # [ShinBunBu-Subs] Bleach - 02-03 (CX 1280x720 x264 AAC) r''' ^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator (?P<series_name>.+?)[ ._-]+ # Show_Name and separator (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}) # E01 (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # E02 (v(?P<version>[0-9]))? # version [ ._-]+\((?P<extra_info>(CX[ ._-]?)?\d{3,4}[xp]?\d{0,4}[\.\w\s-]*)\) # Source_Quality_Etc- (\[(?P<crc>\w{8})\])? # CRC .*? # Separator and EOL '''), ('anime_slash', # [SGKK] Bleach 312v1 [720p/MKV] r''' ^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator (?P<series_name>.+?)[ ._-]+ # Show_Name and separator (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}) # E01 (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # E02 (v(?P<version>[0-9]))? # version [ ._-]+\[(?P<extra_info>\d{3,4}p) # Source_Quality_Etc- (\[(?P<crc>\w{8})\])? # CRC .*? # Separator and EOL '''), ('anime_standard_codec', # [Ayako]_Infinite_Stratos_-_IS_-_07_[H264][720p][EB7838FC] # [Ayako] Infinite Stratos - IS - 07v2 [H264][720p][44419534] # [Ayako-Shikkaku] Oniichan no Koto Nanka Zenzen Suki Janain Dakara ne - 10 [LQ][h264][720p] [8853B21C] r''' ^(\[(?P<release_group>.+?)\][ ._-]*)? # Release Group and separator (?P<series_name>.+?)[ ._]* # Show_Name and separator ([ ._-]+-[ ._-]+[A-Z]+[ ._-]+)?[ ._-]+ # funny stuff, this is sooo nuts ! this will kick me in the butt one day (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}) # E01 (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # E02 (v(?P<version>[0-9]))? # version ([ ._-](\[\w{1,2}\])?\[[a-z][.]?\w{2,4}\])? #codec [ ._-]*\[(?P<extra_info>(\d{3,4}[xp]?\d{0,4})?[\.\w\s-]*)\] # Source_Quality_Etc- (\[(?P<crc>\w{8})\])? .*? # Separator and EOL '''), ('anime_codec_crc', r''' ^(?:\[(?P<release_group>.*?)\][ ._-]*)? (?:(?P<series_name>.*?)[ ._-]*)? (?:(?P<ep_ab_num>(((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))[ ._-]*).+? (?:\[(?P<codec>.*?)\][ ._-]*) (?:\[(?P<crc>\w{8})\])? .*? '''), ('anime SxEE', # Show_Name.1x02.Source_Quality_Etc-Group # Show Name - 1x02 - My Ep Name # Show_Name.1x02x03x04.Source_Quality_Etc-Group # Show Name - 1x02-03-04 - My Ep Name r''' ^((?!\[.+?\])(?P<series_name>.+?)[\[. _-]+)? # Show_Name and separator if no brackets group (?P<season_num>\d+)x # 1x (?P<ep_num>\d+) # 02 and separator (([. _-]*x|-) # linking x/- char (?P<extra_ep_num> (?!(1080|720|480)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps \d+))* # additional x03/etc [\]. _-]*((?P<extra_info>.+?) # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ('anime_SxxExx', # Show.Name.S01E02.Source.Quality.Etc-Group # Show Name - S01E02 - My Ep Name # Show.Name.S01.E03.My.Ep.Name # Show.Name.S01E02E03.Source.Quality.Etc-Group # Show Name - S01E02-03 - My Ep Name # Show.Name.S01.E02.E03 # Show Name - S01E02 # Show Name - S01E02-03 r''' ^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator (\()?s(?P<season_num>\d+)[. _-]* # S01 and optional separator e(?P<ep_num>\d+)(\))? # E02 and separator (([. _-]*e|-) # linking e/- char (?P<extra_ep_num>(?!(1080|720|480)[pi])\d+)(\))?)* # additional E03/etc ([. _-]+((?P<extra_info>.+?))? # Source_Quality_Etc- ((?<![. _-])(?<!WEB) # Make sure this is really the release group -(?P<release_group>[^ -]+([. _-]\[.*\])?))?)?$ # Group '''), ('anime_and_normal', # Bleach - s16e03-04 - 313-314 # Bleach.s16e03-04.313-314 # Bleach s16e03e04 313-314 r''' ^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator [sS](?P<season_num>\d+)[. _-]* # S01 and optional separator [eE](?P<ep_num>\d+) # epipisode E02 (([. _-]*e|-) # linking e/- char (?P<extra_ep_num>\d+))* # additional E03/etc ([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be there(->{2,}) "s16e03-04-313-314" would make sens any way ((?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # absolute number (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # "-" as separator and anditional absolute number, all optinal (v(?P<version>[0-9]))? # the version e.g. "v2" .*? '''), ('anime_and_normal_x', # Bleach - s16e03-04 - 313-314 # Bleach.s16e03-04.313-314 # Bleach s16e03e04 313-314 r''' ^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator (?P<season_num>\d+)[. _-]* # S01 and optional separator [xX](?P<ep_num>\d+) # epipisode E02 (([. _-]*e|-) # linking e/- char (?P<extra_ep_num>\d+))* # additional E03/etc ([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be there(->{2,}) "s16e03-04-313-314" would make sens any way ((?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # absolute number (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # "-" as separator and anditional absolute number, all optinal (v(?P<version>[0-9]))? # the version e.g. "v2" .*? '''), ('anime_and_normal_reverse', # Bleach - 313-314 - s16e03-04 r''' ^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}) # absolute number (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # "-" as separator and anditional absolute number, all optinal (v(?P<version>[0-9]))? # the version e.g. "v2" ([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be there(->{2,}) "s16e03-04-313-314" would make sens any way [sS](?P<season_num>\d+)[. _-]* # S01 and optional separator [eE](?P<ep_num>\d+) # epipisode E02 (([. _-]*e|-) # linking e/- char (?P<extra_ep_num>\d+))* # additional E03/etc .*? '''), ('anime_and_normal_front', # 165.Naruto Shippuuden.s08e014 r''' ^(?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}) # start of string and absolute number (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # "-" as separator and anditional absolute number, all optinal (v(?P<version>[0-9]))?[ ._-]+ # the version e.g. "v2" (?P<series_name>.+?)[ ._-]+ [sS](?P<season_num>\d+)[. _-]* # S01 and optional separator [eE](?P<ep_num>\d+) (([. _-]*e|-) # linking e/- char (?P<extra_ep_num>\d+))* # additional E03/etc .*? '''), ('anime_ep_name', r''' ^(?:\[(?P<release_group>.+?)\][ ._-]*) (?P<series_name>.+?)[ ._-]+ (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}) (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))?[ ._-]*? (?:v(?P<version>[0-9])[ ._-]+?)? (?:.+?[ ._-]+?)? \[(?P<extra_info>\w+)\][ ._-]? (?:\[(?P<crc>\w{8})\])? .*? '''), ('anime_WarB3asT', # 003. Show Name - Ep Name.ext # 003-004. Show Name - Ep Name.ext r''' ^(?P<ep_ab_num>\d{3,4})(-(?P<extra_ab_ep_num>\d{3,4}))?\.\s+(?P<series_name>.+?)\s-\s.* '''), ('anime_bare', # One Piece - 102 # [ACX]_Wolf's_Spirit_001.mkv r''' ^(\[(?P<release_group>.+?)\][ ._-]*)? (?P<series_name>.+?)[ ._-]+ # Show_Name and separator (?P<ep_ab_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}) # E01 (-(?P<extra_ab_ep_num>((?!(1080|720|480)[pi])|(?![hx].?264))\d{1,3}))? # E02 (v(?P<version>[0-9]))? # v2 .*? # Separator and EOL '''), ]
rbrito/pkg-youtube-dl
refs/heads/master
youtube_dl/extractor/ora.py
87
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..compat import compat_urlparse from ..utils import ( get_element_by_attribute, qualities, unescapeHTML, ) class OraTVIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?(?:ora\.tv|unsafespeech\.com)/([^/]+/)*(?P<id>[^/\?#]+)' _TESTS = [{ 'url': 'https://www.ora.tv/larrykingnow/2015/12/16/vine-youtube-stars-zach-king-king-bach-on-their-viral-videos-0_36jupg6090pq', 'md5': 'fa33717591c631ec93b04b0e330df786', 'info_dict': { 'id': '50178', 'ext': 'mp4', 'title': 'Vine & YouTube Stars Zach King & King Bach On Their Viral Videos!', 'description': 'md5:ebbc5b1424dd5dba7be7538148287ac1', } }, { 'url': 'http://www.unsafespeech.com/video/2016/5/10/student-self-censorship-and-the-thought-police-on-university-campuses-0_6622bnkppw4d', 'only_matching': True, }] def _real_extract(self, url): display_id = self._match_id(url) webpage = self._download_webpage(url, display_id) video_data = self._search_regex( r'"(?:video|current)"\s*:\s*({[^}]+?})', webpage, 'current video') m3u8_url = self._search_regex( r'hls_stream"?\s*:\s*"([^"]+)', video_data, 'm3u8 url', None) if m3u8_url: formats = self._extract_m3u8_formats( m3u8_url, display_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False) # similar to GameSpotIE m3u8_path = compat_urlparse.urlparse(m3u8_url).path QUALITIES_RE = r'((,[a-z]+\d+)+,?)' available_qualities = self._search_regex( QUALITIES_RE, m3u8_path, 'qualities').strip(',').split(',') http_path = m3u8_path[1:].split('/', 1)[1] http_template = re.sub(QUALITIES_RE, r'%s', http_path) http_template = http_template.replace('.csmil/master.m3u8', '') http_template = compat_urlparse.urljoin( 'http://videocdn-pmd.ora.tv/', http_template) preference = qualities( ['mobile400', 'basic400', 'basic600', 'sd900', 'sd1200', 'sd1500', 'hd720', 'hd1080']) for q in available_qualities: formats.append({ 'url': http_template % q, 'format_id': q, 'preference': preference(q), }) self._sort_formats(formats) else: return self.url_result(self._search_regex( r'"youtube_id"\s*:\s*"([^"]+)', webpage, 'youtube id'), 'Youtube') return { 'id': self._search_regex( r'"id"\s*:\s*(\d+)', video_data, 'video id', default=display_id), 'display_id': display_id, 'title': unescapeHTML(self._og_search_title(webpage)), 'description': get_element_by_attribute( 'class', 'video_txt_decription', webpage), 'thumbnail': self._proto_relative_url(self._search_regex( r'"thumb"\s*:\s*"([^"]+)', video_data, 'thumbnail', None)), 'formats': formats, }
mdavid/horizon
refs/heads/master
openstack_dashboard/test/test_plugins/panel_group_tests.py
44
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from django.conf import settings from django.test.utils import override_settings import horizon from openstack_dashboard.test import helpers as test from openstack_dashboard.test.test_panels.plugin_panel \ import panel as plugin_panel from openstack_dashboard.test.test_panels.second_panel \ import panel as second_panel import openstack_dashboard.test.test_plugins.panel_group_config from openstack_dashboard.utils import settings as util_settings PANEL_GROUP_SLUG = 'plugin_panel_group' SECOND_PANEL_GROUP_SLUG = 'second_panel_group' HORIZON_CONFIG = copy.deepcopy(settings.HORIZON_CONFIG) INSTALLED_APPS = list(settings.INSTALLED_APPS) # NOTE: Ensure dashboards and default_dashboard are not included in # HORIZON_CONFIG to ensure warning messages from update_dashboards below. HORIZON_CONFIG.pop('dashboards', None) HORIZON_CONFIG.pop('default_dashboard', None) util_settings.update_dashboards([ openstack_dashboard.test.test_plugins.panel_group_config, ], HORIZON_CONFIG, INSTALLED_APPS) @override_settings(HORIZON_CONFIG=HORIZON_CONFIG, INSTALLED_APPS=INSTALLED_APPS) class PanelGroupPluginTests(test.PluginTestCase): def test_add_panel_group(self): dashboard = horizon.get_dashboard("admin") self.assertIsNotNone(dashboard.get_panel_group(PANEL_GROUP_SLUG)) def test_add_second_panel_group(self): # Check that the second panel group was added to the dashboard. dashboard = horizon.get_dashboard("admin") self.assertIsNotNone( dashboard.get_panel_group(SECOND_PANEL_GROUP_SLUG)) def test_add_panel(self): # Check that the panel is in its configured dashboard and panel group. dashboard = horizon.get_dashboard("admin") panel_group = dashboard.get_panel_group(PANEL_GROUP_SLUG) self.assertIn(plugin_panel.PluginPanel, [p.__class__ for p in dashboard.get_panels()]) self.assertIn(plugin_panel.PluginPanel, [p.__class__ for p in panel_group]) def test_add_second_panel(self): # Check that the second panel is in its configured dashboard and panel # group. dashboard = horizon.get_dashboard("admin") second_panel_group = dashboard.get_panel_group(SECOND_PANEL_GROUP_SLUG) self.assertIn(second_panel.SecondPanel, [p.__class__ for p in dashboard.get_panels()]) self.assertIn(second_panel.SecondPanel, [p.__class__ for p in second_panel_group]) def test_unregistered_panel_group(self): dashboard = horizon.get_dashboard("admin") self.assertIsNone(dashboard.get_panel_group("nonexistent_panel"))
sarthfrey/Texty
refs/heads/master
lib/httplib2/__init__.py
106
from __future__ import generators """ httplib2 A caching http interface that supports ETags and gzip to conserve bandwidth. Requires Python 2.3 or later Changelog: 2007-08-18, Rick: Modified so it's able to use a socks proxy if needed. """ __author__ = "Joe Gregorio (joe@bitworking.org)" __copyright__ = "Copyright 2006, Joe Gregorio" __contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)", "James Antill", "Xavier Verges Farrero", "Jonathan Feinberg", "Blair Zajac", "Sam Ruby", "Louis Nyffenegger"] __license__ = "MIT" __version__ = "0.9.2" import re import sys import email import email.Utils import email.Message import email.FeedParser import StringIO import gzip import zlib import httplib import urlparse import urllib import base64 import os import copy import calendar import time import random import errno try: from hashlib import sha1 as _sha, md5 as _md5 except ImportError: # prior to Python 2.5, these were separate modules import sha import md5 _sha = sha.new _md5 = md5.new import hmac from gettext import gettext as _ import socket try: from httplib2 import socks except ImportError: try: import socks except (ImportError, AttributeError): socks = None # Build the appropriate socket wrapper for ssl try: import ssl # python 2.6 ssl_SSLError = ssl.SSLError def _ssl_wrap_socket(sock, key_file, cert_file, disable_validation, ca_certs): if disable_validation: cert_reqs = ssl.CERT_NONE else: cert_reqs = ssl.CERT_REQUIRED # We should be specifying SSL version 3 or TLS v1, but the ssl module # doesn't expose the necessary knobs. So we need to go with the default # of SSLv23. return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file, cert_reqs=cert_reqs, ca_certs=ca_certs) except (AttributeError, ImportError): ssl_SSLError = None def _ssl_wrap_socket(sock, key_file, cert_file, disable_validation, ca_certs): if not disable_validation: raise CertificateValidationUnsupported( "SSL certificate validation is not supported without " "the ssl module installed. To avoid this error, install " "the ssl module, or explicity disable validation.") ssl_sock = socket.ssl(sock, key_file, cert_file) return httplib.FakeSocket(sock, ssl_sock) if sys.version_info >= (2,3): from iri2uri import iri2uri else: def iri2uri(uri): return uri def has_timeout(timeout): # python 2.6 if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'): return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT) return (timeout is not None) __all__ = [ 'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', 'debuglevel', 'ProxiesUnavailableError'] # The httplib debug level, set to a non-zero value to get debug output debuglevel = 0 # A request will be tried 'RETRIES' times if it fails at the socket/connection level. RETRIES = 2 # Python 2.3 support if sys.version_info < (2,4): def sorted(seq): seq.sort() return seq # Python 2.3 support def HTTPResponse__getheaders(self): """Return list of (header, value) tuples.""" if self.msg is None: raise httplib.ResponseNotReady() return self.msg.items() if not hasattr(httplib.HTTPResponse, 'getheaders'): httplib.HTTPResponse.getheaders = HTTPResponse__getheaders # All exceptions raised here derive from HttpLib2Error class HttpLib2Error(Exception): pass # Some exceptions can be caught and optionally # be turned back into responses. class HttpLib2ErrorWithResponse(HttpLib2Error): def __init__(self, desc, response, content): self.response = response self.content = content HttpLib2Error.__init__(self, desc) class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass class RedirectLimit(HttpLib2ErrorWithResponse): pass class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass class MalformedHeader(HttpLib2Error): pass class RelativeURIError(HttpLib2Error): pass class ServerNotFoundError(HttpLib2Error): pass class ProxiesUnavailableError(HttpLib2Error): pass class CertificateValidationUnsupported(HttpLib2Error): pass class SSLHandshakeError(HttpLib2Error): pass class NotSupportedOnThisPlatform(HttpLib2Error): pass class CertificateHostnameMismatch(SSLHandshakeError): def __init__(self, desc, host, cert): HttpLib2Error.__init__(self, desc) self.host = host self.cert = cert # Open Items: # ----------- # Proxy support # Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) # Pluggable cache storage (supports storing the cache in # flat files by default. We need a plug-in architecture # that can support Berkeley DB and Squid) # == Known Issues == # Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. # Does not handle Cache-Control: max-stale # Does not use Age: headers when calculating cache freshness. # The number of redirections to follow before giving up. # Note that only GET redirects are automatically followed. # Will also honor 301 requests by saving that info and never # requesting that URI again. DEFAULT_MAX_REDIRECTS = 5 try: # Users can optionally provide a module that tells us where the CA_CERTS # are located. import ca_certs_locater CA_CERTS = ca_certs_locater.get() except ImportError: # Default CA certificates file bundled with httplib2. CA_CERTS = os.path.join( os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt") # Which headers are hop-by-hop headers by default HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] def _get_end2end_headers(response): hopbyhop = list(HOP_BY_HOP) hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) return [header for header in response.keys() if header not in hopbyhop] URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") def parse_uri(uri): """Parses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) """ groups = URI.match(uri).groups() return (groups[1], groups[3], groups[4], groups[6], groups[8]) def urlnorm(uri): (scheme, authority, path, query, fragment) = parse_uri(uri) if not scheme or not authority: raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) authority = authority.lower() scheme = scheme.lower() if not path: path = "/" # Could do syntax based normalization of the URI before # computing the digest. See Section 6.2.2 of Std 66. request_uri = query and "?".join([path, query]) or path scheme = scheme.lower() defrag_uri = scheme + "://" + authority + request_uri return scheme, authority, request_uri, defrag_uri # Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) re_url_scheme = re.compile(r'^\w+://') re_slash = re.compile(r'[?/:|]+') def safename(filename): """Return a filename suitable for the cache. Strips dangerous and common characters to create a filename we can use to store the cache in. """ try: if re_url_scheme.match(filename): if isinstance(filename,str): filename = filename.decode('utf-8') filename = filename.encode('idna') else: filename = filename.encode('idna') except UnicodeError: pass if isinstance(filename,unicode): filename=filename.encode('utf-8') filemd5 = _md5(filename).hexdigest() filename = re_url_scheme.sub("", filename) filename = re_slash.sub(",", filename) # limit length of filename if len(filename)>200: filename=filename[:200] return ",".join((filename, filemd5)) NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') def _normalize_headers(headers): return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) def _parse_cache_control(headers): retval = {} if headers.has_key('cache-control'): parts = headers['cache-control'].split(',') parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")] parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] retval = dict(parts_with_args + parts_wo_args) return retval # Whether to use a strict mode to parse WWW-Authenticate headers # Might lead to bad results in case of ill-formed header value, # so disabled by default, falling back to relaxed parsing. # Set to true to turn on, usefull for testing servers. USE_WWW_AUTH_STRICT_PARSING = 0 # In regex below: # [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP # "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space # Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: # \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"? WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$") WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$") UNQUOTE_PAIRS = re.compile(r'\\(.)') def _parse_www_authenticate(headers, headername='www-authenticate'): """Returns a dictionary of dictionaries, one dict per auth_scheme.""" retval = {} if headers.has_key(headername): try: authenticate = headers[headername].strip() www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED while authenticate: # Break off the scheme at the beginning of the line if headername == 'authentication-info': (auth_scheme, the_rest) = ('digest', authenticate) else: (auth_scheme, the_rest) = authenticate.split(" ", 1) # Now loop over all the key value pairs that come after the scheme, # being careful not to roll into the next scheme match = www_auth.search(the_rest) auth_params = {} while match: if match and len(match.groups()) == 3: (key, value, the_rest) = match.groups() auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')]) match = www_auth.search(the_rest) retval[auth_scheme.lower()] = auth_params authenticate = the_rest.strip() except ValueError: raise MalformedHeader("WWW-Authenticate") return retval def _entry_disposition(response_headers, request_headers): """Determine freshness from the Date, Expires and Cache-Control headers. We don't handle the following: 1. Cache-Control: max-stale 2. Age: headers are not used in the calculations. Not that this algorithm is simpler than you might think because we are operating as a private (non-shared) cache. This lets us ignore 's-maxage'. We can also ignore 'proxy-invalidate' since we aren't a proxy. We will never return a stale document as fresh as a design decision, and thus the non-implementation of 'max-stale'. This also lets us safely ignore 'must-revalidate' since we operate as if every server has sent 'must-revalidate'. Since we are private we get to ignore both 'public' and 'private' parameters. We also ignore 'no-transform' since we don't do any transformations. The 'no-store' parameter is handled at a higher level. So the only Cache-Control parameters we look at are: no-cache only-if-cached max-age min-fresh """ retval = "STALE" cc = _parse_cache_control(request_headers) cc_response = _parse_cache_control(response_headers) if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1: retval = "TRANSPARENT" if 'cache-control' not in request_headers: request_headers['cache-control'] = 'no-cache' elif cc.has_key('no-cache'): retval = "TRANSPARENT" elif cc_response.has_key('no-cache'): retval = "STALE" elif cc.has_key('only-if-cached'): retval = "FRESH" elif response_headers.has_key('date'): date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date'])) now = time.time() current_age = max(0, now - date) if cc_response.has_key('max-age'): try: freshness_lifetime = int(cc_response['max-age']) except ValueError: freshness_lifetime = 0 elif response_headers.has_key('expires'): expires = email.Utils.parsedate_tz(response_headers['expires']) if None == expires: freshness_lifetime = 0 else: freshness_lifetime = max(0, calendar.timegm(expires) - date) else: freshness_lifetime = 0 if cc.has_key('max-age'): try: freshness_lifetime = int(cc['max-age']) except ValueError: freshness_lifetime = 0 if cc.has_key('min-fresh'): try: min_fresh = int(cc['min-fresh']) except ValueError: min_fresh = 0 current_age += min_fresh if freshness_lifetime > current_age: retval = "FRESH" return retval def _decompressContent(response, new_content): content = new_content try: encoding = response.get('content-encoding', None) if encoding in ['gzip', 'deflate']: if encoding == 'gzip': content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() if encoding == 'deflate': content = zlib.decompress(content) response['content-length'] = str(len(content)) # Record the historical presence of the encoding in a way the won't interfere. response['-content-encoding'] = response['content-encoding'] del response['content-encoding'] except IOError: content = "" raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) return content def _updateCache(request_headers, response_headers, content, cache, cachekey): if cachekey: cc = _parse_cache_control(request_headers) cc_response = _parse_cache_control(response_headers) if cc.has_key('no-store') or cc_response.has_key('no-store'): cache.delete(cachekey) else: info = email.Message.Message() for key, value in response_headers.iteritems(): if key not in ['status','content-encoding','transfer-encoding']: info[key] = value # Add annotations to the cache to indicate what headers # are variant for this request. vary = response_headers.get('vary', None) if vary: vary_headers = vary.lower().replace(' ', '').split(',') for header in vary_headers: key = '-varied-%s' % header try: info[key] = request_headers[header] except KeyError: pass status = response_headers.status if status == 304: status = 200 status_header = 'status: %d\r\n' % status header_str = info.as_string() header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str) text = "".join([status_header, header_str, content]) cache.set(cachekey, text) def _cnonce(): dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest() return dig[:16] def _wsse_username_token(cnonce, iso_now, password): return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip() # For credentials we need two things, first # a pool of credential to try (not necesarily tied to BAsic, Digest, etc.) # Then we also need a list of URIs that have already demanded authentication # That list is tricky since sub-URIs can take the same auth, or the # auth scheme may change as you descend the tree. # So we also need each Auth instance to be able to tell us # how close to the 'top' it is. class Authentication(object): def __init__(self, credentials, host, request_uri, headers, response, content, http): (scheme, authority, path, query, fragment) = parse_uri(request_uri) self.path = path self.host = host self.credentials = credentials self.http = http def depth(self, request_uri): (scheme, authority, path, query, fragment) = parse_uri(request_uri) return request_uri[len(self.path):].count("/") def inscope(self, host, request_uri): # XXX Should we normalize the request_uri? (scheme, authority, path, query, fragment) = parse_uri(request_uri) return (host == self.host) and path.startswith(self.path) def request(self, method, request_uri, headers, content): """Modify the request headers to add the appropriate Authorization header. Over-ride this in sub-classes.""" pass def response(self, response, content): """Gives us a chance to update with new nonces or such returned from the last authorized response. Over-rise this in sub-classes if necessary. Return TRUE is the request is to be retried, for example Digest may return stale=true. """ return False class BasicAuthentication(Authentication): def __init__(self, credentials, host, request_uri, headers, response, content, http): Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) def request(self, method, request_uri, headers, content): """Modify the request headers to add the appropriate Authorization header.""" headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip() class DigestAuthentication(Authentication): """Only do qop='auth' and MD5, since that is all Apache currently implements""" def __init__(self, credentials, host, request_uri, headers, response, content, http): Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) challenge = _parse_www_authenticate(response, 'www-authenticate') self.challenge = challenge['digest'] qop = self.challenge.get('qop', 'auth') self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None if self.challenge['qop'] is None: raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop)) self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper() if self.challenge['algorithm'] != 'MD5': raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm'])) self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]]) self.challenge['nc'] = 1 def request(self, method, request_uri, headers, content, cnonce = None): """Modify the request headers""" H = lambda x: _md5(x).hexdigest() KD = lambda s, d: H("%s:%s" % (s, d)) A2 = "".join([method, ":", request_uri]) self.challenge['cnonce'] = cnonce or _cnonce() request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % ( self.challenge['nonce'], '%08x' % self.challenge['nc'], self.challenge['cnonce'], self.challenge['qop'], H(A2))) headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % ( self.credentials[0], self.challenge['realm'], self.challenge['nonce'], request_uri, self.challenge['algorithm'], request_digest, self.challenge['qop'], self.challenge['nc'], self.challenge['cnonce']) if self.challenge.get('opaque'): headers['authorization'] += ', opaque="%s"' % self.challenge['opaque'] self.challenge['nc'] += 1 def response(self, response, content): if not response.has_key('authentication-info'): challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {}) if 'true' == challenge.get('stale'): self.challenge['nonce'] = challenge['nonce'] self.challenge['nc'] = 1 return True else: updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {}) if updated_challenge.has_key('nextnonce'): self.challenge['nonce'] = updated_challenge['nextnonce'] self.challenge['nc'] = 1 return False class HmacDigestAuthentication(Authentication): """Adapted from Robert Sayre's code and DigestAuthentication above.""" __author__ = "Thomas Broyer (t.broyer@ltgt.net)" def __init__(self, credentials, host, request_uri, headers, response, content, http): Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) challenge = _parse_www_authenticate(response, 'www-authenticate') self.challenge = challenge['hmacdigest'] # TODO: self.challenge['domain'] self.challenge['reason'] = self.challenge.get('reason', 'unauthorized') if self.challenge['reason'] not in ['unauthorized', 'integrity']: self.challenge['reason'] = 'unauthorized' self.challenge['salt'] = self.challenge.get('salt', '') if not self.challenge.get('snonce'): raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty.")) self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1') if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']: raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm'])) self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1') if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']: raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm'])) if self.challenge['algorithm'] == 'HMAC-MD5': self.hashmod = _md5 else: self.hashmod = _sha if self.challenge['pw-algorithm'] == 'MD5': self.pwhashmod = _md5 else: self.pwhashmod = _sha self.key = "".join([self.credentials[0], ":", self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(), ":", self.challenge['realm']]) self.key = self.pwhashmod.new(self.key).hexdigest().lower() def request(self, method, request_uri, headers, content): """Modify the request headers""" keys = _get_end2end_headers(headers) keylist = "".join(["%s " % k for k in keys]) headers_val = "".join([headers[k] for k in keys]) created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime()) cnonce = _cnonce() request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val) request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower() headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % ( self.credentials[0], self.challenge['realm'], self.challenge['snonce'], cnonce, request_uri, created, request_digest, keylist) def response(self, response, content): challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {}) if challenge.get('reason') in ['integrity', 'stale']: return True return False class WsseAuthentication(Authentication): """This is thinly tested and should not be relied upon. At this time there isn't any third party server to test against. Blogger and TypePad implemented this algorithm at one point but Blogger has since switched to Basic over HTTPS and TypePad has implemented it wrong, by never issuing a 401 challenge but instead requiring your client to telepathically know that their endpoint is expecting WSSE profile="UsernameToken".""" def __init__(self, credentials, host, request_uri, headers, response, content, http): Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) def request(self, method, request_uri, headers, content): """Modify the request headers to add the appropriate Authorization header.""" headers['authorization'] = 'WSSE profile="UsernameToken"' iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) cnonce = _cnonce() password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1]) headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % ( self.credentials[0], password_digest, cnonce, iso_now) class GoogleLoginAuthentication(Authentication): def __init__(self, credentials, host, request_uri, headers, response, content, http): from urllib import urlencode Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) challenge = _parse_www_authenticate(response, 'www-authenticate') service = challenge['googlelogin'].get('service', 'xapi') # Bloggger actually returns the service in the challenge # For the rest we guess based on the URI if service == 'xapi' and request_uri.find("calendar") > 0: service = "cl" # No point in guessing Base or Spreadsheet #elif request_uri.find("spreadsheets") > 0: # service = "wise" auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent']) resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'}) lines = content.split('\n') d = dict([tuple(line.split("=", 1)) for line in lines if line]) if resp.status == 403: self.Auth = "" else: self.Auth = d['Auth'] def request(self, method, request_uri, headers, content): """Modify the request headers to add the appropriate Authorization header.""" headers['authorization'] = 'GoogleLogin Auth=' + self.Auth AUTH_SCHEME_CLASSES = { "basic": BasicAuthentication, "wsse": WsseAuthentication, "digest": DigestAuthentication, "hmacdigest": HmacDigestAuthentication, "googlelogin": GoogleLoginAuthentication } AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] class FileCache(object): """Uses a local directory as a store for cached files. Not really safe to use if multiple threads or processes are going to be running on the same cache. """ def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior self.cache = cache self.safe = safe if not os.path.exists(cache): os.makedirs(self.cache) def get(self, key): retval = None cacheFullPath = os.path.join(self.cache, self.safe(key)) try: f = file(cacheFullPath, "rb") retval = f.read() f.close() except IOError: pass return retval def set(self, key, value): cacheFullPath = os.path.join(self.cache, self.safe(key)) f = file(cacheFullPath, "wb") f.write(value) f.close() def delete(self, key): cacheFullPath = os.path.join(self.cache, self.safe(key)) if os.path.exists(cacheFullPath): os.remove(cacheFullPath) class Credentials(object): def __init__(self): self.credentials = [] def add(self, name, password, domain=""): self.credentials.append((domain.lower(), name, password)) def clear(self): self.credentials = [] def iter(self, domain): for (cdomain, name, password) in self.credentials: if cdomain == "" or domain == cdomain: yield (name, password) class KeyCerts(Credentials): """Identical to Credentials except that name/password are mapped to key/cert.""" pass class AllHosts(object): pass class ProxyInfo(object): """Collect information required to use a proxy.""" bypass_hosts = () def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=True, proxy_user=None, proxy_pass=None): """ Args: proxy_type: The type of proxy server. This must be set to one of socks.PROXY_TYPE_XXX constants. For example: p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_port=8000) proxy_host: The hostname or IP address of the proxy server. proxy_port: The port that the proxy server is running on. proxy_rdns: If True (default), DNS queries will not be performed locally, and instead, handed to the proxy to resolve. This is useful if the network does not allow resolution of non-local names. In httplib2 0.9 and earlier, this defaulted to False. proxy_user: The username used to authenticate with the proxy server. proxy_pass: The password used to authenticate with the proxy server. """ self.proxy_type = proxy_type self.proxy_host = proxy_host self.proxy_port = proxy_port self.proxy_rdns = proxy_rdns self.proxy_user = proxy_user self.proxy_pass = proxy_pass def astuple(self): return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass) def isgood(self): return (self.proxy_host != None) and (self.proxy_port != None) def applies_to(self, hostname): return not self.bypass_host(hostname) def bypass_host(self, hostname): """Has this host been excluded from the proxy config""" if self.bypass_hosts is AllHosts: return True bypass = False for domain in self.bypass_hosts: if hostname.endswith(domain): bypass = True return bypass def proxy_info_from_environment(method='http'): """ Read proxy info from the environment variables. """ if method not in ['http', 'https']: return env_var = method + '_proxy' url = os.environ.get(env_var, os.environ.get(env_var.upper())) if not url: return pi = proxy_info_from_url(url, method) no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', '')) bypass_hosts = [] if no_proxy: bypass_hosts = no_proxy.split(',') # special case, no_proxy=* means all hosts bypassed if no_proxy == '*': bypass_hosts = AllHosts pi.bypass_hosts = bypass_hosts return pi def proxy_info_from_url(url, method='http'): """ Construct a ProxyInfo from a URL (such as http_proxy env var) """ url = urlparse.urlparse(url) username = None password = None port = None if '@' in url[1]: ident, host_port = url[1].split('@', 1) if ':' in ident: username, password = ident.split(':', 1) else: password = ident else: host_port = url[1] if ':' in host_port: host, port = host_port.split(':', 1) else: host = host_port if port: port = int(port) else: port = dict(https=443, http=80)[method] proxy_type = 3 # socks.PROXY_TYPE_HTTP return ProxyInfo( proxy_type = proxy_type, proxy_host = host, proxy_port = port, proxy_user = username or None, proxy_pass = password or None, ) class HTTPConnectionWithTimeout(httplib.HTTPConnection): """ HTTPConnection subclass that supports timeouts All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout """ def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): httplib.HTTPConnection.__init__(self, host, port, strict) self.timeout = timeout self.proxy_info = proxy_info def connect(self): """Connect to the host and port specified in __init__.""" # Mostly verbatim from httplib.py. if self.proxy_info and socks is None: raise ProxiesUnavailableError( 'Proxy support missing but proxy use was requested!') msg = "getaddrinfo returns an empty list" if self.proxy_info and self.proxy_info.isgood(): use_proxy = True proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() host = proxy_host port = proxy_port else: use_proxy = False host = self.host port = self.port for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: if use_proxy: self.sock = socks.socksocket(af, socktype, proto) self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) else: self.sock = socket.socket(af, socktype, proto) self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) # Different from httplib: support timeouts. if has_timeout(self.timeout): self.sock.settimeout(self.timeout) # End of difference from httplib. if self.debuglevel > 0: print "connect: (%s, %s) ************" % (self.host, self.port) if use_proxy: print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) self.sock.connect((self.host, self.port) + sa[2:]) except socket.error, msg: if self.debuglevel > 0: print "connect fail: (%s, %s)" % (self.host, self.port) if use_proxy: print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) if self.sock: self.sock.close() self.sock = None continue break if not self.sock: raise socket.error, msg class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): """ This class allows communication via SSL. All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout """ def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=None, proxy_info=None, ca_certs=None, disable_ssl_certificate_validation=False): httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, cert_file=cert_file, strict=strict) self.timeout = timeout self.proxy_info = proxy_info if ca_certs is None: ca_certs = CA_CERTS self.ca_certs = ca_certs self.disable_ssl_certificate_validation = \ disable_ssl_certificate_validation # The following two methods were adapted from https_wrapper.py, released # with the Google Appengine SDK at # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py # under the following license: # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # def _GetValidHostsForCert(self, cert): """Returns a list of valid host globs for an SSL certificate. Args: cert: A dictionary representing an SSL certificate. Returns: list: A list of valid host globs. """ if 'subjectAltName' in cert: return [x[1] for x in cert['subjectAltName'] if x[0].lower() == 'dns'] else: return [x[0][1] for x in cert['subject'] if x[0][0].lower() == 'commonname'] def _ValidateCertificateHostname(self, cert, hostname): """Validates that a given hostname is valid for an SSL certificate. Args: cert: A dictionary representing an SSL certificate. hostname: The hostname to test. Returns: bool: Whether or not the hostname is valid for this certificate. """ hosts = self._GetValidHostsForCert(cert) for host in hosts: host_re = host.replace('.', '\.').replace('*', '[^.]*') if re.search('^%s$' % (host_re,), hostname, re.I): return True return False def connect(self): "Connect to a host on a given (SSL) port." msg = "getaddrinfo returns an empty list" if self.proxy_info and self.proxy_info.isgood(): use_proxy = True proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() host = proxy_host port = proxy_port else: use_proxy = False host = self.host port = self.port address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM) for family, socktype, proto, canonname, sockaddr in address_info: try: if use_proxy: sock = socks.socksocket(family, socktype, proto) sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) else: sock = socket.socket(family, socktype, proto) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) if has_timeout(self.timeout): sock.settimeout(self.timeout) sock.connect((self.host, self.port)) self.sock =_ssl_wrap_socket( sock, self.key_file, self.cert_file, self.disable_ssl_certificate_validation, self.ca_certs) if self.debuglevel > 0: print "connect: (%s, %s)" % (self.host, self.port) if use_proxy: print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) if not self.disable_ssl_certificate_validation: cert = self.sock.getpeercert() hostname = self.host.split(':', 0)[0] if not self._ValidateCertificateHostname(cert, hostname): raise CertificateHostnameMismatch( 'Server presented certificate that does not match ' 'host %s: %s' % (hostname, cert), hostname, cert) except ssl_SSLError, e: if sock: sock.close() if self.sock: self.sock.close() self.sock = None # Unfortunately the ssl module doesn't seem to provide any way # to get at more detailed error information, in particular # whether the error is due to certificate validation or # something else (such as SSL protocol mismatch). if e.errno == ssl.SSL_ERROR_SSL: raise SSLHandshakeError(e) else: raise except (socket.timeout, socket.gaierror): raise except socket.error, msg: if self.debuglevel > 0: print "connect fail: (%s, %s)" % (self.host, self.port) if use_proxy: print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) if self.sock: self.sock.close() self.sock = None continue break if not self.sock: raise socket.error, msg SCHEME_TO_CONNECTION = { 'http': HTTPConnectionWithTimeout, 'https': HTTPSConnectionWithTimeout } # Use a different connection object for Google App Engine try: try: from google.appengine.api import apiproxy_stub_map if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: raise ImportError # Bail out; we're not actually running on App Engine. from google.appengine.api.urlfetch import fetch from google.appengine.api.urlfetch import InvalidURLError except (ImportError, AttributeError): from google3.apphosting.api import apiproxy_stub_map if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: raise ImportError # Bail out; we're not actually running on App Engine. from google3.apphosting.api.urlfetch import fetch from google3.apphosting.api.urlfetch import InvalidURLError def _new_fixed_fetch(validate_certificate): def fixed_fetch(url, payload=None, method="GET", headers={}, allow_truncated=False, follow_redirects=True, deadline=None): if deadline is None: deadline = socket.getdefaulttimeout() or 5 return fetch(url, payload=payload, method=method, headers=headers, allow_truncated=allow_truncated, follow_redirects=follow_redirects, deadline=deadline, validate_certificate=validate_certificate) return fixed_fetch class AppEngineHttpConnection(httplib.HTTPConnection): """Use httplib on App Engine, but compensate for its weirdness. The parameters key_file, cert_file, proxy_info, ca_certs, and disable_ssl_certificate_validation are all dropped on the ground. """ def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=None, proxy_info=None, ca_certs=None, disable_ssl_certificate_validation=False): httplib.HTTPConnection.__init__(self, host, port=port, strict=strict, timeout=timeout) class AppEngineHttpsConnection(httplib.HTTPSConnection): """Same as AppEngineHttpConnection, but for HTTPS URIs.""" def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=None, proxy_info=None, ca_certs=None, disable_ssl_certificate_validation=False): httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, cert_file=cert_file, strict=strict, timeout=timeout) self._fetch = _new_fixed_fetch( not disable_ssl_certificate_validation) # Update the connection classes to use the Googel App Engine specific ones. SCHEME_TO_CONNECTION = { 'http': AppEngineHttpConnection, 'https': AppEngineHttpsConnection } except (ImportError, AttributeError): pass class Http(object): """An HTTP client that handles: - all methods - caching - ETags - compression, - HTTPS - Basic - Digest - WSSE and more. """ def __init__(self, cache=None, timeout=None, proxy_info=proxy_info_from_environment, ca_certs=None, disable_ssl_certificate_validation=False): """If 'cache' is a string then it is used as a directory name for a disk cache. Otherwise it must be an object that supports the same interface as FileCache. All timeouts are in seconds. If None is passed for timeout then Python's default timeout for sockets will be used. See for example the docs of socket.setdefaulttimeout(): http://docs.python.org/library/socket.html#socket.setdefaulttimeout `proxy_info` may be: - a callable that takes the http scheme ('http' or 'https') and returns a ProxyInfo instance per request. By default, uses proxy_nfo_from_environment. - a ProxyInfo instance (static proxy config). - None (proxy disabled). ca_certs is the path of a file containing root CA certificates for SSL server certificate validation. By default, a CA cert file bundled with httplib2 is used. If disable_ssl_certificate_validation is true, SSL cert validation will not be performed. """ self.proxy_info = proxy_info self.ca_certs = ca_certs self.disable_ssl_certificate_validation = \ disable_ssl_certificate_validation # Map domain name to an httplib connection self.connections = {} # The location of the cache, for now a directory # where cached responses are held. if cache and isinstance(cache, basestring): self.cache = FileCache(cache) else: self.cache = cache # Name/password self.credentials = Credentials() # Key/cert self.certificates = KeyCerts() # authorization objects self.authorizations = [] # If set to False then no redirects are followed, even safe ones. self.follow_redirects = True # Which HTTP methods do we apply optimistic concurrency to, i.e. # which methods get an "if-match:" etag header added to them. self.optimistic_concurrency_methods = ["PUT", "PATCH"] # If 'follow_redirects' is True, and this is set to True then # all redirecs are followed, including unsafe ones. self.follow_all_redirects = False self.ignore_etag = False self.force_exception_to_status_code = False self.timeout = timeout # Keep Authorization: headers on a redirect. self.forward_authorization_headers = False def __getstate__(self): state_dict = copy.copy(self.__dict__) # In case request is augmented by some foreign object such as # credentials which handle auth if 'request' in state_dict: del state_dict['request'] if 'connections' in state_dict: del state_dict['connections'] return state_dict def __setstate__(self, state): self.__dict__.update(state) self.connections = {} def _auth_from_challenge(self, host, request_uri, headers, response, content): """A generator that creates Authorization objects that can be applied to requests. """ challenges = _parse_www_authenticate(response, 'www-authenticate') for cred in self.credentials.iter(host): for scheme in AUTH_SCHEME_ORDER: if challenges.has_key(scheme): yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) def add_credentials(self, name, password, domain=""): """Add a name and password that will be used any time a request requires authentication.""" self.credentials.add(name, password, domain) def add_certificate(self, key, cert, domain): """Add a key and cert that will be used any time a request requires authentication.""" self.certificates.add(key, cert, domain) def clear_credentials(self): """Remove all the names and passwords that are used for authentication""" self.credentials.clear() self.authorizations = [] def _conn_request(self, conn, request_uri, method, body, headers): i = 0 seen_bad_status_line = False while i < RETRIES: i += 1 try: if hasattr(conn, 'sock') and conn.sock is None: conn.connect() conn.request(method, request_uri, body, headers) except socket.timeout: raise except socket.gaierror: conn.close() raise ServerNotFoundError("Unable to find the server at %s" % conn.host) except ssl_SSLError: conn.close() raise except socket.error, e: err = 0 if hasattr(e, 'args'): err = getattr(e, 'args')[0] else: err = e.errno if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES: continue # retry on potentially transient socket errors raise except httplib.HTTPException: # Just because the server closed the connection doesn't apparently mean # that the server didn't send a response. if hasattr(conn, 'sock') and conn.sock is None: if i < RETRIES-1: conn.close() conn.connect() continue else: conn.close() raise if i < RETRIES-1: conn.close() conn.connect() continue try: response = conn.getresponse() except httplib.BadStatusLine: # If we get a BadStatusLine on the first try then that means # the connection just went stale, so retry regardless of the # number of RETRIES set. if not seen_bad_status_line and i == 1: i = 0 seen_bad_status_line = True conn.close() conn.connect() continue else: conn.close() raise except (socket.error, httplib.HTTPException): if i < RETRIES-1: conn.close() conn.connect() continue else: conn.close() raise else: content = "" if method == "HEAD": conn.close() else: content = response.read() response = Response(response) if method != "HEAD": content = _decompressContent(response, content) break return (response, content) def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): """Do the actual request using the connection object and also follow one level of redirects if necessary""" auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] auth = auths and sorted(auths)[0][1] or None if auth: auth.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers) if auth: if auth.response(response, body): auth.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers ) response._stale_digest = 1 if response.status == 401: for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): authorization.request(method, request_uri, headers, body) (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) if response.status != 401: self.authorizations.append(authorization) authorization.response(response, body) break if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303): if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: # Pick out the location header and basically start from the beginning # remembering first to strip the ETag header and decrement our 'depth' if redirections: if not response.has_key('location') and response.status != 300: raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) # Fix-up relative redirects (which violate an RFC 2616 MUST) if response.has_key('location'): location = response['location'] (scheme, authority, path, query, fragment) = parse_uri(location) if authority == None: response['location'] = urlparse.urljoin(absolute_uri, location) if response.status == 301 and method in ["GET", "HEAD"]: response['-x-permanent-redirect-url'] = response['location'] if not response.has_key('content-location'): response['content-location'] = absolute_uri _updateCache(headers, response, content, self.cache, cachekey) if headers.has_key('if-none-match'): del headers['if-none-match'] if headers.has_key('if-modified-since'): del headers['if-modified-since'] if 'authorization' in headers and not self.forward_authorization_headers: del headers['authorization'] if response.has_key('location'): location = response['location'] old_response = copy.deepcopy(response) if not old_response.has_key('content-location'): old_response['content-location'] = absolute_uri redirect_method = method if response.status in [302, 303]: redirect_method = "GET" body = None (response, content) = self.request( location, method=redirect_method, body=body, headers=headers, redirections=redirections - 1) response.previous = old_response else: raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content) elif response.status in [200, 203] and method in ["GET", "HEAD"]: # Don't cache 206's since we aren't going to handle byte range requests if not response.has_key('content-location'): response['content-location'] = absolute_uri _updateCache(headers, response, content, self.cache, cachekey) return (response, content) def _normalize_headers(self, headers): return _normalize_headers(headers) # Need to catch and rebrand some exceptions # Then need to optionally turn all exceptions into status codes # including all socket.* and httplib.* exceptions. def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): """ Performs a single HTTP request. The 'uri' is the URI of the HTTP resource and can begin with either 'http' or 'https'. The value of 'uri' must be an absolute URI. The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. There is no restriction on the methods allowed. The 'body' is the entity body to be sent with the request. It is a string object. Any extra headers that are to be sent with the request should be provided in the 'headers' dictionary. The maximum number of redirect to follow before raising an exception is 'redirections. The default is 5. The return value is a tuple of (response, content), the first being and instance of the 'Response' class, the second being a string that contains the response entity body. """ try: if headers is None: headers = {} else: headers = self._normalize_headers(headers) if not headers.has_key('user-agent'): headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__ uri = iri2uri(uri) (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) domain_port = authority.split(":")[0:2] if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http': scheme = 'https' authority = domain_port[0] proxy_info = self._get_proxy_info(scheme, authority) conn_key = scheme+":"+authority if conn_key in self.connections: conn = self.connections[conn_key] else: if not connection_type: connection_type = SCHEME_TO_CONNECTION[scheme] certs = list(self.certificates.iter(authority)) if scheme == 'https': if certs: conn = self.connections[conn_key] = connection_type( authority, key_file=certs[0][0], cert_file=certs[0][1], timeout=self.timeout, proxy_info=proxy_info, ca_certs=self.ca_certs, disable_ssl_certificate_validation= self.disable_ssl_certificate_validation) else: conn = self.connections[conn_key] = connection_type( authority, timeout=self.timeout, proxy_info=proxy_info, ca_certs=self.ca_certs, disable_ssl_certificate_validation= self.disable_ssl_certificate_validation) else: conn = self.connections[conn_key] = connection_type( authority, timeout=self.timeout, proxy_info=proxy_info) conn.set_debuglevel(debuglevel) if 'range' not in headers and 'accept-encoding' not in headers: headers['accept-encoding'] = 'gzip, deflate' info = email.Message.Message() cached_value = None if self.cache: cachekey = defrag_uri.encode('utf-8') cached_value = self.cache.get(cachekey) if cached_value: # info = email.message_from_string(cached_value) # # Need to replace the line above with the kludge below # to fix the non-existent bug not fixed in this # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html try: info, content = cached_value.split('\r\n\r\n', 1) feedparser = email.FeedParser.FeedParser() feedparser.feed(info) info = feedparser.close() feedparser._parse = None except (IndexError, ValueError): self.cache.delete(cachekey) cachekey = None cached_value = None else: cachekey = None if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers: # http://www.w3.org/1999/04/Editing/ headers['if-match'] = info['etag'] if method not in ["GET", "HEAD"] and self.cache and cachekey: # RFC 2616 Section 13.10 self.cache.delete(cachekey) # Check the vary header in the cache to see if this request # matches what varies in the cache. if method in ['GET', 'HEAD'] and 'vary' in info: vary = info['vary'] vary_headers = vary.lower().replace(' ', '').split(',') for header in vary_headers: key = '-varied-%s' % header value = info[key] if headers.get(header, None) != value: cached_value = None break if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: if info.has_key('-x-permanent-redirect-url'): # Should cached permanent redirects be counted in our redirection count? For now, yes. if redirections <= 0: raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "") (response, new_content) = self.request( info['-x-permanent-redirect-url'], method='GET', headers=headers, redirections=redirections - 1) response.previous = Response(info) response.previous.fromcache = True else: # Determine our course of action: # Is the cached entry fresh or stale? # Has the client requested a non-cached response? # # There seems to be three possible answers: # 1. [FRESH] Return the cache entry w/o doing a GET # 2. [STALE] Do the GET (but add in cache validators if available) # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request entry_disposition = _entry_disposition(info, headers) if entry_disposition == "FRESH": if not cached_value: info['status'] = '504' content = "" response = Response(info) if cached_value: response.fromcache = True return (response, content) if entry_disposition == "STALE": if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers: headers['if-none-match'] = info['etag'] if info.has_key('last-modified') and not 'last-modified' in headers: headers['if-modified-since'] = info['last-modified'] elif entry_disposition == "TRANSPARENT": pass (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) if response.status == 304 and method == "GET": # Rewrite the cache entry with the new end-to-end headers # Take all headers that are in response # and overwrite their values in info. # unless they are hop-by-hop, or are listed in the connection header. for key in _get_end2end_headers(response): info[key] = response[key] merged_response = Response(info) if hasattr(response, "_stale_digest"): merged_response._stale_digest = response._stale_digest _updateCache(headers, merged_response, content, self.cache, cachekey) response = merged_response response.status = 200 response.fromcache = True elif response.status == 200: content = new_content else: self.cache.delete(cachekey) content = new_content else: cc = _parse_cache_control(headers) if cc.has_key('only-if-cached'): info['status'] = '504' response = Response(info) content = "" else: (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) except Exception, e: if self.force_exception_to_status_code: if isinstance(e, HttpLib2ErrorWithResponse): response = e.response content = e.content response.status = 500 response.reason = str(e) elif isinstance(e, socket.timeout): content = "Request Timeout" response = Response({ "content-type": "text/plain", "status": "408", "content-length": len(content) }) response.reason = "Request Timeout" else: content = str(e) response = Response({ "content-type": "text/plain", "status": "400", "content-length": len(content) }) response.reason = "Bad Request" else: raise return (response, content) def _get_proxy_info(self, scheme, authority): """Return a ProxyInfo instance (or None) based on the scheme and authority. """ hostname, port = urllib.splitport(authority) proxy_info = self.proxy_info if callable(proxy_info): proxy_info = proxy_info(scheme) if (hasattr(proxy_info, 'applies_to') and not proxy_info.applies_to(hostname)): proxy_info = None return proxy_info class Response(dict): """An object more like email.Message than httplib.HTTPResponse.""" """Is this response from our local cache""" fromcache = False """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """ version = 11 "Status code returned by server. " status = 200 """Reason phrase returned by server.""" reason = "Ok" previous = None def __init__(self, info): # info is either an email.Message or # an httplib.HTTPResponse object. if isinstance(info, httplib.HTTPResponse): for key, value in info.getheaders(): self[key.lower()] = value self.status = info.status self['status'] = str(self.status) self.reason = info.reason self.version = info.version elif isinstance(info, email.Message.Message): for key, value in info.items(): self[key.lower()] = value self.status = int(self['status']) else: for key, value in info.iteritems(): self[key.lower()] = value self.status = int(self.get('status', self.status)) self.reason = self.get('reason', self.reason) def __getattr__(self, name): if name == 'dict': return self else: raise AttributeError, name
tcole98/dinorunio
refs/heads/master
app/models.py
1
import datetime from app import db class User(db.Model): id = db.Column(db.Integer, primary_key=True) nickname = db.Column(db.String(64), index=True, unique=False) score = db.Column(db.Integer, index=True) date = db.Column(db.DateTime, default=datetime.datetime.utcnow) def __init__(self, nickname, score): self.nickname = nickname self.score = score def __repr__(self): return '<User %r>' % (self.nickname)
scotwk/cloud-custodian
refs/heads/master
tools/zerodark/zerodark/resolver.py
7
# Copyright 2017-2018 Capital One Services, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import ipaddress import six import sqlite3 from .utils import row_factory class IPResolver(object): """Resolve as much info as we can about a given ip. Typically this is a two level lookup. - ip address -> eni info - eni info -> resource info (w/ app) On failure to lookup eni info we consider some additional possibilities: - aws service ip - TODO: lookup in vpc/subnet tables. """ # TODO: needs region and account id in queries resource_query = { 'ec2': 'select * from ec2 where instance_id = ?', 'elb': 'select * from elbs where name = ?'} def __init__(self, ipdb_path, cmdb_path, aws_cidrs_path=None): self.ipdb = ipdb_path and sqlite3.connect(ipdb_path) or None if self.ipdb: self.ipdb.row_factory = row_factory self.ipdb_cursor = self.ipdb.cursor() self.cmdb = cmdb_path and sqlite3.connect(cmdb_path) or None if self.cmdb: self.cmdb.row_factory = row_factory self.cmdb_cursor = self.cmdb.cursor() # TODO see if we can do some ip caching self.resource_cache = {} # Service -> list of service cidrs self.aws_cidrs = {} if not aws_cidrs_path: return with open(aws_cidrs_path) as fh: ipranges = json.load(fh) for r in ipranges.get('prefixes', ()): if r['service'] in ('S3', 'AMAZON'): self.aws_cidrs.setdefault( r['service'].lower(), []).append( ipaddress.IPv4Network(r['ip_prefix'])) def resolve(self, ips, start, end): results = {} if not self.ipdb: return results for ip in ips: # TODO: see if we need to expand the time window # the use of config for ip info creates some lag # on capture, and also some potential gaps for # short lived resources. self.ipdb_cursor.execute( '''select * from enis where ip_address = ? and start < ? and (end > ? or end is null)''', (ip, end.strftime('%Y-%m-%dT%H:%M'), start.strftime('%Y-%m-%dT%H:%M'))) info = list(self.ipdb_cursor) # TODO: assert on number of records found if info: eni_info = info.pop() # TODO: this a bit speculative wrt to ip usage # specific to an enterprise network setup, where in # non resolved ips are typically aws services via # classic vpc endpoints using public ips. Might need # to revisit. also potentially an option on ip string # prefix match as a sanity base. elif not info: n = ipaddress.IPv4Address(six.text_type(ip)) found = False for service, cidr_set in self.aws_cidrs.items(): for cidr in cidr_set: if n in cidr: results[ip] = {'app': 'aws s3', 'env': 'aws s3'} found = True break if found: break if found: break continue results[ip] = self.resolve_resource(eni_info) return results def resolve_resource(self, eni_info): # TODO region, account id in cache key ri = self.resource_cache.get(eni_info['resource_type']) if ri is not None: return ri service_query = self.resource_query.get(eni_info['resource_id']) if service_query is None: return eni_info self.cmdb_cursor.execute(service_query, (eni_info['resource_type'],)) ri = self.cmdb_cursor.fetchone() if ri is not None: ri['type'] = eni_info['resource_id'] return ri else: return eni_info