repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
n-west/gnuradio
refs/heads/maint
gnuradio-runtime/lib/math/gen_sine_table.py
79
#!/usr/bin/env python # # Copyright 2004 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # import math import sys def wrap (x): if x >= 2**31: return x - 2**32 return x def gen_approx_table (f, nentries, min_x, max_x): """return a list of nentries containing tuples of the form: (m, c, abs_error). min_x and max_x specify the domain of the table. """ r = [] incx = float (max_x - min_x) / nentries for i in range (nentries): a = (i * incx) + min_x b = ((i + 1) * incx) + min_x m = (f(b)-f(a))/(b-a) c = (3*a+b)*(f(a)-f(b))/(4*(b-a)) + (f((a+b)/2) + f(a))/2 abs_error = c+m*a-f(a) r.append ((m, c, abs_error)) return r def scaled_sine (x): return math.sin (x * math.pi / 2**31) def gen_sine_table (): nbits = 10 nentries = 2**nbits # min_x = -2**31 # max_x = 2**31-1 min_x = 0 max_x = 2**32-1 t = gen_approx_table (scaled_sine, nentries, min_x, max_x) max_error = 0 for e in t: max_error = max (max_error, abs (e[2])) # sys.stdout.write ('static const int WORDBITS = 32;\n') # sys.stdout.write ('static const int NBITS = %d;\n' % (nbits,)) sys.stdout.write (' // max_error = %22.15e\n' % (max_error,)) # sys.stdout.write ('static const double sine_table[%d][2] = {\n'% (nentries,)) for e in t: sys.stdout.write (' { %22.15e, %22.15e },\n' % (2 * e[0], e[1])) # sys.stdout.write ('};\n') if __name__ == '__main__': gen_sine_table ()
fidencio/sssd
refs/heads/master
src/tests/intg/ent_test.py
2
# # ent.py module tests # # Copyright (c) 2015 Red Hat, Inc. # Author: Nikolai Kondrashov <Nikolai.Kondrashov@redhat.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import re import os import io import pytest import ent from util import * @pytest.fixture(scope="module") def passwd_path(request): name = "NSS_WRAPPER_PASSWD" request.addfinalizer(lambda: restore_envvar_file(name)) return backup_envvar_file(name) @pytest.fixture(scope="module") def group_path(request): name = "NSS_WRAPPER_GROUP" request.addfinalizer(lambda: restore_envvar_file(name)) return backup_envvar_file(name) USER1 = dict(name="user1", passwd="x", uid=1001, gid=2001, gecos="User 1", dir="/home/user1", shell="/bin/bash") USER2 = dict(name="user2", passwd="x", uid=1002, gid=2002, gecos="User 2", dir="/home/user2", shell="/bin/bash") USER_LIST = [USER1, USER2] USER_NAME_DICT = dict((u["name"], u) for u in USER_LIST) USER_UID_DICT = dict((u["uid"], u) for u in USER_LIST) EMPTY_GROUP = dict(name="empty_group", passwd="x", gid=2000, mem=ent.contains_only()) GROUP1 = dict(name="group1", passwd="x", gid=2001, mem=ent.contains_only()) GROUP2 = dict(name="group2", passwd="x", gid=2002, mem=ent.contains_only()) ONE_USER_GROUP1 = dict(name="one_user_group1", passwd="x", gid=2011, mem=ent.contains_only("user1")) ONE_USER_GROUP2 = dict(name="one_user_group2", passwd="x", gid=2012, mem=ent.contains_only("user2")) TWO_USER_GROUP = dict(name="two_user_group", passwd="x", gid=2020, mem=ent.contains_only("user1", "user2")) GROUP_LIST = [EMPTY_GROUP, GROUP1, GROUP2, ONE_USER_GROUP1, ONE_USER_GROUP2, TWO_USER_GROUP] GROUP_NAME_DICT = dict((g["name"], g) for g in GROUP_LIST) GROUP_GID_DICT = dict((g["gid"], g) for g in GROUP_LIST) @pytest.fixture(scope="module") def users_and_groups(request, passwd_path, group_path): passwd_contents = "".join([ "{name}:{passwd}:{uid}:{gid}:{gecos}:{dir}:{shell}\n".format(**u) for u in USER_LIST ]) group_contents = "".join([ "%s:%s:%s:%s\n" % (g["name"], g["passwd"], g["gid"], ",".join(g["mem"])) for g in GROUP_LIST ]) with open(passwd_path, "a") as f: f.write(passwd_contents) with open(group_path, "a") as f: f.write(group_contents) def test_assert_passwd_by_name(users_and_groups): ent.assert_passwd_by_name("user1", {}) ent.assert_passwd_by_name("user1", dict(name="user1", uid=1001)) ent.assert_passwd_by_name("user1", USER1) try: ent.assert_passwd_by_name("user3", {}) assert False except AssertionError as e: assert str(e) == "'getpwnam(): name not found: user3'" try: ent.assert_passwd_by_name("user2", dict(name="user1")) assert False except AssertionError as e: assert str(e) == "'name' mismatch: 'user1' != 'user2'" def test_assert_passwd_by_uid(users_and_groups): ent.assert_passwd_by_uid(1001, {}) ent.assert_passwd_by_uid(1001, dict(name="user1", uid=1001)) ent.assert_passwd_by_uid(1001, USER1) try: ent.assert_passwd_by_uid(1003, {}) assert False except AssertionError as e: assert str(e) == "'getpwuid(): uid not found: 1003'" try: ent.assert_passwd_by_uid(1002, dict(name="user1")) assert False except AssertionError as e: assert str(e) == "'name' mismatch: 'user1' != 'user2'" def test_assert_passwd_list(users_and_groups): ent.assert_passwd_list(ent.contains()) ent.assert_passwd_list(ent.contains(USER1)) ent.assert_passwd_list(ent.contains_only(*USER_LIST)) try: ent.assert_passwd_list(ent.contains_only()) assert False except AssertionError as e: assert not re.search("expected users not found:", str(e)) assert re.search("unexpected users found:", str(e)) try: ent.assert_passwd_list(ent.contains(dict(name="non_existent"))) assert False except AssertionError as e: assert re.search("expected users not found:", str(e)) assert not re.search("unexpected users found:", str(e)) def test_assert_each_passwd_by_name(users_and_groups): ent.assert_each_passwd_by_name({}) ent.assert_each_passwd_by_name(dict(user1=USER1)) ent.assert_each_passwd_by_name(USER_NAME_DICT) try: ent.assert_each_passwd_by_name(dict(user3={})) assert False except AssertionError as e: assert str(e) == "'getpwnam(): name not found: user3'" try: ent.assert_each_passwd_by_name(dict(user1=dict(name="user2"))) assert False except AssertionError as e: assert str(e) == \ "user 'user1' mismatch: 'name' mismatch: 'user2' != 'user1'" def test_assert_each_passwd_by_uid(users_and_groups): ent.assert_each_passwd_by_uid({}) ent.assert_each_passwd_by_uid({1001: USER1}) ent.assert_each_passwd_by_uid(USER_UID_DICT) try: ent.assert_each_passwd_by_uid({1003: {}}) assert False except AssertionError as e: assert str(e) == "'getpwuid(): uid not found: 1003'" try: ent.assert_each_passwd_by_uid({1001: dict(uid=1002)}) assert False except AssertionError as e: assert str(e) == \ "user 1001 mismatch: 'uid' mismatch: 1002 != 1001" def test_assert_each_passwd_with_name(users_and_groups): ent.assert_each_passwd_with_name([]) ent.assert_each_passwd_with_name([USER1]) ent.assert_each_passwd_with_name(USER_LIST) try: ent.assert_each_passwd_with_name([dict(name="user3")]) assert False except AssertionError as e: assert str(e) == "'getpwnam(): name not found: user3'" try: ent.assert_each_passwd_with_name([dict(name="user1", uid=1002)]) assert False except AssertionError as e: assert str(e) == \ "user 'user1' mismatch: 'uid' mismatch: 1002 != 1001" def test_assert_each_passwd_with_uid(users_and_groups): ent.assert_each_passwd_with_uid([]) ent.assert_each_passwd_with_uid([USER1]) ent.assert_each_passwd_with_uid(USER_LIST) try: ent.assert_each_passwd_with_uid([dict(uid=1003)]) assert False except AssertionError as e: assert str(e) == "'getpwuid(): uid not found: 1003'" try: ent.assert_each_passwd_with_uid([dict(name="user2", uid=1001)]) assert False except AssertionError as e: assert str(e) == \ "user 1001 mismatch: 'name' mismatch: 'user2' != 'user1'" def test_assert_passwd(users_and_groups): ent.assert_passwd(ent.contains()) ent.assert_passwd(ent.contains(USER1)) ent.assert_passwd(ent.contains_only(*USER_LIST)) try: ent.assert_passwd(ent.contains(dict(name="user3", uid=1003))) assert False except AssertionError as e: assert re.search("list mismatch:", str(e)) assert re.search("expected users not found:", str(e)) assert not re.search("unexpected users found:", str(e)) try: ent.assert_passwd(ent.contains_only(USER1)) assert False except AssertionError as e: assert re.search("list mismatch:", str(e)) assert not re.search("expected users not found:", str(e)) assert re.search("unexpected users found:", str(e)) def test_group_member_matching(users_and_groups): ent.assert_group_by_name("empty_group", dict(mem=ent.contains())) ent.assert_group_by_name("empty_group", dict(mem=ent.contains_only())) try: ent.assert_group_by_name("empty_group", dict(mem=ent.contains("user1"))) except AssertionError as e: assert re.search("member list mismatch:", str(e)) assert re.search("expected members not found:", str(e)) ent.assert_group_by_name("one_user_group1", dict(mem=ent.contains())) ent.assert_group_by_name("one_user_group1", dict(mem=ent.contains("user1"))) ent.assert_group_by_name("one_user_group1", dict(mem=ent.contains_only("user1"))) try: ent.assert_group_by_name("one_user_group1", dict(mem=ent.contains_only())) except AssertionError as e: assert re.search("member list mismatch:", str(e)) assert re.search("unexpected members found:", str(e)) assert not re.search("expected members not found:", str(e)) try: ent.assert_group_by_name("one_user_group1", dict(mem=ent.contains_only("user3"))) except AssertionError as e: assert re.search("member list mismatch:", str(e)) assert re.search("unexpected members found:", str(e)) assert re.search("expected members not found:", str(e)) try: ent.assert_group_by_name("one_user_group1", dict(mem=ent.contains("user3"))) except AssertionError as e: assert re.search("member list mismatch:", str(e)) assert not re.search("unexpected members found:", str(e)) assert re.search("expected members not found:", str(e)) ent.assert_group_by_name("two_user_group", dict(mem=ent.contains())) ent.assert_group_by_name("two_user_group", dict(mem=ent.contains("user1"))) ent.assert_group_by_name("two_user_group", dict(mem=ent.contains("user1", "user2"))) ent.assert_group_by_name("two_user_group", dict(mem=ent.contains_only("user1", "user2"))) try: ent.assert_group_by_name("two_user_group", dict(mem=ent.contains_only("user1"))) except AssertionError as e: assert re.search("member list mismatch:", str(e)) assert re.search("unexpected members found:", str(e)) assert not re.search("expected members not found:", str(e)) def test_assert_group_by_name(users_and_groups): ent.assert_group_by_name("group1", {}) ent.assert_group_by_name("group1", dict(name="group1", gid=2001)) ent.assert_group_by_name("group1", GROUP1) try: ent.assert_group_by_name("group3", {}) assert False except AssertionError as e: assert str(e) == "'getgrnam(): name not found: group3'" try: ent.assert_group_by_name("group2", dict(name="group1")) assert False except AssertionError as e: assert str(e) == "'name' mismatch: 'group1' != 'group2'" def test_assert_group_by_gid(users_and_groups): ent.assert_group_by_gid(2001, {}) ent.assert_group_by_gid(2001, dict(name="group1", gid=2001)) ent.assert_group_by_gid(2001, GROUP1) try: ent.assert_group_by_gid(2003, {}) assert False except AssertionError as e: assert str(e) == "'getgrgid(): gid not found: 2003'" try: ent.assert_group_by_gid(2002, dict(name="group1")) assert False except AssertionError as e: assert str(e) == "'name' mismatch: 'group1' != 'group2'" def test_assert_group_list(users_and_groups): ent.assert_group_list(ent.contains()) ent.assert_group_list(ent.contains(GROUP1)) ent.assert_group_list(ent.contains_only(*GROUP_LIST)) try: ent.assert_group_list(ent.contains_only()) assert False except AssertionError as e: assert not re.search("expected groups not found:", str(e)) assert re.search("unexpected groups found:", str(e)) try: ent.assert_group_list(ent.contains(dict(name="non_existent"))) assert False except AssertionError as e: assert re.search("expected groups not found:", str(e)) assert not re.search("unexpected groups found:", str(e)) def test_assert_each_group_by_name(users_and_groups): ent.assert_each_group_by_name({}) ent.assert_each_group_by_name(dict(group1=GROUP1)) ent.assert_each_group_by_name(GROUP_NAME_DICT) try: ent.assert_each_group_by_name(dict(group3={})) assert False except AssertionError as e: assert str(e) == "'getgrnam(): name not found: group3'" try: ent.assert_each_group_by_name(dict(group1=dict(name="group2"))) assert False except AssertionError as e: assert str(e) == "group 'group1' mismatch: " + \ "'name' mismatch: 'group2' != 'group1'" def test_assert_each_group_by_gid(users_and_groups): ent.assert_each_group_by_gid({}) ent.assert_each_group_by_gid({2001: GROUP1}) ent.assert_each_group_by_gid(GROUP_GID_DICT) try: ent.assert_each_group_by_gid({2003: {}}) assert False except AssertionError as e: assert str(e) == "'getgrgid(): gid not found: 2003'" try: ent.assert_each_group_by_gid({2001: dict(gid=2002)}) assert False except AssertionError as e: assert str(e) == \ "group 2001 mismatch: 'gid' mismatch: 2002 != 2001" def test_assert_each_group_with_name(users_and_groups): ent.assert_each_group_with_name([]) ent.assert_each_group_with_name([GROUP1]) ent.assert_each_group_with_name(GROUP_LIST) try: ent.assert_each_group_with_name([dict(name="group3")]) assert False except AssertionError as e: assert str(e) == "'getgrnam(): name not found: group3'" try: ent.assert_each_group_with_name([dict(name="group1", gid=2002)]) assert False except AssertionError as e: assert str(e) == \ "group 'group1' mismatch: 'gid' mismatch: 2002 != 2001" def test_assert_each_group_with_gid(users_and_groups): ent.assert_each_group_with_gid([]) ent.assert_each_group_with_gid([GROUP1]) ent.assert_each_group_with_gid(GROUP_LIST) try: ent.assert_each_group_with_gid([dict(gid=2003)]) assert False except AssertionError as e: assert str(e) == "'getgrgid(): gid not found: 2003'" try: ent.assert_each_group_with_gid([dict(name="group2", gid=2001)]) assert False except AssertionError as e: assert str(e) == \ "group 2001 mismatch: 'name' mismatch: 'group2' != 'group1'" def test_assert_group(users_and_groups): ent.assert_group(ent.contains()) ent.assert_group(ent.contains(GROUP1)) ent.assert_group(ent.contains_only(*GROUP_LIST)) try: ent.assert_group(ent.contains(dict(name="group3", gid=2003))) assert False except AssertionError as e: assert re.search("list mismatch:", str(e)) assert re.search("expected groups not found:", str(e)) assert not re.search("unexpected groups found:", str(e)) try: ent.assert_group(ent.contains_only(GROUP1)) assert False except AssertionError as e: assert re.search("list mismatch:", str(e)) assert not re.search("expected groups not found:", str(e)) assert re.search("unexpected groups found:", str(e))
snakeleon/YouCompleteMe-x64
refs/heads/master
python/ycm/client/command_request.py
2
# Copyright (C) 2013 Google Inc. # # This file is part of YouCompleteMe. # # YouCompleteMe is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # YouCompleteMe is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>. from ycm.client.base_request import BaseRequest, BuildRequestData from ycm import vimsupport DEFAULT_BUFFER_COMMAND = 'same-buffer' def _EnsureBackwardsCompatibility( arguments ): if arguments and arguments[ 0 ] == 'GoToDefinitionElseDeclaration': arguments[ 0 ] = 'GoTo' return arguments class CommandRequest( BaseRequest ): def __init__( self, arguments, extra_data = None, silent = False ): super( CommandRequest, self ).__init__() self._arguments = _EnsureBackwardsCompatibility( arguments ) self._command = arguments and arguments[ 0 ] self._extra_data = extra_data self._response = None self._request_data = None self._response_future = None self._silent = silent def Start( self ): self._request_data = BuildRequestData() if self._extra_data: self._request_data.update( self._extra_data ) self._request_data.update( { 'command_arguments': self._arguments } ) self._response_future = self.PostDataToHandlerAsync( self._request_data, 'run_completer_command' ) def Done( self ): return bool( self._response_future ) and self._response_future.done() def Response( self ): if self._response is None and self._response_future is not None: # Block self._response = self.HandleFuture( self._response_future, display_message = not self._silent ) return self._response def RunPostCommandActionsIfNeeded( self, modifiers, buffer_command = DEFAULT_BUFFER_COMMAND ): # This is a blocking call if not Done() self.Response() if self._response is None: # An exception was raised and handled. return # If not a dictionary or a list, the response is necessarily a # scalar: boolean, number, string, etc. In this case, we print # it to the user. if not isinstance( self._response, ( dict, list ) ): return self._HandleBasicResponse() if 'fixits' in self._response: return self._HandleFixitResponse() if 'message' in self._response: return self._HandleMessageResponse() if 'detailed_info' in self._response: return self._HandleDetailedInfoResponse() # The only other type of response we understand is GoTo, and that is the # only one that we can't detect just by inspecting the response (it should # either be a single location or a list) return self._HandleGotoResponse( buffer_command, modifiers ) def StringResponse( self ): # Retuns a supporable public API version of the response. The reason this # exists is that the ycmd API here is wonky as it originally only supported # text-responses and now has things like fixits and such. # # The supportable public API is basically any text-only response. All other # response types are returned as empty strings # This is a blocking call if not Done() self.Response() # Completer threw an error ? if self._response is None: return "" # If not a dictionary or a list, the response is necessarily a # scalar: boolean, number, string, etc. In this case, we print # it to the user. if not isinstance( self._response, ( dict, list ) ): return str( self._response ) if 'message' in self._response: return self._response[ 'message' ] if 'detailed_info' in self._response: return self._response[ 'detailed_info' ] # The only other type of response we understand is 'fixits' and GoTo. We # don't provide string versions of them. return "" def _HandleGotoResponse( self, buffer_command, modifiers ): if isinstance( self._response, list ): vimsupport.SetQuickFixList( [ vimsupport.BuildQfListItem( x ) for x in self._response ] ) vimsupport.OpenQuickFixList( focus = True, autoclose = True ) else: vimsupport.JumpToLocation( self._response[ 'filepath' ], self._response[ 'line_num' ], self._response[ 'column_num' ], modifiers, buffer_command ) def _HandleFixitResponse( self ): if not len( self._response[ 'fixits' ] ): vimsupport.PostVimMessage( 'No fixits found for current line', warning = False ) else: try: fixit_index = 0 # If there is more than one fixit, we need to ask the user which one # should be applied. # # If there's only one, triggered by the FixIt subcommand (as opposed to # `RefactorRename`, for example) and whose `kind` is not `quicfix`, we # still need to as the user for confirmation. fixits = self._response[ 'fixits' ] if ( len( fixits ) > 1 or ( len( fixits ) == 1 and self._command == 'FixIt' and fixits[ 0 ].get( 'kind' ) != 'quickfix' ) ): fixit_index = vimsupport.SelectFromList( "FixIt suggestion(s) available at this location. " "Which one would you like to apply?", [ fixit[ 'text' ] for fixit in fixits ] ) chosen_fixit = fixits[ fixit_index ] if chosen_fixit[ 'resolve' ]: self._request_data.update( { 'fixit': chosen_fixit } ) response = self.PostDataToHandler( self._request_data, 'resolve_fixit' ) if response is None: return fixits = response[ 'fixits' ] assert len( fixits ) == 1 chosen_fixit = fixits[ 0 ] vimsupport.ReplaceChunks( chosen_fixit[ 'chunks' ], silent = self._command == 'Format' ) except RuntimeError as e: vimsupport.PostVimMessage( str( e ) ) def _HandleBasicResponse( self ): vimsupport.PostVimMessage( self._response, warning = False ) def _HandleMessageResponse( self ): vimsupport.PostVimMessage( self._response[ 'message' ], warning = False ) def _HandleDetailedInfoResponse( self ): vimsupport.WriteToPreviewWindow( self._response[ 'detailed_info' ] ) def SendCommandRequestAsync( arguments, extra_data = None, silent = True ): request = CommandRequest( arguments, extra_data = extra_data, silent = silent ) request.Start() # Don't block return request def SendCommandRequest( arguments, modifiers, buffer_command = DEFAULT_BUFFER_COMMAND, extra_data = None ): request = SendCommandRequestAsync( arguments, extra_data = extra_data, silent = False ) # Block here to get the response request.RunPostCommandActionsIfNeeded( modifiers, buffer_command ) return request.Response() def GetCommandResponse( arguments, extra_data = None ): request = SendCommandRequestAsync( arguments, extra_data = extra_data, silent = True ) # Block here to get the response return request.StringResponse()
Eficent/odoomrp-wip
refs/heads/8.0
procurement_orderpoint_no_confirm/__openerp__.py
27
# -*- encoding: utf-8 -*- ############################################################################## # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses/. # ############################################################################## { "name": "Procurement Orderpoint No Confirm", "version": "1.0", "depends": ["stock"], "author": "OdooMRP team," "AvanzOSC," "Serv. Tecnol. Avanzados - Pedro M. Baeza", "contributors": [ "Ainara Galdona <ainaragaldona@avanzosc.es>", "Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>", "Ana Juaristi <anajuaristi@avanzosc.es>", ], "category": "Procurement", 'installable': True, 'auto_install': False, }
jisqyv/p2pool
refs/heads/master
nattraverso/utils.py
288
""" Various utility functions used in the nattraverso package. @author: Raphael Slinckx @copyright: Copyright 2005 @license: LGPL @contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>} @version: 0.1.0 """ __revision__ = "$id" def is_rfc1918_ip(ip): """ Checks if the given ip address is a rfc1918 one. @param ip: The ip address to test @type ip: a string "x.x.x.x" @return: True if it's a LAN address, False otherwise """ if isinstance(ip, basestring): ip = _ip_to_number(ip) for net, mask in _nets: if ip&mask == net: return True return False def is_bogus_ip(ip): """ Checks if the given ip address is bogus, i.e. 0.0.0.0 or 127.0.0.1. @param ip: The ip address to test @type ip: a string "x.x.x.x" @return: True if it's bogus, False otherwise """ return ip.startswith('0.') or ip.startswith('127.') def _ip_to_number(ipstr): """ Translate a string ip address to a packed number. @param ipstr: the ip address to transform @type ipstr: a string "x.x.x.x" @return: an int32 number representing the ip address """ net = [ int(digit) for digit in ipstr.split('.') ] + [ 0, 0, 0 ] net = net[:4] return ((((((0L+net[0])<<8) + net[1])<<8) + net[2])<<8) +net[3] # List of rfc1918 net/mask _rfc1918_networks = [('127', 8), ('192.168', 16), ('10', 8), ('172.16', 12)] # Machine readable form of the above _nets = [(_ip_to_number(net), (2L**32 -1)^(2L**(32-mask)-1)) for net, mask in _rfc1918_networks]
OfficialMan/Sark
refs/heads/master
plugins/function_flow.py
4
import networkx as nx import idaapi from sark import get_codeblock, get_flowchart, get_block_start, get_nx_graph try: from sark.ui import ActionHandler use_new_ui = True except: use_new_ui = False COLOR_REACHABLE = 0x66EE11 COLOR_UNREACHABLE = 0x6611EE COLOR_REACHING = 0x11EE66 COLOR_NOT_REACHING = 0x1166EE COLOR_SOURCE = 0xEE6611 COLOR_NONE = 0xFFFFFFFF COLOR_EXIT = 0x000048 def iter_exit_nodes(ea): for block in get_flowchart(ea): # Check if there are successors for successor in block.next: break else: yield block def clear_func(ea): for block in get_flowchart(ea): block.color = COLOR_NONE def mark_not_reaching_nodes(ea, source_color=COLOR_SOURCE, other_color=COLOR_NOT_REACHING): graph = get_nx_graph(ea) graph = graph.reverse() block_ea = get_block_start(ea) reaching = nx.descendants(graph, block_ea) for node_ea in graph.nodes_iter(): if node_ea not in reaching: get_codeblock(node_ea).color = other_color get_codeblock(ea).color = source_color def mark_reaching_nodes(ea, source_color=COLOR_SOURCE, other_color=COLOR_REACHING): graph = get_nx_graph(ea) graph = graph.reverse() block_ea = get_block_start(ea) for descendant in nx.descendants(graph, block_ea): get_codeblock(descendant).color = other_color get_codeblock(ea).color = source_color def mark_unreachable_nodes(ea, source_color=COLOR_SOURCE, other_color=COLOR_UNREACHABLE): graph = get_nx_graph(ea) block_ea = get_block_start(ea) descendants = nx.descendants(graph, block_ea) for block in get_flowchart(ea): if block.startEA not in descendants: block.color = other_color get_codeblock(ea).color = source_color def mark_reachable_nodes(ea, source_color=COLOR_SOURCE, other_color=COLOR_REACHABLE): graph = get_nx_graph(ea) block_ea = get_block_start(ea) for descendant in nx.descendants(graph, block_ea): get_codeblock(descendant).color = other_color get_codeblock(ea).color = source_color def mark_exit_nodes(ea, node_color=COLOR_EXIT): for block in iter_exit_nodes(ea): block.color = node_color if use_new_ui: #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Start of IDA >= 6.7 Code class MarkReachableNodesHandler(ActionHandler): TEXT = "Reachable" def _activate(self, ctx): clear_func(ctx.cur_ea) mark_reachable_nodes(ctx.cur_ea) class MarkUnReachableNodesHandler(ActionHandler): TEXT = "Unreachable" def _activate(self, ctx): clear_func(ctx.cur_ea) mark_unreachable_nodes(ctx.cur_ea) class MarkReachingNodesHandler(ActionHandler): TEXT = "Reaching" def _activate(self, ctx): clear_func(ctx.cur_ea) mark_reaching_nodes(ctx.cur_ea) class MarkNotReachingNodesHandler(ActionHandler): TEXT = "Not Reaching" def _activate(self, ctx): clear_func(ctx.cur_ea) mark_not_reaching_nodes(ctx.cur_ea) class MarkClearHandler(ActionHandler): TEXT = "Clear" def _activate(self, ctx): clear_func(ctx.cur_ea) class MarkExits(ActionHandler): TEXT = "Exits" def _activate(self, ctx): clear_func(ctx.cur_ea) mark_exit_nodes(ctx.cur_ea) idaapi.msg("\n" * 2) for block in iter_exit_nodes(ctx.cur_ea): idaapi.msg("Exit at 0x{:08X}\n".format(block.startEA)) class Hooks(idaapi.UI_Hooks): def populating_tform_popup(self, form, popup): # You can attach here. pass def finish_populating_tform_popup(self, form, popup): # Or here, after the popup is done being populated by its owner. if idaapi.get_tform_type(form) == idaapi.BWN_DISASM: idaapi.attach_action_to_popup(form, popup, MarkReachableNodesHandler.get_name(), "Mark/") idaapi.attach_action_to_popup(form, popup, MarkUnReachableNodesHandler.get_name(), "Mark/") idaapi.attach_action_to_popup(form, popup, MarkReachingNodesHandler.get_name(), "Mark/") idaapi.attach_action_to_popup(form, popup, MarkNotReachingNodesHandler.get_name(), "Mark/") idaapi.attach_action_to_popup(form, popup, MarkExits.get_name(), "Mark/") idaapi.attach_action_to_popup(form, popup, MarkClearHandler.get_name(), "Mark/") class FunctionFlow(idaapi.plugin_t): flags = idaapi.PLUGIN_PROC comment = "Show Flow in Functions" help = "Show code flow inside functions" wanted_name = "Function Flow" wanted_hotkey = "" def init(self): MarkReachableNodesHandler.register() MarkUnReachableNodesHandler.register() MarkReachingNodesHandler.register() MarkNotReachingNodesHandler.register() MarkExits.register() MarkClearHandler.register() self.hooks = Hooks() self.hooks.hook() return idaapi.PLUGIN_KEEP def term(self): MarkReachableNodesHandler.unregister() MarkUnReachableNodesHandler.unregister() MarkReachingNodesHandler.unregister() MarkNotReachingNodesHandler.unregister() MarkExits.unregister() MarkClearHandler.unregister() def run(self, arg): pass # End of IDA >= 6.7 Code #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ else: # Old (< 6.7) ui code #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Start of IDA < 6.7 Code def mark_reachable(): ea = idaapi.get_screen_ea() clear_func(ea) mark_reachable_nodes(ea) def mark_unreachable(): ea = idaapi.get_screen_ea() clear_func(ea) mark_unreachable_nodes(ea) def mark_reaching(): ea = idaapi.get_screen_ea() clear_func(ea) mark_reaching_nodes(ea) def mark_not_reaching(): ea = idaapi.get_screen_ea() clear_func(ea) mark_not_reaching_nodes(ea) def mark_exists(): ea = idaapi.get_screen_ea() clear_func(ea) mark_exit_nodes(ea) idaapi.msg("\n" * 2) for block in iter_exit_nodes(ea): idaapi.msg("Exit at 0x{:08X}\n".format(block.startEA)) def mark_clear(): ea = idaapi.get_screen_ea() clear_func(ea) class FunctionFlow(idaapi.plugin_t): flags = idaapi.PLUGIN_PROC comment = "Show Flow in Functions" help = "Show code flow inside functions" wanted_name = "Function Flow" wanted_hotkey = "" def init(self): idaapi.add_menu_item("View/Mark/", "Reachable", None, 0, mark_reachable, tuple()) idaapi.add_menu_item("View/Mark/", "Un-Reachable", None, 0, mark_unreachable, tuple()) idaapi.add_menu_item("View/Mark/", "Reaching", None, 0, mark_reaching, tuple()) idaapi.add_menu_item("View/Mark/", "Not Reaching", None, 0, mark_not_reaching, tuple()) idaapi.add_menu_item("View/Mark/", "Exists", None, 0, mark_exists, tuple()) idaapi.add_menu_item("View/Mark/", "Clear", None, 0, mark_clear, tuple()) return idaapi.PLUGIN_KEEP def term(self): pass def run(self, arg): pass # End of IDA < 6.7 Code #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ def PLUGIN_ENTRY(): return FunctionFlow()
abhishekgahlot/kivy
refs/heads/master
kivy/tools/pep8checker/pep8kivy.py
17
import sys from os import walk from os.path import isdir, join, abspath, dirname import pep8 import time htmlmode = False pep8_ignores = ( 'E125', # continuation line does not # distinguish itself from next logical line 'E126', # continuation line over-indented for hanging indent 'E127', # continuation line over-indented for visual indent 'E128') # continuation line under-indented for visual indent class KivyStyleChecker(pep8.Checker): def __init__(self, filename): pep8.Checker.__init__(self, filename, ignore=pep8_ignores) def report_error(self, line_number, offset, text, check): if htmlmode is False: return pep8.Checker.report_error(self, line_number, offset, text, check) # html generation print('<tr><td>{0}</td><td>{1}</td></tr>'.format(line_number, text)) if __name__ == '__main__': def usage(): print('Usage: python pep8kivy.py [-html] <file_or_folder_to_check>*') print('Folders will be checked recursively.') sys.exit(1) if len(sys.argv) < 2: usage() if sys.argv[1] == '-html': if len(sys.argv) < 3: usage() else: htmlmode = True targets = sys.argv[-1].split() elif sys.argv == 2: targets = sys.argv[-1] else: targets = sys.argv[-1].split() def check(fn): try: checker = KivyStyleChecker(fn) except IOError: # File couldn't be opened, so was deleted apparently. # Don't check deleted files. return 0 return checker.check_all() errors = 0 exclude_dirs = ['/lib', '/coverage', '/pep8', '/doc'] exclude_files = ['kivy/gesture.py', 'osx/build.py', 'win32/build.py', 'kivy/tools/stub-gl-debug.py', 'kivy/modules/webdebugger.py'] for target in targets: if isdir(target): if htmlmode: path = join(dirname(abspath(__file__)), 'pep8base.html') print(open(path, 'r').read()) print('''<p>Generated: %s</p><table>''' % (time.strftime('%c'))) for dirpath, dirnames, filenames in walk(target): cont = False for pat in exclude_dirs: if pat in dirpath: cont = True break if cont: continue for filename in filenames: if not filename.endswith('.py'): continue cont = False complete_filename = join(dirpath, filename) for pat in exclude_files: if complete_filename.endswith(pat): cont = True if cont: continue if htmlmode: print('<tr><th colspan="2">%s</td></tr>' \ % complete_filename) errors += check(complete_filename) if htmlmode: print('</div></div></table></body></html>') else: # Got a single file to check for pat in exclude_dirs + exclude_files: if pat in target: break else: if target.endswith('.py'): errors += check(target) # If errors is 0 we return with 0. That's just fine. sys.exit(errors)
mollyproject/mollyproject
refs/heads/master
molly/external_media/models.py
1
from __future__ import division import os.path import random import urllib from datetime import datetime try: from cStringIO import StringIO except ImportError: from StringIO import StringIO from PIL import Image from django.db import models from django.conf import settings from django.core.urlresolvers import reverse class ExternalImage(models.Model): url = models.URLField() etag = models.TextField(null=True) last_modified = models.TextField(null=True) last_updated = models.DateTimeField() # This one is in UTC width = models.PositiveIntegerField(null=True) height = models.PositiveIntegerField(null=True) def save(self, force_insert=False, force_update=False, *args, **kwargs): self.last_updated = datetime.utcnow() super(ExternalImage, self).save(force_insert=False, force_update=False, **kwargs) def get_external_image_dir(): return getattr(settings, 'EXTERNAL_IMAGE_DIR', os.path.join(settings.CACHE_DIR, 'external_images')) class ExternalImageSized(models.Model): external_image = models.ForeignKey(ExternalImage) width = models.PositiveIntegerField() height = models.PositiveIntegerField() slug = models.SlugField() content_type = models.TextField() def get_filename(self): external_image_dir = get_external_image_dir() if not self.slug: while not self.slug or ExternalImageSized.objects.filter(slug=self.slug).count(): self.slug = "%08x" % random.randint(0, 16**8-1) if not os.path.exists(external_image_dir): os.makedirs(external_image_dir) return os.path.join(external_image_dir, self.slug) def get_absolute_url(self): return reverse('external_media:image', args=[self.slug]) def save(self, force_insert=False, force_update=False, *args, **kwargs): if not self.id: response = urllib.urlopen(self.external_image.url) data = StringIO(response.read()) im = Image.open(data) size = im.size ratio = size[1] / size[0] if self.width >= size[0]: resized = im else: try: resized = im.resize((self.width, int(round(self.width*ratio))), Image.ANTIALIAS) except IOError, e: if e.message == "cannot read interlaced PNG files": # Ain't nothing can be done until you upgrade PIL to 1.1.7 resized = im else: raise self.width, self.height = resized.size try: resized.save(self.get_filename(), format='jpeg') self.content_type = 'image/jpeg' except IOError, e: try: resized.convert('RGB').save(self.get_filename(), format='jpeg') self.content_type = 'image/jpeg' except IOError: open(self.get_filename(), 'wb').write(data.getvalue()) self.content_type = response.headers['content-type'] self.external_image.width = size[0] self.external_image.height = size[1] super(ExternalImageSized, self).save(force_insert=False, force_update=False, **kwargs) def delete(self): try: os.unlink(self.get_filename()) except OSError: # Ignore errors where we're trying to delete a file that's already # been deleted pass super(ExternalImageSized, self).delete()
Mitali-Sodhi/CodeLingo
refs/heads/master
Dataset/python/test_sentinel.py
40
from __future__ import with_statement import pytest from redis import exceptions from redis.sentinel import (Sentinel, SentinelConnectionPool, MasterNotFoundError, SlaveNotFoundError) from redis._compat import next import redis.sentinel class SentinelTestClient(object): def __init__(self, cluster, id): self.cluster = cluster self.id = id def sentinel_masters(self): self.cluster.connection_error_if_down(self) return {self.cluster.service_name: self.cluster.master} def sentinel_slaves(self, master_name): self.cluster.connection_error_if_down(self) if master_name != self.cluster.service_name: return [] return self.cluster.slaves class SentinelTestCluster(object): def __init__(self, service_name='mymaster', ip='127.0.0.1', port=6379): self.clients = {} self.master = { 'ip': ip, 'port': port, 'is_master': True, 'is_sdown': False, 'is_odown': False, 'num-other-sentinels': 0, } self.service_name = service_name self.slaves = [] self.nodes_down = set() def connection_error_if_down(self, node): if node.id in self.nodes_down: raise exceptions.ConnectionError def client(self, host, port, **kwargs): return SentinelTestClient(self, (host, port)) @pytest.fixture() def cluster(request): def teardown(): redis.sentinel.StrictRedis = saved_StrictRedis cluster = SentinelTestCluster() saved_StrictRedis = redis.sentinel.StrictRedis redis.sentinel.StrictRedis = cluster.client request.addfinalizer(teardown) return cluster @pytest.fixture() def sentinel(request, cluster): return Sentinel([('foo', 26379), ('bar', 26379)]) def test_discover_master(sentinel): address = sentinel.discover_master('mymaster') assert address == ('127.0.0.1', 6379) def test_discover_master_error(sentinel): with pytest.raises(MasterNotFoundError): sentinel.discover_master('xxx') def test_discover_master_sentinel_down(cluster, sentinel): # Put first sentinel 'foo' down cluster.nodes_down.add(('foo', 26379)) address = sentinel.discover_master('mymaster') assert address == ('127.0.0.1', 6379) # 'bar' is now first sentinel assert sentinel.sentinels[0].id == ('bar', 26379) def test_master_min_other_sentinels(cluster): sentinel = Sentinel([('foo', 26379)], min_other_sentinels=1) # min_other_sentinels with pytest.raises(MasterNotFoundError): sentinel.discover_master('mymaster') cluster.master['num-other-sentinels'] = 2 address = sentinel.discover_master('mymaster') assert address == ('127.0.0.1', 6379) def test_master_odown(cluster, sentinel): cluster.master['is_odown'] = True with pytest.raises(MasterNotFoundError): sentinel.discover_master('mymaster') def test_master_sdown(cluster, sentinel): cluster.master['is_sdown'] = True with pytest.raises(MasterNotFoundError): sentinel.discover_master('mymaster') def test_discover_slaves(cluster, sentinel): assert sentinel.discover_slaves('mymaster') == [] cluster.slaves = [ {'ip': 'slave0', 'port': 1234, 'is_odown': False, 'is_sdown': False}, {'ip': 'slave1', 'port': 1234, 'is_odown': False, 'is_sdown': False}, ] assert sentinel.discover_slaves('mymaster') == [ ('slave0', 1234), ('slave1', 1234)] # slave0 -> ODOWN cluster.slaves[0]['is_odown'] = True assert sentinel.discover_slaves('mymaster') == [ ('slave1', 1234)] # slave1 -> SDOWN cluster.slaves[1]['is_sdown'] = True assert sentinel.discover_slaves('mymaster') == [] cluster.slaves[0]['is_odown'] = False cluster.slaves[1]['is_sdown'] = False # node0 -> DOWN cluster.nodes_down.add(('foo', 26379)) assert sentinel.discover_slaves('mymaster') == [ ('slave0', 1234), ('slave1', 1234)] def test_master_for(cluster, sentinel): master = sentinel.master_for('mymaster', db=9) assert master.ping() assert master.connection_pool.master_address == ('127.0.0.1', 6379) # Use internal connection check master = sentinel.master_for('mymaster', db=9, check_connection=True) assert master.ping() def test_slave_for(cluster, sentinel): cluster.slaves = [ {'ip': '127.0.0.1', 'port': 6379, 'is_odown': False, 'is_sdown': False}, ] slave = sentinel.slave_for('mymaster', db=9) assert slave.ping() def test_slave_for_slave_not_found_error(cluster, sentinel): cluster.master['is_odown'] = True slave = sentinel.slave_for('mymaster', db=9) with pytest.raises(SlaveNotFoundError): slave.ping() def test_slave_round_robin(cluster, sentinel): cluster.slaves = [ {'ip': 'slave0', 'port': 6379, 'is_odown': False, 'is_sdown': False}, {'ip': 'slave1', 'port': 6379, 'is_odown': False, 'is_sdown': False}, ] pool = SentinelConnectionPool('mymaster', sentinel) rotator = pool.rotate_slaves() assert next(rotator) in (('slave0', 6379), ('slave1', 6379)) assert next(rotator) in (('slave0', 6379), ('slave1', 6379)) # Fallback to master assert next(rotator) == ('127.0.0.1', 6379) with pytest.raises(SlaveNotFoundError): next(rotator)
yuanzhao/gpdb
refs/heads/master
gpMgmt/bin/gppylib/db/dbconn.py
14
#!/usr/bin/env python # # Copyright (c) Greenplum Inc 2008. All Rights Reserved. # """ TODO: module docs """ import sys import os import stat try: from pygresql import pgdb from gppylib.commands.unix import UserId except ImportError, e: sys.exit('Error: unable to import module: ' + str(e)) from gppylib import gplog logger = gplog.get_default_logger() class ConnectionError(StandardError): pass class Pgpass(): """ Class for handling .pgpass file. """ entries = [] valid_pgpass = True def __init__(self): HOME = os.getenv('HOME') PGPASSFILE = os.getenv('PGPASSFILE', '%s/.pgpass' % HOME) if not os.path.exists(PGPASSFILE): return st_info = os.stat(PGPASSFILE) mode = str(oct(st_info[stat.ST_MODE] & 0777)) if mode != "0600": print 'WARNING: password file "%s" has group or world access; permissions should be u=rw (0600) or less' % PGPASSFILE self.valid_pgpass = False return try: fp = open(PGPASSFILE, 'r') try: lineno = 1 for line in fp: line = line.strip() if line.startswith('#'): continue try: (hostname, port, database, username, password) = line.strip().split(':') entry = {'hostname': hostname, 'port': port, 'database': database, 'username': username, 'password': password } self.entries.append(entry) except: print 'Invalid line in .pgpass file. Line number %d' % lineno lineno += 1 except IOError: pass finally: if fp: fp.close() except OSError: pass def get_password(self, username, hostname, port, database): for entry in self.entries: if ((entry['hostname'] == hostname or entry['hostname'] == '*') and (entry['port'] == str(port) or entry['port'] == '*') and (entry['database'] == database or entry['database'] == '*') and (entry['username'] == username or entry['username'] == '*')): return entry['password'] return None def pgpass_valid(self): return self.valid_pgpass class DbURL: """ DbURL is used to store all of the data required to get at a PG or GP database. """ pghost='foo' pgport=5432 pgdb='template1' pguser='username' pgpass='pass' timeout=None retries=None def __init__(self,hostname=None,port=0,dbname=None,username=None,password=None,timeout=None,retries=None): if hostname is None: self.pghost = os.environ.get('PGHOST', 'localhost') else: self.pghost = hostname if port is 0: self.pgport = int(os.environ.get('PGPORT', '5432')) else: self.pgport = int(port) if dbname is None: self.pgdb = os.environ.get('PGDATABASE', 'template1') else: self.pgdb = dbname if username is None: self.pguser = os.environ.get('PGUSER', os.environ.get('USER', UserId.local('Get uid'))) if self.pguser is None or self.pguser == '': raise Exception('Both $PGUSER and $USER env variables are not set!') else: self.pguser = username if password is None: pgpass = Pgpass() if pgpass.pgpass_valid(): password = pgpass.get_password(self.pguser, self.pghost, self.pgport, self.pgdb) if password: self.pgpass = password else: self.pgpass = os.environ.get('PGPASSWORD', None) else: self.pgpass = password if timeout is not None: self.timeout = int(timeout) if retries is None: self.retries = 1 else: self.retries = int(retries) def __str__(self): # MPP-13617 def canonicalize(s): if ':' not in s: return s return '[' + s + ']' return "%s:%d:%s:%s:%s" % \ (canonicalize(self.pghost),self.pgport,self.pgdb,self.pguser,self.pgpass) def connect(dburl, utility=False, verbose=False, encoding=None, allowSystemTableMods=None, logConn=True): if utility: options = '-c gp_session_role=utility' else: options = '' # MPP-13779, et al if allowSystemTableMods in ['dml']: options += ' -c allow_system_table_mods=' + allowSystemTableMods elif allowSystemTableMods is not None: raise Exception('allowSystemTableMods invalid: %s' % allowSystemTableMods) # bypass pgdb.connect() and instead call pgdb._connect_ # to avoid silly issues with : in ipv6 address names and the url string # dbbase = dburl.pgdb dbhost = dburl.pghost dbport = int(dburl.pgport) dbopt = options dbtty = "1" dbuser = dburl.pguser dbpasswd = dburl.pgpass timeout = dburl.timeout cnx = None # All quotation and escaping here are to handle database name containing # special characters like ' and \ and white spaces. # Need to escape backslashes and single quote in db name # Also single quoted the connection string for dbname dbbase = dbbase.replace('\\', '\\\\') dbbase = dbbase.replace('\'', '\\\'') # MPP-14121, use specified connection timeout # Single quote the connection string for dbbase name if timeout is not None: cstr = "dbname='%s' connect_timeout=%s" % (dbbase, timeout) retries = dburl.retries else: cstr = "dbname='%s'" % dbbase retries = 1 # This flag helps to avoid logging the connection string in some special # situations as requested if (logConn == True): (logger.info if timeout is not None else logger.debug)("Connecting to %s" % cstr) for i in range(retries): try: cnx = pgdb._connect_(cstr, dbhost, dbport, dbopt, dbtty, dbuser, dbpasswd) break except pgdb.InternalError, e: if 'timeout expired' in str(e): logger.warning('Timeout expired connecting to %s, attempt %d/%d' % (dbbase, i+1, retries)) continue raise if cnx is None: raise ConnectionError('Failed to connect to %s' % dbbase) conn = pgdb.pgdbCnx(cnx) #by default, libpq will print WARNINGS to stdout if not verbose: cursor=conn.cursor() cursor.execute("SET CLIENT_MIN_MESSAGES='ERROR'") conn.commit() cursor.close() # set client encoding if needed if encoding: cursor=conn.cursor() cursor.execute("SET CLIENT_ENCODING='%s'" % encoding) conn.commit() cursor.close() def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() conn.__class__.__enter__, conn.__class__.__exit__ = __enter__, __exit__ return conn def execSQL(conn,sql): """ If necessary, user must invoke conn.commit(). Do *NOT* violate that API here without considering the existing callers of this function. """ cursor=conn.cursor() cursor.execute(sql) return cursor def execSQLForSingletonRow(conn, sql): """ Run SQL that returns exactly one row, and return that one row TODO: Handle like gppylib.system.comfigurationImplGpdb.fetchSingleOutputRow(). In the event of the wrong number of rows/columns, some logging would be helpful... """ cursor=conn.cursor() cursor.execute(sql) if cursor.rowcount != 1 : raise UnexpectedRowsError(1, cursor.rowcount, sql) res = cursor.fetchall()[0] cursor.close() return res class UnexpectedRowsError(Exception): def __init__(self, expected, actual, sql): self.expected, self.actual, self.sql = expected, actual, sql Exception.__init__(self, "SQL retrieved %d rows but %d was expected:\n%s" % \ (self.actual, self.expected, self.sql)) def execSQLForSingleton(conn, sql): """ Run SQL that returns exactly one row and one column, and return that cell TODO: Handle like gppylib.system.comfigurationImplGpdb.fetchSingleOutputRow(). In the event of the wrong number of rows/columns, some logging would be helpful... """ row = execSQLForSingletonRow(conn, sql) if len(row) > 1: raise Exception("SQL retrieved %d columns but 1 was expected:\n%s" % \ (len(row), sql)) return row[0] def executeUpdateOrInsert(conn, sql, expectedRowUpdatesOrInserts): cursor=conn.cursor() cursor.execute(sql) if cursor.rowcount != expectedRowUpdatesOrInserts : raise Exception("SQL affected %s rows but %s were expected:\n%s" % \ (cursor.rowcount, expectedRowUpdatesOrInserts, sql)) return cursor
seocam/django
refs/heads/master
tests/migrations/test_migrations_first/thefirst.py
2995
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): operations = [ migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=255)), ("slug", models.SlugField(null=True)), ("age", models.IntegerField(default=0)), ("silly_field", models.BooleanField(default=False)), ], ), migrations.CreateModel( "Tribble", [ ("id", models.AutoField(primary_key=True)), ("fluffy", models.BooleanField(default=True)), ], ) ]
oliverlee/sympy
refs/heads/master
sympy/unify/tests/test_rewrite.py
59
from sympy.unify.rewrite import rewriterule from sympy import sin, Basic, Symbol, S from sympy.abc import x, y from sympy.strategies.rl import rebuild from sympy.assumptions import Q p, q = Symbol('p'), Symbol('q') def test_simple(): rl = rewriterule(Basic(p, 1), Basic(p, 2), variables=(p,)) assert list(rl(Basic(3, 1))) == [Basic(3, 2)] p1 = p**2 p2 = p**3 rl = rewriterule(p1, p2, variables=(p,)) expr = x**2 assert list(rl(expr)) == [x**3] def test_simple_variables(): rl = rewriterule(Basic(x, 1), Basic(x, 2), variables=(x,)) assert list(rl(Basic(3, 1))) == [Basic(3, 2)] rl = rewriterule(x**2, x**3, variables=(x,)) assert list(rl(y**2)) == [y**3] def test_moderate(): p1 = p**2 + q**3 p2 = (p*q)**4 rl = rewriterule(p1, p2, (p, q)) expr = x**2 + y**3 assert list(rl(expr)) == [(x*y)**4] def test_sincos(): p1 = sin(p)**2 + sin(p)**2 p2 = 1 rl = rewriterule(p1, p2, (p, q)) assert list(rl(sin(x)**2 + sin(x)**2)) == [1] assert list(rl(sin(y)**2 + sin(y)**2)) == [1] def test_Exprs_ok(): rl = rewriterule(p+q, q+p, (p, q)) next(rl(x+y)).is_commutative str(next(rl(x+y))) def test_condition_simple(): rl = rewriterule(x, x+1, [x], lambda x: x < 10) assert not list(rl(S(15))) assert rebuild(next(rl(S(5)))) == 6 def test_condition_multiple(): rl = rewriterule(x + y, x**y, [x,y], lambda x, y: x.is_integer) a = Symbol('a') b = Symbol('b', integer=True) expr = a + b assert list(rl(expr)) == [b**a] c = Symbol('c', integer=True) d = Symbol('d', integer=True) assert set(rl(c + d)) == set([c**d, d**c]) def test_assumptions(): rl = rewriterule(x + y, x**y, [x, y], assume=Q.integer(x)) a, b = map(Symbol, 'ab') expr = a + b assert list(rl(expr, Q.integer(b))) == [b**a]
tangrams/yaml-cpp
refs/heads/master
test/gtest-1.8.0/googlemock/test/gmock_output_test.py
986
#!/usr/bin/env python # # Copyright 2008, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Tests the text output of Google C++ Mocking Framework. SYNOPSIS gmock_output_test.py --build_dir=BUILD/DIR --gengolden # where BUILD/DIR contains the built gmock_output_test_ file. gmock_output_test.py --gengolden gmock_output_test.py """ __author__ = 'wan@google.com (Zhanyong Wan)' import os import re import sys import gmock_test_utils # The flag for generating the golden file GENGOLDEN_FLAG = '--gengolden' PROGRAM_PATH = gmock_test_utils.GetTestExecutablePath('gmock_output_test_') COMMAND = [PROGRAM_PATH, '--gtest_stack_trace_depth=0', '--gtest_print_time=0'] GOLDEN_NAME = 'gmock_output_test_golden.txt' GOLDEN_PATH = os.path.join(gmock_test_utils.GetSourceDir(), GOLDEN_NAME) def ToUnixLineEnding(s): """Changes all Windows/Mac line endings in s to UNIX line endings.""" return s.replace('\r\n', '\n').replace('\r', '\n') def RemoveReportHeaderAndFooter(output): """Removes Google Test result report's header and footer from the output.""" output = re.sub(r'.*gtest_main.*\n', '', output) output = re.sub(r'\[.*\d+ tests.*\n', '', output) output = re.sub(r'\[.* test environment .*\n', '', output) output = re.sub(r'\[=+\] \d+ tests .* ran.*', '', output) output = re.sub(r'.* FAILED TESTS\n', '', output) return output def RemoveLocations(output): """Removes all file location info from a Google Test program's output. Args: output: the output of a Google Test program. Returns: output with all file location info (in the form of 'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or 'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by 'FILE:#: '. """ return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\:', 'FILE:#:', output) def NormalizeErrorMarker(output): """Normalizes the error marker, which is different on Windows vs on Linux.""" return re.sub(r' error: ', ' Failure\n', output) def RemoveMemoryAddresses(output): """Removes memory addresses from the test output.""" return re.sub(r'@\w+', '@0x#', output) def RemoveTestNamesOfLeakedMocks(output): """Removes the test names of leaked mock objects from the test output.""" return re.sub(r'\(used in test .+\) ', '', output) def GetLeakyTests(output): """Returns a list of test names that leak mock objects.""" # findall() returns a list of all matches of the regex in output. # For example, if '(used in test FooTest.Bar)' is in output, the # list will contain 'FooTest.Bar'. return re.findall(r'\(used in test (.+)\)', output) def GetNormalizedOutputAndLeakyTests(output): """Normalizes the output of gmock_output_test_. Args: output: The test output. Returns: A tuple (the normalized test output, the list of test names that have leaked mocks). """ output = ToUnixLineEnding(output) output = RemoveReportHeaderAndFooter(output) output = NormalizeErrorMarker(output) output = RemoveLocations(output) output = RemoveMemoryAddresses(output) return (RemoveTestNamesOfLeakedMocks(output), GetLeakyTests(output)) def GetShellCommandOutput(cmd): """Runs a command in a sub-process, and returns its STDOUT in a string.""" return gmock_test_utils.Subprocess(cmd, capture_stderr=False).output def GetNormalizedCommandOutputAndLeakyTests(cmd): """Runs a command and returns its normalized output and a list of leaky tests. Args: cmd: the shell command. """ # Disables exception pop-ups on Windows. os.environ['GTEST_CATCH_EXCEPTIONS'] = '1' return GetNormalizedOutputAndLeakyTests(GetShellCommandOutput(cmd)) class GMockOutputTest(gmock_test_utils.TestCase): def testOutput(self): (output, leaky_tests) = GetNormalizedCommandOutputAndLeakyTests(COMMAND) golden_file = open(GOLDEN_PATH, 'rb') golden = golden_file.read() golden_file.close() # The normalized output should match the golden file. self.assertEquals(golden, output) # The raw output should contain 2 leaked mock object errors for # test GMockOutputTest.CatchesLeakedMocks. self.assertEquals(['GMockOutputTest.CatchesLeakedMocks', 'GMockOutputTest.CatchesLeakedMocks'], leaky_tests) if __name__ == '__main__': if sys.argv[1:] == [GENGOLDEN_FLAG]: (output, _) = GetNormalizedCommandOutputAndLeakyTests(COMMAND) golden_file = open(GOLDEN_PATH, 'wb') golden_file.write(output) golden_file.close() else: gmock_test_utils.Main()
zhjunlang/kbengine
refs/heads/master
kbe/res/scripts/common/Lib/glob.py
86
"""Filename globbing utility.""" import os import re import fnmatch __all__ = ["glob", "iglob"] def glob(pathname): """Return a list of paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. However, unlike fnmatch, filenames starting with a dot are special cases that are not matched by '*' and '?' patterns. """ return list(iglob(pathname)) def iglob(pathname): """Return an iterator which yields the paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la fnmatch. However, unlike fnmatch, filenames starting with a dot are special cases that are not matched by '*' and '?' patterns. """ dirname, basename = os.path.split(pathname) if not has_magic(pathname): if basename: if os.path.lexists(pathname): yield pathname else: # Patterns ending with a slash should match only directories if os.path.isdir(dirname): yield pathname return if not dirname: yield from glob1(None, basename) return # `os.path.split()` returns the argument itself as a dirname if it is a # drive or UNC path. Prevent an infinite recursion if a drive or UNC path # contains magic characters (i.e. r'\\?\C:'). if dirname != pathname and has_magic(dirname): dirs = iglob(dirname) else: dirs = [dirname] if has_magic(basename): glob_in_dir = glob1 else: glob_in_dir = glob0 for dirname in dirs: for name in glob_in_dir(dirname, basename): yield os.path.join(dirname, name) # These 2 helper functions non-recursively glob inside a literal directory. # They return a list of basenames. `glob1` accepts a pattern while `glob0` # takes a literal basename (so it only has to check for its existence). def glob1(dirname, pattern): if not dirname: if isinstance(pattern, bytes): dirname = bytes(os.curdir, 'ASCII') else: dirname = os.curdir try: names = os.listdir(dirname) except OSError: return [] if not _ishidden(pattern): names = [x for x in names if not _ishidden(x)] return fnmatch.filter(names, pattern) def glob0(dirname, basename): if not basename: # `os.path.split()` returns an empty basename for paths ending with a # directory separator. 'q*x/' should match only directories. if os.path.isdir(dirname): return [basename] else: if os.path.lexists(os.path.join(dirname, basename)): return [basename] return [] magic_check = re.compile('([*?[])') magic_check_bytes = re.compile(b'([*?[])') def has_magic(s): if isinstance(s, bytes): match = magic_check_bytes.search(s) else: match = magic_check.search(s) return match is not None def _ishidden(path): return path[0] in ('.', b'.'[0]) def escape(pathname): """Escape all special characters. """ # Escaping is done by wrapping any of "*?[" between square brackets. # Metacharacters do not work in the drive part and shouldn't be escaped. drive, pathname = os.path.splitdrive(pathname) if isinstance(pathname, bytes): pathname = magic_check_bytes.sub(br'[\1]', pathname) else: pathname = magic_check.sub(r'[\1]', pathname) return drive + pathname
NickelMedia/phantomjs
refs/heads/master
src/qt/qtwebkit/Tools/QueueStatusServer/__init__.py
6014
# Required for Python to search this directory for module files
yrobla/pyjuegos
refs/heads/master
pgzero/rect.py
1
# -*- coding: utf-8 -*- """ZRect This is a Python implementation of the pygame Rect class. Its raison d'être is to allow the coordinates to be floating point. All pygame functions which require a rect allow for an object with a "rect" attribute and whose coordinates will be converted to integers implictly. All functions which require a dict will use the flexible constructor to convert from: this (or a subclass); a Pygame Rect; a 4-tuple or a pair of 2-tuples. In addition, they'll recognise any object which has an (optionally callable) .rect attribute whose value will be used instead. """ from pygame.rect import Rect class NoIntersect(Exception): pass class ZRect: _item_mapping = dict(enumerate("xywh")) def __new__(cls, *args): # # If there is only one argument, it should either be a Rect object # (one of these or one from Pygame) or an arbitrary object with a # "rect" attribute. In the former case, create an equivalent Rect; # in the latter case, re-run the check with the object pointed to # by ".rect", calling it first if it is callable. # if len(args) == 1: obj, = args if isinstance(obj, (ZRect, Rect)): args = obj.x, obj.y, obj.w, obj.h elif hasattr(obj, "rect"): rectobj = obj.rect if callable(rectobj): rectobj = rectobj() return cls(rectobj) obj = super().__new__(cls) # # At this point we have one of: # # x, y, w, h # (x, y), (w, h) # (x, y, w, h), # if len(args) == 4: obj.x, obj.y, obj.w, obj.h = args elif len(args) == 2: (obj.x, obj.y), (obj.w, obj.h) = args elif len(args) == 1: obj.x, obj.y, obj.w, obj.h = args[0] else: raise TypeError("%s should be called with one, two or four arguments" % (cls.__name__)) # # To allow interoperation with Pygame, set a rect attribute # to point to this instance. # obj.rect = obj return obj def __repr__(self): return "<%s (x: %s, y: %s, w: %s, h: %s)>" % (self.__class__.__name__, self.x, self.y, self.w, self.h) def __reduce__(self): return self.__class__, (self.x, self.y, self.w, self.h) def copy(self): return self.__class__(self.x, self.y, self.w, self.h) __copy__ = copy def __len__(self): return 4 def __getitem__(self, item): try: return getattr(self, self._item_mapping[item]) except KeyError: raise IndexError def __setitem__(self, item, value): try: attribute = self._item_mapping[item] except KeyError: raise IndexError else: setattr(attribute, value) def __bool__(self): return self.w != 0 and self.h != 0 def __iter__(self): yield self.x yield self.y yield self.w yield self.h def __hash__(self): raise TypeError("ZRect instances may not be used as dictionary keys") def __eq__(self, *other): rect = self.__class__(*other) return (self.x, self.y, self.w, self.h) == (rect.x, rect.y, rect.w, rect.h) def __ne__(self, *other): rect = self.__class__(*other) return (self.x, self.y, self.w, self.h) != (rect.x, rect.y, rect.w, rect.h) def __lt__(self, *other): rect = self.__class__(*other) return (self.x, self.y, self.w, self.h) < (rect.x, rect.y, rect.w, rect.h) def __gt__(self, *other): rect = self.__class__(*other) return (self.x, self.y, self.w, self.h) > (rect.x, rect.y, rect.w, rect.h) def __le__(self, *other): rect = self.__class__(*other) return (self.x, self.y, self.w, self.h) <= (rect.x, rect.y, rect.w, rect.h) def __ge__(self, *other): rect = self.__class__(*other) return (self.x, self.y, self.w, self.h) >= (rect.x, rect.y, rect.w, rect.h) def __contains__(self, other): """Test whether a point (x, y) or another rectangle (anything accepted by ZRect) is contained within this ZRect """ if len(other) == 2: return self.collidepoint(*other) else: return self.contains(*other) def _get_width(self): return self.w def _set_width(self, width): self.w = width width = property(_get_width, _set_width) def _get_height(self): return self.h def _set_height(self, height): self.h = height height = property(_get_height, _set_height) def _get_top(self): return self.y def _set_top(self, top): self.y = top top = property(_get_top, _set_top) def _get_left(self): return self.x def _set_left(self, left): self.x = left left = property(_get_left, _set_left) def _get_right(self): return self.x + self.w def _set_right(self, right): self.x = right - self.w right = property(_get_right, _set_right) def _get_bottom(self): return self.y + self.h def _set_bottom(self, bottom): self.y = bottom - self.h bottom = property(_get_bottom, _set_bottom) def _get_centerx(self): return self.x + (self.w / 2) def _set_centerx(self, centerx): self.x = centerx - (self.w / 2) centerx = property(_get_centerx, _set_centerx) centrex = centerx def _get_centery(self): return self.y + (self.h / 2) def _set_centery(self, centery): self.y = centery - (self.h / 2) centery = property(_get_centery, _set_centery) centrey = centery def _get_topleft(self): return self.x, self.y def _set_topleft(self, topleft): self.x, self.y = topleft topleft = property(_get_topleft, _set_topleft) def _get_topright(self): return self.x + self.w, self.y def _set_topright(self, topright): x, y = topright self.x = x - self.w self.y = y topright = property(_get_topright, _set_topright) def _get_bottomleft(self): return self.x, self.y + self.h def _set_bottomleft(self, bottomleft): x, y = bottomleft self.x = x self.y = y - self.h bottomleft = property(_get_bottomleft, _set_bottomleft) def _get_bottomright(self): return self.x + self.w, self.y + self.h def _set_bottomright(self, bottomright): x, y = bottomright self.x = x - self.w self.y = y - self.h bottomright = property(_get_bottomright, _set_bottomright) def _get_midtop(self): return self.x + self.w / 2, self.y def _set_midtop(self, midtop): x, y = midtop self.x = x - self.w / 2 self.y = y midtop = property(_get_midtop, _set_midtop) def _get_midleft(self): return self.x, self.y + self.h / 2 def _set_midleft(self, midleft): x, y = midleft self.x = x self.y = y - self.h / 2 midleft = property(_get_midleft, _set_midleft) def _get_midbottom(self): return self.x + self.w / 2, self.y + self.h def _set_midbottom(self, midbottom): x, y = midbottom self.x = x - self.w / 2 self.y = y - self.h midbottom = property(_get_midbottom, _set_midbottom) def _get_midright(self): return self.x + self.w, self.y + self.h / 2 def _set_midright(self, midright): x, y = midright self.x = x - self.w self.y = y - self.h / 2 midright = property(_get_midright, _set_midright) def _get_center(self): return self.x + self.w / 2, self.y + self.h / 2 def _set_center(self, center): x, y = center self.x = x - self.w / 2 self.y = y - self.h / 2 center = property(_get_center, _set_center) centre = center def _get_size(self): return self.w, self.h def _set_size(self, size): self.w, self.h = size size = property(_get_size, _set_size) def move(self, x, y): return self.__class__(self.x + x, self.y + y, self.w, self.h) def move_ip(self, x, y): self.x += x self.y += y def _inflated(self, x, y): return self.x - x / 2, self.y - y / 2, self.w + x, self.h + y def inflate(self, x, y): return self.__class__(*self._inflated(x, y)) def inflate_ip(self, x, y): self.x, self.y, self.w, self.h = self._inflated(x, y) def _clamped(self, *other): rect = self.__class__(*other) if self.w >= rect.w: x = rect.x + rect.w / 2 - self.w / 2 elif self.x < rect.x: x = rect.x elif self.x + self.w > rect.x + rect.w: x = rect.x + rect.w - self.w else: x = self.x if self.h >= rect.h: y = rect.y + rect.h / 2 - self.h / 2 elif self.y < rect.y: y = rect.y elif self.y + self.h > rect.y + rect.h: y = rect.y + rect.h - self.h else: y = self.y return x, y def clamp(self, *other): rect = self.__class__(*other) x, y = self._clamped(rect) return self.__class__(x, y, self.w, self.h) def clamp_ip(self, *other): rect = self.__class__(*other) self.x, self.y = self._clamped(rect) def _clipped(self, *other): rect = self.__class__(*other) if self.x >= rect.x and self.x < (rect.x + rect.w): x = self.x elif rect.x >= self.x and rect.x < (self.x + self.w): x = rect.x else: raise NoIntersect if (self.x + self.w) > rect.x and (self.x + self.w) <= (rect.x + rect.w): w = self.x + self.w - x elif (rect.x + rect.w) > self.x and (rect.x + rect.w) <= (self.x + self.w): w = rect.x + rect.w - x else: raise NoIntersect if self.y >= rect.y and self.y < (rect.y + rect.h): y = self.y elif rect.y >= self.y and rect.y < (self.y + self.h): y = rect.y else: raise NoIntersect if (self.y + self.h) > rect.y and (self.y + self.h) <= (rect.y + rect.h): h = self.y + self.h - y elif (rect.y + rect.h) > self.y and (rect.y + rect.h) <= (self.y + self.h): h = rect.y + rect.h - y else: raise NoIntersect return x, y, w, h def clip(self, *other): rect = self.__class__(*other) try: x, y, w, h = self._clipped(rect) except NoIntersect: x, y, w, h = self.x, self.y, 0, 0 return self.__class__(x, y, w, h) def clip_ip(self, *other): rect = self.__class__(*other) try: self.x, self.y, self.w, self.h = self._clipped(rect) except NoIntersect: self.x, self.y, self.w, self.h = self.x, self.y, 0, 0 def _unioned(self, *other): rect = self.__class__(*other) x = min(self.x, rect.x) y = min(self.y, rect.y) w = max(self.x + self.w, rect.x + rect.w) - x h = max(self.y + self.h, rect.y + rect.h) - y return x, y, w, h def union(self, *other): rect = self.__class__(*other) return self.__class__(*self._unioned(rect)) def union_ip(self, *other): rect = self.__class__(*other) self.x, self.y, self.w, self.h = self._unioned(rect) def _unionalled(self, others): allrects = [self] + [self.__class__(other) for other in others] x = min(r.x for r in allrects) y = min(r.y for r in allrects) w = max(r.x + r.w for r in allrects) - x h = max(r.y + r.h for r in allrects) - y return x, y, w, h def unionall(self, others): return self.__class__(*self._unionalled(others)) def unionall_ip(self, others): self.x, self.y, self.w, self.h = self._unionalled(others) def fit(self, *other): rect = self.__class__(*other) ratio = max(self.w / rect.w, self.h / rect.h) w = self.w / ratio h = self.h / ratio x = rect.x + (rect.w - w) / 2 y = rect.y + (rect.h - h) / 2 return self.__class__(x, y, w, h) def normalize(self): if self.w < 0: self.x += self.w self.w = abs(self.w) if self.h < 0: self.y += self.h self.h = abs(self.h) def contains(self, *other): rect = self.__class__(*other) return ( self.x <= rect.x and self.y <= rect.y and self.x + self.w >= rect.x + rect.w and self.y + self.h >= rect.y + rect.h and self.x + self.w > rect.x and self.y + self.h > rect.y ) def collidepoint(self, *args): if len(args) == 1: x, y = args, else: x, y = args return ( self.x <= x < (self.x + self.w) and self.y <= y < (self.y + self.h) ) def colliderect(self, *other): rect = self.__class__(*other) return ( self.x < rect.x + rect.w and self.y < rect.y + rect.h and self.x + self.w > rect.x and self.y + self.h > rect.y ) def collidelist(self, others): for n, other in enumerate(others): if self.colliderect(other): return n else: return -1 def collidelistall(self, others): return [n for n, other in enumerate(others) if self.colliderect(other)] def collidedict(self, dict, use_values=True): for k, v in dict.items(): if self.colliderect(v if use_values else k): return k, v def collidedictall(self, dict, use_values=True): return [(k, v) for (k, v) in dict.items() if self.colliderect(v if use_values else k)]
opavader/fabric
refs/heads/master
sites/www/conf.py
31
# Obtain shared config values import sys import os from os.path import abspath, join, dirname sys.path.append(abspath(join(dirname(__file__), '..'))) from shared_conf import * # Releases changelog extension extensions.append('releases') releases_github_path = "fabric/fabric" # Intersphinx for referencing API/usage docs extensions.append('sphinx.ext.intersphinx') # Default is 'local' building, but reference the public docs site when building # under RTD. target = join(dirname(__file__), '..', 'docs', '_build') if os.environ.get('READTHEDOCS') == 'True': target = 'http://docs.fabfile.org/en/latest/' intersphinx_mapping = { 'docs': (target, None), } # Sister-site links to API docs html_theme_options['extra_nav_links'] = { "API Docs": 'http://docs.fabfile.org', }
sai16vicky/deepdive
refs/heads/master
examples/tutorial_example/step2-generic-features/udf/ext_people.py
161
#! /usr/bin/env python # Sample input data (piped into STDIN): ''' 118238@10 Sen.~^~Barack~^~Obama~^~and~^~his~^~wife~^~,~^~Michelle~^~Obama~^~,~^~have~^~released~^~eight~^~years~^~of~^~joint~^~returns~^~. O~^~PERSON~^~PERSON~^~O~^~O~^~O~^~O~^~PERSON~^~PERSON~^~O~^~O~^~O~^~DURATION~^~DURATION~^~O~^~O~^~O~^~O 118238@12 During~^~the~^~2004~^~presidential~^~campaign~^~,~^~we~^~urged~^~Teresa~^~Heinz~^~Kerry~^~,~^~the~^~wealthy~^~wife~^~of~^~Sen.~^~John~^~Kerry~^~,~^~to~^~release~^~her~^~tax~^~returns~^~. O~^~O~^~DATE~^~O~^~O~^~O~^~O~^~O~^~PERSON~^~PERSON~^~PERSON~^~O~^~O~^~O~^~O~^~O~^~O~^~PERSON~^~PERSON~^~O~^~O~^~O~^~O~^~O~^~O~^~O ''' import sys ARR_DELIM = '~^~' # For-loop for each row in the input query for row in sys.stdin: # Find phrases that are continuous words tagged with PERSON. sentence_id, words_str, ner_tags_str = row.strip().split('\t') words = words_str.split(ARR_DELIM) ner_tags = ner_tags_str.split(ARR_DELIM) start_index = 0 phrases = [] while start_index < len(words): # Checking if there is a PERSON phrase starting from start_index index = start_index while index < len(words) and ner_tags[index] == "PERSON": index += 1 if index != start_index: # found a person from "start_index" to "index" text = ' '.join(words[start_index:index]) length = index - start_index phrases.append((start_index, length, text)) start_index = index + 1 # Output a tuple for each PERSON phrase for start_position, length, text in phrases: print '\t'.join( [ str(x) for x in [ sentence_id, start_position, # start_position length, # length text, # text '%s_%d' % (sentence_id, start_position) # mention_id ]])
NaPs/Kolekto
refs/heads/master
kolekto/commands/list.py
1
from kolekto.commands import Command from kolekto.printer import printer from kolekto.exceptions import KolektoRuntimeError from kolekto.pattern import parse_pattern from kolekto.datasources import MovieDatasource class ListingFormatWrapper(object): """ A wrapper used to customize how movies attributes are formatted. """ def __init__(self, title, obj): self.title = title self.obj = obj def __unicode__(self): if isinstance(self.obj, bool): if self.obj: return self.title.title() elif isinstance(self.obj, list): if not self.obj: return 'None' # List is empty elif len(self.obj) > 1: return '%s and %s' % (', '.join(self.obj[0:-1]), self.obj[-1]) else: return unicode(self.obj[0]) else: return unicode(self.obj) def __repr__(self): return repr(self.obj) class List(Command): """ List movies in the kolekto tree. """ help = 'list movies' def prepare(self): self.add_arg('listing', metavar='listing', default='default', nargs='?') def _config(self, args, config): """ Get configuration for the current used listing. """ listings = dict((x.args, x) for x in config.subsections('listing')) listing = listings.get(args.listing) if listing is None: if args.listing == u'default': return {'pattern': self._profile.list_default_pattern, 'order': self._profile.list_default_order} else: raise KolektoRuntimeError('Unknown listing %r' % args.listing) else: return {'pattern': listing.get('pattern'), 'order': listing.get('order')} def run(self, args, config): mdb = self.get_metadata_db(args.tree) mds = MovieDatasource(config.subsections('datasource'), args.tree, self.profile.object_class) listing = self._config(args, config) def _sorter((movie_hash, movie)): return tuple(movie.get(x) for x in listing['order']) movies = sorted(mdb.itermovies(), key=_sorter) # Get the current used listing: for movie_hash, movie in movies: movie = mds.attach(movie_hash, movie) prepared_env = parse_pattern(listing['pattern'], movie, ListingFormatWrapper) printer.p(u'<inv><b> {hash} </b></inv> ' + listing['pattern'], hash=movie_hash, **prepared_env)
inova-tecnologias/jenova
refs/heads/master
src/jenova/resources/base.py
1
import jwt, base64, json from collections import namedtuple from werkzeug.exceptions import InternalServerError from flask_restful import reqparse, request, Resource, abort from functools import wraps from time import sleep from jenova.models import UserSchema, Scope from jenova.components import Security, InvalidCredentials, CallLogger, JwtInconsistentDataError from jenova.components.tasks import ( update_cos_into_domain_zimbra_task, create_domain_zimbra_task, delete_domain_zimbra_task, create_delegated_zimbra_admin_task ) logger = CallLogger.logger() TASK_TYPES = ['createzimbradomains'] QUERY_FILTER_IDS = ['id', 'client_id', 'authentication_id', 'domain_id', 'service_id'] RESERVED_NAMES = ['inova', 'jenova', 'inovatec', 'jnv', 'all'] DEFAULT_SCOPES = [ 'dns', 'domain', 'service', 'store', 'users', 'zimbra', 'client', 'permissions' ] PERMS = ['write', 'read', 'edit', 'delete'] def abort_if_obj_doesnt_exist(filter_by, target, model_object): if filter_by in QUERY_FILTER_IDS: try: target = int(target) except ValueError, e: raise query = { filter_by : target } result = model_object.query.filter_by(**query).first() if not result: abort(404, message='Could not find object: %s' % target) return result def exception_handler(f): @wraps(f) def decorated(*args, **kwargs): resource = f.__self__ # Check for violation! # TODO: This method will query for the resources on the most of the resource classes, # maybe it is possible to pass the model object throughout this decorated method resource.is_forbidden(**kwargs) # Check permissions only if isn't an admin if not resource.is_an_admin: if f.__name__ == 'get': resource.can_read(resource.scope) elif f.__name__ == 'put': resource.can_edit(resource.scope) elif f.__name__ == 'post': resource.can_write(resource.scope) elif f.__name__ == 'delete': resource.can_delete(resource.scope) if request.data and type(request.json) != dict: abort(415, message = 'Expecting JSON') return f(*args, **kwargs) return decorated class BaseResource(Resource): method_decorators = [exception_handler] def __init__(self, filters, default_filter='name', **kwargs): self.logger = logger parser = reqparse.RequestParser() parser.add_argument('filter_by', type=str, location='args') self.filter_by = parser.parse_args().get('filter_by') or default_filter if self.filter_by not in filters: err_message = 'Wrong query filter specified %s. Accept only: %s' % (self.filter_by, ', '.join(filters)) abort(400, message=err_message) self.parser = reqparse.RequestParser() self.jwt_payload = self.check_auth() def check_auth(self): auth = request.headers.get('Authorization', None) message = '' if not auth: abort(401, message = 'Authorization header is expected') parts = auth.split() if parts[0].lower() != 'bearer': message = 'Authorization header must start with Bearer' elif len(parts) == 1: message = 'Token not found' elif len(parts) > 2: message = 'Authorization header must be Bearer + \s + token' if message: abort(401, message = message) token = parts[1] try: payload = jwt.decode( token, Security.get_jwt_skey(), algorithms = ['HS256'] ) except jwt.ExpiredSignature: message = 'token is expired' except jwt.InvalidAudienceError: message = 'incorrect audience' except jwt.DecodeError: message = 'token signature is invalid' if message: abort(401, message = message) self.logger.debug('Access granted for %s!' % payload['user']['login']) return payload @property def request_user_login(self): return self.jwt_payload['user']['login'] @property def is_admin(self): return self.jwt_payload['user']['admin'] @property def is_global_admin(self): return self.jwt_payload['user']['global_admin'] @property def is_an_admin(self): return self.is_admin or self.is_global_admin @property def request_user_client_id(self): return self.jwt_payload['user']['client_id'] @property def request_user_reseller_id(self): # It's a reseller admin user if self.jwt_payload['user']['reseller']: return self.jwt_payload['user']['reseller']['id'] return self.jwt_payload['user']['client']['reseller']['id'] @property def request_user_id(self): return self.jwt_payload['user']['id'] ### PERMISSIONS METHODS ### """ # It is possible to override these methods on each Resource classes. # This will give more flexibility by implementing your own behavior. # Edit this methods on this class only if you need to change the whole logic, # otherwise, override this methods in resources classes that inherit from this class. # For disabling the behavior, override this method with the pass operator: # def can_read(): # pass """ def can_read(self, scope_name): """ Check if it has permission to read. Has to be evaluated on every GET HTTP methods. """ has_read_perm = False for perm in self.jwt_payload['user']['permissions']: if perm['scope']['name'] == scope_name: has_read_perm = True if not perm.get('read'): has_read_perm = False break if not has_read_perm: abort(403, message = 'Permission denied! Does not have proper permission.') def can_write(self, scope_name): """ Check if it has permission to write. Has to be evaluated on every POST HTTP methods. """ has_write_perm = False for perm in self.jwt_payload['user']['permissions']: if perm['scope']['name'] == scope_name: has_write_perm = True if not perm.get('write'): has_write_perm = False break if not has_write_perm: abort(403, message = 'Permission denied! Does not have proper permission.') def can_edit(self, scope_name): """ Check if it has permission to edit. Has to be evaluated on every PUT/PATCH HTTP methods. """ has_edit_perm = False for perm in self.jwt_payload['user']['permissions']: if perm['scope']['name'] == scope_name: has_edit_perm = True if not perm.get('edit'): has_edit_perm = False break if not has_edit_perm: abort(403, message = 'Permission denied! Does not have proper permission.') def can_delete(self, scope_name): """ Check if it has permission to delete. Has to be evaluated on every DELETE HTTP methods. """ has_del_perm = False for perm in self.jwt_payload['user']['permissions']: if perm['scope']['name'] == scope_name: has_del_perm = True if not perm.get('delete'): has_del_perm = False break if not has_del_perm: abort(403, message = 'Permission denied! Does not have proper permission.') def is_forbidden(self, **kwargs): """ Check if the resource is allowed by a global admin user. It must be overrided if the user is not a global admin, the contraints must be evaluated accordingly, must ensure if the request user is the owner of the requested resource. :param kwargs: The resource attributes for validating the contraints """ if not self.is_global_admin: abort(403, message = 'Permission denied! Does not have enough permissions.') class TaskResource(BaseResource): def __init__(self): filters = ['id', 'name'] super(TaskResource, self).__init__(filters) def is_forbidden(self, **kwargs): pass def can_read(self): pass def get(self, task_type, task_id): if task_type == 'createzimbradomains': task = create_domain_zimbra_task.AsyncResult(task_id) elif task_type == 'createdelegatedzimbra': task = create_delegated_zimbra_admin_task.AsyncResult(task_id) else: abort(400, message = 'Wrong task_type specified') try: task_state = task.state task_executed = task.ready() except Exception: task_state = 'ERROR' task_executed = True return { 'response' : { 'task_state' : task_state, 'task_executed' : task_executed } }
hrvoojex/pipni2
refs/heads/master
pipni2.py
1
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Calculate telecomunication costs author: Hrvoje T last edited: February 2017 """ from mainwindow import * import sys import datetime #import string #import re # File for saving the result. Default 'output_pipni2.csv' if not explicitly named OUTPUT_FILE = "output_pipni.csv" class Main(QtWidgets.QMainWindow): def __init__(self): super().__init__() self.init_ui() self.setWindowIcon(QtGui.QIcon('pipni2_icon.png')) def init_ui(self): """Initializing GUI from mainwindow module""" self.ui = Ui_MainWindow() self.ui.setupUi(self) # disable 'Spremi' button if there is no data in textBrowser self.spremi_button_disabled() # disable 'kojibroj_grpoupBox' when app is loaded self.one_or_all() # call a method 'selectfile_Dialog' if one of QLineEdit objects is clicked self.ui.lista_lineEdit.clicked.connect(self.selectfile_Dialog) self.ui.specif_lineEdit.clicked.connect(self.selectfile_Dialog) # lista_lineEdit_2 is adresar QLineEdit widget self.ui.lista_lineEdit_2.clicked.connect(self.selectfile_Dialog) # Close the main windows when 'Exit' menu is triggered self.ui.actionExit.triggered.connect(self.close) # When 'Prikaži' button is clicked, connect 'accepted' signal to method self.ui.prikazi_button.clicked.connect(self.prikazi_button_clicked) # When 'Spremi' button is clicked, connect 'accepted' signal to method self.ui.spremi_button.clicked.connect(self.spremi_button_clicked) # When 'Otkazi' button is clicked, connect 'accepted' signal to method self.ui.otkazi_button.clicked.connect(self.otkazi_button_clicked) # When 'Reset' menu is triggered self.ui.actionReset.triggered.connect(self.otkazi_button_clicked) # disable 'kojibroj_groupBox' if svi_button in checked self.ui.svi_radioButton.clicked.connect(self.one_or_all) # enable 'kojibroj_groupBox' if jedan_button in checked self.ui.jedan_radioButton.clicked.connect(self.one_or_all) def selectfile_Dialog(self, event=None): """ Opens a dialog for choosing a file. Takes two positionals arguments 'self' and 'event' because 'mouseReleaseEvent' sends two, when creating new method eg. 'label1.mouseReleaseEvent = self.showText1'. When subclassing QLineEdit as ClickableLineEdit 'event' is None" """ # QFileDialog doesn't use native OS dialog like this one: # 'fname = QFileDialog.getOpenFileName(self, 'Open file', '/home')' # to remember last opening path fname, _ = QtWidgets.QFileDialog.getOpenFileName( self, 'Open File', '', 'Text file (*.csv)', None, QtWidgets.QFileDialog.DontUseNativeDialog) # sender is object that sends the signal sender = self.sender() # write selected file name into that QLineEdit widget 'lista_lineEdit' sender.setText(fname) # set options for combobox only from 'lista_lineEdit' QLineEdit widget if sender.objectName() == "lista_lineEdit": # cals a method fill_combobox self.fill_combobox(fname) def open_file(self, filename): """Reads a file and returns a data. Every line is an item in a list""" try: with open(filename, "r") as fh: # read from a file line by line. Every line is a item in a list data = fh.readlines() # removes escape charachters from items in list 'data' data = [x.strip() for x in data] return data except FileNotFoundError as e: self.ui.textBrowser.setText(str("Greška {}; popuni podatke".format(e))) def fill_combobox(self, filename): """ Set combobox options from lista.csv header. This method is called when first QLineEdit widget is clicked """ # to not allow filename of type Nonetype eg. filename="" if filename != "": # removes all previous combobox options self.ui.zeljeni_comboBox.clear() data = self.open_file(filename) # from 0-th item in list 'data' and split where is ';' sign first_row_words = data[0].split(";") index = 0 # removes first default combobox option self.ui.zeljeni_comboBox.removeItem(index) # for every item in first row add it as an combobox option for item in first_row_words: self.ui.zeljeni_comboBox.addItem("") self.ui.zeljeni_comboBox.setItemText(index, first_row_words[index]) index += 1 def write_outputfile(self): """Write everything from textBrowser to a output file""" # reads from adresar QLineEdit to see for output file name self.adresar_changed() # Writes to a global variable defined at the begining of a module global OUTPUT_FILE data = self.ui.textBrowser.toPlainText() with open(OUTPUT_FILE, "w") as f: f.write(data) def display_in_textbox(self): """Display calculation result in QtextBox widget""" self.ui.textBrowser.clear() self.calculations_lista() def cancel_settings(self): """Reset all app settings""" # remove all QLineEdit widgets text self.ui.lista_lineEdit.setText("") self.ui.specif_lineEdit.setText("") self.ui.lista_lineEdit_2.setText("") self.ui.izlazna_lineEdit.setText("") self.ui.broj_lineEdit.setText("") # radio button reset to 'Svi' self.ui.svi_radioButton.setChecked(True) # remove all combobox options self.ui.zeljeni_comboBox.clear() # clear textBrowser text self.ui.textBrowser.setText("") def adresar_changed(self): """Change global OUTPUT_FILE to QLineEdit widget adresar text""" global OUTPUT_FILE # add time in a file name date_string = datetime.datetime.now().strftime("%Y-%m-%d-%H:%M:%S") save_to = self.ui.izlazna_lineEdit.text() if save_to != "": OUTPUT_FILE = date_string + "-" + save_to else: OUTPUT_FILE = date_string + "-output_pipni.csv" def calculations_lista(self): """Method for calculation a logic of a program with lista.csv""" # create empty string for storing information to display display = [] file_name = self.ui.lista_lineEdit.text() # open a file that is listed in lista QLineEdit widget and # put every line as a item in working_data list working_data = self.open_file(file_name) index = self.ui.zeljeni_comboBox.currentIndex() for line in working_data: word = line.split(";") # removes ' signs from string and converts numbers into float word[index] = self.clean_and_convert(word[index]) # Don't pass the first non-float line to list display because # strings and floats can't be compared for sorting if type(word[index]) != float: self.ui.header_title = word[index] self.ui.header_ident = word[1] # take another line continue display.append((word[1], word[index])) # Sorts the list my_list = self.sort_tup_from_list(display) # show in a textbrowser if 'Koji broj' is empty if self.ui.broj_lineEdit.text() == "": # Shows the list in a textbrowser self.show_in_textbrowser(my_list) #print(my_list) else: my_newlist = [] my_newlist.append(my_list[0]) print(my_newlist) my_newlist.append(self.which_num(my_list)) self.show_in_textbrowser(my_newlist) def which_num(self, lst): """ Takes an input from 'Koji broj' and 'lst' original result list filed with tuples and filters only that broj_lineEdit phone """ # variable to check if phone number is in addressbook is_here = False try: phone_tup = () # reads a phone number from broj_lineEdit widget phone = int(self.ui.broj_lineEdit.text()) for key, val in lst: if str(phone) in (key,val): # set flag is_here as True if phone number is is addresbook is_here = True if self.clean_quot(key) == str(phone): phone_tup = ((key, val)) if is_here == False: self.ui.textBrowser.append("Broj nije u imeniku") return phone_tup except ValueError as e: self.ui.textBrowser.append( "Nije pravilan broj 'Koji broj: {}".format(sys.exc_info()[0])) def clean_and_convert(self, item): """Remove ' sign from string and convert it to float if it is all numbers""" # Replace ' sign for nothing. Just remove it from string item = item.replace("'", "") item = item.replace(",", ".") # If string has all numbers declare it float type if item[0].isdigit(): item = float(item) return item def clean_quot(self, item): """Remove ' sign from string""" # Replace ' sign for nothing. Just remove it from string item = item.replace("'", "") return item def get_key(self, item): """ Returns a key for a sort function. Number 1 means sort by the second item. Item parameter (tmp) is hiden and given by sort() function in sort_tup_from_list() method """ return item[1] def sort_tup_from_list(self, input_list): """Sorts tuples in a list by value""" tmp = [] for key, val in input_list: if type(val) != float: title = val tmp.append((key, val)) tmp.sort(key=self.get_key, reverse=True) # Add header of csv file after sorting by float numbers not to mix # str and float type tmp.insert(0, (self.ui.header_ident, self.ui.header_title)) return tmp def show_in_textbrowser(self, input_list): """Display list of tuples in QTextBrowser""" self.ui.textBrowser.setFontFamily("monospace") try: for tup in input_list: self.ui.textBrowser.append("{:15}{:.>30}".format(tup[0], tup[1])) except: print("No number in addressbook") def one_or_all(self): if self.ui.svi_radioButton.isChecked(): self.ui.kojibroj_groupBox.setDisabled(1) self.ui.broj_lineEdit.setText("") else: self.ui.kojibroj_groupBox.setDisabled(0) def spremi_button_disabled(self): """Disables 'Spremi' button until textBrowser if filled""" data = self.ui.textBrowser.toPlainText() if data == "": self.ui.spremi_button.setDisabled(True) else: self.ui.spremi_button.setDisabled(False) def prikazi_button_clicked(self): """Actions that hapen when OK button is clicked""" self.display_in_textbox() self.spremi_button_disabled() def spremi_button_clicked(self): """Calls a method for saving a result in a file""" self.write_outputfile() def otkazi_button_clicked(self): """Calls a method for canceling app settings""" self.cancel_settings() self.spremi_button_disabled() def closeEvent(self, event): """Ask for closing confirmation""" reply = QtWidgets.QMessageBox.question(self, 'Message', "Are you sure to quit?", QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No, QtWidgets.QMessageBox.No) if reply == QtWidgets.QMessageBox.Yes: event.accept() else: event.ignore() def keyPressEvent(self, e): """Action when return or escape is pressed""" if e.key() == QtCore.Qt.Key_Escape: self.close() if __name__ == "__main__": app = QtWidgets.QApplication(sys.argv) instance = Main() instance.show() sys.exit(app.exec_())
Belxjander/Kirito
refs/heads/master
Python-3.5.0-main/Doc/includes/sqlite3/text_factory.py
51
import sqlite3 con = sqlite3.connect(":memory:") cur = con.cursor() AUSTRIA = "\xd6sterreich" # by default, rows are returned as Unicode cur.execute("select ?", (AUSTRIA,)) row = cur.fetchone() assert row[0] == AUSTRIA # but we can make sqlite3 always return bytestrings ... con.text_factory = bytes cur.execute("select ?", (AUSTRIA,)) row = cur.fetchone() assert type(row[0]) is bytes # the bytestrings will be encoded in UTF-8, unless you stored garbage in the # database ... assert row[0] == AUSTRIA.encode("utf-8") # we can also implement a custom text_factory ... # here we implement one that appends "foo" to all strings con.text_factory = lambda x: x.decode("utf-8") + "foo" cur.execute("select ?", ("bar",)) row = cur.fetchone() assert row[0] == "barfoo"
klahnakoski/esShardBalancer
refs/heads/master
jx_python/namespace/normal.py
2
# encoding: utf-8 # # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. # # Author: Kyle Lahnakoski (kyle@lahnakoski.com) # from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals from collections import Mapping from copy import copy from mo_logs import Log from mo_dots import coalesce, Null from mo_dots import wrap, listwrap from mo_dots import Data from mo_dots import FlatList from mo_math import Math from jx_python.containers import Container from jx_python.dimensions import Dimension from jx_python.domains import Domain from jx_python.expressions import TRUE_FILTER from jx_python.namespace import Namespace, convert_list from jx_python.query import QueryOp, get_all_vars DEFAULT_LIMIT = 10 class Normal(Namespace): """ UNREMARKABLE NAMESPACE, SIMPLY FOR CONVERTING QUERY TO NORMAL FORM """ def convert(self, expr): if isinstance(expr, Mapping) and expr["from"]: return self._convert_query(expr) return expr def _convert_query(self, query): # if not isinstance(query["from"], Container): # Log.error('Expecting from clause to be a Container') query = wrap(query) output = QueryOp("from", None) output["from"] = self._convert_from(query["from"]) output.format = query.format if query.select: output.select = convert_list(self._convert_select, query.select) else: if query.edges or query.groupby: output.select = {"name": "count", "value": ".", "aggregate": "count", "default": 0} else: output.select = {"name": "__all__", "value": "*", "aggregate": "none"} if query.groupby and query.edges: Log.error("You can not use both the `groupby` and `edges` clauses in the same query!") elif query.edges: output.edges = convert_list(self._convert_edge, query.edges) output.groupby = None elif query.groupby: output.edges = None output.groupby = convert_list(self._convert_group, query.groupby) else: output.edges = [] output.groupby = None output.where = self.convert(query.where) output.window = convert_list(self._convert_window, query.window) output.sort = self._convert_sort(query.sort) output.limit = coalesce(query.limit, DEFAULT_LIMIT) if not Math.is_integer(output.limit) or output.limit < 0: Log.error("Expecting limit >= 0") output.isLean = query.isLean # DEPTH ANALYSIS - LOOK FOR COLUMN REFERENCES THAT MAY BE DEEPER THAN # THE from SOURCE IS. vars = get_all_vars(output, exclude_where=True) # WE WILL EXCLUDE where VARIABLES for c in query.columns: if c.name in vars and len(c.nested_path) != 1: Log.error("This query, with variable {{var_name}} is too deep", var_name=c.name) output.having = convert_list(self._convert_having, query.having) return output def _convert_from(self, frum): if isinstance(frum, basestring): return Data(name=frum) elif isinstance(frum, (Container, QueryOp)): return frum else: Log.error("Expecting from clause to be a name, or a container") def _convert_select(self, select): if isinstance(select, basestring): return Data( name=select.rstrip("."), # TRAILING DOT INDICATES THE VALUE, BUT IS INVALID FOR THE NAME value=select, aggregate="none" ) else: select = wrap(select) output = copy(select) if not select.value or isinstance(select.value, basestring): if select.value == ".": output.name = coalesce(select.name, select.aggregate) else: output.name = coalesce(select.name, select.value, select.aggregate) elif not output.name: Log.error("Must give name to each column in select clause") if not output.name: Log.error("expecting select to have a name: {{select}}", select=select) output.aggregate = coalesce(canonical_aggregates.get(select.aggregate), select.aggregate, "none") return output def _convert_edge(self, edge): if isinstance(edge, basestring): return Data( name=edge, value=edge, domain=self._convert_domain() ) else: edge = wrap(edge) if not edge.name and not isinstance(edge.value, basestring): Log.error("You must name compound edges: {{edge}}", edge= edge) if isinstance(edge.value, (Mapping, list)) and not edge.domain: # COMPLEX EDGE IS SHORT HAND domain =self._convert_domain() domain.dimension = Data(fields=edge.value) return Data( name=edge.name, allowNulls=False if edge.allowNulls is False else True, domain=domain ) domain = self._convert_domain(edge.domain) return Data( name=coalesce(edge.name, edge.value), value=edge.value, range=edge.range, allowNulls=False if edge.allowNulls is False else True, domain=domain ) def _convert_group(self, column): if isinstance(column, basestring): return wrap({ "name": column, "value": column, "domain": {"type": "default"} }) else: column = wrap(column) if (column.domain and column.domain.type != "default") or column.allowNulls != None: Log.error("groupby does not accept complicated domains") if not column.name and not isinstance(column.value, basestring): Log.error("You must name compound edges: {{edge}}", edge= column) return wrap({ "name": coalesce(column.name, column.value), "value": column.value, "domain": {"type": "default"} }) def _convert_domain(self, domain=None): if not domain: return Domain(type="default") elif isinstance(domain, Dimension): return domain.getDomain() elif isinstance(domain, Domain): return domain if not domain.name: domain = domain.copy() domain.name = domain.type if not isinstance(domain.partitions, list): domain.partitions = list(domain.partitions) return Domain(**domain) def _convert_range(self, range): if range == None: return None return Data( min=range.min, max=range.max ) def _convert_where(self, where): if where == None: return TRUE_FILTER return where def _convert_window(self, window): return Data( name=coalesce(window.name, window.value), value=window.value, edges=[self._convert_edge(e) for e in listwrap(window.edges)], sort=self._convert_sort(window.sort), aggregate=window.aggregate, range=self._convert_range(window.range), where=self._convert_where(window.where) ) def _convert_sort(self, sort): return normalize_sort(sort) def normalize_sort(sort=None): """ CONVERT SORT PARAMETERS TO A NORMAL FORM SO EASIER TO USE """ if not sort: return Null output = FlatList() for s in listwrap(sort): if isinstance(s, basestring) or Math.is_integer(s): output.append({"value": s, "sort": 1}) elif not s.field and not s.value and s.sort==None: #ASSUME {name: sort} FORM for n, v in s.items(): output.append({"value": n, "sort": sort_direction[v]}) else: output.append({"value": coalesce(s.field, s.value), "sort": coalesce(sort_direction[s.sort], 1)}) return wrap(output) sort_direction = { "asc": 1, "desc": -1, "none": 0, 1: 1, 0: 0, -1: -1, None: 1, Null: 1 } canonical_aggregates = { "none": "none", "one": "one", "count": "count", "sum": "sum", "add": "sum", "mean": "average", "average": "average", "avg": "average", "min": "minimum", "minimum": "minimum", "max": "maximum", "maximum": "minimum", "X2": "sum_of_squares", "std": "std", "stddev": "std", "std_deviation": "std", "var": "variance", "variance": "variance", "stats": "stats" }
rtrigoso/ghost-somepolymath
refs/heads/master
node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/formatters/html.py
94
# -*- coding: utf-8 -*- """ pygments.formatters.html ~~~~~~~~~~~~~~~~~~~~~~~~ Formatter for HTML output. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import os import sys import os.path import io from pygments.formatter import Formatter from pygments.token import Token, Text, STANDARD_TYPES from pygments.util import get_bool_opt, get_int_opt, get_list_opt, bytes try: import ctags except ImportError: ctags = None __all__ = ['HtmlFormatter'] _escape_html_table = { ord('&'): '&amp;', ord('<'): '&lt;', ord('>'): '&gt;', ord('"'): '&quot;', ord("'"): '&#39;', } def escape_html(text, table=_escape_html_table): """Escape &, <, > as well as single and double quotes for HTML.""" return text.translate(table) def get_random_id(): """Return a random id for javascript fields.""" from random import random from time import time try: from hashlib import sha1 as sha except ImportError: import sha sha = sha.new return sha('%s|%s' % (random(), time())).hexdigest() def _get_ttype_class(ttype): fname = STANDARD_TYPES.get(ttype) if fname: return fname aname = '' while fname is None: aname = '-' + ttype[-1] + aname ttype = ttype.parent fname = STANDARD_TYPES.get(ttype) return fname + aname CSSFILE_TEMPLATE = '''\ td.linenos { background-color: #f0f0f0; padding-right: 10px; } span.lineno { background-color: #f0f0f0; padding: 0 5px 0 5px; } pre { line-height: 125%%; } %(styledefs)s ''' DOC_HEADER = '''\ <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd"> <html> <head> <title>%(title)s</title> <meta http-equiv="content-type" content="text/html; charset=%(encoding)s"> <style type="text/css"> ''' + CSSFILE_TEMPLATE + ''' </style> </head> <body> <h2>%(title)s</h2> ''' DOC_HEADER_EXTERNALCSS = '''\ <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd"> <html> <head> <title>%(title)s</title> <meta http-equiv="content-type" content="text/html; charset=%(encoding)s"> <link rel="stylesheet" href="%(cssfile)s" type="text/css"> </head> <body> <h2>%(title)s</h2> ''' DOC_FOOTER = '''\ </body> </html> ''' class HtmlFormatter(Formatter): r""" Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option. If the `linenos` option is set to ``"table"``, the ``<pre>`` is additionally wrapped inside a ``<table>`` which has one row and two cells: one containing the line numbers and one containing the code. Example: .. sourcecode:: html <div class="highlight" > <table><tr> <td class="linenos" title="click to toggle" onclick="with (this.firstChild.style) { display = (display == '') ? 'none' : '' }"> <pre>1 2</pre> </td> <td class="code"> <pre><span class="Ke">def </span><span class="NaFu">foo</span>(bar): <span class="Ke">pass</span> </pre> </td> </tr></table></div> (whitespace added to improve clarity). Wrapping can be disabled using the `nowrap` option. A list of lines can be specified using the `hl_lines` option to make these lines highlighted (as of Pygments 0.11). With the `full` option, a complete HTML 4 document is output, including the style definitions inside a ``<style>`` tag, or in a separate file if the `cssfile` option is given. When `tagsfile` is set to the path of a ctags index file, it is used to generate hyperlinks from names to their definition. You must enable `anchorlines` and run ctags with the `-n` option for this to work. The `python-ctags` module from PyPI must be installed to use this feature; otherwise a `RuntimeError` will be raised. The `get_style_defs(arg='')` method of a `HtmlFormatter` returns a string containing CSS rules for the CSS classes used by the formatter. The argument `arg` can be used to specify additional CSS selectors that are prepended to the classes. A call `fmter.get_style_defs('td .code')` would result in the following CSS classes: .. sourcecode:: css td .code .kw { font-weight: bold; color: #00FF00 } td .code .cm { color: #999999 } ... If you have Pygments 0.6 or higher, you can also pass a list or tuple to the `get_style_defs()` method to request multiple prefixes for the tokens: .. sourcecode:: python formatter.get_style_defs(['div.syntax pre', 'pre.syntax']) The output would then look like this: .. sourcecode:: css div.syntax pre .kw, pre.syntax .kw { font-weight: bold; color: #00FF00 } div.syntax pre .cm, pre.syntax .cm { color: #999999 } ... Additional options accepted: `nowrap` If set to ``True``, don't wrap the tokens at all, not even inside a ``<pre>`` tag. This disables most other options (default: ``False``). `full` Tells the formatter to output a "full" document, i.e. a complete self-contained document (default: ``False``). `title` If `full` is true, the title that should be used to caption the document (default: ``''``). `style` The style to use, can be a string or a Style subclass (default: ``'default'``). This option has no effect if the `cssfile` and `noclobber_cssfile` option are given and the file specified in `cssfile` exists. `noclasses` If set to true, token ``<span>`` tags will not use CSS classes, but inline styles. This is not recommended for larger pieces of code since it increases output size by quite a bit (default: ``False``). `classprefix` Since the token types use relatively short class names, they may clash with some of your own class names. In this case you can use the `classprefix` option to give a string to prepend to all Pygments-generated CSS class names for token types. Note that this option also affects the output of `get_style_defs()`. `cssclass` CSS class for the wrapping ``<div>`` tag (default: ``'highlight'``). If you set this option, the default selector for `get_style_defs()` will be this class. *New in Pygments 0.9:* If you select the ``'table'`` line numbers, the wrapping table will have a CSS class of this string plus ``'table'``, the default is accordingly ``'highlighttable'``. `cssstyles` Inline CSS styles for the wrapping ``<div>`` tag (default: ``''``). `prestyles` Inline CSS styles for the ``<pre>`` tag (default: ``''``). *New in Pygments 0.11.* `cssfile` If the `full` option is true and this option is given, it must be the name of an external file. If the filename does not include an absolute path, the file's path will be assumed to be relative to the main output file's path, if the latter can be found. The stylesheet is then written to this file instead of the HTML file. *New in Pygments 0.6.* `noclobber_cssfile` If `cssfile` is given and the specified file exists, the css file will not be overwritten. This allows the use of the `full` option in combination with a user specified css file. Default is ``False``. *New in Pygments 1.1.* `linenos` If set to ``'table'``, output line numbers as a table with two cells, one containing the line numbers, the other the whole code. This is copy-and-paste-friendly, but may cause alignment problems with some browsers or fonts. If set to ``'inline'``, the line numbers will be integrated in the ``<pre>`` tag that contains the code (that setting is *new in Pygments 0.8*). For compatibility with Pygments 0.7 and earlier, every true value except ``'inline'`` means the same as ``'table'`` (in particular, that means also ``True``). The default value is ``False``, which means no line numbers at all. **Note:** with the default ("table") line number mechanism, the line numbers and code can have different line heights in Internet Explorer unless you give the enclosing ``<pre>`` tags an explicit ``line-height`` CSS property (you get the default line spacing with ``line-height: 125%``). `hl_lines` Specify a list of lines to be highlighted. *New in Pygments 0.11.* `linenostart` The line number for the first line (default: ``1``). `linenostep` If set to a number n > 1, only every nth line number is printed. `linenospecial` If set to a number n > 0, every nth line number is given the CSS class ``"special"`` (default: ``0``). `nobackground` If set to ``True``, the formatter won't output the background color for the wrapping element (this automatically defaults to ``False`` when there is no wrapping element [eg: no argument for the `get_syntax_defs` method given]) (default: ``False``). *New in Pygments 0.6.* `lineseparator` This string is output between lines of code. It defaults to ``"\n"``, which is enough to break a line inside ``<pre>`` tags, but you can e.g. set it to ``"<br>"`` to get HTML line breaks. *New in Pygments 0.7.* `lineanchors` If set to a nonempty string, e.g. ``foo``, the formatter will wrap each output line in an anchor tag with a ``name`` of ``foo-linenumber``. This allows easy linking to certain lines. *New in Pygments 0.9.* `linespans` If set to a nonempty string, e.g. ``foo``, the formatter will wrap each output line in a span tag with an ``id`` of ``foo-linenumber``. This allows easy access to lines via javascript. *New in Pygments 1.6.* `anchorlinenos` If set to `True`, will wrap line numbers in <a> tags. Used in combination with `linenos` and `lineanchors`. `tagsfile` If set to the path of a ctags file, wrap names in anchor tags that link to their definitions. `lineanchors` should be used, and the tags file should specify line numbers (see the `-n` option to ctags). *New in Pygments 1.6.* `tagurlformat` A string formatting pattern used to generate links to ctags definitions. Available variables are `%(path)s`, `%(fname)s` and `%(fext)s`. Defaults to an empty string, resulting in just `#prefix-number` links. *New in Pygments 1.6.* **Subclassing the HTML formatter** *New in Pygments 0.7.* The HTML formatter is now built in a way that allows easy subclassing, thus customizing the output HTML code. The `format()` method calls `self._format_lines()` which returns a generator that yields tuples of ``(1, line)``, where the ``1`` indicates that the ``line`` is a line of the formatted source code. If the `nowrap` option is set, the generator is the iterated over and the resulting HTML is output. Otherwise, `format()` calls `self.wrap()`, which wraps the generator with other generators. These may add some HTML code to the one generated by `_format_lines()`, either by modifying the lines generated by the latter, then yielding them again with ``(1, line)``, and/or by yielding other HTML code before or after the lines, with ``(0, html)``. The distinction between source lines and other code makes it possible to wrap the generator multiple times. The default `wrap()` implementation adds a ``<div>`` and a ``<pre>`` tag. A custom `HtmlFormatter` subclass could look like this: .. sourcecode:: python class CodeHtmlFormatter(HtmlFormatter): def wrap(self, source, outfile): return self._wrap_code(source) def _wrap_code(self, source): yield 0, '<code>' for i, t in source: if i == 1: # it's a line of formatted code t += '<br>' yield i, t yield 0, '</code>' This results in wrapping the formatted lines with a ``<code>`` tag, where the source lines are broken using ``<br>`` tags. After calling `wrap()`, the `format()` method also adds the "line numbers" and/or "full document" wrappers if the respective options are set. Then, all HTML yielded by the wrapped generator is output. """ name = 'HTML' aliases = ['html'] filenames = ['*.html', '*.htm'] def __init__(self, **options): Formatter.__init__(self, **options) self.title = self._decodeifneeded(self.title) self.nowrap = get_bool_opt(options, 'nowrap', False) self.noclasses = get_bool_opt(options, 'noclasses', False) self.classprefix = options.get('classprefix', '') self.cssclass = self._decodeifneeded(options.get('cssclass', 'highlight')) self.cssstyles = self._decodeifneeded(options.get('cssstyles', '')) self.prestyles = self._decodeifneeded(options.get('prestyles', '')) self.cssfile = self._decodeifneeded(options.get('cssfile', '')) self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False) self.tagsfile = self._decodeifneeded(options.get('tagsfile', '')) self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', '')) if self.tagsfile: if not ctags: raise RuntimeError('The "ctags" package must to be installed ' 'to be able to use the "tagsfile" feature.') self._ctags = ctags.CTags(self.tagsfile) linenos = options.get('linenos', False) if linenos == 'inline': self.linenos = 2 elif linenos: # compatibility with <= 0.7 self.linenos = 1 else: self.linenos = 0 self.linenostart = abs(get_int_opt(options, 'linenostart', 1)) self.linenostep = abs(get_int_opt(options, 'linenostep', 1)) self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0)) self.nobackground = get_bool_opt(options, 'nobackground', False) self.lineseparator = options.get('lineseparator', '\n') self.lineanchors = options.get('lineanchors', '') self.linespans = options.get('linespans', '') self.anchorlinenos = options.get('anchorlinenos', False) self.hl_lines = set() for lineno in get_list_opt(options, 'hl_lines', []): try: self.hl_lines.add(int(lineno)) except ValueError: pass self._create_stylesheet() def _get_css_class(self, ttype): """Return the css class of this token type prefixed with the classprefix option.""" ttypeclass = _get_ttype_class(ttype) if ttypeclass: return self.classprefix + ttypeclass return '' def _create_stylesheet(self): t2c = self.ttype2class = {Token: ''} c2s = self.class2style = {} for ttype, ndef in self.style: name = self._get_css_class(ttype) style = '' if ndef['color']: style += 'color: #%s; ' % ndef['color'] if ndef['bold']: style += 'font-weight: bold; ' if ndef['italic']: style += 'font-style: italic; ' if ndef['underline']: style += 'text-decoration: underline; ' if ndef['bgcolor']: style += 'background-color: #%s; ' % ndef['bgcolor'] if ndef['border']: style += 'border: 1px solid #%s; ' % ndef['border'] if style: t2c[ttype] = name # save len(ttype) to enable ordering the styles by # hierarchy (necessary for CSS cascading rules!) c2s[name] = (style[:-2], ttype, len(ttype)) def get_style_defs(self, arg=None): """ Return CSS style definitions for the classes produced by the current highlighting style. ``arg`` can be a string or list of selectors to insert before the token type classes. """ if arg is None: arg = ('cssclass' in self.options and '.'+self.cssclass or '') if isinstance(arg, str): args = [arg] else: args = list(arg) def prefix(cls): if cls: cls = '.' + cls tmp = [] for arg in args: tmp.append((arg and arg + ' ' or '') + cls) return ', '.join(tmp) styles = [(level, ttype, cls, style) for cls, (style, ttype, level) in self.class2style.items() if cls and style] styles.sort() lines = ['%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:]) for (level, ttype, cls, style) in styles] if arg and not self.nobackground and \ self.style.background_color is not None: text_style = '' if Text in self.ttype2class: text_style = ' ' + self.class2style[self.ttype2class[Text]][0] lines.insert(0, '%s { background: %s;%s }' % (prefix(''), self.style.background_color, text_style)) if self.style.highlight_color is not None: lines.insert(0, '%s.hll { background-color: %s }' % (prefix(''), self.style.highlight_color)) return '\n'.join(lines) def _decodeifneeded(self, value): if isinstance(value, bytes): if self.encoding: return value.decode(self.encoding) return value.decode() return value def _wrap_full(self, inner, outfile): if self.cssfile: if os.path.isabs(self.cssfile): # it's an absolute filename cssfilename = self.cssfile else: try: filename = outfile.name if not filename or filename[0] == '<': # pseudo files, e.g. name == '<fdopen>' raise AttributeError cssfilename = os.path.join(os.path.dirname(filename), self.cssfile) except AttributeError: print('Note: Cannot determine output file name, ' \ 'using current directory as base for the CSS file name', file=sys.stderr) cssfilename = self.cssfile # write CSS file only if noclobber_cssfile isn't given as an option. try: if not os.path.exists(cssfilename) or not self.noclobber_cssfile: cf = open(cssfilename, "w") cf.write(CSSFILE_TEMPLATE % {'styledefs': self.get_style_defs('body')}) cf.close() except IOError as err: err.strerror = 'Error writing CSS file: ' + err.strerror raise yield 0, (DOC_HEADER_EXTERNALCSS % dict(title = self.title, cssfile = self.cssfile, encoding = self.encoding)) else: yield 0, (DOC_HEADER % dict(title = self.title, styledefs = self.get_style_defs('body'), encoding = self.encoding)) for t, line in inner: yield t, line yield 0, DOC_FOOTER def _wrap_tablelinenos(self, inner): dummyoutfile = io.StringIO() lncount = 0 for t, line in inner: if t: lncount += 1 dummyoutfile.write(line) fl = self.linenostart mw = len(str(lncount + fl - 1)) sp = self.linenospecial st = self.linenostep la = self.lineanchors aln = self.anchorlinenos nocls = self.noclasses if sp: lines = [] for i in range(fl, fl+lncount): if i % st == 0: if i % sp == 0: if aln: lines.append('<a href="#%s-%d" class="special">%*d</a>' % (la, i, mw, i)) else: lines.append('<span class="special">%*d</span>' % (mw, i)) else: if aln: lines.append('<a href="#%s-%d">%*d</a>' % (la, i, mw, i)) else: lines.append('%*d' % (mw, i)) else: lines.append('') ls = '\n'.join(lines) else: lines = [] for i in range(fl, fl+lncount): if i % st == 0: if aln: lines.append('<a href="#%s-%d">%*d</a>' % (la, i, mw, i)) else: lines.append('%*d' % (mw, i)) else: lines.append('') ls = '\n'.join(lines) # in case you wonder about the seemingly redundant <div> here: since the # content in the other cell also is wrapped in a div, some browsers in # some configurations seem to mess up the formatting... if nocls: yield 0, ('<table class="%stable">' % self.cssclass + '<tr><td><div class="linenodiv" ' 'style="background-color: #f0f0f0; padding-right: 10px">' '<pre style="line-height: 125%">' + ls + '</pre></div></td><td class="code">') else: yield 0, ('<table class="%stable">' % self.cssclass + '<tr><td class="linenos"><div class="linenodiv"><pre>' + ls + '</pre></div></td><td class="code">') yield 0, dummyoutfile.getvalue() yield 0, '</td></tr></table>' def _wrap_inlinelinenos(self, inner): # need a list of lines since we need the width of a single number :( lines = list(inner) sp = self.linenospecial st = self.linenostep num = self.linenostart mw = len(str(len(lines) + num - 1)) if self.noclasses: if sp: for t, line in lines: if num%sp == 0: style = 'background-color: #ffffc0; padding: 0 5px 0 5px' else: style = 'background-color: #f0f0f0; padding: 0 5px 0 5px' yield 1, '<span style="%s">%*s</span> ' % ( style, mw, (num%st and ' ' or num)) + line num += 1 else: for t, line in lines: yield 1, ('<span style="background-color: #f0f0f0; ' 'padding: 0 5px 0 5px">%*s</span> ' % ( mw, (num%st and ' ' or num)) + line) num += 1 elif sp: for t, line in lines: yield 1, '<span class="lineno%s">%*s</span> ' % ( num%sp == 0 and ' special' or '', mw, (num%st and ' ' or num)) + line num += 1 else: for t, line in lines: yield 1, '<span class="lineno">%*s</span> ' % ( mw, (num%st and ' ' or num)) + line num += 1 def _wrap_lineanchors(self, inner): s = self.lineanchors i = self.linenostart - 1 # subtract 1 since we have to increment i # *before* yielding for t, line in inner: if t: i += 1 yield 1, '<a name="%s-%d"></a>' % (s, i) + line else: yield 0, line def _wrap_linespans(self, inner): s = self.linespans i = self.linenostart - 1 for t, line in inner: if t: i += 1 yield 1, '<span id="%s-%d">%s</span>' % (s, i, line) else: yield 0, line def _wrap_div(self, inner): style = [] if (self.noclasses and not self.nobackground and self.style.background_color is not None): style.append('background: %s' % (self.style.background_color,)) if self.cssstyles: style.append(self.cssstyles) style = '; '.join(style) yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) + (style and (' style="%s"' % style)) + '>') for tup in inner: yield tup yield 0, '</div>\n' def _wrap_pre(self, inner): style = [] if self.prestyles: style.append(self.prestyles) if self.noclasses: style.append('line-height: 125%') style = '; '.join(style) yield 0, ('<pre' + (style and ' style="%s"' % style) + '>') for tup in inner: yield tup yield 0, '</pre>' def _format_lines(self, tokensource): """ Just format the tokens, without any wrapping tags. Yield individual lines. """ nocls = self.noclasses lsep = self.lineseparator # for <span style=""> lookup only getcls = self.ttype2class.get c2s = self.class2style escape_table = _escape_html_table tagsfile = self.tagsfile lspan = '' line = '' for ttype, value in tokensource: if nocls: cclass = getcls(ttype) while cclass is None: ttype = ttype.parent cclass = getcls(ttype) cspan = cclass and '<span style="%s">' % c2s[cclass][0] or '' else: cls = self._get_css_class(ttype) cspan = cls and '<span class="%s">' % cls or '' parts = value.translate(escape_table).split('\n') if tagsfile and ttype in Token.Name: filename, linenumber = self._lookup_ctag(value) if linenumber: base, filename = os.path.split(filename) if base: base += '/' filename, extension = os.path.splitext(filename) url = self.tagurlformat % {'path': base, 'fname': filename, 'fext': extension} parts[0] = "<a href=\"%s#%s-%d\">%s" % \ (url, self.lineanchors, linenumber, parts[0]) parts[-1] = parts[-1] + "</a>" # for all but the last line for part in parts[:-1]: if line: if lspan != cspan: line += (lspan and '</span>') + cspan + part + \ (cspan and '</span>') + lsep else: # both are the same line += part + (lspan and '</span>') + lsep yield 1, line line = '' elif part: yield 1, cspan + part + (cspan and '</span>') + lsep else: yield 1, lsep # for the last line if line and parts[-1]: if lspan != cspan: line += (lspan and '</span>') + cspan + parts[-1] lspan = cspan else: line += parts[-1] elif parts[-1]: line = cspan + parts[-1] lspan = cspan # else we neither have to open a new span nor set lspan if line: yield 1, line + (lspan and '</span>') + lsep def _lookup_ctag(self, token): entry = ctags.TagEntry() if self._ctags.find(entry, token, 0): return entry['file'], entry['lineNumber'] else: return None, None def _highlight_lines(self, tokensource): """ Highlighted the lines specified in the `hl_lines` option by post-processing the token stream coming from `_format_lines`. """ hls = self.hl_lines for i, (t, value) in enumerate(tokensource): if t != 1: yield t, value if i + 1 in hls: # i + 1 because Python indexes start at 0 if self.noclasses: style = '' if self.style.highlight_color is not None: style = (' style="background-color: %s"' % (self.style.highlight_color,)) yield 1, '<span%s>%s</span>' % (style, value) else: yield 1, '<span class="hll">%s</span>' % value else: yield 1, value def wrap(self, source, outfile): """ Wrap the ``source``, which is a generator yielding individual lines, in custom generators. See docstring for `format`. Can be overridden. """ return self._wrap_div(self._wrap_pre(source)) def format_unencoded(self, tokensource, outfile): """ The formatting process uses several nested generators; which of them are used is determined by the user's options. Each generator should take at least one argument, ``inner``, and wrap the pieces of text generated by this. Always yield 2-tuples: (code, text). If "code" is 1, the text is part of the original tokensource being highlighted, if it's 0, the text is some piece of wrapping. This makes it possible to use several different wrappers that process the original source linewise, e.g. line number generators. """ source = self._format_lines(tokensource) if self.hl_lines: source = self._highlight_lines(source) if not self.nowrap: if self.linenos == 2: source = self._wrap_inlinelinenos(source) if self.lineanchors: source = self._wrap_lineanchors(source) if self.linespans: source = self._wrap_linespans(source) source = self.wrap(source, outfile) if self.linenos == 1: source = self._wrap_tablelinenos(source) if self.full: source = self._wrap_full(source, outfile) for t, piece in source: outfile.write(piece)
adlius/osf.io
refs/heads/develop
admin_tests/osf_groups/test_views.py
10
from admin.osf_groups.views import ( OSFGroupsView, OSFGroupsListView, OSFGroupsFormView ) from admin_tests.utilities import setup_log_view from nose import tools as nt from django.test import RequestFactory from tests.base import AdminTestCase from osf_tests.factories import UserFactory, ProjectFactory, OSFGroupFactory from osf.utils.permissions import WRITE from admin.osf_groups.serializers import serialize_node_for_groups class TestOSFGroupsView(AdminTestCase): def setUp(self): super(TestOSFGroupsView, self).setUp() self.user = UserFactory() self.user_two = UserFactory() self.project = ProjectFactory() self.group = OSFGroupFactory(name='test', creator=self.user) self.group.make_member(self.user_two) self.group.add_group_to_node(self.project) self.group.save() self.request = RequestFactory().post('/fake_path') def test_get_object(self): view = OSFGroupsView() view = setup_log_view(view, self.request, id=self.group._id) group = view.get_object() nt.assert_equal(self.group.name, group['name']) nt.assert_equal(self.user.fullname, group['creator']['name']) nt.assert_equal(len(group['members']), 1) nt.assert_equal(group['members'][0]['name'], self.user_two.fullname) nt.assert_equal(group['members'][0]['id'], self.user_two._id) nt.assert_equal(len(group['managers']), 1) nt.assert_equal(group['managers'][0]['name'], self.user.fullname) nt.assert_equal(group['managers'][0]['id'], self.user._id) nt.assert_equal([serialize_node_for_groups(self.project, self.group)], group['nodes']) nt.assert_equal(group['nodes'][0]['title'], self.project.title) nt.assert_equal(group['nodes'][0]['permission'], WRITE) class TestOSFGroupsListView(AdminTestCase): def setUp(self): super(TestOSFGroupsListView, self).setUp() self.user = UserFactory() self.group = OSFGroupFactory(name='Brian Dawkins', creator=self.user) self.group2 = OSFGroupFactory(name='Brian Westbrook', creator=self.user) self.group3 = OSFGroupFactory(name='Darren Sproles', creator=self.user) self.request = RequestFactory().post('/fake_path') self.view = OSFGroupsListView() def test_get_default_queryset(self): view = setup_log_view(self.view, self.request) queryset = view.get_queryset() nt.assert_equal(len(queryset), 3) nt.assert_in(self.group, queryset) nt.assert_in(self.group2, queryset) nt.assert_in(self.group3, queryset) def test_get_queryset_by_name(self): request = RequestFactory().post('/fake_path/?name=Brian') view = setup_log_view(self.view, request) queryset = view.get_queryset() nt.assert_equal(len(queryset), 2) nt.assert_in(self.group, queryset) nt.assert_in(self.group2, queryset) class TestOSFGroupsFormView(AdminTestCase): def setUp(self): super(TestOSFGroupsFormView, self).setUp() self.user = UserFactory() self.group = OSFGroupFactory(name='Brian Dawkins', creator=self.user) self.group2 = OSFGroupFactory(name='Brian Westbrook', creator=self.user) self.view = OSFGroupsFormView() def test_post_id(self): request = RequestFactory().post('/fake_path', data={'id': self.group._id, 'name': ''}) view = setup_log_view(self.view, request) redirect = view.post(request) assert redirect.url == '/osf_groups/{}/'.format(self.group._id) def test_post_name(self): request = RequestFactory().post('/fake_path', data={'id': '', 'name': 'Brian'}) view = setup_log_view(self.view, request) redirect = view.post(request) assert redirect.url == '/osf_groups/?name=Brian'
jobscore/sync-engine
refs/heads/master
inbox/util/concurrency.py
3
import sys import functools import random import gevent from backports import ssl from gevent import socket from redis import TimeoutError import _mysql_exceptions from sqlalchemy.exc import StatementError from inbox.models import Account from inbox.models.session import session_scope from nylas.logging import get_logger, create_error_log_context from nylas.logging.sentry import log_uncaught_errors log = get_logger() BACKOFF_DELAY = 30 # seconds to wait before retrying after a failure TRANSIENT_NETWORK_ERRS = ( socket.timeout, TimeoutError, socket.error, ssl.SSLError) TRANSIENT_MYSQL_MESSAGES = ( "try restarting transaction", "Too many connections", "Lost connection to MySQL server", "MySQL server has gone away", "Can't connect to MySQL server", "Max connect timeout reached") def retry(func, retry_classes=None, fail_classes=None, exc_callback=None, backoff_delay=BACKOFF_DELAY): """ Executes the callable func, retrying on uncaught exceptions matching the class filters. Arguments --------- func : function exc_callback : function, optional Function to execute if an exception is raised within func. The exception is passed as the first argument. (e.g., log something) retry_classes: list of Exception subclasses, optional Configures what to retry on. If specified, func is retried only if one of these exceptions is raised. Default is to retry on all exceptions. fail_classes: list of Exception subclasses, optional Configures what not to retry on. If specified, func is /not/ retried if one of these exceptions is raised. """ if (fail_classes and retry_classes and set(fail_classes).intersection(retry_classes)): raise ValueError("Can't include exception classes in both fail_on and " "retry_on") def should_retry_on(exc): if fail_classes and isinstance(exc, tuple(fail_classes)): return False if retry_classes and not isinstance(exc, tuple(retry_classes)): return False return True @functools.wraps(func) def wrapped(*args, **kwargs): while True: try: return func(*args, **kwargs) except gevent.GreenletExit as e: # GreenletExit isn't actually a subclass of Exception. # This is also considered to be a successful execution # (somebody intentionally killed the greenlet). raise except Exception as e: if not should_retry_on(e): raise if exc_callback is not None: exc_callback(e) # Sleep a bit so that we don't poll too quickly and re-encounter # the error. Also add a random delay to prevent herding effects. gevent.sleep(backoff_delay + int(random.uniform(1, 10))) return wrapped def retry_with_logging(func, logger=None, retry_classes=None, fail_classes=None, account_id=None, provider=None, backoff_delay=BACKOFF_DELAY): # Sharing the network_errs counter between invocations of callback by # placing it inside an array: # http://stackoverflow.com/questions/7935966/python-overwriting-variables-in-nested-functions occurrences = [0] def callback(e): is_transient = isinstance(e, TRANSIENT_NETWORK_ERRS) mysql_error = None log = logger or get_logger() if isinstance(e, _mysql_exceptions.OperationalError): mysql_error = e elif isinstance(e, StatementError) and isinstance(e.orig, _mysql_exceptions.OperationalError): mysql_error = e.orig if mysql_error: for msg in TRANSIENT_MYSQL_MESSAGES: if msg in mysql_error.message: is_transient = True if is_transient: occurrences[0] += 1 if occurrences[0] < 20: return else: occurrences[0] = 1 if account_id: try: with session_scope(account_id) as db_session: account = db_session.query(Account).get(account_id) sync_error = account.sync_error if not sync_error or isinstance(sync_error, basestring): account.update_sync_error(e) db_session.commit() except: log.error('Error saving sync_error to account object', account_id=account_id, **create_error_log_context(sys.exc_info())) log_uncaught_errors(logger, account_id=account_id, provider=provider, occurrences=occurrences[0]) return retry(func, exc_callback=callback, retry_classes=retry_classes, fail_classes=fail_classes, backoff_delay=backoff_delay)()
emartonline/newfies-dialer
refs/heads/master
newfies/voice_app/migrations/0001_initialschema_voice_app.py
7
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'VoiceApp' db.create_table(u'voice_app', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=90)), ('description', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), ('type', self.gf('django.db.models.fields.IntegerField')(default='1', max_length=20, null=True, blank=True)), ('gateway', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['dialer_gateway.Gateway'], null=True, blank=True)), ('data', self.gf('django.db.models.fields.CharField')(max_length=500, blank=True)), ('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='VoIP App owner', to=orm['auth.User'])), ('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('updated_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), )) db.send_create_signal('voice_app', ['VoiceApp']) def backwards(self, orm): # Deleting model 'VoiceApp' db.delete_table(u'voice_app') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'dialer_gateway.gateway': { 'Meta': {'object_name': 'Gateway', 'db_table': "u'dialer_gateway'"}, 'addparameter': ('django.db.models.fields.CharField', [], {'max_length': '360', 'blank': 'True'}), 'addprefix': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}), 'count_call': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'count_in_use': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'failover': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'Failover Gateway'", 'null': 'True', 'to': "orm['dialer_gateway.Gateway']"}), 'gateway_codecs': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}), 'gateway_retries': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}), 'gateway_timeouts': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}), 'gateways': ('django.db.models.fields.CharField', [], {'max_length': '500'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'maximum_call': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'originate_dial_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}), 'removeprefix': ('django.db.models.fields.CharField', [], {'max_length': '60', 'blank': 'True'}), 'secondused': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'status': ('django.db.models.fields.IntegerField', [], {'default': "'1'", 'null': 'True', 'blank': 'True'}), 'updated_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, 'voice_app.voiceapp': { 'Meta': {'object_name': 'VoiceApp', 'db_table': "u'voice_app'"}, 'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'data': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'gateway': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dialer_gateway.Gateway']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '90'}), 'type': ('django.db.models.fields.IntegerField', [], {'default': "'1'", 'max_length': '20', 'null': 'True', 'blank': 'True'}), 'updated_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'VoIP App owner'", 'to': "orm['auth.User']"}) } } complete_apps = ['voice_app']
antoviaque/edx-platform
refs/heads/master
lms/djangoapps/shoppingcart/processors/__init__.py
215
""" Public API for payment processor implementations. The specific implementation is determined at runtime using Django settings: CC_PROCESSOR_NAME: The name of the Python module (in `shoppingcart.processors`) to use. CC_PROCESSOR: Dictionary of configuration options for specific processor implementations, keyed to processor names. """ from django.conf import settings # Import the processor implementation, using `CC_PROCESSOR_NAME` # as the name of the Python module in `shoppingcart.processors` PROCESSOR_MODULE = __import__( 'shoppingcart.processors.' + settings.CC_PROCESSOR_NAME, fromlist=[ 'render_purchase_form_html', 'process_postpay_callback', 'get_purchase_endpoint', 'get_signed_purchase_params', ] ) def render_purchase_form_html(cart, **kwargs): """ Render an HTML form with POSTs to the hosted payment processor. Args: cart (Order): The order model representing items in the user's cart. Returns: unicode: the rendered HTML form """ return PROCESSOR_MODULE.render_purchase_form_html(cart, **kwargs) def process_postpay_callback(params, **kwargs): """ Handle a response from the payment processor. Concrete implementations should: 1) Verify the parameters and determine if the payment was successful. 2) If successful, mark the order as purchased and call `purchased_callbacks` of the cart items. 3) If unsuccessful, try to figure out why and generate a helpful error message. 4) Return a dictionary of the form: {'success': bool, 'order': Order, 'error_html': str} Args: params (dict): Dictionary of parameters received from the payment processor. Keyword Args: Can be used to provide additional information to concrete implementations. Returns: dict """ return PROCESSOR_MODULE.process_postpay_callback(params, **kwargs) def get_purchase_endpoint(): """ Return the URL of the current payment processor's endpoint. Returns: unicode """ return PROCESSOR_MODULE.get_purchase_endpoint() def get_signed_purchase_params(cart, **kwargs): """ Return the parameters to send to the current payment processor. Args: cart (Order): The order model representing items in the user's cart. Keyword Args: Can be used to provide additional information to concrete implementations. Returns: dict """ return PROCESSOR_MODULE.get_signed_purchase_params(cart, **kwargs)
monospaced/paperback
refs/heads/master
melk/bs4/tests/test_htmlparser.py
433
"""Tests to ensure that the html.parser tree builder generates good trees.""" from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest from bs4.builder import HTMLParserTreeBuilder class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest): @property def default_builder(self): return HTMLParserTreeBuilder() def test_namespaced_system_doctype(self): # html.parser can't handle namespaced doctypes, so skip this one. pass def test_namespaced_public_doctype(self): # html.parser can't handle namespaced doctypes, so skip this one. pass
rajsadho/django
refs/heads/master
django/contrib/messages/storage/__init__.py
827
from django.conf import settings from django.utils.module_loading import import_string def default_storage(request): """ Callable with the same interface as the storage classes. This isn't just default_storage = import_string(settings.MESSAGE_STORAGE) to avoid accessing the settings at the module level. """ return import_string(settings.MESSAGE_STORAGE)(request)
glandium/git-cinnabar
refs/heads/master
cinnabar/cmd/__init__.py
1
from __future__ import absolute_import from .data import data # noqa: F401 from .fsck import fsck # noqa: F401 from .upgrade import upgrade # noqa: F401 from .reclone import reclone # noqa: F401 from .fetch import fetch # noqa: F401 from .convert import ( # noqa: F401 hg2git, git2hg, ) from .bundle import bundle # noqa: F401 from .rollback import rollback # noqa: F401 from .python import python # noqa: F401 from .download import download # noqa: F401 from .util import CLI # noqa: F401
lubao/uju
refs/heads/master
Apps/GenForm/views.py
1
# Create your views here. from django.core.context_processors import csrf from django.http import HttpResponse from django import template from django.shortcuts import render_to_response from django.template.loader import render_to_string def show_op_form(request): if request.method == 'GET' : my_csrf = {} my_csrf.update(csrf(request)) app_form_path = 'template/opForm.html' return render_to_response (app_form_path, my_csrf) def gen_op_form(request): if request.method == "POST": # res = HttpResponse() # for data in request.raw_post_data.split("&") : # res.write("<div>{0}</div>".format(data)) num_of_schemas = 0 current_schema_id = 0 form_dict = {} data_list = request.raw_post_data.replace('+',' ').split("&") csrf_token = data_list.pop(0) form_dict['appId'] = data_list.pop(0).split("=")[1] form_dict['formId'] = data_list.pop(0).split("=")[1] form_dict['formName'] = data_list.pop(0).split("=")[1] schemaId = data_list.pop(0).split("=")[1] op = data_list.pop(0).split("=")[1] if (op == 'Create') : form_dict['opId'] = 0 elif (op == 'Update') : form_dict['opId'] = 1 elif (op == 'Destroy') : form_dict['opId'] = 2 elif (op == 'Fetch') : form_dict['opId'] = 3 elif (op == 'Adv Fetch') : form_dict['opId'] = 4 elif (op == 'Count') : form_dict['opId'] = 5 form_dict['schemas'] = [] while data_list : if form_dict['opId'] < 3 : current_schema = {'fields':[], 'schemaId':schemaId, } num_of_fields = 0 while data_list : num_of_fields += 1 current_schema['fields'].append( {'fieldId':data_list.pop(0).split('=')[1]}) current_schema['numOfFields'] = num_of_fields form_dict['schemas'].append(current_schema) elif form_dict['opId'] == 3 or form_dict['opId'] == 5: num_of_fields = 0 num_of_fetch = 0 current_schema = {'fields':[], 'schemaId':schemaId, 'fetch':[],'logical':'AND', } while data_list and not data_list[0].startswith('fetch'): num_of_fields += 1 current_schema['fields'].append( {'fieldId':data_list.pop(0).split('=')[1] ,'operator':data_list.pop(0).split('=')[1].replace('+',' ')}) current_schema['numOfFields'] = num_of_fields if form_dict['opId'] == 3: while data_list : num_of_fetch += 1 current_schema['fetch'].append( data_list.pop(0).split('=')[1]) current_schema['numOfFetch'] = num_of_fetch form_dict['schemas'].append(current_schema) # form_dict = gen_fake_form_dict() doc = render_to_string ('template/form_description.xml', form_dict) open("/tmp/{0}.xml".format(form_dict['formName']),"w").write(doc) tmp = open('/tmp/{0}.xml'.format(form_dict['formName']),'rb').read() res = HttpResponse(tmp, mimetype="application/xml") res['Content-Disposition'] = 'attachment; filename={0}.xml'.format( form_dict['formName'] ) return res else: return HttpResponse("WHF") def gen_fake_form_dict(): # This function will return a fake dictionary # to present the data. # This function is designed to verify app_description.xml return { 'appId':'1', 'formName':'fakeformName', 'formId':'1', 'opId':'3', 'numOfSchemas':'1', 'schemas':[ { 'schemaId':'1', 'numOfFields':'1', 'logical':'AND', 'fields': [{'fieldId':'0', 'operator':'Greator Than', } ],'numOfFetch':'2','fetch':['1', '2', ]}]}
naglis/plugin.audio.ausis
refs/heads/master
resources/lib/__init__.py
14224
# -*- coding: utf-8 -*-
RO-ny9/python-for-android
refs/heads/master
python-modules/twisted/twisted/python/monkey.py
61
# -*- test-case-name: twisted.test.test_monkey -*- # Copyright (c) 2007 Twisted Matrix Laboratories. # See LICENSE for details. class MonkeyPatcher(object): """ Cover up attributes with new objects. Neat for monkey-patching things for unit-testing purposes. """ def __init__(self, *patches): # List of patches to apply in (obj, name, value). self._patchesToApply = [] # List of the original values for things that have been patched. # (obj, name, value) format. self._originals = [] for patch in patches: self.addPatch(*patch) def addPatch(self, obj, name, value): """ Add a patch so that the attribute C{name} on C{obj} will be assigned to C{value} when C{patch} is called or during C{runWithPatches}. You can restore the original values with a call to restore(). """ self._patchesToApply.append((obj, name, value)) def _alreadyPatched(self, obj, name): """ Has the C{name} attribute of C{obj} already been patched by this patcher? """ for o, n, v in self._originals: if (o, n) == (obj, name): return True return False def patch(self): """ Apply all of the patches that have been specified with L{addPatch}. Reverse this operation using L{restore}. """ for obj, name, value in self._patchesToApply: if not self._alreadyPatched(obj, name): self._originals.append((obj, name, getattr(obj, name))) setattr(obj, name, value) def restore(self): """ Restore all original values to any patched objects. """ while self._originals: obj, name, value = self._originals.pop() setattr(obj, name, value) def runWithPatches(self, f, *args, **kw): """ Apply each patch already specified. Then run the function f with the given args and kwargs. Restore everything when done. """ self.patch() try: return f(*args, **kw) finally: self.restore()
triveous/LearnFlask
refs/heads/master
flask/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py
304
'''SSL with SNI_-support for Python 2. Follow these instructions if you would like to verify SSL certificates in Python 2. Note, the default libraries do *not* do certificate checking; you need to do additional work to validate certificates yourself. This needs the following packages installed: * pyOpenSSL (tested with 0.13) * ndg-httpsclient (tested with 0.3.2) * pyasn1 (tested with 0.1.6) You can install them with the following command: pip install pyopenssl ndg-httpsclient pyasn1 To activate certificate checking, call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code before you begin making HTTP requests. This can be done in a ``sitecustomize`` module, or at any other time before your application begins using ``urllib3``, like this:: try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() except ImportError: pass Now you can use :mod:`urllib3` as you normally would, and it will support SNI when the required modules are installed. Activating this module also has the positive side effect of disabling SSL/TLS encryption in Python 2 (see `CRIME attack`_). If you want to configure the default list of supported cipher suites, you can set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. Module Variables ---------------- :var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites. Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES: ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS`` .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) ''' from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName import OpenSSL.SSL from pyasn1.codec.der import decoder as der_decoder from pyasn1.type import univ, constraint from socket import _fileobject, timeout import ssl import select from cStringIO import StringIO from .. import connection from .. import util __all__ = ['inject_into_urllib3', 'extract_from_urllib3'] # SNI only *really* works if we can read the subjectAltName of certificates. HAS_SNI = SUBJ_ALT_NAME_SUPPORT # Map from urllib3 to PyOpenSSL compatible parameter-values. _openssl_versions = { ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD, ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, } _openssl_verify = { ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } # A secure default. # Sources for more information on TLS ciphers: # # - https://wiki.mozilla.org/Security/Server_Side_TLS # - https://www.ssllabs.com/projects/best-practices/index.html # - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ # # The general intent is: # - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), # - prefer ECDHE over DHE for better performance, # - prefer any AES-GCM over any AES-CBC for better performance and security, # - use 3DES as fallback which is secure but slow, # - disable NULL authentication, MD5 MACs and DSS for security reasons. DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \ "ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \ "!aNULL:!MD5:!DSS" orig_util_HAS_SNI = util.HAS_SNI orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket def inject_into_urllib3(): 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' connection.ssl_wrap_socket = ssl_wrap_socket util.HAS_SNI = HAS_SNI def extract_from_urllib3(): 'Undo monkey-patching by :func:`inject_into_urllib3`.' connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket util.HAS_SNI = orig_util_HAS_SNI ### Note: This is a slightly bug-fixed version of same from ndg-httpsclient. class SubjectAltName(BaseSubjectAltName): '''ASN.1 implementation for subjectAltNames support''' # There is no limit to how many SAN certificates a certificate may have, # however this needs to have some limit so we'll set an arbitrarily high # limit. sizeSpec = univ.SequenceOf.sizeSpec + \ constraint.ValueSizeConstraint(1, 1024) ### Note: This is a slightly bug-fixed version of same from ndg-httpsclient. def get_subj_alt_name(peer_cert): # Search through extensions dns_name = [] if not SUBJ_ALT_NAME_SUPPORT: return dns_name general_names = SubjectAltName() for i in range(peer_cert.get_extension_count()): ext = peer_cert.get_extension(i) ext_name = ext.get_short_name() if ext_name != 'subjectAltName': continue # PyOpenSSL returns extension data in ASN.1 encoded form ext_dat = ext.get_data() decoded_dat = der_decoder.decode(ext_dat, asn1Spec=general_names) for name in decoded_dat: if not isinstance(name, SubjectAltName): continue for entry in range(len(name)): component = name.getComponentByPosition(entry) if component.getName() != 'dNSName': continue dns_name.append(str(component.getComponent())) return dns_name class fileobject(_fileobject): def _wait_for_sock(self): rd, wd, ed = select.select([self._sock], [], [], self._sock.gettimeout()) if not rd: raise timeout() def read(self, size=-1): # Use max, disallow tiny reads in a loop as they are very inefficient. # We never leave read() with any leftover data from a new recv() call # in our internal buffer. rbufsize = max(self._rbufsize, self.default_bufsize) # Our use of StringIO rather than lists of string objects returned by # recv() minimizes memory usage and fragmentation that occurs when # rbufsize is large compared to the typical return value of recv(). buf = self._rbuf buf.seek(0, 2) # seek end if size < 0: # Read until EOF self._rbuf = StringIO() # reset _rbuf. we consume it via buf. while True: try: data = self._sock.recv(rbufsize) except OpenSSL.SSL.WantReadError: self._wait_for_sock() continue if not data: break buf.write(data) return buf.getvalue() else: # Read until size bytes or EOF seen, whichever comes first buf_len = buf.tell() if buf_len >= size: # Already have size bytes in our buffer? Extract and return. buf.seek(0) rv = buf.read(size) self._rbuf = StringIO() self._rbuf.write(buf.read()) return rv self._rbuf = StringIO() # reset _rbuf. we consume it via buf. while True: left = size - buf_len # recv() will malloc the amount of memory given as its # parameter even though it often returns much less data # than that. The returned data string is short lived # as we copy it into a StringIO and free it. This avoids # fragmentation issues on many platforms. try: data = self._sock.recv(left) except OpenSSL.SSL.WantReadError: self._wait_for_sock() continue if not data: break n = len(data) if n == size and not buf_len: # Shortcut. Avoid buffer data copies when: # - We have no data in our buffer. # AND # - Our call to recv returned exactly the # number of bytes we were asked to read. return data if n == left: buf.write(data) del data # explicit free break assert n <= left, "recv(%d) returned %d bytes" % (left, n) buf.write(data) buf_len += n del data # explicit free #assert buf_len == buf.tell() return buf.getvalue() def readline(self, size=-1): buf = self._rbuf buf.seek(0, 2) # seek end if buf.tell() > 0: # check if we already have it in our buffer buf.seek(0) bline = buf.readline(size) if bline.endswith('\n') or len(bline) == size: self._rbuf = StringIO() self._rbuf.write(buf.read()) return bline del bline if size < 0: # Read until \n or EOF, whichever comes first if self._rbufsize <= 1: # Speed up unbuffered case buf.seek(0) buffers = [buf.read()] self._rbuf = StringIO() # reset _rbuf. we consume it via buf. data = None recv = self._sock.recv while True: try: while data != "\n": data = recv(1) if not data: break buffers.append(data) except OpenSSL.SSL.WantReadError: self._wait_for_sock() continue break return "".join(buffers) buf.seek(0, 2) # seek end self._rbuf = StringIO() # reset _rbuf. we consume it via buf. while True: try: data = self._sock.recv(self._rbufsize) except OpenSSL.SSL.WantReadError: self._wait_for_sock() continue if not data: break nl = data.find('\n') if nl >= 0: nl += 1 buf.write(data[:nl]) self._rbuf.write(data[nl:]) del data break buf.write(data) return buf.getvalue() else: # Read until size bytes or \n or EOF seen, whichever comes first buf.seek(0, 2) # seek end buf_len = buf.tell() if buf_len >= size: buf.seek(0) rv = buf.read(size) self._rbuf = StringIO() self._rbuf.write(buf.read()) return rv self._rbuf = StringIO() # reset _rbuf. we consume it via buf. while True: try: data = self._sock.recv(self._rbufsize) except OpenSSL.SSL.WantReadError: self._wait_for_sock() continue if not data: break left = size - buf_len # did we just receive a newline? nl = data.find('\n', 0, left) if nl >= 0: nl += 1 # save the excess data to _rbuf self._rbuf.write(data[nl:]) if buf_len: buf.write(data[:nl]) break else: # Shortcut. Avoid data copy through buf when returning # a substring of our first recv(). return data[:nl] n = len(data) if n == size and not buf_len: # Shortcut. Avoid data copy through buf when # returning exactly all of our first recv(). return data if n >= left: buf.write(data[:left]) self._rbuf.write(data[left:]) break buf.write(data) buf_len += n #assert buf_len == buf.tell() return buf.getvalue() class WrappedSocket(object): '''API-compatibility wrapper for Python OpenSSL's Connection-class.''' def __init__(self, connection, socket): self.connection = connection self.socket = socket def fileno(self): return self.socket.fileno() def makefile(self, mode, bufsize=-1): return fileobject(self.connection, mode, bufsize) def settimeout(self, timeout): return self.socket.settimeout(timeout) def sendall(self, data): return self.connection.sendall(data) def close(self): return self.connection.shutdown() def getpeercert(self, binary_form=False): x509 = self.connection.get_peer_certificate() if not x509: return x509 if binary_form: return OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_ASN1, x509) return { 'subject': ( (('commonName', x509.get_subject().CN),), ), 'subjectAltName': [ ('DNS', value) for value in get_subj_alt_name(x509) ] } def _verify_callback(cnx, x509, err_no, err_depth, return_code): return err_no == 0 def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, ssl_version=None): ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version]) if certfile: ctx.use_certificate_file(certfile) if keyfile: ctx.use_privatekey_file(keyfile) if cert_reqs != ssl.CERT_NONE: ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback) if ca_certs: try: ctx.load_verify_locations(ca_certs, None) except OpenSSL.SSL.Error as e: raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e) else: ctx.set_default_verify_paths() # Disable TLS compression to migitate CRIME attack (issue #309) OP_NO_COMPRESSION = 0x20000 ctx.set_options(OP_NO_COMPRESSION) # Set list of supported ciphersuites. ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST) cnx = OpenSSL.SSL.Connection(ctx, sock) cnx.set_tlsext_host_name(server_hostname) cnx.set_connect_state() while True: try: cnx.do_handshake() except OpenSSL.SSL.WantReadError: select.select([sock], [], []) continue except OpenSSL.SSL.Error as e: raise ssl.SSLError('bad handshake', e) break return WrappedSocket(cnx, sock)
marty331/jakesclock
refs/heads/master
flask/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
2057
try: # Python 3.2+ from ssl import CertificateError, match_hostname except ImportError: try: # Backport of the function from a pypi module from backports.ssl_match_hostname import CertificateError, match_hostname except ImportError: # Our vendored copy from ._implementation import CertificateError, match_hostname # Not needed, but documenting what we provide. __all__ = ('CertificateError', 'match_hostname')
threerings/farbot
refs/heads/master
farb/test/test_utils.py
1
# test_utils.py vi:ts=4:sw=4:expandtab: # # Copyright (c) 2006-2008 Three Rings Design, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright owner nor the names of contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """ Misc Utilities Unit Tests """ import os import shutil import unittest from farb import utils # Useful Constants from farb.test import DATA_DIR from farb.test.test_builder import BUILDROOT class CopyRecursiveTestCase(unittest.TestCase): """ Test copyRecursive """ def setUp(self): self.copySrc = os.path.join(BUILDROOT, 'Makefile') self.copyDst = os.path.join(DATA_DIR, 'testcopy') self.copyRecursiveDst = os.path.join(DATA_DIR, 'testrecurse') def tearDown(self): if (os.path.exists(self.copyRecursiveDst)): shutil.rmtree(self.copyRecursiveDst) if (os.path.exists(self.copyDst)): os.unlink(self.copyDst) def test_copyWithOwnership(self): utils.copyWithOwnership(self.copySrc, self.copyDst) # TODO Would need root running this test in order to test # if the ownership copying code works self.assert_(os.path.exists(os.path.join(DATA_DIR, 'testcopy'))) def test_copyRecursive(self): utils.copyRecursive(BUILDROOT, self.copyRecursiveDst) # TODO Would need root running this test in order to test # if the ownership copying code works self.assert_(os.path.exists(os.path.join(self.copyRecursiveDst, 'Makefile')))
nacl-webkit/chrome_deps
refs/heads/master
ppapi/c/documentation/doxy_cleanup.py
177
#!/usr/bin/python # Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. '''This utility cleans up the html files as emitted by doxygen so that they are suitable for publication on a Google documentation site. ''' import optparse import os import re import shutil import string import sys try: from BeautifulSoup import BeautifulSoup, Tag except (ImportError, NotImplementedError): print ("This tool requires the BeautifulSoup package " "(see http://www.crummy.com/software/BeautifulSoup/).\n" "Make sure that the file BeautifulSoup.py is either in this directory " "or is available in your PYTHON_PATH") raise class HTMLFixer(object): '''This class cleans up the html strings as produced by Doxygen ''' def __init__(self, html): self.soup = BeautifulSoup(html) def FixTableHeadings(self): '''Fixes the doxygen table headings. This includes: - Using bare <h2> title row instead of row embedded in <tr><td> in table - Putting the "name" attribute into the "id" attribute of the <tr> tag. - Splitting up tables into multiple separate tables if a table heading appears in the middle of a table. For example, this html: <table> <tr><td colspan="2"><h2><a name="pub-attribs"></a> Data Fields List</h2></td></tr> ... </table> would be converted to this: <h2>Data Fields List</h2> <table> ... </table> ''' table_headers = [] for tag in self.soup.findAll('tr'): if tag.td and tag.td.h2 and tag.td.h2.a and tag.td.h2.a['name']: #tag['id'] = tag.td.h2.a['name'] tag.string = tag.td.h2.a.next tag.name = 'h2' table_headers.append(tag) # reverse the list so that earlier tags don't delete later tags table_headers.reverse() # Split up tables that have multiple table header (th) rows for tag in table_headers: print "Header tag: %s is %s" % (tag.name, tag.string.strip()) # Is this a heading in the middle of a table? if tag.findPreviousSibling('tr') and tag.parent.name == 'table': print "Splitting Table named %s" % tag.string.strip() table = tag.parent table_parent = table.parent table_index = table_parent.contents.index(table) new_table = Tag(self.soup, name='table', attrs=table.attrs) table_parent.insert(table_index + 1, new_table) tag_index = table.contents.index(tag) for index, row in enumerate(table.contents[tag_index:]): new_table.insert(index, row) # Now move the <h2> tag to be in front of the <table> tag assert tag.parent.name == 'table' table = tag.parent table_parent = table.parent table_index = table_parent.contents.index(table) table_parent.insert(table_index, tag) def RemoveTopHeadings(self): '''Removes <div> sections with a header, tabs, or navpath class attribute''' header_tags = self.soup.findAll( name='div', attrs={'class' : re.compile('^(header|tabs[0-9]*|navpath)$')}) [tag.extract() for tag in header_tags] def FixAll(self): self.FixTableHeadings() self.RemoveTopHeadings() def __str__(self): return str(self.soup) def main(): '''Main entry for the doxy_cleanup utility doxy_cleanup takes a list of html files and modifies them in place.''' parser = optparse.OptionParser(usage='Usage: %prog [options] files...') parser.add_option('-m', '--move', dest='move', action='store_true', default=False, help='move html files to "original_html"') options, files = parser.parse_args() if not files: parser.print_usage() return 1 for filename in files: try: with open(filename, 'r') as file: html = file.read() print "Processing %s" % filename fixer = HTMLFixer(html) fixer.FixAll() with open(filename, 'w') as file: file.write(str(fixer)) if options.move: new_directory = os.path.join( os.path.dirname(os.path.dirname(filename)), 'original_html') if not os.path.exists(new_directory): os.mkdir(new_directory) shutil.move(filename, new_directory) except: print "Error while processing %s" % filename raise return 0 if __name__ == '__main__': sys.exit(main())
gandrewstone/BitcoinUnlimited
refs/heads/master
contrib/testgen/base58.py
2139
''' Bitcoin base58 encoding and decoding. Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain) ''' import hashlib # for compatibility with following code... class SHA256: new = hashlib.sha256 if str != bytes: # Python 3.x def ord(c): return c def chr(n): return bytes( (n,) ) __b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' __b58base = len(__b58chars) b58chars = __b58chars def b58encode(v): """ encode v, which is a string of bytes, to base58. """ long_value = 0 for (i, c) in enumerate(v[::-1]): long_value += (256**i) * ord(c) result = '' while long_value >= __b58base: div, mod = divmod(long_value, __b58base) result = __b58chars[mod] + result long_value = div result = __b58chars[long_value] + result # Bitcoin does a little leading-zero-compression: # leading 0-bytes in the input become leading-1s nPad = 0 for c in v: if c == '\0': nPad += 1 else: break return (__b58chars[0]*nPad) + result def b58decode(v, length = None): """ decode v into a string of len bytes """ long_value = 0 for (i, c) in enumerate(v[::-1]): long_value += __b58chars.find(c) * (__b58base**i) result = bytes() while long_value >= 256: div, mod = divmod(long_value, 256) result = chr(mod) + result long_value = div result = chr(long_value) + result nPad = 0 for c in v: if c == __b58chars[0]: nPad += 1 else: break result = chr(0)*nPad + result if length is not None and len(result) != length: return None return result def checksum(v): """Return 32-bit checksum based on SHA256""" return SHA256.new(SHA256.new(v).digest()).digest()[0:4] def b58encode_chk(v): """b58encode a string, with 32-bit checksum""" return b58encode(v + checksum(v)) def b58decode_chk(v): """decode a base58 string, check and remove checksum""" result = b58decode(v) if result is None: return None h3 = checksum(result[:-4]) if result[-4:] == checksum(result[:-4]): return result[:-4] else: return None def get_bcaddress_version(strAddress): """ Returns None if strAddress is invalid. Otherwise returns integer version of address. """ addr = b58decode_chk(strAddress) if addr is None or len(addr)!=21: return None version = addr[0] return ord(version) if __name__ == '__main__': # Test case (from http://gitorious.org/bitcoin/python-base58.git) assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0 _ohai = 'o hai'.encode('ascii') _tmp = b58encode(_ohai) assert _tmp == 'DYB3oMS' assert b58decode(_tmp, 5) == _ohai print("Tests passed")
alexmorozov/django
refs/heads/master
tests/template_tests/filter_tests/test_linebreaksbr.py
331
from django.template.defaultfilters import linebreaksbr from django.test import SimpleTestCase from django.utils.safestring import mark_safe from ..utils import setup class LinebreaksbrTests(SimpleTestCase): """ The contents in "linebreaksbr" are escaped according to the current autoescape setting. """ @setup({'linebreaksbr01': '{{ a|linebreaksbr }} {{ b|linebreaksbr }}'}) def test_linebreaksbr01(self): output = self.engine.render_to_string('linebreaksbr01', {"a": "x&\ny", "b": mark_safe("x&\ny")}) self.assertEqual(output, "x&amp;<br />y x&<br />y") @setup({'linebreaksbr02': '{% autoescape off %}{{ a|linebreaksbr }} {{ b|linebreaksbr }}{% endautoescape %}'}) def test_linebreaksbr02(self): output = self.engine.render_to_string('linebreaksbr02', {"a": "x&\ny", "b": mark_safe("x&\ny")}) self.assertEqual(output, "x&<br />y x&<br />y") class FunctionTests(SimpleTestCase): def test_newline(self): self.assertEqual(linebreaksbr('line 1\nline 2'), 'line 1<br />line 2') def test_carriage(self): self.assertEqual(linebreaksbr('line 1\rline 2'), 'line 1<br />line 2') def test_carriage_newline(self): self.assertEqual(linebreaksbr('line 1\r\nline 2'), 'line 1<br />line 2') def test_non_string_input(self): self.assertEqual(linebreaksbr(123), '123') def test_autoescape(self): self.assertEqual( linebreaksbr('foo\n<a>bar</a>\nbuz'), 'foo<br />&lt;a&gt;bar&lt;/a&gt;<br />buz', ) def test_autoescape_off(self): self.assertEqual( linebreaksbr('foo\n<a>bar</a>\nbuz', autoescape=False), 'foo<br /><a>bar</a><br />buz', )
Gabrielcarvfer/NS3
refs/heads/master
src/point-to-point/bindings/modulegen_customizations.py
376
def post_register_types(root_module): root_module.add_include('"ns3/queue.h"') root_module.add_include('"ns3/error-model.h"')
danieljaouen/ansible
refs/heads/devel
test/sanity/code-smell/replace-urlopen.py
61
#!/usr/bin/env python import os import re import sys def main(): skip = set([ 'test/sanity/code-smell/%s' % os.path.basename(__file__), 'lib/ansible/module_utils/six/__init__.py', 'lib/ansible/module_utils/urls.py', 'test/units/module_utils/urls/test_Request.py', 'test/units/module_utils/urls/test_fetch_url.py', ]) for path in sys.argv[1:] or sys.stdin.read().splitlines(): if path in skip: continue with open(path, 'r') as path_fd: for line, text in enumerate(path_fd.readlines()): match = re.search(r'^(?:[^#]*?)(urlopen)', text) if match: print('%s:%d:%d: use `ansible.module_utils.urls.open_url` instead of `urlopen`' % ( path, line + 1, match.start(1) + 1)) if __name__ == '__main__': main()
wiltonlazary/arangodb
refs/heads/devel
3rdParty/boost/1.61.0/tools/build/test/preprocessor.py
58
#!/usr/bin/python # Copyright 2003 Vladimir Prus # Copyright 2011 Steven Watanabe # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # Test the C/C++ preprocessor. import BoostBuild t = BoostBuild.Tester() t.write("jamroot.jam", """ project ; preprocessed hello : hello.cpp ; preprocessed a : a.c ; exe hello.exe : hello a : <define>FAIL ; """) t.write("hello.cpp", """ #ifndef __cplusplus #error "This file must be compiled as C++" #endif #ifdef FAIL #error "Not preprocessed?" #endif extern "C" int foo(); int main() { return foo(); } """) t.write("a.c", """ /* This will not compile unless in C mode. */ #ifdef __cplusplus #error "This file must be compiled as C" #endif #ifdef FAIL #error "Not preprocessed?" #endif int foo() { int new = 0; new = (new+1)*7; return new; } """) t.run_build_system() t.expect_addition("bin/$toolset/debug/hello.ii") t.expect_addition("bin/$toolset/debug/a.i") t.expect_addition("bin/$toolset/debug/hello.exe") t.cleanup()
stephentyrone/swift
refs/heads/master
utils/lldb/lldbToolBox.py
4
""" LLDB Helpers for working with the swift compiler. Load into LLDB with 'command script import /path/to/lldbToolBox.py' This will also import LLVM data formatters as well, assuming that llvm is next to the swift checkout. """ import argparse import os import shlex import subprocess import sys import tempfile import lldb REPO_BASE = os.path.abspath(os.path.join(__file__, os.pardir, os.pardir, os.pardir, os.pardir)) SWIFT_REPO = os.path.join(REPO_BASE, "swift") LLVM_REPO = os.path.join(REPO_BASE, "llvm-project") LLVM_DATAFORMATTER_PATH = os.path.join(LLVM_REPO, "llvm", "utils", "lldbDataFormatters.py") SWIFT_DATAFORMATTER_PATH = os.path.join(SWIFT_REPO, "utils", "lldb", "lldbSwiftDataFormatters.py") def import_llvm_dataformatters(debugger): if not os.access(LLVM_DATAFORMATTER_PATH, os.F_OK): print("WARNING! Could not find LLVM data formatters!") return cmd = 'command script import {}'.format(LLVM_DATAFORMATTER_PATH) debugger.HandleCommand(cmd) print("Loaded LLVM data formatters.") def import_swift_dataformatters(debugger): if not os.access(SWIFT_DATAFORMATTER_PATH, os.F_OK): print("WARNING! Could not find Swift data formatters!") return cmd = 'command script import {}'.format(SWIFT_DATAFORMATTER_PATH) debugger.HandleCommand(cmd) print("Loaded Swift data formatters.") VIEWCFG_PATH = os.path.join(SWIFT_REPO, "utils", "viewcfg") BLOCKIFYASM_PATH = os.path.join(SWIFT_REPO, "utils", "dev-scripts", "blockifyasm") def disassemble_asm_cfg(debugger, command, exec_ctx, result, internal_dict): """ This function disassembles the current assembly frame into a temporary file and then uses that temporary file as input to blockifyasm | viewcfg. This will cause a pdf of the cfg to be opened on Darwin. """ d = exec_ctx.frame.Disassemble() with tempfile.TemporaryFile() as f: f.write(bytes(d, 'utf-8')) f.flush() f.seek(0) p1 = subprocess.Popen([BLOCKIFYASM_PATH], stdin=f, stdout=subprocess.PIPE) subprocess.Popen([VIEWCFG_PATH], stdin=p1.stdout) p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits. def disassemble_to_file(debugger, command, exec_ctx, result, internal_dict): """This function disassembles the current assembly frame into a file specified by the user. """ parser = argparse.ArgumentParser(prog='disassemble-to-file', description=""" Dump the disassembly of the current frame to the specified file. """) parser.add_argument('file', type=argparse.FileType('w'), default=sys.stdout) args = parser.parse_args(shlex.split(command)) args.file.write(exec_ctx.frame.disassembly) def sequence(debugger, command, exec_ctx, result, internal_dict): """ Combine multiple semicolon separated lldb commands into one command. This command is particularly useful for defining aliases and breakpoint commands. Some examples: # Define an alias that prints rax and also steps one instruction. command alias xs sequence p/x $rax; stepi # Breakpoint command to show the frame's info and arguments. breakpoint command add -o 'seq frame info; reg read arg1 arg2 arg3' # Override `b` to allow a condition to be specified. For example: # b someMethod if someVar > 2 command regex b s/(.+) if (.+)/seq _regexp-break %1; break mod -c "%2"/ s/(.*)/_regexp-break %1/ """ interpreter = debugger.GetCommandInterpreter() for subcommand in command.split(';'): subcommand = subcommand.strip() if not subcommand: continue # skip empty commands ret = lldb.SBCommandReturnObject() interpreter.HandleCommand(subcommand, exec_ctx, ret) if ret.GetOutput(): print >>result, ret.GetOutput().strip() if not ret.Succeeded(): result.SetError(ret.GetError()) result.SetStatus(ret.GetStatus()) return def __lldb_init_module(debugger, internal_dict): import_llvm_dataformatters(debugger) import_swift_dataformatters(debugger) debugger.HandleCommand('command script add disassemble-asm-cfg ' '-f lldbToolBox.disassemble_asm_cfg') debugger.HandleCommand('command script add disassemble-to-file ' '-f lldbToolBox.disassemble_to_file') debugger.HandleCommand('command script add sequence ' '-h "Run multiple semicolon separated commands" ' '-f lldbToolBox.sequence')
mehmetkose/django-favorites
refs/heads/master
favorites/admin.py
1
from django.contrib import admin from models import * admin.site.register(Favorite)
hainm/scikit-learn
refs/heads/master
sklearn/decomposition/tests/test_fastica.py
272
""" Test the fastica algorithm. """ import itertools import warnings import numpy as np from scipy import stats from nose.tools import assert_raises from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_true from sklearn.utils.testing import assert_less from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_warns from sklearn.decomposition import FastICA, fastica, PCA from sklearn.decomposition.fastica_ import _gs_decorrelation from sklearn.externals.six import moves def center_and_norm(x, axis=-1): """ Centers and norms x **in place** Parameters ----------- x: ndarray Array with an axis of observations (statistical units) measured on random variables. axis: int, optional Axis along which the mean and variance are calculated. """ x = np.rollaxis(x, axis) x -= x.mean(axis=0) x /= x.std(axis=0) def test_gs(): # Test gram schmidt orthonormalization # generate a random orthogonal matrix rng = np.random.RandomState(0) W, _, _ = np.linalg.svd(rng.randn(10, 10)) w = rng.randn(10) _gs_decorrelation(w, W, 10) assert_less((w ** 2).sum(), 1.e-10) w = rng.randn(10) u = _gs_decorrelation(w, W, 5) tmp = np.dot(u, W.T) assert_less((tmp[:5] ** 2).sum(), 1.e-10) def test_fastica_simple(add_noise=False): # Test the FastICA algorithm on very simple data. rng = np.random.RandomState(0) # scipy.stats uses the global RNG: np.random.seed(0) n_samples = 1000 # Generate two sources: s1 = (2 * np.sin(np.linspace(0, 100, n_samples)) > 0) - 1 s2 = stats.t.rvs(1, size=n_samples) s = np.c_[s1, s2].T center_and_norm(s) s1, s2 = s # Mixing angle phi = 0.6 mixing = np.array([[np.cos(phi), np.sin(phi)], [np.sin(phi), -np.cos(phi)]]) m = np.dot(mixing, s) if add_noise: m += 0.1 * rng.randn(2, 1000) center_and_norm(m) # function as fun arg def g_test(x): return x ** 3, (3 * x ** 2).mean(axis=-1) algos = ['parallel', 'deflation'] nls = ['logcosh', 'exp', 'cube', g_test] whitening = [True, False] for algo, nl, whiten in itertools.product(algos, nls, whitening): if whiten: k_, mixing_, s_ = fastica(m.T, fun=nl, algorithm=algo) assert_raises(ValueError, fastica, m.T, fun=np.tanh, algorithm=algo) else: X = PCA(n_components=2, whiten=True).fit_transform(m.T) k_, mixing_, s_ = fastica(X, fun=nl, algorithm=algo, whiten=False) assert_raises(ValueError, fastica, X, fun=np.tanh, algorithm=algo) s_ = s_.T # Check that the mixing model described in the docstring holds: if whiten: assert_almost_equal(s_, np.dot(np.dot(mixing_, k_), m)) center_and_norm(s_) s1_, s2_ = s_ # Check to see if the sources have been estimated # in the wrong order if abs(np.dot(s1_, s2)) > abs(np.dot(s1_, s1)): s2_, s1_ = s_ s1_ *= np.sign(np.dot(s1_, s1)) s2_ *= np.sign(np.dot(s2_, s2)) # Check that we have estimated the original sources if not add_noise: assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=2) assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=2) else: assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=1) assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=1) # Test FastICA class _, _, sources_fun = fastica(m.T, fun=nl, algorithm=algo, random_state=0) ica = FastICA(fun=nl, algorithm=algo, random_state=0) sources = ica.fit_transform(m.T) assert_equal(ica.components_.shape, (2, 2)) assert_equal(sources.shape, (1000, 2)) assert_array_almost_equal(sources_fun, sources) assert_array_almost_equal(sources, ica.transform(m.T)) assert_equal(ica.mixing_.shape, (2, 2)) for fn in [np.tanh, "exp(-.5(x^2))"]: ica = FastICA(fun=fn, algorithm=algo, random_state=0) assert_raises(ValueError, ica.fit, m.T) assert_raises(TypeError, FastICA(fun=moves.xrange(10)).fit, m.T) def test_fastica_nowhiten(): m = [[0, 1], [1, 0]] # test for issue #697 ica = FastICA(n_components=1, whiten=False, random_state=0) assert_warns(UserWarning, ica.fit, m) assert_true(hasattr(ica, 'mixing_')) def test_non_square_fastica(add_noise=False): # Test the FastICA algorithm on very simple data. rng = np.random.RandomState(0) n_samples = 1000 # Generate two sources: t = np.linspace(0, 100, n_samples) s1 = np.sin(t) s2 = np.ceil(np.sin(np.pi * t)) s = np.c_[s1, s2].T center_and_norm(s) s1, s2 = s # Mixing matrix mixing = rng.randn(6, 2) m = np.dot(mixing, s) if add_noise: m += 0.1 * rng.randn(6, n_samples) center_and_norm(m) k_, mixing_, s_ = fastica(m.T, n_components=2, random_state=rng) s_ = s_.T # Check that the mixing model described in the docstring holds: assert_almost_equal(s_, np.dot(np.dot(mixing_, k_), m)) center_and_norm(s_) s1_, s2_ = s_ # Check to see if the sources have been estimated # in the wrong order if abs(np.dot(s1_, s2)) > abs(np.dot(s1_, s1)): s2_, s1_ = s_ s1_ *= np.sign(np.dot(s1_, s1)) s2_ *= np.sign(np.dot(s2_, s2)) # Check that we have estimated the original sources if not add_noise: assert_almost_equal(np.dot(s1_, s1) / n_samples, 1, decimal=3) assert_almost_equal(np.dot(s2_, s2) / n_samples, 1, decimal=3) def test_fit_transform(): # Test FastICA.fit_transform rng = np.random.RandomState(0) X = rng.random_sample((100, 10)) for whiten, n_components in [[True, 5], [False, None]]: n_components_ = (n_components if n_components is not None else X.shape[1]) ica = FastICA(n_components=n_components, whiten=whiten, random_state=0) Xt = ica.fit_transform(X) assert_equal(ica.components_.shape, (n_components_, 10)) assert_equal(Xt.shape, (100, n_components_)) ica = FastICA(n_components=n_components, whiten=whiten, random_state=0) ica.fit(X) assert_equal(ica.components_.shape, (n_components_, 10)) Xt2 = ica.transform(X) assert_array_almost_equal(Xt, Xt2) def test_inverse_transform(): # Test FastICA.inverse_transform n_features = 10 n_samples = 100 n1, n2 = 5, 10 rng = np.random.RandomState(0) X = rng.random_sample((n_samples, n_features)) expected = {(True, n1): (n_features, n1), (True, n2): (n_features, n2), (False, n1): (n_features, n2), (False, n2): (n_features, n2)} for whiten in [True, False]: for n_components in [n1, n2]: n_components_ = (n_components if n_components is not None else X.shape[1]) ica = FastICA(n_components=n_components, random_state=rng, whiten=whiten) with warnings.catch_warnings(record=True): # catch "n_components ignored" warning Xt = ica.fit_transform(X) expected_shape = expected[(whiten, n_components_)] assert_equal(ica.mixing_.shape, expected_shape) X2 = ica.inverse_transform(Xt) assert_equal(X.shape, X2.shape) # reversibility test in non-reduction case if n_components == X.shape[1]: assert_array_almost_equal(X, X2)
p0psicles/SickRage
refs/heads/master
sickrage/providers/__init__.py
18
# coding=utf-8 __all__ = []
bgris/ODL_bgris
refs/heads/master
lib/python3.5/site-packages/prompt_toolkit/filters/utils.py
23
from __future__ import unicode_literals from .base import Always, Never from .types import SimpleFilter, CLIFilter __all__ = ( 'to_cli_filter', 'to_simple_filter', ) _always = Always() _never = Never() def to_simple_filter(bool_or_filter): """ Accept both booleans and CLIFilters as input and turn it into a SimpleFilter. """ if not isinstance(bool_or_filter, (bool, SimpleFilter)): raise TypeError('Expecting a bool or a SimpleFilter instance. Got %r' % bool_or_filter) return { True: _always, False: _never, }.get(bool_or_filter, bool_or_filter) def to_cli_filter(bool_or_filter): """ Accept both booleans and CLIFilters as input and turn it into a CLIFilter. """ if not isinstance(bool_or_filter, (bool, CLIFilter)): raise TypeError('Expecting a bool or a CLIFilter instance. Got %r' % bool_or_filter) return { True: _always, False: _never, }.get(bool_or_filter, bool_or_filter)
RydrDojo/Ridr
refs/heads/master
pylotVenv/lib/python2.7/site-packages/sqlalchemy_utils/primitives/country.py
5
import six from .. import i18n from ..utils import str_coercible @str_coercible class Country(object): """ Country class wraps a 2 to 3 letter country code. It provides various convenience properties and methods. :: from babel import Locale from sqlalchemy_utils import Country, i18n # First lets add a locale getter for testing purposes i18n.get_locale = lambda: Locale('en') Country('FI').name # Finland Country('FI').code # FI Country(Country('FI')).code # 'FI' Country always validates the given code. :: Country(None) # raises TypeError Country('UnknownCode') # raises ValueError Country supports equality operators. :: Country('FI') == Country('FI') Country('FI') != Country('US') Country objects are hashable. :: assert hash(Country('FI')) == hash('FI') """ def __init__(self, code_or_country): if isinstance(code_or_country, Country): self.code = code_or_country.code elif isinstance(code_or_country, six.string_types): self.validate(code_or_country) self.code = code_or_country else: raise TypeError( "Country() argument must be a string or a country, not '{0}'" .format( type(code_or_country).__name__ ) ) @property def name(self): return i18n.get_locale().territories[self.code] @classmethod def validate(self, code): try: i18n.babel.Locale('en').territories[code] except KeyError: raise ValueError( 'Could not convert string to country code: {0}'.format(code) ) def __eq__(self, other): if isinstance(other, Country): return self.code == other.code elif isinstance(other, six.string_types): return self.code == other else: return NotImplemented def __hash__(self): return hash(self.code) def __ne__(self, other): return not (self == other) def __repr__(self): return '%s(%r)' % (self.__class__.__name__, self.code) def __unicode__(self): return self.name
gieseke/bufferkdtree
refs/heads/master
bufferkdtree/neighbors/util.py
1
# # Copyright (C) 2013-2016 Fabian Gieseke <fabian.gieseke@di.ku.dk> # License: GPL v2 # import math import time def compute_optimal_tree_depth(model, Xtrain, Xtest, target="test", tree_depths=None, verbose=1): """ Computes the optimal tree depth. Returns ------- opt_height : int The optimal tree depth based on the target provided. """ ALLOWED_TARGETS = ['train', 'test', 'both'] if target not in ALLOWED_TARGETS: raise Exception("Target is not valid (allowed ones are " + \ unicode(ALLOWED_TARGETS) + ": " + unicode(target)) if tree_depths is None: max_depth = int(math.floor(math.log(len(Xtrain), 2))) tree_depths = range(2, max_depth - 1) kwargs = {'target':target, 'tree_depths':tree_depths, 'verbose':verbose} return _conduct_tree_depths_comparison(model, Xtrain, Xtest, **kwargs) def _conduct_tree_depths_comparison(model, Xtrain, Xtest, target="test", tree_depths=None, verbose=1): runtimes = {} if target == "test": for tree_depth in tree_depths: #model = copy.deepcopy(model) model.tree_depth = tree_depth model.fit(Xtrain) start = time.time() model.kneighbors(Xtest) end = time.time() if model.verbose: print("tree_depth %i -> %f" % (tree_depth, end - start)) runtimes[tree_depth] = end - start elif target == "train": for tree_depth in tree_depths: #model = copy.deepcopy(model) model.tree_depth = tree_depth start = time.time() model.fit(Xtrain) end = time.time() if model.verbose: print("tree_depth %i -> %f" % (tree_depth, end - start)) runtimes[tree_depth] = end - start elif target == "both": for tree_depth in tree_depths: #model = copy.deepcopy(model) model.tree_depth = tree_depth start = time.time() model.fit(Xtrain) model.kneighbors(Xtest) end = time.time() if verbose > 0: print("tree_depth %i -> %f" % (tree_depth, end - start)) runtimes[tree_depth] = end - start else: raise Exception("Unknown target: " + unicode(target)) return min(runtimes, key=runtimes.get)
ayesandarmoe/microblog_flask_tutorial
refs/heads/master
flask/lib/python2.7/encodings/mac_roman.py
593
""" Python Character Mapping Codec mac_roman generated from 'MAPPINGS/VENDORS/APPLE/ROMAN.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='mac-roman', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> CONTROL CHARACTER u'\x01' # 0x01 -> CONTROL CHARACTER u'\x02' # 0x02 -> CONTROL CHARACTER u'\x03' # 0x03 -> CONTROL CHARACTER u'\x04' # 0x04 -> CONTROL CHARACTER u'\x05' # 0x05 -> CONTROL CHARACTER u'\x06' # 0x06 -> CONTROL CHARACTER u'\x07' # 0x07 -> CONTROL CHARACTER u'\x08' # 0x08 -> CONTROL CHARACTER u'\t' # 0x09 -> CONTROL CHARACTER u'\n' # 0x0A -> CONTROL CHARACTER u'\x0b' # 0x0B -> CONTROL CHARACTER u'\x0c' # 0x0C -> CONTROL CHARACTER u'\r' # 0x0D -> CONTROL CHARACTER u'\x0e' # 0x0E -> CONTROL CHARACTER u'\x0f' # 0x0F -> CONTROL CHARACTER u'\x10' # 0x10 -> CONTROL CHARACTER u'\x11' # 0x11 -> CONTROL CHARACTER u'\x12' # 0x12 -> CONTROL CHARACTER u'\x13' # 0x13 -> CONTROL CHARACTER u'\x14' # 0x14 -> CONTROL CHARACTER u'\x15' # 0x15 -> CONTROL CHARACTER u'\x16' # 0x16 -> CONTROL CHARACTER u'\x17' # 0x17 -> CONTROL CHARACTER u'\x18' # 0x18 -> CONTROL CHARACTER u'\x19' # 0x19 -> CONTROL CHARACTER u'\x1a' # 0x1A -> CONTROL CHARACTER u'\x1b' # 0x1B -> CONTROL CHARACTER u'\x1c' # 0x1C -> CONTROL CHARACTER u'\x1d' # 0x1D -> CONTROL CHARACTER u'\x1e' # 0x1E -> CONTROL CHARACTER u'\x1f' # 0x1F -> CONTROL CHARACTER u' ' # 0x20 -> SPACE u'!' # 0x21 -> EXCLAMATION MARK u'"' # 0x22 -> QUOTATION MARK u'#' # 0x23 -> NUMBER SIGN u'$' # 0x24 -> DOLLAR SIGN u'%' # 0x25 -> PERCENT SIGN u'&' # 0x26 -> AMPERSAND u"'" # 0x27 -> APOSTROPHE u'(' # 0x28 -> LEFT PARENTHESIS u')' # 0x29 -> RIGHT PARENTHESIS u'*' # 0x2A -> ASTERISK u'+' # 0x2B -> PLUS SIGN u',' # 0x2C -> COMMA u'-' # 0x2D -> HYPHEN-MINUS u'.' # 0x2E -> FULL STOP u'/' # 0x2F -> SOLIDUS u'0' # 0x30 -> DIGIT ZERO u'1' # 0x31 -> DIGIT ONE u'2' # 0x32 -> DIGIT TWO u'3' # 0x33 -> DIGIT THREE u'4' # 0x34 -> DIGIT FOUR u'5' # 0x35 -> DIGIT FIVE u'6' # 0x36 -> DIGIT SIX u'7' # 0x37 -> DIGIT SEVEN u'8' # 0x38 -> DIGIT EIGHT u'9' # 0x39 -> DIGIT NINE u':' # 0x3A -> COLON u';' # 0x3B -> SEMICOLON u'<' # 0x3C -> LESS-THAN SIGN u'=' # 0x3D -> EQUALS SIGN u'>' # 0x3E -> GREATER-THAN SIGN u'?' # 0x3F -> QUESTION MARK u'@' # 0x40 -> COMMERCIAL AT u'A' # 0x41 -> LATIN CAPITAL LETTER A u'B' # 0x42 -> LATIN CAPITAL LETTER B u'C' # 0x43 -> LATIN CAPITAL LETTER C u'D' # 0x44 -> LATIN CAPITAL LETTER D u'E' # 0x45 -> LATIN CAPITAL LETTER E u'F' # 0x46 -> LATIN CAPITAL LETTER F u'G' # 0x47 -> LATIN CAPITAL LETTER G u'H' # 0x48 -> LATIN CAPITAL LETTER H u'I' # 0x49 -> LATIN CAPITAL LETTER I u'J' # 0x4A -> LATIN CAPITAL LETTER J u'K' # 0x4B -> LATIN CAPITAL LETTER K u'L' # 0x4C -> LATIN CAPITAL LETTER L u'M' # 0x4D -> LATIN CAPITAL LETTER M u'N' # 0x4E -> LATIN CAPITAL LETTER N u'O' # 0x4F -> LATIN CAPITAL LETTER O u'P' # 0x50 -> LATIN CAPITAL LETTER P u'Q' # 0x51 -> LATIN CAPITAL LETTER Q u'R' # 0x52 -> LATIN CAPITAL LETTER R u'S' # 0x53 -> LATIN CAPITAL LETTER S u'T' # 0x54 -> LATIN CAPITAL LETTER T u'U' # 0x55 -> LATIN CAPITAL LETTER U u'V' # 0x56 -> LATIN CAPITAL LETTER V u'W' # 0x57 -> LATIN CAPITAL LETTER W u'X' # 0x58 -> LATIN CAPITAL LETTER X u'Y' # 0x59 -> LATIN CAPITAL LETTER Y u'Z' # 0x5A -> LATIN CAPITAL LETTER Z u'[' # 0x5B -> LEFT SQUARE BRACKET u'\\' # 0x5C -> REVERSE SOLIDUS u']' # 0x5D -> RIGHT SQUARE BRACKET u'^' # 0x5E -> CIRCUMFLEX ACCENT u'_' # 0x5F -> LOW LINE u'`' # 0x60 -> GRAVE ACCENT u'a' # 0x61 -> LATIN SMALL LETTER A u'b' # 0x62 -> LATIN SMALL LETTER B u'c' # 0x63 -> LATIN SMALL LETTER C u'd' # 0x64 -> LATIN SMALL LETTER D u'e' # 0x65 -> LATIN SMALL LETTER E u'f' # 0x66 -> LATIN SMALL LETTER F u'g' # 0x67 -> LATIN SMALL LETTER G u'h' # 0x68 -> LATIN SMALL LETTER H u'i' # 0x69 -> LATIN SMALL LETTER I u'j' # 0x6A -> LATIN SMALL LETTER J u'k' # 0x6B -> LATIN SMALL LETTER K u'l' # 0x6C -> LATIN SMALL LETTER L u'm' # 0x6D -> LATIN SMALL LETTER M u'n' # 0x6E -> LATIN SMALL LETTER N u'o' # 0x6F -> LATIN SMALL LETTER O u'p' # 0x70 -> LATIN SMALL LETTER P u'q' # 0x71 -> LATIN SMALL LETTER Q u'r' # 0x72 -> LATIN SMALL LETTER R u's' # 0x73 -> LATIN SMALL LETTER S u't' # 0x74 -> LATIN SMALL LETTER T u'u' # 0x75 -> LATIN SMALL LETTER U u'v' # 0x76 -> LATIN SMALL LETTER V u'w' # 0x77 -> LATIN SMALL LETTER W u'x' # 0x78 -> LATIN SMALL LETTER X u'y' # 0x79 -> LATIN SMALL LETTER Y u'z' # 0x7A -> LATIN SMALL LETTER Z u'{' # 0x7B -> LEFT CURLY BRACKET u'|' # 0x7C -> VERTICAL LINE u'}' # 0x7D -> RIGHT CURLY BRACKET u'~' # 0x7E -> TILDE u'\x7f' # 0x7F -> CONTROL CHARACTER u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS u'\u2020' # 0xA0 -> DAGGER u'\xb0' # 0xA1 -> DEGREE SIGN u'\xa2' # 0xA2 -> CENT SIGN u'\xa3' # 0xA3 -> POUND SIGN u'\xa7' # 0xA4 -> SECTION SIGN u'\u2022' # 0xA5 -> BULLET u'\xb6' # 0xA6 -> PILCROW SIGN u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S u'\xae' # 0xA8 -> REGISTERED SIGN u'\xa9' # 0xA9 -> COPYRIGHT SIGN u'\u2122' # 0xAA -> TRADE MARK SIGN u'\xb4' # 0xAB -> ACUTE ACCENT u'\xa8' # 0xAC -> DIAERESIS u'\u2260' # 0xAD -> NOT EQUAL TO u'\xc6' # 0xAE -> LATIN CAPITAL LETTER AE u'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE u'\u221e' # 0xB0 -> INFINITY u'\xb1' # 0xB1 -> PLUS-MINUS SIGN u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO u'\xa5' # 0xB4 -> YEN SIGN u'\xb5' # 0xB5 -> MICRO SIGN u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL u'\u2211' # 0xB7 -> N-ARY SUMMATION u'\u220f' # 0xB8 -> N-ARY PRODUCT u'\u03c0' # 0xB9 -> GREEK SMALL LETTER PI u'\u222b' # 0xBA -> INTEGRAL u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA u'\xe6' # 0xBE -> LATIN SMALL LETTER AE u'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE u'\xbf' # 0xC0 -> INVERTED QUESTION MARK u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK u'\xac' # 0xC2 -> NOT SIGN u'\u221a' # 0xC3 -> SQUARE ROOT u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK u'\u2248' # 0xC5 -> ALMOST EQUAL TO u'\u2206' # 0xC6 -> INCREMENT u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS u'\xa0' # 0xCA -> NO-BREAK SPACE u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE u'\u2013' # 0xD0 -> EN DASH u'\u2014' # 0xD1 -> EM DASH u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK u'\xf7' # 0xD6 -> DIVISION SIGN u'\u25ca' # 0xD7 -> LOZENGE u'\xff' # 0xD8 -> LATIN SMALL LETTER Y WITH DIAERESIS u'\u0178' # 0xD9 -> LATIN CAPITAL LETTER Y WITH DIAERESIS u'\u2044' # 0xDA -> FRACTION SLASH u'\u20ac' # 0xDB -> EURO SIGN u'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK u'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK u'\ufb01' # 0xDE -> LATIN SMALL LIGATURE FI u'\ufb02' # 0xDF -> LATIN SMALL LIGATURE FL u'\u2021' # 0xE0 -> DOUBLE DAGGER u'\xb7' # 0xE1 -> MIDDLE DOT u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK u'\u2030' # 0xE4 -> PER MILLE SIGN u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX u'\xca' # 0xE6 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE u'\xcb' # 0xE8 -> LATIN CAPITAL LETTER E WITH DIAERESIS u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX u'\uf8ff' # 0xF0 -> Apple logo u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT u'\u02dc' # 0xF7 -> SMALL TILDE u'\xaf' # 0xF8 -> MACRON u'\u02d8' # 0xF9 -> BREVE u'\u02d9' # 0xFA -> DOT ABOVE u'\u02da' # 0xFB -> RING ABOVE u'\xb8' # 0xFC -> CEDILLA u'\u02dd' # 0xFD -> DOUBLE ACUTE ACCENT u'\u02db' # 0xFE -> OGONEK u'\u02c7' # 0xFF -> CARON ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
StephenWeber/ansible
refs/heads/devel
test/units/modules/cloud/amazon/test_ec2_vpc_nat_gateway.py
41
import pytest import unittest boto3 = pytest.importorskip("boto3") botocore = pytest.importorskip("botocore") from collections import namedtuple from ansible.parsing.dataloader import DataLoader from ansible.vars import VariableManager from ansible.inventory import Inventory from ansible.playbook.play import Play from ansible.executor.task_queue_manager import TaskQueueManager import ansible.modules.cloud.amazon.ec2_vpc_nat_gateway as ng Options = ( namedtuple( 'Options', [ 'connection', 'module_path', 'forks', 'become', 'become_method', 'become_user', 'remote_user', 'private_key_file', 'ssh_common_args', 'sftp_extra_args', 'scp_extra_args', 'ssh_extra_args', 'verbosity', 'check' ] ) ) # initialize needed objects variable_manager = VariableManager() loader = DataLoader() options = ( Options( connection='local', module_path='cloud/amazon', forks=1, become=None, become_method=None, become_user=None, check=True, remote_user=None, private_key_file=None, ssh_common_args=None, sftp_extra_args=None, scp_extra_args=None, ssh_extra_args=None, verbosity=3 ) ) passwords = dict(vault_pass='') aws_region = 'us-west-2' # create inventory and pass to var manager inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list='localhost') variable_manager.set_inventory(inventory) def run(play): tqm = None results = None try: tqm = TaskQueueManager( inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=passwords, stdout_callback='default', ) results = tqm.run(play) finally: if tqm is not None: tqm.cleanup() return tqm, results class AnsibleVpcNatGatewayTasks(unittest.TestCase): def test_create_gateway_using_allocation_id(self): play_source = dict( name = "Create new nat gateway with eip allocation-id", hosts = 'localhost', gather_facts = 'no', tasks = [ dict( action=dict( module='ec2_vpc_nat_gateway', args=dict( subnet_id='subnet-12345678', allocation_id='eipalloc-12345678', wait='yes', region=aws_region, ) ), register='nat_gateway', ), dict( action=dict( module='debug', args=dict( msg='{{nat_gateway}}' ) ) ) ] ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm, results = run(play) self.failUnless(tqm._stats.ok['localhost'] == 2) self.failUnless(tqm._stats.changed['localhost'] == 1) def test_create_gateway_using_allocation_id_idempotent(self): play_source = dict( name = "Create new nat gateway with eip allocation-id", hosts = 'localhost', gather_facts = 'no', tasks = [ dict( action=dict( module='ec2_vpc_nat_gateway', args=dict( subnet_id='subnet-123456789', allocation_id='eipalloc-1234567', wait='yes', region=aws_region, ) ), register='nat_gateway', ), dict( action=dict( module='debug', args=dict( msg='{{nat_gateway}}' ) ) ) ] ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm, results = run(play) self.failUnless(tqm._stats.ok['localhost'] == 2) self.assertFalse('localhost' in tqm._stats.changed) def test_create_gateway_using_eip_address(self): play_source = dict( name = "Create new nat gateway with eip address", hosts = 'localhost', gather_facts = 'no', tasks = [ dict( action=dict( module='ec2_vpc_nat_gateway', args=dict( subnet_id='subnet-12345678', eip_address='55.55.55.55', wait='yes', region=aws_region, ) ), register='nat_gateway', ), dict( action=dict( module='debug', args=dict( msg='{{nat_gateway}}' ) ) ) ] ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm, results = run(play) self.failUnless(tqm._stats.ok['localhost'] == 2) self.failUnless(tqm._stats.changed['localhost'] == 1) def test_create_gateway_using_eip_address_idempotent(self): play_source = dict( name = "Create new nat gateway with eip address", hosts = 'localhost', gather_facts = 'no', tasks = [ dict( action=dict( module='ec2_vpc_nat_gateway', args=dict( subnet_id='subnet-123456789', eip_address='55.55.55.55', wait='yes', region=aws_region, ) ), register='nat_gateway', ), dict( action=dict( module='debug', args=dict( msg='{{nat_gateway}}' ) ) ) ] ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm, results = run(play) self.failUnless(tqm._stats.ok['localhost'] == 2) self.assertFalse('localhost' in tqm._stats.changed) def test_create_gateway_in_subnet_only_if_one_does_not_exist_already(self): play_source = dict( name = "Create new nat gateway only if one does not exist already", hosts = 'localhost', gather_facts = 'no', tasks = [ dict( action=dict( module='ec2_vpc_nat_gateway', args=dict( if_exist_do_not_create='yes', subnet_id='subnet-123456789', wait='yes', region=aws_region, ) ), register='nat_gateway', ), dict( action=dict( module='debug', args=dict( msg='{{nat_gateway}}' ) ) ) ] ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm, results = run(play) self.failUnless(tqm._stats.ok['localhost'] == 2) self.assertFalse('localhost' in tqm._stats.changed) def test_delete_gateway(self): play_source = dict( name = "Delete Nat Gateway", hosts = 'localhost', gather_facts = 'no', tasks = [ dict( action=dict( module='ec2_vpc_nat_gateway', args=dict( nat_gateway_id='nat-123456789', state='absent', wait='yes', region=aws_region, ) ), register='nat_gateway', ), dict( action=dict( module='debug', args=dict( msg='{{nat_gateway}}' ) ) ) ] ) play = Play().load(play_source, variable_manager=variable_manager, loader=loader) tqm, results = run(play) self.failUnless(tqm._stats.ok['localhost'] == 2) self.assertTrue('localhost' in tqm._stats.changed) class AnsibleEc2VpcNatGatewayFunctions(unittest.TestCase): def test_convert_to_lower(self): example = ng.DRY_RUN_GATEWAY_UNCONVERTED converted_example = ng.convert_to_lower(example[0]) keys = list(converted_example.keys()) keys.sort() for i in range(len(keys)): if i == 0: self.assertEqual(keys[i], 'create_time') if i == 1: self.assertEqual(keys[i], 'nat_gateway_addresses') gw_addresses_keys = list(converted_example[keys[i]][0].keys()) gw_addresses_keys.sort() for j in range(len(gw_addresses_keys)): if j == 0: self.assertEqual(gw_addresses_keys[j], 'allocation_id') if j == 1: self.assertEqual(gw_addresses_keys[j], 'network_interface_id') if j == 2: self.assertEqual(gw_addresses_keys[j], 'private_ip') if j == 3: self.assertEqual(gw_addresses_keys[j], 'public_ip') if i == 2: self.assertEqual(keys[i], 'nat_gateway_id') if i == 3: self.assertEqual(keys[i], 'state') if i == 4: self.assertEqual(keys[i], 'subnet_id') if i == 5: self.assertEqual(keys[i], 'vpc_id') def test_get_nat_gateways(self): client = boto3.client('ec2', region_name=aws_region) success, err_msg, stream = ( ng.get_nat_gateways(client, 'subnet-123456789', check_mode=True) ) should_return = ng.DRY_RUN_GATEWAYS self.assertTrue(success) self.assertEqual(stream, should_return) def test_get_nat_gateways_no_gateways_found(self): client = boto3.client('ec2', region_name=aws_region) success, err_msg, stream = ( ng.get_nat_gateways(client, 'subnet-1234567', check_mode=True) ) self.assertTrue(success) self.assertEqual(stream, []) def test_wait_for_status(self): client = boto3.client('ec2', region_name=aws_region) success, err_msg, gws = ( ng.wait_for_status( client, 5, 'nat-123456789', 'available', check_mode=True ) ) should_return = ng.DRY_RUN_GATEWAYS[0] self.assertTrue(success) self.assertEqual(gws, should_return) def test_wait_for_status_to_timeout(self): client = boto3.client('ec2', region_name=aws_region) success, err_msg, gws = ( ng.wait_for_status( client, 2, 'nat-12345678', 'available', check_mode=True ) ) self.assertFalse(success) self.assertEqual(gws, {}) def test_gateway_in_subnet_exists_with_allocation_id(self): client = boto3.client('ec2', region_name=aws_region) gws, err_msg = ( ng.gateway_in_subnet_exists( client, 'subnet-123456789', 'eipalloc-1234567', check_mode=True ) ) should_return = ng.DRY_RUN_GATEWAYS self.assertEqual(gws, should_return) def test_gateway_in_subnet_exists_with_allocation_id_does_not_exist(self): client = boto3.client('ec2', region_name=aws_region) gws, err_msg = ( ng.gateway_in_subnet_exists( client, 'subnet-123456789', 'eipalloc-123', check_mode=True ) ) should_return = list() self.assertEqual(gws, should_return) def test_gateway_in_subnet_exists_without_allocation_id(self): client = boto3.client('ec2', region_name=aws_region) gws, err_msg = ( ng.gateway_in_subnet_exists( client, 'subnet-123456789', check_mode=True ) ) should_return = ng.DRY_RUN_GATEWAYS self.assertEqual(gws, should_return) def test_get_eip_allocation_id_by_address(self): client = boto3.client('ec2', region_name=aws_region) allocation_id, _ = ( ng.get_eip_allocation_id_by_address( client, '55.55.55.55', check_mode=True ) ) should_return = 'eipalloc-1234567' self.assertEqual(allocation_id, should_return) def test_get_eip_allocation_id_by_address_does_not_exist(self): client = boto3.client('ec2', region_name=aws_region) allocation_id, err_msg = ( ng.get_eip_allocation_id_by_address( client, '52.52.52.52', check_mode=True ) ) self.assertEqual(err_msg, 'EIP 52.52.52.52 does not exist') self.assertTrue(allocation_id is None) def test_allocate_eip_address(self): client = boto3.client('ec2', region_name=aws_region) success, err_msg, eip_id = ( ng.allocate_eip_address( client, check_mode=True ) ) self.assertTrue(success) def test_release_address(self): client = boto3.client('ec2', region_name=aws_region) success, _ = ( ng.release_address( client, 'eipalloc-1234567', check_mode=True ) ) self.assertTrue(success) def test_create(self): client = boto3.client('ec2', region_name=aws_region) success, changed, err_msg, results = ( ng.create( client, 'subnet-123456', 'eipalloc-1234567', check_mode=True ) ) self.assertTrue(success) self.assertTrue(changed) def test_pre_create(self): client = boto3.client('ec2', region_name=aws_region) success, changed, err_msg, results = ( ng.pre_create( client, 'subnet-123456', check_mode=True ) ) self.assertTrue(success) self.assertTrue(changed) def test_pre_create_idemptotent_with_allocation_id(self): client = boto3.client('ec2', region_name=aws_region) success, changed, err_msg, results = ( ng.pre_create( client, 'subnet-123456789', allocation_id='eipalloc-1234567', check_mode=True ) ) self.assertTrue(success) self.assertFalse(changed) def test_pre_create_idemptotent_with_eip_address(self): client = boto3.client('ec2', region_name=aws_region) success, changed, err_msg, results = ( ng.pre_create( client, 'subnet-123456789', eip_address='55.55.55.55', check_mode=True ) ) self.assertTrue(success) self.assertFalse(changed) def test_pre_create_idemptotent_if_exist_do_not_create(self): client = boto3.client('ec2', region_name=aws_region) success, changed, err_msg, results = ( ng.pre_create( client, 'subnet-123456789', if_exist_do_not_create=True, check_mode=True ) ) self.assertTrue(success) self.assertFalse(changed) def test_delete(self): client = boto3.client('ec2', region_name=aws_region) success, changed, err_msg, _ = ( ng.remove( client, 'nat-123456789', check_mode=True ) ) self.assertTrue(success) self.assertTrue(changed) def test_delete_and_release_ip(self): client = boto3.client('ec2', region_name=aws_region) success, changed, err_msg, _ = ( ng.remove( client, 'nat-123456789', release_eip=True, check_mode=True ) ) self.assertTrue(success) self.assertTrue(changed) def test_delete_if_does_not_exist(self): client = boto3.client('ec2', region_name=aws_region) success, changed, err_msg, _ = ( ng.remove( client, 'nat-12345', check_mode=True ) ) self.assertFalse(success) self.assertFalse(changed)
auto-mat/klub
refs/heads/diakonie
local_migrations/migrations_helpdesk/0011_admin_related_improvements.py
4
# -*- coding: utf-8 -*- from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('helpdesk', '0010_remove_queuemembership'), ] operations = [ migrations.AlterField( model_name='queue', name='permission_name', field=models.CharField(editable=False, max_length=50, blank=True, help_text='Name used in the django.contrib.auth permission system', null=True, verbose_name='Django auth permission name'), ), migrations.AlterField( model_name='queue', name='slug', field=models.SlugField(help_text="This slug is used when building ticket ID's. Once set, try not to change it or e-mailing may get messy.", unique=True, verbose_name='Slug'), ), ]
pducks32/intergrala
refs/heads/master
python/sympy/sympy/functions/special/tests/test_error_functions.py
13
from sympy import ( symbols, expand, expand_func, nan, oo, Float, conjugate, diff, re, im, Abs, O, factorial, exp_polar, polar_lift, gruntz, limit, Symbol, I, integrate, S, sqrt, sin, cos, sinh, cosh, exp, log, pi, EulerGamma, erf, erfc, erfi, erf2, erfinv, erfcinv, erf2inv, gamma, uppergamma, loggamma, Ei, expint, E1, li, Li, Si, Ci, Shi, Chi, fresnels, fresnelc, hyper, meijerg) from sympy.functions.special.error_functions import _erfs, _eis from sympy.core.function import ArgumentIndexError from sympy.utilities.pytest import raises x, y, z = symbols('x,y,z') w = Symbol("w", real=True) n = Symbol("n", integer=True) def test_erf(): assert erf(nan) == nan assert erf(oo) == 1 assert erf(-oo) == -1 assert erf(0) == 0 assert erf(I*oo) == oo*I assert erf(-I*oo) == -oo*I assert erf(-2) == -erf(2) assert erf(-x*y) == -erf(x*y) assert erf(-x - y) == -erf(x + y) assert erf(erfinv(x)) == x assert erf(erfcinv(x)) == 1 - x assert erf(erf2inv(0, x)) == x assert erf(erf2inv(0, erf(erfcinv(1 - erf(erfinv(x)))))) == x assert erf(I).is_real is False assert erf(0).is_real is True assert conjugate(erf(z)) == erf(conjugate(z)) assert erf(x).as_leading_term(x) == 2*x/sqrt(pi) assert erf(1/x).as_leading_term(x) == erf(1/x) assert erf(z).rewrite('uppergamma') == sqrt(z**2)*erf(sqrt(z**2))/z assert erf(z).rewrite('erfc') == S.One - erfc(z) assert erf(z).rewrite('erfi') == -I*erfi(I*z) assert erf(z).rewrite('fresnels') == (1 + I)*(fresnelc(z*(1 - I)/sqrt(pi)) - I*fresnels(z*(1 - I)/sqrt(pi))) assert erf(z).rewrite('fresnelc') == (1 + I)*(fresnelc(z*(1 - I)/sqrt(pi)) - I*fresnels(z*(1 - I)/sqrt(pi))) assert erf(z).rewrite('hyper') == 2*z*hyper([S.Half], [3*S.Half], -z**2)/sqrt(pi) assert erf(z).rewrite('meijerg') == z*meijerg([S.Half], [], [0], [-S.Half], z**2)/sqrt(pi) assert erf(z).rewrite('expint') == sqrt(z**2)/z - z*expint(S.Half, z**2)/sqrt(S.Pi) assert limit(exp(x)*exp(x**2)*(erf(x + 1/exp(x)) - erf(x)), x, oo) == \ 2/sqrt(pi) assert limit((1 - erf(z))*exp(z**2)*z, z, oo) == 1/sqrt(pi) assert limit((1 - erf(x))*exp(x**2)*sqrt(pi)*x, x, oo) == 1 assert limit(((1 - erf(x))*exp(x**2)*sqrt(pi)*x - 1)*2*x**2, x, oo) == -1 assert erf(x).as_real_imag() == \ ((erf(re(x) - I*re(x)*Abs(im(x))/Abs(re(x)))/2 + erf(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))/2, I*(erf(re(x) - I*re(x)*Abs(im(x))/Abs(re(x))) - erf(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))) * re(x)*Abs(im(x))/(2*im(x)*Abs(re(x))))) raises(ArgumentIndexError, lambda: erf(x).fdiff(2)) def test_erf_series(): assert erf(x).series(x, 0, 7) == 2*x/sqrt(pi) - \ 2*x**3/3/sqrt(pi) + x**5/5/sqrt(pi) + O(x**7) def test_erf_evalf(): assert abs( erf(Float(2.0)) - 0.995322265 ) < 1E-8 # XXX def test__erfs(): assert _erfs(z).diff(z) == -2/sqrt(S.Pi) + 2*z*_erfs(z) assert _erfs(1/z).series(z) == \ z/sqrt(pi) - z**3/(2*sqrt(pi)) + 3*z**5/(4*sqrt(pi)) + O(z**6) assert expand(erf(z).rewrite('tractable').diff(z).rewrite('intractable')) \ == erf(z).diff(z) assert _erfs(z).rewrite("intractable") == (-erf(z) + 1)*exp(z**2) def test_erfc(): assert erfc(nan) == nan assert erfc(oo) == 0 assert erfc(-oo) == 2 assert erfc(0) == 1 assert erfc(I*oo) == -oo*I assert erfc(-I*oo) == oo*I assert erfc(-x) == S(2) - erfc(x) assert erfc(erfcinv(x)) == x assert erfc(I).is_real is False assert erfc(0).is_real is True assert conjugate(erfc(z)) == erfc(conjugate(z)) assert erfc(x).as_leading_term(x) == S.One assert erfc(1/x).as_leading_term(x) == erfc(1/x) assert erfc(z).rewrite('erf') == 1 - erf(z) assert erfc(z).rewrite('erfi') == 1 + I*erfi(I*z) assert erfc(z).rewrite('fresnels') == 1 - (1 + I)*(fresnelc(z*(1 - I)/sqrt(pi)) - I*fresnels(z*(1 - I)/sqrt(pi))) assert erfc(z).rewrite('fresnelc') == 1 - (1 + I)*(fresnelc(z*(1 - I)/sqrt(pi)) - I*fresnels(z*(1 - I)/sqrt(pi))) assert erfc(z).rewrite('hyper') == 1 - 2*z*hyper([S.Half], [3*S.Half], -z**2)/sqrt(pi) assert erfc(z).rewrite('meijerg') == 1 - z*meijerg([S.Half], [], [0], [-S.Half], z**2)/sqrt(pi) assert erfc(z).rewrite('uppergamma') == 1 - sqrt(z**2)*erf(sqrt(z**2))/z assert erfc(z).rewrite('expint') == S.One - sqrt(z**2)/z + z*expint(S.Half, z**2)/sqrt(S.Pi) assert erfc(x).as_real_imag() == \ ((erfc(re(x) - I*re(x)*Abs(im(x))/Abs(re(x)))/2 + erfc(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))/2, I*(erfc(re(x) - I*re(x)*Abs(im(x))/Abs(re(x))) - erfc(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))) * re(x)*Abs(im(x))/(2*im(x)*Abs(re(x))))) raises(ArgumentIndexError, lambda: erfc(x).fdiff(2)) def test_erfc_series(): assert erfc(x).series(x, 0, 7) == 1 - 2*x/sqrt(pi) + \ 2*x**3/3/sqrt(pi) - x**5/5/sqrt(pi) + O(x**7) def test_erfc_evalf(): assert abs( erfc(Float(2.0)) - 0.00467773 ) < 1E-8 # XXX def test_erfi(): assert erfi(nan) == nan assert erfi(oo) == S.Infinity assert erfi(-oo) == S.NegativeInfinity assert erfi(0) == S.Zero assert erfi(I*oo) == I assert erfi(-I*oo) == -I assert erfi(-x) == -erfi(x) assert erfi(I*erfinv(x)) == I*x assert erfi(I*erfcinv(x)) == I*(1 - x) assert erfi(I*erf2inv(0, x)) == I*x assert erfi(I).is_real is False assert erfi(0).is_real is True assert conjugate(erfi(z)) == erfi(conjugate(z)) assert erfi(z).rewrite('erf') == -I*erf(I*z) assert erfi(z).rewrite('erfc') == I*erfc(I*z) - I assert erfi(z).rewrite('fresnels') == (1 - I)*(fresnelc(z*(1 + I)/sqrt(pi)) - I*fresnels(z*(1 + I)/sqrt(pi))) assert erfi(z).rewrite('fresnelc') == (1 - I)*(fresnelc(z*(1 + I)/sqrt(pi)) - I*fresnels(z*(1 + I)/sqrt(pi))) assert erfi(z).rewrite('hyper') == 2*z*hyper([S.Half], [3*S.Half], z**2)/sqrt(pi) assert erfi(z).rewrite('meijerg') == z*meijerg([S.Half], [], [0], [-S.Half], -z**2)/sqrt(pi) assert erfi(z).rewrite('uppergamma') == (sqrt(-z**2)/z*(uppergamma(S.Half, -z**2)/sqrt(S.Pi) - S.One)) assert erfi(z).rewrite('expint') == sqrt(-z**2)/z - z*expint(S.Half, -z**2)/sqrt(S.Pi) assert erfi(x).as_real_imag() == \ ((erfi(re(x) - I*re(x)*Abs(im(x))/Abs(re(x)))/2 + erfi(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))/2, I*(erfi(re(x) - I*re(x)*Abs(im(x))/Abs(re(x))) - erfi(re(x) + I*re(x)*Abs(im(x))/Abs(re(x)))) * re(x)*Abs(im(x))/(2*im(x)*Abs(re(x))))) raises(ArgumentIndexError, lambda: erfi(x).fdiff(2)) def test_erfi_series(): assert erfi(x).series(x, 0, 7) == 2*x/sqrt(pi) + \ 2*x**3/3/sqrt(pi) + x**5/5/sqrt(pi) + O(x**7) def test_erfi_evalf(): assert abs( erfi(Float(2.0)) - 18.5648024145756 ) < 1E-13 # XXX def test_erf2(): assert erf2(0, 0) == S.Zero assert erf2(x, x) == S.Zero assert erf2(nan, 0) == nan assert erf2(-oo, y) == erf(y) + 1 assert erf2( oo, y) == erf(y) - 1 assert erf2( x, oo) == 1 - erf(x) assert erf2( x,-oo) == -1 - erf(x) assert erf2(x, erf2inv(x, y)) == y assert erf2(-x, -y) == -erf2(x,y) assert erf2(-x, y) == erf(y) + erf(x) assert erf2( x, -y) == -erf(y) - erf(x) assert erf2(x, y).rewrite('fresnels') == erf(y).rewrite(fresnels)-erf(x).rewrite(fresnels) assert erf2(x, y).rewrite('fresnelc') == erf(y).rewrite(fresnelc)-erf(x).rewrite(fresnelc) assert erf2(x, y).rewrite('hyper') == erf(y).rewrite(hyper)-erf(x).rewrite(hyper) assert erf2(x, y).rewrite('meijerg') == erf(y).rewrite(meijerg)-erf(x).rewrite(meijerg) assert erf2(x, y).rewrite('uppergamma') == erf(y).rewrite(uppergamma) - erf(x).rewrite(uppergamma) assert erf2(x, y).rewrite('expint') == erf(y).rewrite(expint)-erf(x).rewrite(expint) assert erf2(I, 0).is_real is False assert erf2(0, 0).is_real is True #assert conjugate(erf2(x, y)) == erf2(conjugate(x), conjugate(y)) assert erf2(x, y).rewrite('erf') == erf(y) - erf(x) assert erf2(x, y).rewrite('erfc') == erfc(x) - erfc(y) assert erf2(x, y).rewrite('erfi') == I*(erfi(I*x) - erfi(I*y)) raises(ArgumentIndexError, lambda: erfi(x).fdiff(3)) def test_erfinv(): assert erfinv(0) == 0 assert erfinv(1) == S.Infinity assert erfinv(nan) == S.NaN assert erfinv(erf(w)) == w assert erfinv(erf(-w)) == -w assert erfinv(x).diff() == sqrt(pi)*exp(erfinv(x)**2)/2 assert erfinv(z).rewrite('erfcinv') == erfcinv(1-z) def test_erfinv_evalf(): assert abs( erfinv(Float(0.2)) - 0.179143454621292 ) < 1E-13 def test_erfcinv(): assert erfcinv(1) == 0 assert erfcinv(0) == S.Infinity assert erfcinv(nan) == S.NaN assert erfcinv(x).diff() == -sqrt(pi)*exp(erfcinv(x)**2)/2 assert erfcinv(z).rewrite('erfinv') == erfinv(1-z) def test_erf2inv(): assert erf2inv(0, 0) == S.Zero assert erf2inv(0, 1) == S.Infinity assert erf2inv(1, 0) == S.One assert erf2inv(0, y) == erfinv(y) assert erf2inv(oo,y) == erfcinv(-y) assert erf2inv(x, y).diff(x) == exp(-x**2 + erf2inv(x, y)**2) assert erf2inv(x, y).diff(y) == sqrt(pi)*exp(erf2inv(x, y)**2)/2 # NOTE we multiply by exp_polar(I*pi) and need this to be on the principal # branch, hence take x in the lower half plane (d=0). def mytn(expr1, expr2, expr3, x, d=0): from sympy.utilities.randtest import verify_numerically, random_complex_number subs = {} for a in expr1.free_symbols: if a != x: subs[a] = random_complex_number() return expr2 == expr3 and verify_numerically(expr1.subs(subs), expr2.subs(subs), x, d=d) def mytd(expr1, expr2, x): from sympy.utilities.randtest import test_derivative_numerically, \ random_complex_number subs = {} for a in expr1.free_symbols: if a != x: subs[a] = random_complex_number() return expr1.diff(x) == expr2 and test_derivative_numerically(expr1.subs(subs), x) def tn_branch(func, s=None): from sympy import I, pi, exp_polar from random import uniform def fn(x): if s is None: return func(x) return func(s, x) c = uniform(1, 5) expr = fn(c*exp_polar(I*pi)) - fn(c*exp_polar(-I*pi)) eps = 1e-15 expr2 = fn(-c + eps*I) - fn(-c - eps*I) return abs(expr.n() - expr2.n()).n() < 1e-10 def test_ei(): pos = Symbol('p', positive=True) neg = Symbol('n', negative=True) assert Ei(-pos) == Ei(polar_lift(-1)*pos) - I*pi assert Ei(neg) == Ei(polar_lift(neg)) - I*pi assert tn_branch(Ei) assert mytd(Ei(x), exp(x)/x, x) assert mytn(Ei(x), Ei(x).rewrite(uppergamma), -uppergamma(0, x*polar_lift(-1)) - I*pi, x) assert mytn(Ei(x), Ei(x).rewrite(expint), -expint(1, x*polar_lift(-1)) - I*pi, x) assert Ei(x).rewrite(expint).rewrite(Ei) == Ei(x) assert Ei(x*exp_polar(2*I*pi)) == Ei(x) + 2*I*pi assert Ei(x*exp_polar(-2*I*pi)) == Ei(x) - 2*I*pi assert mytn(Ei(x), Ei(x).rewrite(Shi), Chi(x) + Shi(x), x) assert mytn(Ei(x*polar_lift(I)), Ei(x*polar_lift(I)).rewrite(Si), Ci(x) + I*Si(x) + I*pi/2, x) assert Ei(log(x)).rewrite(li) == li(x) assert Ei(2*log(x)).rewrite(li) == li(x**2) assert gruntz(Ei(x+exp(-x))*exp(-x)*x, x, oo) == 1 assert Ei(x).series(x) == EulerGamma + log(x) + x + x**2/4 + \ x**3/18 + x**4/96 + x**5/600 + O(x**6) def test_expint(): assert mytn(expint(x, y), expint(x, y).rewrite(uppergamma), y**(x - 1)*uppergamma(1 - x, y), x) assert mytd( expint(x, y), -y**(x - 1)*meijerg([], [1, 1], [0, 0, 1 - x], [], y), x) assert mytd(expint(x, y), -expint(x - 1, y), y) assert mytn(expint(1, x), expint(1, x).rewrite(Ei), -Ei(x*polar_lift(-1)) + I*pi, x) assert expint(-4, x) == exp(-x)/x + 4*exp(-x)/x**2 + 12*exp(-x)/x**3 \ + 24*exp(-x)/x**4 + 24*exp(-x)/x**5 assert expint(-S(3)/2, x) == \ exp(-x)/x + 3*exp(-x)/(2*x**2) - 3*sqrt(pi)*erf(sqrt(x))/(4*x**S('5/2')) \ + 3*sqrt(pi)/(4*x**S('5/2')) assert tn_branch(expint, 1) assert tn_branch(expint, 2) assert tn_branch(expint, 3) assert tn_branch(expint, 1.7) assert tn_branch(expint, pi) assert expint(y, x*exp_polar(2*I*pi)) == \ x**(y - 1)*(exp(2*I*pi*y) - 1)*gamma(-y + 1) + expint(y, x) assert expint(y, x*exp_polar(-2*I*pi)) == \ x**(y - 1)*(exp(-2*I*pi*y) - 1)*gamma(-y + 1) + expint(y, x) assert expint(2, x*exp_polar(2*I*pi)) == 2*I*pi*x + expint(2, x) assert expint(2, x*exp_polar(-2*I*pi)) == -2*I*pi*x + expint(2, x) assert expint(1, x).rewrite(Ei).rewrite(expint) == expint(1, x) assert mytn(E1(x), E1(x).rewrite(Shi), Shi(x) - Chi(x), x) assert mytn(E1(polar_lift(I)*x), E1(polar_lift(I)*x).rewrite(Si), -Ci(x) + I*Si(x) - I*pi/2, x) assert mytn(expint(2, x), expint(2, x).rewrite(Ei).rewrite(expint), -x*E1(x) + exp(-x), x) assert mytn(expint(3, x), expint(3, x).rewrite(Ei).rewrite(expint), x**2*E1(x)/2 + (1 - x)*exp(-x)/2, x) assert expint(S(3)/2, z).nseries(z) == \ 2 + 2*z - z**2/3 + z**3/15 - z**4/84 + z**5/540 - \ 2*sqrt(pi)*sqrt(z) + O(z**6) assert E1(z).series(z) == -EulerGamma - log(z) + z - \ z**2/4 + z**3/18 - z**4/96 + z**5/600 + O(z**6) assert expint(4, z).series(z) == S(1)/3 - z/2 + z**2/2 + \ z**3*(log(z)/6 - S(11)/36 + EulerGamma/6) - z**4/24 + \ z**5/240 + O(z**6) def test__eis(): assert _eis(z).diff(z) == -_eis(z) + 1/z assert _eis(1/z).series(z) == \ z + z**2 + 2*z**3 + 6*z**4 + 24*z**5 + O(z**6) assert Ei(z).rewrite('tractable') == exp(z)*_eis(z) assert li(z).rewrite('tractable') == z*_eis(log(z)) assert _eis(z).rewrite('intractable') == exp(-z)*Ei(z) assert expand(li(z).rewrite('tractable').diff(z).rewrite('intractable')) \ == li(z).diff(z) assert expand(Ei(z).rewrite('tractable').diff(z).rewrite('intractable')) \ == Ei(z).diff(z) assert _eis(z).series(z, n=3) == EulerGamma + log(z) + z*(-log(z) - \ EulerGamma + 1) + z**2*(log(z)/2 - S(3)/4 + EulerGamma/2) + O(z**3*log(z)) def tn_arg(func): def test(arg, e1, e2): from random import uniform v = uniform(1, 5) v1 = func(arg*x).subs(x, v).n() v2 = func(e1*v + e2*1e-15).n() return abs(v1 - v2).n() < 1e-10 return test(exp_polar(I*pi/2), I, 1) and \ test(exp_polar(-I*pi/2), -I, 1) and \ test(exp_polar(I*pi), -1, I) and \ test(exp_polar(-I*pi), -1, -I) def test_li(): z = Symbol("z") zr = Symbol("z", real=True) zp = Symbol("z", positive=True) zn = Symbol("z", negative=True) assert li(0) == 0 assert li(1) == -oo assert li(oo) == oo assert isinstance(li(z), li) assert diff(li(z), z) == 1/log(z) assert conjugate(li(z)) == li(conjugate(z)) assert conjugate(li(-zr)) == li(-zr) assert conjugate(li(-zp)) == conjugate(li(-zp)) assert conjugate(li(zn)) == conjugate(li(zn)) assert li(z).rewrite(Li) == Li(z) + li(2) assert li(z).rewrite(Ei) == Ei(log(z)) assert li(z).rewrite(uppergamma) == (-log(1/log(z))/2 - log(-log(z)) + log(log(z))/2 - expint(1, -log(z))) assert li(z).rewrite(Si) == (-log(I*log(z)) - log(1/log(z))/2 + log(log(z))/2 + Ci(I*log(z)) + Shi(log(z))) assert li(z).rewrite(Ci) == (-log(I*log(z)) - log(1/log(z))/2 + log(log(z))/2 + Ci(I*log(z)) + Shi(log(z))) assert li(z).rewrite(Shi) == (-log(1/log(z))/2 + log(log(z))/2 + Chi(log(z)) - Shi(log(z))) assert li(z).rewrite(Chi) == (-log(1/log(z))/2 + log(log(z))/2 + Chi(log(z)) - Shi(log(z))) assert li(z).rewrite(hyper) ==(log(z)*hyper((1, 1), (2, 2), log(z)) - log(1/log(z))/2 + log(log(z))/2 + EulerGamma) assert li(z).rewrite(meijerg) == (-log(1/log(z))/2 - log(-log(z)) + log(log(z))/2 - meijerg(((), (1,)), ((0, 0), ()), -log(z))) assert gruntz(1/li(z), z, oo) == 0 def test_Li(): assert Li(2) == 0 assert Li(oo) == oo assert isinstance(Li(z), Li) assert diff(Li(z), z) == 1/log(z) assert gruntz(1/Li(z), z, oo) == 0 assert Li(z).rewrite(li) == li(z) - li(2) def test_si(): assert Si(I*x) == I*Shi(x) assert Shi(I*x) == I*Si(x) assert Si(-I*x) == -I*Shi(x) assert Shi(-I*x) == -I*Si(x) assert Si(-x) == -Si(x) assert Shi(-x) == -Shi(x) assert Si(exp_polar(2*pi*I)*x) == Si(x) assert Si(exp_polar(-2*pi*I)*x) == Si(x) assert Shi(exp_polar(2*pi*I)*x) == Shi(x) assert Shi(exp_polar(-2*pi*I)*x) == Shi(x) assert Si(oo) == pi/2 assert Si(-oo) == -pi/2 assert Shi(oo) == oo assert Shi(-oo) == -oo assert mytd(Si(x), sin(x)/x, x) assert mytd(Shi(x), sinh(x)/x, x) assert mytn(Si(x), Si(x).rewrite(Ei), -I*(-Ei(x*exp_polar(-I*pi/2))/2 + Ei(x*exp_polar(I*pi/2))/2 - I*pi) + pi/2, x) assert mytn(Si(x), Si(x).rewrite(expint), -I*(-expint(1, x*exp_polar(-I*pi/2))/2 + expint(1, x*exp_polar(I*pi/2))/2) + pi/2, x) assert mytn(Shi(x), Shi(x).rewrite(Ei), Ei(x)/2 - Ei(x*exp_polar(I*pi))/2 + I*pi/2, x) assert mytn(Shi(x), Shi(x).rewrite(expint), expint(1, x)/2 - expint(1, x*exp_polar(I*pi))/2 - I*pi/2, x) assert tn_arg(Si) assert tn_arg(Shi) assert Si(x).nseries(x, n=8) == \ x - x**3/18 + x**5/600 - x**7/35280 + O(x**9) assert Shi(x).nseries(x, n=8) == \ x + x**3/18 + x**5/600 + x**7/35280 + O(x**9) assert Si(sin(x)).nseries(x, n=5) == x - 2*x**3/9 + 17*x**5/450 + O(x**6) assert Si(x).nseries(x, 1, n=3) == \ Si(1) + (x - 1)*sin(1) + (x - 1)**2*(-sin(1)/2 + cos(1)/2) + O((x - 1)**3, (x, 1)) def test_ci(): m1 = exp_polar(I*pi) m1_ = exp_polar(-I*pi) pI = exp_polar(I*pi/2) mI = exp_polar(-I*pi/2) assert Ci(m1*x) == Ci(x) + I*pi assert Ci(m1_*x) == Ci(x) - I*pi assert Ci(pI*x) == Chi(x) + I*pi/2 assert Ci(mI*x) == Chi(x) - I*pi/2 assert Chi(m1*x) == Chi(x) + I*pi assert Chi(m1_*x) == Chi(x) - I*pi assert Chi(pI*x) == Ci(x) + I*pi/2 assert Chi(mI*x) == Ci(x) - I*pi/2 assert Ci(exp_polar(2*I*pi)*x) == Ci(x) + 2*I*pi assert Chi(exp_polar(-2*I*pi)*x) == Chi(x) - 2*I*pi assert Chi(exp_polar(2*I*pi)*x) == Chi(x) + 2*I*pi assert Ci(exp_polar(-2*I*pi)*x) == Ci(x) - 2*I*pi assert Ci(oo) == 0 assert Ci(-oo) == I*pi assert Chi(oo) == oo assert Chi(-oo) == oo assert mytd(Ci(x), cos(x)/x, x) assert mytd(Chi(x), cosh(x)/x, x) assert mytn(Ci(x), Ci(x).rewrite(Ei), Ei(x*exp_polar(-I*pi/2))/2 + Ei(x*exp_polar(I*pi/2))/2, x) assert mytn(Chi(x), Chi(x).rewrite(Ei), Ei(x)/2 + Ei(x*exp_polar(I*pi))/2 - I*pi/2, x) assert tn_arg(Ci) assert tn_arg(Chi) from sympy import O, EulerGamma, log, limit assert Ci(x).nseries(x, n=4) == \ EulerGamma + log(x) - x**2/4 + x**4/96 + O(x**5) assert Chi(x).nseries(x, n=4) == \ EulerGamma + log(x) + x**2/4 + x**4/96 + O(x**5) assert limit(log(x) - Ci(2*x), x, 0) == -log(2) - EulerGamma def test_fresnel(): assert fresnels(0) == 0 assert fresnels(oo) == S.Half assert fresnels(-oo) == -S.Half assert fresnels(z) == fresnels(z) assert fresnels(-z) == -fresnels(z) assert fresnels(I*z) == -I*fresnels(z) assert fresnels(-I*z) == I*fresnels(z) assert conjugate(fresnels(z)) == fresnels(conjugate(z)) assert fresnels(z).diff(z) == sin(pi*z**2/2) assert fresnels(z).rewrite(erf) == (S.One + I)/4 * ( erf((S.One + I)/2*sqrt(pi)*z) - I*erf((S.One - I)/2*sqrt(pi)*z)) assert fresnels(z).rewrite(hyper) == \ pi*z**3/6 * hyper([S(3)/4], [S(3)/2, S(7)/4], -pi**2*z**4/16) assert fresnels(z).series(z, n=15) == \ pi*z**3/6 - pi**3*z**7/336 + pi**5*z**11/42240 + O(z**15) assert fresnels(w).is_real is True assert fresnels(z).as_real_imag() == \ ((fresnels(re(z) - I*re(z)*Abs(im(z))/Abs(re(z)))/2 + fresnels(re(z) + I*re(z)*Abs(im(z))/Abs(re(z)))/2, I*(fresnels(re(z) - I*re(z)*Abs(im(z))/Abs(re(z))) - fresnels(re(z) + I*re(z)*Abs(im(z))/Abs(re(z)))) * re(z)*Abs(im(z))/(2*im(z)*Abs(re(z))))) assert fresnels(2 + 3*I).as_real_imag() == ( fresnels(2 + 3*I)/2 + fresnels(2 - 3*I)/2, I*(fresnels(2 - 3*I) - fresnels(2 + 3*I))/2 ) assert expand_func(integrate(fresnels(z), z)) == \ z*fresnels(z) + cos(pi*z**2/2)/pi assert fresnels(z).rewrite(meijerg) == sqrt(2)*pi*z**(S(9)/4) * \ meijerg(((), (1,)), ((S(3)/4,), (S(1)/4, 0)), -pi**2*z**4/16)/(2*(-z)**(S(3)/4)*(z**2)**(S(3)/4)) assert fresnelc(0) == 0 assert fresnelc(oo) == S.Half assert fresnelc(-oo) == -S.Half assert fresnelc(z) == fresnelc(z) assert fresnelc(-z) == -fresnelc(z) assert fresnelc(I*z) == I*fresnelc(z) assert fresnelc(-I*z) == -I*fresnelc(z) assert conjugate(fresnelc(z)) == fresnelc(conjugate(z)) assert fresnelc(z).diff(z) == cos(pi*z**2/2) assert fresnelc(z).rewrite(erf) == (S.One - I)/4 * ( erf((S.One + I)/2*sqrt(pi)*z) + I*erf((S.One - I)/2*sqrt(pi)*z)) assert fresnelc(z).rewrite(hyper) == \ z * hyper([S.One/4], [S.One/2, S(5)/4], -pi**2*z**4/16) assert fresnelc(z).series(z, n=15) == \ z - pi**2*z**5/40 + pi**4*z**9/3456 - pi**6*z**13/599040 + O(z**15) # issue 6510 assert fresnels(z).series(z, S.Infinity) == \ (-1/(pi**2*z**3) + O(z**(-6), (z, oo)))*sin(pi*z**2/2) + \ (3/(pi**3*z**5) - 1/(pi*z) + O(z**(-6), (z, oo)))*cos(pi*z**2/2) + S.Half assert fresnelc(z).series(z, S.Infinity) == \ (-1/(pi**2*z**3) + O(z**(-6), (z, oo)))*cos(pi*z**2/2) + \ (-3/(pi**3*z**5) + 1/(pi*z) + O(z**(-6), (z, oo)))*sin(pi*z**2/2) + S.Half assert fresnels(1/z).series(z) == \ (-z**3/pi**2 + O(z**6))*sin(pi/(2*z**2)) + (-z/pi + 3*z**5/pi**3 + \ O(z**6))*cos(pi/(2*z**2)) + S.Half assert fresnelc(1/z).series(z) == \ (-z**3/pi**2 + O(z**6))*cos(pi/(2*z**2)) + (z/pi - 3*z**5/pi**3 + \ O(z**6))*sin(pi/(2*z**2)) + S.Half assert fresnelc(w).is_real is True assert fresnelc(z).as_real_imag() == \ ((fresnelc(re(z) - I*re(z)*Abs(im(z))/Abs(re(z)))/2 + fresnelc(re(z) + I*re(z)*Abs(im(z))/Abs(re(z)))/2, I*(fresnelc(re(z) - I*re(z)*Abs(im(z))/Abs(re(z))) - fresnelc(re(z) + I*re(z)*Abs(im(z))/Abs(re(z)))) * re(z)*Abs(im(z))/(2*im(z)*Abs(re(z))))) assert fresnelc(2 + 3*I).as_real_imag() == ( fresnelc(2 - 3*I)/2 + fresnelc(2 + 3*I)/2, I*(fresnelc(2 - 3*I) - fresnelc(2 + 3*I))/2 ) assert expand_func(integrate(fresnelc(z), z)) == \ z*fresnelc(z) - sin(pi*z**2/2)/pi assert fresnelc(z).rewrite(meijerg) == sqrt(2)*pi*z**(S(3)/4) * \ meijerg(((), (1,)), ((S(1)/4,), (S(3)/4, 0)), -pi**2*z**4/16)/(2*(-z)**(S(1)/4)*(z**2)**(S(1)/4)) from sympy.utilities.randtest import verify_numerically verify_numerically(re(fresnels(z)), fresnels(z).as_real_imag()[0], z) verify_numerically(im(fresnels(z)), fresnels(z).as_real_imag()[1], z) verify_numerically(fresnels(z), fresnels(z).rewrite(hyper), z) verify_numerically(fresnels(z), fresnels(z).rewrite(meijerg), z) verify_numerically(re(fresnelc(z)), fresnelc(z).as_real_imag()[0], z) verify_numerically(im(fresnelc(z)), fresnelc(z).as_real_imag()[1], z) verify_numerically(fresnelc(z), fresnelc(z).rewrite(hyper), z) verify_numerically(fresnelc(z), fresnelc(z).rewrite(meijerg), z)
jjas0nn/solvem
refs/heads/master
tensorflow/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/genshi.py
356
from __future__ import absolute_import, division, unicode_literals from genshi.core import QName, Attrs from genshi.core import START, END, TEXT, COMMENT, DOCTYPE def to_genshi(walker): text = [] for token in walker: type = token["type"] if type in ("Characters", "SpaceCharacters"): text.append(token["data"]) elif text: yield TEXT, "".join(text), (None, -1, -1) text = [] if type in ("StartTag", "EmptyTag"): if token["namespace"]: name = "{%s}%s" % (token["namespace"], token["name"]) else: name = token["name"] attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value) for attr, value in token["data"].items()]) yield (START, (QName(name), attrs), (None, -1, -1)) if type == "EmptyTag": type = "EndTag" if type == "EndTag": if token["namespace"]: name = "{%s}%s" % (token["namespace"], token["name"]) else: name = token["name"] yield END, QName(name), (None, -1, -1) elif type == "Comment": yield COMMENT, token["data"], (None, -1, -1) elif type == "Doctype": yield DOCTYPE, (token["name"], token["publicId"], token["systemId"]), (None, -1, -1) else: pass # FIXME: What to do? if text: yield TEXT, "".join(text), (None, -1, -1)
jpn--/pines
refs/heads/master
pines/streamers.py
1
class double_stream: def __init__(self, filename, mode='w'): self.file = open(filename, mode) def write(self, *args): self.file.write(*args) print(*args, end="") def flush(self): self.file.flush() def close(self): self.file.close() def __enter__(self): pass def __exit__(self, exc_type, exc_val, exc_tb): self.flush() self.close()
google/uncertainty-baselines
refs/heads/main
uncertainty_baselines/models/movielens_test.py
1
# coding=utf-8 # Copyright 2021 The Uncertainty Baselines Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python3 """Tests for uncertainty_baselines.models.criteo_mlp.""" import tensorflow as tf import uncertainty_baselines as ub class MovieLensTest(tf.test.TestCase): def testCreateModel(self): model = ub.models.movielens(31) self.assertLen(model.layers, 8) if __name__ == '__main__': tf.test.main()
Debian/openjfx
refs/heads/master
modules/web/src/main/native/Tools/Scripts/webkitpy/layout_tests/models/test_run_results.py
2
# Copyright (C) 2010 Google Inc. All rights reserved. # Copyright (C) 2010 Gabor Rapcsanyi (rgabor@inf.u-szeged.hu), University of Szeged # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import datetime import logging import signal from webkitpy.layout_tests.models import test_expectations from webkitpy.layout_tests.models import test_failures _log = logging.getLogger(__name__) INTERRUPTED_EXIT_STATUS = signal.SIGINT + 128 class TestRunResults(object): def __init__(self, expectations, num_tests): self.total = num_tests self.remaining = self.total self.expectations = expectations self.expected = 0 self.unexpected = 0 self.unexpected_failures = 0 self.unexpected_crashes = 0 self.unexpected_timeouts = 0 self.tests_by_expectation = {} self.tests_by_timeline = {} self.results_by_name = {} # Map of test name to the last result for the test. self.all_results = [] # All results from a run, including every iteration of every test. self.unexpected_results_by_name = {} self.failures_by_name = {} self.total_failures = 0 self.expected_skips = 0 for expectation in test_expectations.TestExpectations.EXPECTATIONS.values(): self.tests_by_expectation[expectation] = set() for timeline in test_expectations.TestExpectations.TIMELINES.values(): self.tests_by_timeline[timeline] = expectations.model().get_tests_with_timeline(timeline) self.slow_tests = set() self.interrupted = False self.keyboard_interrupted = False def add(self, test_result, expected, test_is_slow): self.tests_by_expectation[test_result.type].add(test_result.test_name) self.results_by_name[test_result.test_name] = test_result if test_result.is_other_crash: return if test_result.type != test_expectations.SKIP: self.all_results.append(test_result) self.remaining -= 1 if len(test_result.failures): self.total_failures += 1 self.failures_by_name[test_result.test_name] = test_result.failures if expected: self.expected += 1 if test_result.type == test_expectations.SKIP: self.expected_skips += 1 else: self.unexpected_results_by_name[test_result.test_name] = test_result self.unexpected += 1 if len(test_result.failures): self.unexpected_failures += 1 if test_result.type == test_expectations.CRASH: self.unexpected_crashes += 1 elif test_result.type == test_expectations.TIMEOUT: self.unexpected_timeouts += 1 if test_is_slow: self.slow_tests.add(test_result.test_name) def merge(self, test_run_results): if not test_run_results: return self # self.expectations should be the same for both self.total += test_run_results.total self.remaining += test_run_results.remaining self.expected += test_run_results.expected self.unexpected += test_run_results.unexpected self.unexpected_failures += test_run_results.unexpected_failures self.unexpected_crashes += test_run_results.unexpected_crashes self.unexpected_timeouts += test_run_results.unexpected_timeouts self.tests_by_expectation.update(test_run_results.tests_by_expectation) self.tests_by_timeline.update(test_run_results.tests_by_timeline) self.results_by_name.update(test_run_results.results_by_name) self.all_results += test_run_results.all_results self.unexpected_results_by_name.update(test_run_results.unexpected_results_by_name) self.failures_by_name.update(test_run_results.failures_by_name) self.total_failures += test_run_results.total_failures self.expected_skips += test_run_results.expected_skips self.tests_by_expectation.update(test_run_results.tests_by_expectation) self.tests_by_timeline.update(test_run_results.tests_by_timeline) self.slow_tests.update(test_run_results.slow_tests) self.interrupted |= test_run_results.interrupted self.keyboard_interrupted |= test_run_results.keyboard_interrupted return self class RunDetails(object): def __init__(self, exit_code, summarized_results=None, initial_results=None, retry_results=None, enabled_pixel_tests_in_retry=False): self.exit_code = exit_code self.summarized_results = summarized_results self.initial_results = initial_results self.retry_results = retry_results self.enabled_pixel_tests_in_retry = enabled_pixel_tests_in_retry def _interpret_test_failures(failures): test_dict = {} failure_types = [type(failure) for failure in failures] # FIXME: get rid of all this is_* values once there is a 1:1 map between # TestFailure type and test_expectations.EXPECTATION. if test_failures.FailureMissingAudio in failure_types: test_dict['is_missing_audio'] = True if test_failures.FailureMissingResult in failure_types: test_dict['is_missing_text'] = True if test_failures.FailureMissingImage in failure_types or test_failures.FailureMissingImageHash in failure_types: test_dict['is_missing_image'] = True if 'image_diff_percent' not in test_dict: for failure in failures: if isinstance(failure, test_failures.FailureImageHashMismatch) or isinstance(failure, test_failures.FailureReftestMismatch): test_dict['image_diff_percent'] = failure.diff_percent return test_dict # These results must match ones in print_unexpected_results() in views/buildbot_results.py. def summarize_results(port_obj, expectations, initial_results, retry_results, enabled_pixel_tests_in_retry, include_passes=False, include_time_and_modifiers=False): """Returns a dictionary containing a summary of the test runs, with the following fields: 'version': a version indicator 'fixable': The number of fixable tests (NOW - PASS) 'skipped': The number of skipped tests (NOW & SKIPPED) 'num_regressions': The number of non-flaky failures 'num_flaky': The number of flaky failures 'num_missing': The number of tests with missing results 'num_passes': The number of unexpected passes 'tests': a dict of tests -> {'expected': '...', 'actual': '...'} 'date': the current date and time """ results = {} results['version'] = 4 tbe = initial_results.tests_by_expectation tbt = initial_results.tests_by_timeline results['fixable'] = len(tbt[test_expectations.NOW] - tbe[test_expectations.PASS]) results['skipped'] = len(tbt[test_expectations.NOW] & tbe[test_expectations.SKIP]) num_passes = 0 num_flaky = 0 num_missing = 0 num_regressions = 0 keywords = {} for expecation_string, expectation_enum in test_expectations.TestExpectations.EXPECTATIONS.iteritems(): keywords[expectation_enum] = expecation_string.upper() for modifier_string, modifier_enum in test_expectations.TestExpectations.MODIFIERS.iteritems(): keywords[modifier_enum] = modifier_string.upper() tests = {} other_crashes_dict = {} for test_name, result in initial_results.results_by_name.iteritems(): # Note that if a test crashed in the original run, we ignore # whether or not it crashed when we retried it (if we retried it), # and always consider the result not flaky. expected = expectations.model().get_expectations_string(test_name) result_type = result.type actual = [keywords[result_type]] if result_type == test_expectations.SKIP: continue if result.is_other_crash: other_crashes_dict[test_name] = {} continue test_dict = {} if result.has_stderr: test_dict['has_stderr'] = True if result.reftest_type: test_dict.update(reftest_type=list(result.reftest_type)) if expectations.model().has_modifier(test_name, test_expectations.WONTFIX): test_dict['wontfix'] = True if result_type == test_expectations.PASS: num_passes += 1 # FIXME: include passing tests that have stderr output. if expected == 'PASS' and not include_passes: continue elif result_type == test_expectations.CRASH: if test_name in initial_results.unexpected_results_by_name: num_regressions += 1 test_dict['report'] = 'REGRESSION' elif result_type == test_expectations.MISSING: if test_name in initial_results.unexpected_results_by_name: num_missing += 1 test_dict['report'] = 'MISSING' elif test_name in initial_results.unexpected_results_by_name: if retry_results and test_name not in retry_results.unexpected_results_by_name: actual.extend(expectations.model().get_expectations_string(test_name).split(" ")) num_flaky += 1 test_dict['report'] = 'FLAKY' elif retry_results: retry_result_type = retry_results.unexpected_results_by_name[test_name].type if result_type != retry_result_type: if enabled_pixel_tests_in_retry and result_type == test_expectations.TEXT and (retry_result_type == test_expectations.IMAGE_PLUS_TEXT or retry_result_type == test_expectations.MISSING): if retry_result_type == test_expectations.MISSING: num_missing += 1 num_regressions += 1 test_dict['report'] = 'REGRESSION' else: num_flaky += 1 test_dict['report'] = 'FLAKY' actual.append(keywords[retry_result_type]) else: num_regressions += 1 test_dict['report'] = 'REGRESSION' else: num_regressions += 1 test_dict['report'] = 'REGRESSION' test_dict['expected'] = expected test_dict['actual'] = " ".join(actual) if include_time_and_modifiers: test_dict['time'] = round(1000 * result.test_run_time) # FIXME: Fix get_modifiers to return modifiers in new format. test_dict['modifiers'] = ' '.join(expectations.model().get_modifiers(test_name)).replace('BUGWK', 'webkit.org/b/') test_dict.update(_interpret_test_failures(result.failures)) if retry_results: retry_result = retry_results.unexpected_results_by_name.get(test_name) if retry_result: test_dict.update(_interpret_test_failures(retry_result.failures)) # Store test hierarchically by directory. e.g. # foo/bar/baz.html: test_dict # foo/bar/baz1.html: test_dict # # becomes # foo: { # bar: { # baz.html: test_dict, # baz1.html: test_dict # } # } parts = test_name.split('/') current_map = tests for i, part in enumerate(parts): if i == (len(parts) - 1): current_map[part] = test_dict break if part not in current_map: current_map[part] = {} current_map = current_map[part] results['tests'] = tests results['num_passes'] = num_passes results['num_flaky'] = num_flaky results['num_missing'] = num_missing results['num_regressions'] = num_regressions results['uses_expectations_file'] = port_obj.uses_test_expectations_file() results['interrupted'] = initial_results.interrupted # Does results.html have enough information to compute this itself? (by checking total number of results vs. total number of tests?) results['layout_tests_dir'] = port_obj.layout_tests_dir() results['has_pretty_patch'] = port_obj.pretty_patch.pretty_patch_available() results['pixel_tests_enabled'] = port_obj.get_option('pixel_tests') results['other_crashes'] = other_crashes_dict results['date'] = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y") try: # We only use the svn revision for using trac links in the results.html file, # Don't do this by default since it takes >100ms. # FIXME: Do we really need to populate this both here and in the json_results_generator? if port_obj.get_option("builder_name"): port_obj.host.initialize_scm() results['revision'] = port_obj.host.scm().head_svn_revision() except Exception, e: _log.warn("Failed to determine svn revision for checkout (cwd: %s, webkit_base: %s), leaving 'revision' key blank in full_results.json.\n%s" % (port_obj._filesystem.getcwd(), port_obj.path_from_webkit_base(), e)) # Handle cases where we're running outside of version control. import traceback _log.debug('Failed to learn head svn revision:') _log.debug(traceback.format_exc()) results['revision'] = "" return results
geminy/aidear
refs/heads/master
oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/safebrowsing.py
6
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import logging from common.chrome_proxy_shared_page_state import ChromeProxySharedPageState from telemetry.core import exceptions from telemetry.page import page as page_module from telemetry import story class SafebrowsingPage(page_module.Page): """ Why: Expect 'malware ahead' page. Use a short navigation timeout because no response will be received. """ def __init__(self, url, page_set, expect_timeout): super(SafebrowsingPage, self).__init__(url=url, page_set=page_set, shared_page_state_class=ChromeProxySharedPageState) self._expect_timeout = expect_timeout def RunNavigateSteps(self, action_runner): try: action_runner.Navigate(self.url, timeout_in_seconds=5) except exceptions.TimeoutException as e: if self._expect_timeout: logging.warning('Navigation timeout on page %s', self.url) else: raise e class SafebrowsingStorySet(story.StorySet): """ Chrome proxy test sites """ def __init__(self, expect_timeout=False): super(SafebrowsingStorySet, self).__init__() self.AddStory( SafebrowsingPage('http://www.ianfette.org/', self, expect_timeout))
harshita-gupta/Harvard-FRSEM-Catalog-2016-17
refs/heads/master
flask/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py
55
# mysql/zxjdbc.py # Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """ .. dialect:: mysql+zxjdbc :name: zxjdbc for Jython :dbapi: zxjdbc :connectstring: mysql+zxjdbc://<user>:<password>@<hostname>[:<port>]/\ <database> :driverurl: http://dev.mysql.com/downloads/connector/j/ .. note:: Jython is not supported by current versions of SQLAlchemy. The zxjdbc dialect should be considered as experimental. Character Sets -------------- SQLAlchemy zxjdbc dialects pass unicode straight through to the zxjdbc/JDBC layer. To allow multiple character sets to be sent from the MySQL Connector/J JDBC driver, by default SQLAlchemy sets its ``characterEncoding`` connection property to ``UTF-8``. It may be overridden via a ``create_engine`` URL parameter. """ import re from ... import types as sqltypes, util from ...connectors.zxJDBC import ZxJDBCConnector from .base import BIT, MySQLDialect, MySQLExecutionContext class _ZxJDBCBit(BIT): def result_processor(self, dialect, coltype): """Converts boolean or byte arrays from MySQL Connector/J to longs.""" def process(value): if value is None: return value if isinstance(value, bool): return int(value) v = 0 for i in value: v = v << 8 | (i & 0xff) value = v return value return process class MySQLExecutionContext_zxjdbc(MySQLExecutionContext): def get_lastrowid(self): cursor = self.create_cursor() cursor.execute("SELECT LAST_INSERT_ID()") lastrowid = cursor.fetchone()[0] cursor.close() return lastrowid class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect): jdbc_db_name = 'mysql' jdbc_driver_name = 'com.mysql.jdbc.Driver' execution_ctx_cls = MySQLExecutionContext_zxjdbc colspecs = util.update_copy( MySQLDialect.colspecs, { sqltypes.Time: sqltypes.Time, BIT: _ZxJDBCBit } ) def _detect_charset(self, connection): """Sniff out the character set in use for connection results.""" # Prefer 'character_set_results' for the current connection over the # value in the driver. SET NAMES or individual variable SETs will # change the charset without updating the driver's view of the world. # # If it's decided that issuing that sort of SQL leaves you SOL, then # this can prefer the driver value. rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'") opts = dict((row[0], row[1]) for row in self._compat_fetchall(rs)) for key in ('character_set_connection', 'character_set'): if opts.get(key, None): return opts[key] util.warn("Could not detect the connection character set. " "Assuming latin1.") return 'latin1' def _driver_kwargs(self): """return kw arg dict to be sent to connect().""" return dict(characterEncoding='UTF-8', yearIsDateType='false') def _extract_error_code(self, exception): # e.g.: DBAPIError: (Error) Table 'test.u2' doesn't exist # [SQLCode: 1146], [SQLState: 42S02] 'DESCRIBE `u2`' () m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.args)) c = m.group(1) if c: return int(c) def _get_server_version_info(self, connection): dbapi_con = connection.connection version = [] r = re.compile('[.\-]') for n in r.split(dbapi_con.dbversion): try: version.append(int(n)) except ValueError: version.append(n) return tuple(version) dialect = MySQLDialect_zxjdbc
meabsence/python-for-android
refs/heads/master
python-build/python-libs/gdata/src/gdata/base/service.py
166
#!/usr/bin/python # # Copyright (C) 2006 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """GBaseService extends the GDataService to streamline Google Base operations. GBaseService: Provides methods to query feeds and manipulate items. Extends GDataService. DictionaryToParamList: Function which converts a dictionary into a list of URL arguments (represented as strings). This is a utility function used in CRUD operations. """ __author__ = 'api.jscudder (Jeffrey Scudder)' import urllib import gdata import atom.service import gdata.service import gdata.base import atom # URL to which all batch requests are sent. BASE_BATCH_URL = 'http://www.google.com/base/feeds/items/batch' class Error(Exception): pass class RequestError(Error): pass class GBaseService(gdata.service.GDataService): """Client for the Google Base service.""" def __init__(self, email=None, password=None, source=None, server='base.google.com', api_key=None, additional_headers=None, handler=None, **kwargs): """Creates a client for the Google Base service. Args: email: string (optional) The user's email address, used for authentication. password: string (optional) The user's password. source: string (optional) The name of the user's application. server: string (optional) The name of the server to which a connection will be opened. Default value: 'base.google.com'. api_key: string (optional) The Google Base API key to use. **kwargs: The other parameters to pass to gdata.service.GDataService constructor. """ gdata.service.GDataService.__init__( self, email=email, password=password, service='gbase', source=source, server=server, additional_headers=additional_headers, handler=handler, **kwargs) self.api_key = api_key def _SetAPIKey(self, api_key): if not isinstance(self.additional_headers, dict): self.additional_headers = {} self.additional_headers['X-Google-Key'] = api_key def __SetAPIKey(self, api_key): self._SetAPIKey(api_key) def _GetAPIKey(self): if 'X-Google-Key' not in self.additional_headers: return None else: return self.additional_headers['X-Google-Key'] def __GetAPIKey(self): return self._GetAPIKey() api_key = property(__GetAPIKey, __SetAPIKey, doc="""Get or set the API key to be included in all requests.""") def Query(self, uri, converter=None): """Performs a style query and returns a resulting feed or entry. Args: uri: string The full URI which be queried. Examples include '/base/feeds/snippets?bq=digital+camera', 'http://www.google.com/base/feeds/snippets?bq=digital+camera' '/base/feeds/items' I recommend creating a URI using a query class. converter: func (optional) A function which will be executed on the server's response. Examples include GBaseItemFromString, etc. Returns: If converter was specified, returns the results of calling converter on the server's response. If converter was not specified, and the result was an Atom Entry, returns a GBaseItem, by default, the method returns the result of calling gdata.service's Get method. """ result = self.Get(uri, converter=converter) if converter: return result elif isinstance(result, atom.Entry): return gdata.base.GBaseItemFromString(result.ToString()) return result def QuerySnippetsFeed(self, uri): return self.Get(uri, converter=gdata.base.GBaseSnippetFeedFromString) def QueryItemsFeed(self, uri): return self.Get(uri, converter=gdata.base.GBaseItemFeedFromString) def QueryAttributesFeed(self, uri): return self.Get(uri, converter=gdata.base.GBaseAttributesFeedFromString) def QueryItemTypesFeed(self, uri): return self.Get(uri, converter=gdata.base.GBaseItemTypesFeedFromString) def QueryLocalesFeed(self, uri): return self.Get(uri, converter=gdata.base.GBaseLocalesFeedFromString) def GetItem(self, uri): return self.Get(uri, converter=gdata.base.GBaseItemFromString) def GetSnippet(self, uri): return self.Get(uri, converter=gdata.base.GBaseSnippetFromString) def GetAttribute(self, uri): return self.Get(uri, converter=gdata.base.GBaseAttributeEntryFromString) def GetItemType(self, uri): return self.Get(uri, converter=gdata.base.GBaseItemTypeEntryFromString) def GetLocale(self, uri): return self.Get(uri, converter=gdata.base.GDataEntryFromString) def InsertItem(self, new_item, url_params=None, escape_params=True, converter=None): """Adds an item to Google Base. Args: new_item: atom.Entry or subclass A new item which is to be added to Google Base. url_params: dict (optional) Additional URL parameters to be included in the insertion request. escape_params: boolean (optional) If true, the url_parameters will be escaped before they are included in the request. converter: func (optional) Function which is executed on the server's response before it is returned. Usually this is a function like GBaseItemFromString which will parse the response and turn it into an object. Returns: If converter is defined, the results of running converter on the server's response. Otherwise, it will be a GBaseItem. """ response = self.Post(new_item, '/base/feeds/items', url_params=url_params, escape_params=escape_params, converter=converter) if not converter and isinstance(response, atom.Entry): return gdata.base.GBaseItemFromString(response.ToString()) return response def DeleteItem(self, item_id, url_params=None, escape_params=True): """Removes an item with the specified ID from Google Base. Args: item_id: string The ID of the item to be deleted. Example: 'http://www.google.com/base/feeds/items/13185446517496042648' url_params: dict (optional) Additional URL parameters to be included in the deletion request. escape_params: boolean (optional) If true, the url_parameters will be escaped before they are included in the request. Returns: True if the delete succeeded. """ return self.Delete('%s' % (item_id[len('http://www.google.com'):],), url_params=url_params, escape_params=escape_params) def UpdateItem(self, item_id, updated_item, url_params=None, escape_params=True, converter=gdata.base.GBaseItemFromString): """Updates an existing item. Args: item_id: string The ID of the item to be updated. Example: 'http://www.google.com/base/feeds/items/13185446517496042648' updated_item: atom.Entry, subclass, or string, containing the Atom Entry which will replace the base item which is stored at the item_id. url_params: dict (optional) Additional URL parameters to be included in the update request. escape_params: boolean (optional) If true, the url_parameters will be escaped before they are included in the request. converter: func (optional) Function which is executed on the server's response before it is returned. Usually this is a function like GBaseItemFromString which will parse the response and turn it into an object. Returns: If converter is defined, the results of running converter on the server's response. Otherwise, it will be a GBaseItem. """ response = self.Put(updated_item, item_id, url_params=url_params, escape_params=escape_params, converter=converter) if not converter and isinstance(response, atom.Entry): return gdata.base.GBaseItemFromString(response.ToString()) return response def ExecuteBatch(self, batch_feed, converter=gdata.base.GBaseItemFeedFromString): """Sends a batch request feed to the server. Args: batch_feed: gdata.BatchFeed A feed containing BatchEntry elements which contain the desired CRUD operation and any necessary entry data. converter: Function (optional) Function to be executed on the server's response. This function should take one string as a parameter. The default value is GBaseItemFeedFromString which will turn the result into a gdata.base.GBaseItem object. Returns: A gdata.BatchFeed containing the results. """ return self.Post(batch_feed, BASE_BATCH_URL, converter=converter) class BaseQuery(gdata.service.Query): def _GetBaseQuery(self): return self['bq'] def _SetBaseQuery(self, base_query): self['bq'] = base_query bq = property(_GetBaseQuery, _SetBaseQuery, doc="""The bq query parameter""")
hajicj/MUSCIMarker
refs/heads/develop
MUSCIMarker/merge_cropobject_lists.py
1
#!/usr/bin/env python """This is a simple script that merges a number of CropObject list files into one.""" from __future__ import print_function, unicode_literals import argparse import codecs import logging import time from muscima.cropobject import merge_cropobject_lists from muscima.io import parse_cropobject_list, export_cropobject_list __version__ = "0.0.1" __author__ = "Jan Hajic jr." def build_argument_parser(): parser = argparse.ArgumentParser(description=__doc__, add_help=True, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-i', '--inputs', nargs='+', required=True, help='Input CropObject lists. Will be appended' ' in the order in which they are given.') parser.add_argument('-o', '--output', required=True, help='Output file for the merged CropObject list.') parser.add_argument('-v', '--verbose', action='store_true', help='Turn on INFO messages.') parser.add_argument('--debug', action='store_true', help='Turn on DEBUG messages.') return parser def main(args): logging.info('Starting main...') _start_time = time.clock() logging.warning('Merging CropObject lists is now very dangerous,' ' becaues of the uid situation.') inputs = [parse_cropobject_list(f) for f in args.inputs] merged = merge_cropobject_lists(*inputs) with codecs.open(args.output, 'w', 'utf-8') as hdl: hdl.write(export_cropobject_list(merged)) hdl.write('\n') _end_time = time.clock() logging.info('merge_cropobject_lists.py done in {0:.3f} s'.format(_end_time - _start_time)) if __name__ == '__main__': parser = build_argument_parser() args = parser.parse_args() if args.verbose: logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) if args.debug: logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG) main(args)
wdzhou/mantid
refs/heads/master
scripts/AbinsModules/CalculatePowder.py
1
from __future__ import (absolute_import, division, print_function) import numpy as np import AbinsModules try: # noinspection PyUnresolvedReferences from pathos.multiprocessing import ProcessPool PATHOS_FOUND = True except ImportError: PATHOS_FOUND = False # noinspection PyMethodMayBeStatic class CalculatePowder(object): """ Class for calculating powder data. """ def __init__(self, filename=None, abins_data=None): """ :param filename: name of input DFT filename :param abins_data: object of type AbinsData with data from input DFT file """ if not isinstance(abins_data, AbinsModules.AbinsData): raise ValueError("Object of AbinsData was expected.") k_data = abins_data.get_kpoints_data().extract() gamma_pkt = AbinsModules.AbinsConstants.GAMMA_POINT self._frequencies = k_data["frequencies"] self._displacements = k_data["atomic_displacements"] self._num_atoms = self._displacements[gamma_pkt].shape[0] self._atoms_data = abins_data.get_atoms_data().extract() self._clerk = AbinsModules.IOmodule(input_filename=filename, group_name=AbinsModules.AbinsParameters.powder_data_group) def _calculate_powder(self): """ Calculates powder data (a_tensors, b_tensors according to aCLIMAX manual). """ # define container for powder data powder = AbinsModules.PowderData(num_atoms=self._num_atoms) k_indices = sorted(self._frequencies.keys()) # make sure dictionary keys are in the same order on each machine b_tensors = {} a_tensors = {} if PATHOS_FOUND: threads = AbinsModules.AbinsParameters.threads p_local = ProcessPool(nodes=threads) tensors = p_local.map(self._calculate_powder_k, k_indices) else: tensors = [self._calculate_powder_k(k=k) for k in k_indices] for indx, k in enumerate(k_indices): a_tensors[k] = tensors[indx][0] b_tensors[k] = tensors[indx][1] # fill powder object with powder data powder.set(dict(b_tensors=b_tensors, a_tensors=a_tensors)) return powder def _calculate_powder_k(self, k=None): """ :param k: k index """ # Notation for indices: # num_freq -- number of phonons # num_atoms -- number of atoms # num_k -- number of k-points # dim -- size of displacement vector for one atom (dim = 3) # masses[num_atoms, num_freq] masses = np.asarray([([self._atoms_data["atom_%s" % atom]["mass"]] * self._frequencies[k].size) for atom in range(self._num_atoms)]) # disp[num_atoms, num_freq, dim] disp = self._displacements[k] # factor[num_atoms, num_freq] factor = np.einsum('ij,j->ij', 1.0 / masses, AbinsModules.AbinsConstants.CONSTANT / self._frequencies[k]) # b_tensors[num_atoms, num_freq, dim, dim] b_tensors = np.einsum('ijkl,ij->ijkl', np.einsum('lki, lkj->lkij', disp, disp.conjugate()).real, factor) temp = np.fabs(b_tensors) indices = temp < AbinsModules.AbinsConstants.NUM_ZERO b_tensors[indices] = AbinsModules.AbinsConstants.NUM_ZERO # a_tensors[num_atoms, dim, dim] a_tensors = np.sum(a=b_tensors, axis=1) return a_tensors, b_tensors def get_formatted_data(self): """ Method to obtain data. :return: obtained data """ try: self._clerk.check_previous_data() data = self.load_formatted_data() self._report_progress(str(data) + " has been loaded from the HDF file.") except (IOError, ValueError) as err: self._report_progress("Warning: " + str(err) + " Data has to be calculated.") data = self.calculate_data() self._report_progress(str(data) + " has been calculated.") return data def calculate_data(self): """ Calculates mean square displacements. :return: object of type PowderData with mean square displacements. """ data = self._calculate_powder() self._clerk.add_file_attributes() self._clerk.add_data("powder_data", data.extract()) self._clerk.save() return data def load_formatted_data(self): """ Loads mean square displacements. :return: object of type PowderData with mean square displacements. """ data = self._clerk.load(list_of_datasets=["powder_data"]) k_pkt = AbinsModules.AbinsConstants.GAMMA_POINT powder_data = AbinsModules.PowderData(num_atoms=data["datasets"]["powder_data"]["b_tensors"][k_pkt].shape[0]) powder_data.set(data["datasets"]["powder_data"]) return powder_data def _report_progress(self, msg): """ :param msg: message to print out """ # In order to avoid # # RuntimeError: Pickling of "mantid.kernel._kernel.Logger" # instances is not enabled (http://www.boost.org/libs/python/doc/v2/pickle.html) # # logger has to be imported locally from mantid.kernel import logger logger.notice(msg)
azureplus/hue
refs/heads/master
desktop/core/ext-py/pytz-2015.2/pytz/lazy.py
514
from threading import RLock try: from UserDict import DictMixin except ImportError: from collections import Mapping as DictMixin # With lazy loading, we might end up with multiple threads triggering # it at the same time. We need a lock. _fill_lock = RLock() class LazyDict(DictMixin): """Dictionary populated on first use.""" data = None def __getitem__(self, key): if self.data is None: _fill_lock.acquire() try: if self.data is None: self._fill() finally: _fill_lock.release() return self.data[key.upper()] def __contains__(self, key): if self.data is None: _fill_lock.acquire() try: if self.data is None: self._fill() finally: _fill_lock.release() return key in self.data def __iter__(self): if self.data is None: _fill_lock.acquire() try: if self.data is None: self._fill() finally: _fill_lock.release() return iter(self.data) def __len__(self): if self.data is None: _fill_lock.acquire() try: if self.data is None: self._fill() finally: _fill_lock.release() return len(self.data) def keys(self): if self.data is None: _fill_lock.acquire() try: if self.data is None: self._fill() finally: _fill_lock.release() return self.data.keys() class LazyList(list): """List populated on first use.""" _props = [ '__str__', '__repr__', '__unicode__', '__hash__', '__sizeof__', '__cmp__', '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', 'append', 'count', 'index', 'extend', 'insert', 'pop', 'remove', 'reverse', 'sort', '__add__', '__radd__', '__iadd__', '__mul__', '__rmul__', '__imul__', '__contains__', '__len__', '__nonzero__', '__getitem__', '__setitem__', '__delitem__', '__iter__', '__reversed__', '__getslice__', '__setslice__', '__delslice__'] def __new__(cls, fill_iter=None): if fill_iter is None: return list() # We need a new class as we will be dynamically messing with its # methods. class LazyList(list): pass fill_iter = [fill_iter] def lazy(name): def _lazy(self, *args, **kw): _fill_lock.acquire() try: if len(fill_iter) > 0: list.extend(self, fill_iter.pop()) for method_name in cls._props: delattr(LazyList, method_name) finally: _fill_lock.release() return getattr(list, name)(self, *args, **kw) return _lazy for name in cls._props: setattr(LazyList, name, lazy(name)) new_list = LazyList() return new_list # Not all versions of Python declare the same magic methods. # Filter out properties that don't exist in this version of Python # from the list. LazyList._props = [prop for prop in LazyList._props if hasattr(list, prop)] class LazySet(set): """Set populated on first use.""" _props = ( '__str__', '__repr__', '__unicode__', '__hash__', '__sizeof__', '__cmp__', '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', '__contains__', '__len__', '__nonzero__', '__getitem__', '__setitem__', '__delitem__', '__iter__', '__sub__', '__and__', '__xor__', '__or__', '__rsub__', '__rand__', '__rxor__', '__ror__', '__isub__', '__iand__', '__ixor__', '__ior__', 'add', 'clear', 'copy', 'difference', 'difference_update', 'discard', 'intersection', 'intersection_update', 'isdisjoint', 'issubset', 'issuperset', 'pop', 'remove', 'symmetric_difference', 'symmetric_difference_update', 'union', 'update') def __new__(cls, fill_iter=None): if fill_iter is None: return set() class LazySet(set): pass fill_iter = [fill_iter] def lazy(name): def _lazy(self, *args, **kw): _fill_lock.acquire() try: if len(fill_iter) > 0: for i in fill_iter.pop(): set.add(self, i) for method_name in cls._props: delattr(LazySet, method_name) finally: _fill_lock.release() return getattr(set, name)(self, *args, **kw) return _lazy for name in cls._props: setattr(LazySet, name, lazy(name)) new_set = LazySet() return new_set # Not all versions of Python declare the same magic methods. # Filter out properties that don't exist in this version of Python # from the list. LazySet._props = [prop for prop in LazySet._props if hasattr(set, prop)]
jandom/rdkit
refs/heads/master
rdkit/Chem/BuildFragmentCatalog.py
1
# $Id$ # # Copyright (C) 2003-2008 Greg Landrum and Rational Discovery LLC # # @@ All Rights Reserved @@ # This file is part of the RDKit. # The contents are covered by the terms of the BSD license # which is included in the file license.txt, found at the root # of the RDKit source tree. # """ command line utility for working with FragmentCatalogs (CASE-type analysis) **Usage** BuildFragmentCatalog [optional args] <filename> filename, the name of a delimited text file containing InData, is required for some modes of operation (see below) **Command Line Arguments** - -n *maxNumMols*: specify the maximum number of molecules to be processed - -b: build the catalog and OnBitLists *requires InData* - -s: score compounds *requires InData and a Catalog, can use OnBitLists* - -g: calculate info gains *requires Scores* - -d: show details about high-ranking fragments *requires a Catalog and Gains* - --catalog=*filename*: filename with the pickled catalog. If -b is provided, this file will be overwritten. - --onbits=*filename*: filename to hold the pickled OnBitLists. If -b is provided, this file will be overwritten - --scores=*filename*: filename to hold the text score data. If -s is provided, this file will be overwritten - --gains=*filename*: filename to hold the text gains data. If -g is provided, this file will be overwritten - --details=*filename*: filename to hold the text details data. If -d is provided, this file will be overwritten. - --minPath=2: specify the minimum length for a path - --maxPath=6: specify the maximum length for a path - --smiCol=1: specify which column in the input data file contains SMILES - --actCol=-1: specify which column in the input data file contains activities - --nActs=2: specify the number of possible activity values - --nBits=-1: specify the maximum number of bits to show details for """ from __future__ import print_function import sys, os from rdkit.six.moves import cPickle #@UnresolvedImport #pylint: disable=F0401 from rdkit.six import next from rdkit import Chem from rdkit import RDConfig from rdkit.Chem import FragmentCatalog from rdkit.Dbase.DbConnection import DbConnect import numpy from rdkit.ML import InfoTheory import types _cvsVersion = "$Revision$" idx1 = _cvsVersion.find(':') + 1 idx2 = _cvsVersion.rfind('$') __VERSION_STRING = "%s" % (_cvsVersion[idx1:idx2]) def message(msg, dest=sys.stdout): dest.write(msg) def BuildCatalog(suppl, maxPts=-1, groupFileName=None, minPath=2, maxPath=6, reportFreq=10): """ builds a fragment catalog from a set of molecules in a delimited text block **Arguments** - suppl: a mol supplier - maxPts: (optional) if provided, this will set an upper bound on the number of points to be considered - groupFileName: (optional) name of the file containing functional group information - minPath, maxPath: (optional) names of the minimum and maximum path lengths to be considered - reportFreq: (optional) how often to display status information **Returns** a FragmentCatalog """ if groupFileName is None: groupFileName = os.path.join(RDConfig.RDDataDir, "FunctionalGroups.txt") fpParams = FragmentCatalog.FragCatParams(minPath, maxPath, groupFileName) catalog = FragmentCatalog.FragCatalog(fpParams) fgen = FragmentCatalog.FragCatGenerator() if maxPts > 0: nPts = maxPts else: if hasattr(suppl, '__len__'): nPts = len(suppl) else: nPts = -1 for i, mol in enumerate(suppl): if i == nPts: break if i and not i % reportFreq: if nPts > -1: message('Done %d of %d, %d paths\n' % (i, nPts, catalog.GetFPLength())) else: message('Done %d, %d paths\n' % (i, catalog.GetFPLength())) fgen.AddFragsFromMol(mol, catalog) return catalog def ScoreMolecules(suppl, catalog, maxPts=-1, actName='', acts=None, nActs=2, reportFreq=10): """ scores the compounds in a supplier using a catalog **Arguments** - suppl: a mol supplier - catalog: the FragmentCatalog - maxPts: (optional) the maximum number of molecules to be considered - actName: (optional) the name of the molecule's activity property. If this is not provided, the molecule's last property will be used. - acts: (optional) a sequence of activity values (integers). If not provided, the activities will be read from the molecules. - nActs: (optional) number of possible activity values - reportFreq: (optional) how often to display status information **Returns** a 2-tuple: 1) the results table (a 3D array of ints nBits x 2 x nActs) 2) a list containing the on bit lists for each molecule """ nBits = catalog.GetFPLength() resTbl = numpy.zeros((nBits, 2, nActs), numpy.int) obls = [] if not actName and not acts: actName = suppl[0].GetPropNames()[-1] fpgen = FragmentCatalog.FragFPGenerator() suppl.reset() i = 1 for mol in suppl: if i and not i % reportFreq: message('Done %d.\n' % (i)) if mol: if not acts: act = int(mol.GetProp(actName)) else: act = acts[i - 1] fp = fpgen.GetFPForMol(mol, catalog) obls.append([x for x in fp.GetOnBits()]) for j in range(nBits): resTbl[j, 0, act] += 1 for id in obls[i - 1]: resTbl[id - 1, 0, act] -= 1 resTbl[id - 1, 1, act] += 1 else: obls.append([]) i += 1 return resTbl, obls def ScoreFromLists(bitLists, suppl, catalog, maxPts=-1, actName='', acts=None, nActs=2, reportFreq=10): """ similar to _ScoreMolecules()_, but uses pre-calculated bit lists for the molecules (this speeds things up a lot) **Arguments** - bitLists: sequence of on bit sequences for the input molecules - suppl: the input supplier (we read activities from here) - catalog: the FragmentCatalog - maxPts: (optional) the maximum number of molecules to be considered - actName: (optional) the name of the molecule's activity property. If this is not provided, the molecule's last property will be used. - nActs: (optional) number of possible activity values - reportFreq: (optional) how often to display status information **Returns** the results table (a 3D array of ints nBits x 2 x nActs) """ nBits = catalog.GetFPLength() if maxPts > 0: nPts = maxPts else: nPts = len(bitLists) resTbl = numpy.zeros((nBits, 2, nActs), numpy.int) if not actName and not acts: actName = suppl[0].GetPropNames()[-1] suppl.reset() for i in range(1, nPts + 1): mol = next(suppl) if not acts: act = int(mol.GetProp(actName)) else: act = acts[i - 1] if i and not i % reportFreq: message('Done %d of %d\n' % (i, nPts)) ids = set() for id in bitLists[i - 1]: ids.add(id - 1) for j in range(nBits): resTbl[j, 0, act] += 1 for id in ids: resTbl[id, 0, act] -= 1 resTbl[id, 1, act] += 1 return resTbl def CalcGains(suppl, catalog, topN=-1, actName='', acts=None, nActs=2, reportFreq=10, biasList=None, collectFps=0): """ calculates info gains by constructing fingerprints *DOC* Returns a 2-tuple: 1) gains matrix 2) list of fingerprints """ nBits = catalog.GetFPLength() if topN < 0: topN = nBits if not actName and not acts: actName = suppl[0].GetPropNames()[-1] gains = [0] * nBits if hasattr(suppl, '__len__'): nMols = len(suppl) else: nMols = -1 fpgen = FragmentCatalog.FragFPGenerator() #ranker = InfoTheory.InfoBitRanker(nBits,nActs,InfoTheory.InfoType.ENTROPY) if biasList: ranker = InfoTheory.InfoBitRanker(nBits, nActs, InfoTheory.InfoType.BIASENTROPY) ranker.SetBiasList(biasList) else: ranker = InfoTheory.InfoBitRanker(nBits, nActs, InfoTheory.InfoType.ENTROPY) i = 0 fps = [] for mol in suppl: if not acts: try: act = int(mol.GetProp(actName)) except KeyError: message('ERROR: Molecule has no property: %s\n' % (actName)) message('\tAvailable properties are: %s\n' % (str(mol.GetPropNames()))) raise KeyError(actName) else: act = acts[i] if i and not i % reportFreq: if nMols > 0: message('Done %d of %d.\n' % (i, nMols)) else: message('Done %d.\n' % (i)) fp = fpgen.GetFPForMol(mol, catalog) ranker.AccumulateVotes(fp, act) i += 1 if collectFps: fps.append(fp) gains = ranker.GetTopN(topN) return gains, fps def CalcGainsFromFps(suppl, fps, topN=-1, actName='', acts=None, nActs=2, reportFreq=10, biasList=None): """ calculates info gains from a set of fingerprints *DOC* """ nBits = len(fps[0]) if topN < 0: topN = nBits if not actName and not acts: actName = suppl[0].GetPropNames()[-1] gains = [0] * nBits if hasattr(suppl, '__len__'): nMols = len(suppl) else: nMols = -1 if biasList: ranker = InfoTheory.InfoBitRanker(nBits, nActs, InfoTheory.InfoType.BIASENTROPY) ranker.SetBiasList(biasList) else: ranker = InfoTheory.InfoBitRanker(nBits, nActs, InfoTheory.InfoType.ENTROPY) for i, mol in enumerate(suppl): if not acts: try: act = int(mol.GetProp(actName)) except KeyError: message('ERROR: Molecule has no property: %s\n' % (actName)) message('\tAvailable properties are: %s\n' % (str(mol.GetPropNames()))) raise KeyError(actName) else: act = acts[i] if i and not i % reportFreq: if nMols > 0: message('Done %d of %d.\n' % (i, nMols)) else: message('Done %d.\n' % (i)) fp = fps[i] ranker.AccumulateVotes(fp, act) gains = ranker.GetTopN(topN) return gains def OutputGainsData(outF, gains, cat, nActs=2): actHeaders = ['Act-%d' % (x) for x in range(nActs)] if cat: outF.write('id,Description,Gain,%s\n' % (','.join(actHeaders))) else: outF.write('id,Gain,%s\n' % (','.join(actHeaders))) for entry in gains: id = int(entry[0]) outL = [str(id)] if cat: descr = cat.GetBitDescription(id) outL.append(descr) outL.append('%.6f' % entry[1]) outL += ['%d' % x for x in entry[2:]] outF.write(','.join(outL)) outF.write('\n') def ProcessGainsData(inF, delim=',', idCol=0, gainCol=1): """ reads a list of ids and info gains out of an input file """ res = [] inL = inF.readline() for line in inF.xreadlines(): splitL = line.strip().split(delim) res.append((splitL[idCol], float(splitL[gainCol]))) return res def ShowDetails(catalog, gains, nToDo=-1, outF=sys.stdout, idCol=0, gainCol=1, outDelim=','): """ gains should be a sequence of sequences. The idCol entry of each sub-sequence should be a catalog ID. _ProcessGainsData()_ provides suitable input. """ if nToDo < 0: nToDo = len(gains) for i in range(nToDo): id = int(gains[i][idCol]) gain = float(gains[i][gainCol]) descr = catalog.GetFragDescription(id) if descr: outF.write('%s\n' % (outDelim.join((str(id), descr, str(gain))))) def SupplierFromDetails(details): from rdkit.VLib.NodeLib.DbMolSupply import DbMolSupplyNode from rdkit.VLib.NodeLib.SmilesSupply import SmilesSupplyNode if details.dbName: conn = DbConnect(details.dbName, details.tableName) suppl = DbMolSupplyNode(conn.GetData()) else: suppl = SmilesSupplyNode(details.inFileName, delim=details.delim, nameColumn=details.nameCol, smilesColumn=details.smiCol, titleLine=details.hasTitle) if type(details.actCol) == types.IntType: suppl.reset() m = next(suppl) actName = m.GetPropNames()[details.actCol] details.actCol = actName if type(details.nameCol) == types.IntType: suppl.reset() m = next(suppl) nameName = m.GetPropNames()[details.nameCol] details.nameCol = nameName suppl.reset() if type(details.actCol) == types.IntType: suppl.reset() m = next(suppl) actName = m.GetPropNames()[details.actCol] details.actCol = actName if type(details.nameCol) == types.IntType: suppl.reset() m = next(suppl) nameName = m.GetPropNames()[details.nameCol] details.nameCol = nameName suppl.reset() return suppl def Usage(): print("This is BuildFragmentCatalog version %s" % (__VERSION_STRING)) print('usage error') #print(__doc__) sys.exit(-1) class RunDetails(object): numMols = -1 doBuild = 0 doSigs = 0 doScore = 0 doGains = 0 doDetails = 0 catalogName = None onBitsName = None scoresName = None gainsName = None dbName = '' tableName = None detailsName = None inFileName = None fpName = None minPath = 2 maxPath = 6 smiCol = 1 actCol = -1 nameCol = -1 hasTitle = 1 nActs = 2 nBits = -1 delim = ',' biasList = None topN = -1 def ParseArgs(details): import getopt try: args, extras = getopt.getopt(sys.argv[1:], 'n:d:cst', ['catalog=', 'onbits=', 'scoresFile=', 'gainsFile=', 'detailsFile=', 'fpFile=', 'minPath=', 'maxPath=', 'smiCol=', 'actCol=', 'nameCol=', 'nActs=', 'nBits=', 'biasList=', 'topN=', 'build', 'sigs', 'gains', 'details', 'score', 'noTitle']) except Exception: sys.stderr.write('Error parsing command line:\n') import traceback traceback.print_exc() Usage() for arg, val in args: if arg == '-n': details.numMols = int(val) elif arg == '-c': details.delim = ',' elif arg == '-s': details.delim = ' ' elif arg == '-t': details.delim = '\t' elif arg == '-d': details.dbName = val elif arg == '--build': details.doBuild = 1 elif arg == '--score': details.doScore = 1 elif arg == '--gains': details.doGains = 1 elif arg == '--sigs': details.doSigs = 1 elif arg == '-details': details.doDetails = 1 elif arg == '--catalog': details.catalogName = val elif arg == '--onbits': details.onBitsName = val elif arg == '--scoresFile': details.scoresName = val elif arg == '--gainsFile': details.gainsName = val elif arg == '--detailsFile': details.detailsName = val elif arg == '--fpFile': details.fpName = val elif arg == '--minPath': details.minPath = int(val) elif arg == '--maxPath': details.maxPath = int(val) elif arg == '--smiCol': try: details.smiCol = int(val) except ValueError: details.smiCol = val elif arg == '--actCol': try: details.actCol = int(val) except ValueError: details.actCol = val elif arg == '--nameCol': try: details.nameCol = int(val) except ValueError: details.nameCol = val elif arg == '--nActs': details.nActs = int(val) elif arg == '--nBits': details.nBits = int(val) elif arg == '--noTitle': details.hasTitle = 0 elif arg == '--biasList': details.biasList = tuple(eval(val)) elif arg == '--topN': details.topN = int(val) elif arg == '-h': Usage() sys.exit(0) else: Usage() if len(extras): if details.dbName: details.tableName = extras[0] else: details.inFileName = extras[0] else: Usage() if __name__ == '__main__': import time details = RunDetails() ParseArgs(details) from io import StringIO suppl = SupplierFromDetails(details) cat = None obls = None if details.doBuild: if not suppl: message("We require inData to generate a catalog\n") sys.exit(-2) message("Building catalog\n") t1 = time.time() cat = BuildCatalog(suppl, maxPts=details.numMols, minPath=details.minPath, maxPath=details.maxPath) t2 = time.time() message("\tThat took %.2f seconds.\n" % (t2 - t1)) if details.catalogName: message("Dumping catalog data\n") cPickle.dump(cat, open(details.catalogName, 'wb+')) elif details.catalogName: message("Loading catalog\n") cat = cPickle.load(open(details.catalogName, 'rb')) if details.onBitsName: try: obls = cPickle.load(open(details.onBitsName, 'rb')) except Exception: obls = None else: if len(obls) < (inD.count('\n') - 1): obls = None scores = None if details.doScore: if not suppl: message("We require inData to score molecules\n") sys.exit(-2) if not cat: message("We require a catalog to score molecules\n") sys.exit(-2) message("Scoring compounds\n") if not obls or len(obls) < details.numMols: scores, obls = ScoreMolecules(suppl, cat, maxPts=details.numMols, actName=details.actCol, nActs=details.nActs) if details.scoresName: cPickle.dump(scores, open(details.scoresName, 'wb+')) if details.onBitsName: cPickle.dump(obls, open(details.onBitsName, 'wb+')) else: scores = ScoreFromLists(obls, suppl, cat, maxPts=details.numMols, actName=details.actCol, nActs=details.nActs) elif details.scoresName: scores = cPickle.load(open(details.scoresName, 'rb')) if details.fpName and os.path.exists(details.fpName) and not details.doSigs: message("Reading fingerprints from file.\n") fps = cPickle.load(open(details.fpName, 'rb')) else: fps = [] gains = None if details.doGains: if not suppl: message("We require inData to calculate gains\n") sys.exit(-2) if not (cat or fps): message("We require either a catalog or fingerprints to calculate gains\n") sys.exit(-2) message("Calculating Gains\n") t1 = time.time() if details.fpName: collectFps = 1 else: collectFps = 0 if not fps: gains, fps = CalcGains(suppl, cat, topN=details.topN, actName=details.actCol, nActs=details.nActs, biasList=details.biasList, collectFps=collectFps) if details.fpName: message("Writing fingerprint file.\n") tmpF = open(details.fpName, 'wb+') cPickle.dump(fps, tmpF, 1) tmpF.close() else: gains = CalcGainsFromFps(suppl, fps, topN=details.topN, actName=details.actCol, nActs=details.nActs, biasList=details.biasList) t2 = time.time() message("\tThat took %.2f seconds.\n" % (t2 - t1)) if details.gainsName: outF = open(details.gainsName, 'w+') OutputGainsData(outF, gains, cat, nActs=details.nActs) else: if details.gainsName: inF = open(details.gainsName, 'r') gains = ProcessGainsData(inF) if details.doDetails: if not cat: message("We require a catalog to get details\n") sys.exit(-2) if not gains: message("We require gains data to get details\n") sys.exit(-2) io = StringIO() io.write('id,SMILES,gain\n') ShowDetails(cat, gains, nToDo=details.nBits, outF=io) if details.detailsName: open(details.detailsName, 'w+').write(io.getvalue()) else: sys.stderr.write(io.getvalue())
DomainGroupOSS/luigi
refs/heads/master
luigi/mrrunner.py
65
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ The hadoop runner. This module contains the main() method which will be used to run the mapper and reducer on the Hadoop nodes. """ from __future__ import print_function try: import cPickle as pickle except ImportError: import pickle import logging import os import sys import tarfile import traceback class Runner(object): """ Run the mapper or reducer on hadoop nodes. """ def __init__(self, job=None): self.extract_packages_archive() self.job = job or pickle.load(open("job-instance.pickle", "rb")) self.job._setup_remote() def run(self, kind, stdin=sys.stdin, stdout=sys.stdout): if kind == "map": self.job.run_mapper(stdin, stdout) elif kind == "combiner": self.job.run_combiner(stdin, stdout) elif kind == "reduce": self.job.run_reducer(stdin, stdout) else: raise Exception('weird command: %s' % kind) def extract_packages_archive(self): if not os.path.exists("packages.tar"): return tar = tarfile.open("packages.tar") for tarinfo in tar: tar.extract(tarinfo) tar.close() if '' not in sys.path: sys.path.insert(0, '') def print_exception(exc): tb = traceback.format_exc() print('luigi-exc-hex=%s' % tb.encode('hex'), file=sys.stderr) def main(args=None, stdin=sys.stdin, stdout=sys.stdout, print_exception=print_exception): """ Run either the mapper or the reducer from the class instance in the file "job-instance.pickle". Arguments: kind -- is either map or reduce """ try: # Set up logging. logging.basicConfig(level=logging.WARN) kind = args is not None and args[1] or sys.argv[1] Runner().run(kind, stdin=stdin, stdout=stdout) except Exception as exc: # Dump encoded data that we will try to fetch using mechanize print_exception(exc) raise if __name__ == '__main__': main()
axbaretto/beam
refs/heads/master
sdks/python/.tox/lint/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.py
640
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. ''' This module generates ANSI character codes to printing colors to terminals. See: http://en.wikipedia.org/wiki/ANSI_escape_code ''' CSI = '\033[' OSC = '\033]' BEL = '\007' def code_to_chars(code): return CSI + str(code) + 'm' def set_title(title): return OSC + '2;' + title + BEL def clear_screen(mode=2): return CSI + str(mode) + 'J' def clear_line(mode=2): return CSI + str(mode) + 'K' class AnsiCodes(object): def __init__(self): # the subclasses declare class attributes which are numbers. # Upon instantiation we define instance attributes, which are the same # as the class attributes but wrapped with the ANSI escape sequence for name in dir(self): if not name.startswith('_'): value = getattr(self, name) setattr(self, name, code_to_chars(value)) class AnsiCursor(object): def UP(self, n=1): return CSI + str(n) + 'A' def DOWN(self, n=1): return CSI + str(n) + 'B' def FORWARD(self, n=1): return CSI + str(n) + 'C' def BACK(self, n=1): return CSI + str(n) + 'D' def POS(self, x=1, y=1): return CSI + str(y) + ';' + str(x) + 'H' class AnsiFore(AnsiCodes): BLACK = 30 RED = 31 GREEN = 32 YELLOW = 33 BLUE = 34 MAGENTA = 35 CYAN = 36 WHITE = 37 RESET = 39 # These are fairly well supported, but not part of the standard. LIGHTBLACK_EX = 90 LIGHTRED_EX = 91 LIGHTGREEN_EX = 92 LIGHTYELLOW_EX = 93 LIGHTBLUE_EX = 94 LIGHTMAGENTA_EX = 95 LIGHTCYAN_EX = 96 LIGHTWHITE_EX = 97 class AnsiBack(AnsiCodes): BLACK = 40 RED = 41 GREEN = 42 YELLOW = 43 BLUE = 44 MAGENTA = 45 CYAN = 46 WHITE = 47 RESET = 49 # These are fairly well supported, but not part of the standard. LIGHTBLACK_EX = 100 LIGHTRED_EX = 101 LIGHTGREEN_EX = 102 LIGHTYELLOW_EX = 103 LIGHTBLUE_EX = 104 LIGHTMAGENTA_EX = 105 LIGHTCYAN_EX = 106 LIGHTWHITE_EX = 107 class AnsiStyle(AnsiCodes): BRIGHT = 1 DIM = 2 NORMAL = 22 RESET_ALL = 0 Fore = AnsiFore() Back = AnsiBack() Style = AnsiStyle() Cursor = AnsiCursor()
HiLiph/wicd
refs/heads/master
wicd/networking.py
2
#!/usr/bin/env python # -*- coding: utf-8 -*- """ networking - Provides wrappers for common network operations This module provides wrappers of the common network tasks as well as threads to perform the actual connecting to networks. class Controller() -- Parent class to Wireless and Wired class ConnectThread() -- Parent class to WirelessConnectThread and WiredConnectThread class Wireless() -- Wrapper for various wireless functions class Wired() -- Wrapper for various wired functions class WirelessConnectThread() -- Connection thread for wireless interface class WiredConnectThread() -- Connection thread for wired interface """ # # Copyright (C) 2007 - 2009 Adam Blackburn # Copyright (C) 2007 - 2009 Dan O'Reilly # Copyright (C) 2007 - 2009 Byron Hillis # Copyright (C) 2009 Andrew Psaltis # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License Version 2 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # Much thanks to wieman01 for help and support with various types of encyption. # Also thanks to foxy123, yopnono, and the many others who reported bugs helped # and helped keep this project moving. # import re import time import threading import os from signal import SIGTERM # wicd imports import misc import wpath from backend import BackendManager from translations import _ if __name__ == '__main__': wpath.chdir(__file__) BACKEND = None BACKEND_MGR = BackendManager() def abortable(func): """ Mark a method in a ConnectionThread as abortable. This decorator runs a check that will abort the connection thread if necessary before running a given method. """ def wrapper(self, *__args, **__kargs): self.abort_if_needed() return func(self, *__args, **__kargs) wrapper.__name__ = func.__name__ wrapper.__dict__ = func.__dict__ wrapper.__doc__ = func.__doc__ wrapper.__module = func.__module__ return wrapper def get_backend_list(): """ Returns a list of available backends. """ if BACKEND_MGR: return BACKEND_MGR.get_available_backends() else: return [""] def get_backend_update_interval(): """ Returns the suggested connection status update interval. """ if BACKEND_MGR: return BACKEND_MGR.get_update_interval() else: return 5 # Seconds, this should never happen though. def get_current_backend(): """ Returns the current backend instance. """ if BACKEND_MGR: return BACKEND_MGR.get_current_backend() else: return None def get_backend_description(backend_name): """ Returns the description of the currently loaded backend. """ return BACKEND_MGR.get_backend_description(backend_name) def get_backend_description_dict(): """ Returns a dict of all available backend descriptions. """ d = {} for be in get_backend_list(): if be: d[be] = get_backend_description(be) return d def expand_script_macros(script, msg, bssid, essid): """Expands any supported macros in a script. Keyword arguments: script -- the script to execute. msg -- the name of the script, %{script} will be expanded to this. bssid -- the bssid of the network we connect to, defaults to 'wired'. essid -- the essid of the network we connect to, defaults to 'wired'. """ def repl(match): macro = match.group(1).lower() if macro_dict.has_key(macro): return macro_dict[macro] print 'Warning: found illegal macro %s in %s script' % (macro, msg) return match.group() macro_dict = { 'script' : msg, 'bssid' : bssid, 'essid' : essid } regex = re.compile(r'%\{([a-zA-Z0-9]+)\}') expanded = regex.sub(repl, script) print "Expanded '%s' to '%s'" % (script, expanded) return expanded class Controller(object): """ Parent class for the different interface types. """ def __init__(self, debug=False): """ Initialise the class. """ self.global_dns_1 = None self.global_dns_2 = None self.global_dns_3 = None self.global_dns_dom = None self.global_search_dom = None self._dhcp_client = None self._flush_tool = None self._debug = debug self._backend = None self.connecting_thread = None self.before_script = None self.after_script = None self.pre_disconnect_script = None self.post_disconnect_script = None self.driver = None self.iface = None def get_debug(self): return self._debug def set_debug(self, value): self._debug = value if self.iface: self.iface.SetDebugMode(value) debug = property(get_debug, set_debug) def set_dhcp_client(self, value): self._dhcp_client = value if self.iface: self.iface.DHCP_CLIENT = value def get_dhcp_client(self): return self._dhcp_client dhcp_client = property(get_dhcp_client, set_dhcp_client) def set_flush_tool(self, value): self._flush_tool = value if self.iface: self.iface.flush_tool = value def get_flush_tool(self): return self._flush_tool flush_tool = property(get_flush_tool, set_flush_tool) def LoadBackend(self, backend_name): """ Load the given networking backend. """ global BACKEND if backend_name == self._backend: return self._backend = BACKEND_MGR.load_backend(backend_name) BACKEND = self._backend def NeedsExternalCalls(self): """ Returns true if the loaded backend needs external calls. """ if self._backend: return self._backend.NeedsExternalCalls() else: return True def GetIP(self, ifconfig=""): """ Get the IP of the interface. Returns: The IP address of the interface in dotted notation. """ return self.iface.GetIP(ifconfig) def Disconnect(self, nettype, name, mac): """ Disconnect from the network. """ iface = self.iface # mac and name need to be strings if mac in (None, ''): mac = 'X' if name in (None, ''): name = 'X' misc.ExecuteScripts(wpath.predisconnectscripts, self.debug, extra_parameters=(nettype, name, mac)) if self.pre_disconnect_script: print 'Running pre-disconnect script' misc.ExecuteScript(expand_script_macros(self.pre_disconnect_script, 'pre-disconnection', mac, name), self.debug) iface.ReleaseDHCP() iface.SetAddress('0.0.0.0') iface.FlushRoutes() iface.Down() iface.Up() misc.ExecuteScripts(wpath.postdisconnectscripts, self.debug, extra_parameters=(nettype, name, mac)) if self.post_disconnect_script: print 'Running post-disconnect script' misc.ExecuteScript(expand_script_macros(self.post_disconnect_script, 'post-disconnection', mac, name), self.debug) def ReleaseDHCP(self): """ Release the DHCP lease for this interface. """ return self.iface.ReleaseDHCP() def KillDHCP(self): """ Kill the managed DHCP client if its in a connecting state. """ print 'running kill dhcp.' if (self.connecting_thread.is_connecting and self.iface.dhcp_object): if self.iface.dhcp_object.poll() is None: os.kill(self.iface.dhcp_object.pid, SIGTERM) self.iface.dhcp_object = None def IsUp(self): """ Calls the IsUp method for the wired interface. Returns: True if the interface is up, False otherwise. """ return self.iface.IsUp() def EnableInterface(self): """ Puts the interface up. Returns: True if the interface was put up succesfully, False otherwise. """ return self.iface.Up() def DisableInterface(self): """ Puts the interface down. Returns: True if the interface was put down succesfully, False otherwise. """ return self.iface.Down() def AppAvailable(self, app): """ Determine if the given application is installed. """ return self.iface.AppAvailable(app) class ConnectThread(threading.Thread): """ A class to perform network connections in a multi-threaded way. Useless on it's own, this class provides the generic functions necessary for connecting using a separate thread. """ is_connecting = None should_die = False lock = threading.Lock() def __init__(self, network, interface_name, before_script, after_script, pre_disconnect_script, post_disconnect_script, gdns1, gdns2, gdns3, gdns_dom, gsearch_dom, iface, debug): """ Initialise the required object variables and the thread. Keyword arguments: network -- the network to connect to wireless -- name of the wireless interface wired -- name of the wired interface before_script -- script to run before bringing up the interface after_script -- script to run after bringing up the interface pre_disconnect_script -- script to run before disconnection post_disconnect_script -- script to run after disconnection gdns1 -- global DNS server 1 gdns2 -- global DNS server 2 gdns3 -- global DNS server 3 debug -- debug mode status """ threading.Thread.__init__(self) self.network = network self.is_connecting = False self.is_aborted = False self.connect_result = None self.before_script = before_script self.after_script = after_script self.pre_disconnect_script = pre_disconnect_script self.post_disconnect_script = post_disconnect_script self._should_die = False self.abort_reason = "" self.connect_result = "" self.global_dns_1 = gdns1 self.global_dns_2 = gdns2 self.global_dns_3 = gdns3 self.global_dns_dom = gdns_dom self.global_search_dom = gsearch_dom self.iface = iface self.connecting_status = None self.debug = debug self.SetStatus('interface_down') def run(self): self.connect_result = "failed" try: self._connect() finally: self.is_connecting = False def set_should_die(self, val): self.lock.acquire() try: self._should_die = val finally: self.lock.release() def get_should_die(self): return self._should_die should_die = property(get_should_die, set_should_die) def SetStatus(self, status): """ Set the threads current status message in a thread-safe way. Keyword arguments: status -- the current connection status """ self.lock.acquire() try: self.connecting_status = status finally: self.lock.release() def GetStatus(self): """ Get the threads current status message in a thread-safe way. Returns: The current connection status. """ self.lock.acquire() try: status = self.connecting_status finally: self.lock.release() return status @abortable def reset_ip_addresses(self, iface): """ Resets the IP addresses for both wired/wireless interfaces. Sets a false ip so that when we set the real one, the correct routing entry is created. """ print 'Setting false IP...' self.SetStatus('resetting_ip_address') iface.SetAddress('0.0.0.0') @abortable def put_iface_down(self, iface): """ Puts the given interface down. """ print 'Putting interface down' self.SetStatus('interface_down') iface.Down() @abortable def run_global_scripts_if_needed(self, script_dir, extra_parameters=()): misc.ExecuteScripts(script_dir, verbose=self.debug, extra_parameters=extra_parameters) @abortable def run_script_if_needed(self, script, msg, bssid='wired', essid='wired'): """ Execute a given script if needed. Keyword arguments: script -- the script to execute, or None/'' if there isn't one. msg -- the name of the script to display in the log. """ if script: print 'Executing %s script' % (msg) misc.ExecuteScript(expand_script_macros(script, msg, bssid, essid), self.debug) @abortable def flush_routes(self, iface): """ Flush the routes for both wired/wireless interfaces. """ self.SetStatus('flushing_routing_table') print 'Flushing the routing table...' iface.FlushRoutes() @abortable def set_broadcast_address(self, iface): """ Set the broadcast address for the given interface. """ if not self.network.get('broadcast') == None: self.SetStatus('setting_broadcast_address') print 'Setting the broadcast address...' + self.network['broadcast'] iface.SetAddress(broadcast=self.network['broadcast']) @abortable def set_ip_address(self, iface): """ Set the IP address for the given interface. Assigns a static IP if one is requested, otherwise calls DHCP. """ if self.network.get('ip'): self.SetStatus('setting_static_ip') print 'Setting static IP : ' + self.network['ip'] iface.SetAddress(self.network['ip'], self.network['netmask']) if self.network.get('gateway'): print 'Setting default gateway : ' + self.network['gateway'] iface.SetDefaultRoute(self.network['gateway']) else: # Run dhcp... self.SetStatus('running_dhcp') if self.network.get('usedhcphostname') == None: self.network['usedhcphostname'] = False if self.network.get('dhcphostname') == None: self.network['dhcphostname'] = os.uname()[1] if not self.network['usedhcphostname']: hname = os.uname()[1] else: hname = self.network['dhcphostname'] print "Running DHCP with hostname",hname dhcp_status = iface.StartDHCP(hname) if dhcp_status in ['no_dhcp_offers', 'dhcp_failed']: if self.connect_result != "aborted": self.abort_connection(dhcp_status) return @abortable def set_dns_addresses(self, iface): """ Set the DNS address(es). If static DNS servers or global DNS servers are specified, set them. Otherwise do nothing. """ if self.network.get('use_global_dns'): iface.SetDNS(misc.Noneify(self.global_dns_1), misc.Noneify(self.global_dns_2), misc.Noneify(self.global_dns_3), misc.Noneify(self.global_dns_dom), misc.Noneify(self.global_search_dom)) elif self.network.get('use_static_dns') and (self.network.get('dns1') or self.network.get('dns2') or self.network.get('dns3')): self.SetStatus('setting_static_dns') iface.SetDNS(self.network.get('dns1'), self.network.get('dns2'), self.network.get('dns3'), self.network.get('dns_domain'), self.network.get('search_domain')) @abortable def release_dhcp_clients(self, iface): """ Release all running dhcp clients. """ print "Releasing DHCP leases..." iface.ReleaseDHCP() def connect_aborted(self, reason): """ Sets the thread status to aborted. """ if self.abort_reason: reason = self.abort_reason self.connecting_status = reason self.is_aborted = True self.connect_result = reason self.is_connecting = False print 'exiting connection thread' def abort_connection(self, reason=""): """ Schedule a connection abortion for the given reason. """ self.abort_reason = reason self.should_die = True def abort_if_needed(self): """ Abort the thread is it has been requested. """ self.lock.acquire() try: if self._should_die: self.connect_aborted('aborted') raise SystemExit finally: self.lock.release() @abortable def stop_wpa(self, iface): """ Stops wpa_supplicant. """ print 'Stopping wpa_supplicant' iface.StopWPA() @abortable def put_iface_up(self, iface): """ Bring up given interface. """ print 'Putting interface up...' self.SetStatus('interface_up') iface.Up() for x in range(0, 5): time.sleep(2) if iface.IsUp(): return self.abort_if_needed() # If we get here, the interface never came up print "WARNING: Timed out waiting for interface to come up" class Wireless(Controller): """ A wrapper for common wireless interface functions. """ def __init__(self, debug=False): """ Initialize the class. """ Controller.__init__(self, debug=debug) self._wpa_driver = None self._wireless_interface = None self.wiface = None self.should_verify_ap = True def set_wireless_iface(self, value): self._wireless_interface = value if self.wiface: self.wiface.SetInterface(value) def get_wireless_iface(self): return self._wireless_interface wireless_interface = property(get_wireless_iface, set_wireless_iface) def set_wpa_driver(self, value): self._wpa_driver = value if self.wiface: self.SetWPADriver(value) def get_wpa_driver(self): return self._wpa_driver wpa_driver = property(get_wpa_driver, set_wpa_driver) def set_iface(self, value): self.wiface = value def get_iface(self): return self.wiface iface = property(get_iface, set_iface) def LoadBackend(self, backend): """ Load a given backend. Load up a backend into the backend manager and associate with the networking interface. """ Controller.LoadBackend(self, backend) if self._backend: self.wiface = self._backend.WirelessInterface(self.wireless_interface, self.debug, self.wpa_driver) def Scan(self, essid=None): """ Scan for available wireless networks. Keyword arguments: essid -- The essid of a hidden network Returns: A list of available networks sorted by strength. """ def comp(x, y): if 'quality' in x: key = 'quality' else: key = 'strength' return cmp(x[key], y[key]) if not self.wiface: return [] wiface = self.wiface # Prepare the interface for scanning wiface.Up() # If there is a hidden essid then set it now, so that when it is # scanned it will be recognized. essid = misc.Noneify(essid) if essid is not None: print 'Setting hidden essid' + essid wiface.SetEssid(essid) # sleep for a bit; scanning to fast will result in nothing time.sleep(1) aps = wiface.GetNetworks() aps.sort(cmp=comp, reverse=True) return aps def Connect(self, network, debug=False): """ Spawn a connection thread to connect to the network. Keyword arguments: network -- network to connect to """ if not self.wiface: return False self.connecting_thread = WirelessConnectThread(network, self.wireless_interface, self.wpa_driver, self.before_script, self.after_script, self.pre_disconnect_script, self.post_disconnect_script, self.global_dns_1, self.global_dns_2, self.global_dns_3, self.global_dns_dom, self.global_search_dom, self.wiface, self.should_verify_ap, debug) self.connecting_thread.setDaemon(True) self.connecting_thread.start() return True def GetSignalStrength(self, iwconfig=""): """ Get signal strength of the current network. Returns: The current signal strength. """ return self.wiface.GetSignalStrength(iwconfig) def GetDBMStrength(self, iwconfig=""): """ Get the dBm signal strength of the current network. Returns: The current dBm signal strength. """ return self.wiface.GetDBMStrength(iwconfig) def GetCurrentNetwork(self, iwconfig=""): """ Get current network name. Returns: The name of the currently connected network. """ if self.connecting_thread and self.connecting_thread.is_connecting: return self.connecting_thread.network['essid'] return self.wiface.GetCurrentNetwork(iwconfig) def GetBSSID(self): """ Get the BSSID of the current access point. Returns: The MAC Adress of the active access point as a string, or None the BSSID can't be found. """ return self.wiface.GetBSSID() def GetCurrentBitrate(self, iwconfig): """ Get the current bitrate of the interface. Returns: The bitrate of the active access point as a string, or None the bitrate can't be found. """ return self.wiface.GetCurrentBitrate(iwconfig) def GetOperationalMode(self, iwconfig): """ Get the current operational mode of the interface. Returns: The operational mode of the interface as a string, or None if the operational mode can't be found. """ return self.wiface.GetOperationalMode(iwconfig) def GetAvailableAuthMethods(self, iwlistauth): """ Get the available authentication methods for the interface. Returns: The available authentication methods of the interface as a string, or None if the auth methods can't be found. """ return self.wiface.GetAvailableAuthMethods(iwlistauth) def GetIwconfig(self): """ Get the out of iwconfig. """ return self.wiface.GetIwconfig() def GetWpaSupplicantDrivers(self): """ Returns all valid wpa_supplicant drivers on the system. """ return BACKEND.GetWpaSupplicantDrivers() def StopWPA(self): return self.wiface.StopWPA() def CreateAdHocNetwork(self, essid, channel, ip, enctype, key, enc_used): """ Create an ad-hoc wireless network. Keyword arguments: essid -- essid of the ad-hoc network channel -- channel of the ad-hoc network ip -- ip of the ad-hoc network enctype -- unused key -- key of the ad-hoc network enc_used -- encrytion enabled on ad-hoc network """ wiface = self.wiface print 'Creating ad-hoc network' print 'Stopping dhcp client and wpa_supplicant' wiface.ReleaseDHCP() wiface.StopWPA() print 'Putting wireless interface down' wiface.Down() print 'Setting mode, channel, and essid' wiface.SetMode('ad-hoc') wiface.SetChannel(channel) wiface.SetEssid(essid) # Right now it just assumes you're using WEP if enc_used: print 'Setting encryption key' wiface.SetKey(key) print 'Putting interface up' wiface.Up() print 'Setting IP address' wiface.SetAddress(ip, '255.255.255.0') def DetectWirelessInterface(self): """ Detect available wireless interfaces. Returns: The first available wireless interface. """ ifaces = BACKEND.GetWirelessInterfaces() if ifaces: return ifaces[0] else: return None def GetKillSwitchStatus(self): """ Get the current status of the Killswitch. Returns: True if the killswitch is on, False otherwise. """ return self.wiface.GetKillSwitchStatus() def SwitchRfKill(self): """ Switches the rfkill on/off for wireless cards. """ types = ['wifi', 'wlan', 'wimax', 'wwan'] try: if self.GetRfKillStatus(): action = 'unblock' else: action = 'block' for t in types: cmd = ['rfkill', action, t] print "rfkill: %sing %s" % (action, t) misc.Run(cmd) return True except Exception, e: raise e return False def GetRfKillStatus(self): """ Determines if rfkill switch is active or not. Returns: True if rfkill (soft-)switch is enabled. """ cmd = 'rfkill list' rfkill_out = misc.Run(cmd) soft_blocks = filter(lambda x: x.startswith('Soft'), rfkill_out.split('\t')) for line in map(lambda x: x.strip(), soft_blocks): if line.endswith('yes'): return True return False def Disconnect(self): """ Disconnect the given iface. Executes the disconnect script associated with a given interface, Resets it's IP address, and puts the interface down then up. """ if BACKEND.NeedsExternalCalls(): iwconfig = self.GetIwconfig() else: iwconfig = None bssid = self.wiface.GetBSSID(iwconfig) essid = self.wiface.GetCurrentNetwork(iwconfig) Controller.Disconnect(self, 'wireless', essid, bssid) self.StopWPA() def SetWPADriver(self, driver): """ Sets the wpa_supplicant driver associated with the interface. """ self.wiface.SetWpaDriver(driver) class WirelessConnectThread(ConnectThread): """ A thread class to perform the connection to a wireless network. This thread, when run, will perform the necessary steps to connect to the specified network. """ def __init__(self, network, wireless, wpa_driver, before_script, after_script, pre_disconnect_script, post_disconnect_script, gdns1, gdns2, gdns3, gdns_dom, gsearch_dom, wiface, should_verify_ap, debug=False): """ Initialise the thread with network information. Keyword arguments: network -- the network to connect to wireless -- name of the wireless interface wpa_driver -- type of wireless interface before_script -- script to run before bringing up the interface after_script -- script to run after bringing up the interface pre_disconnect_script -- script to run before disconnection post_disconnect_script -- script to run after disconnection gdns1 -- global DNS server 1 gdns2 -- global DNS server 2 gdns3 -- global DNS server 3 """ ConnectThread.__init__(self, network, wireless, before_script, after_script, pre_disconnect_script, post_disconnect_script, gdns1, gdns2, gdns3, gdns_dom, gsearch_dom, wiface, debug) self.wpa_driver = wpa_driver self.should_verify_ap = should_verify_ap def _connect(self): """ The main function of the connection thread. This function performs the necessary calls to connect to the specified network, using the information provided. The following indicates the steps taken. 1. Run pre-connection script. 2. Take down the interface and clean up any previous connections. 3. Generate a PSK if required and authenticate. 4. Associate with the WAP. 5. Get/set IP address and DNS servers. """ wiface = self.iface self.is_connecting = True # Run pre-connection script. self.run_global_scripts_if_needed(wpath.preconnectscripts, extra_parameters=('wireless', self.network['essid'], self.network['bssid']) ) self.run_script_if_needed(self.before_script, 'pre-connection', self.network['bssid'], self.network['essid']) # Take down interface and clean up previous connections. self.put_iface_down(wiface) self.release_dhcp_clients(wiface) self.reset_ip_addresses(wiface) self.stop_wpa(wiface) self.flush_routes(wiface) wiface.SetMode(self.network['mode']) # Put interface up. self.SetStatus('configuring_interface') self.put_iface_up(wiface) # Generate PSK and authenticate if needed. if self.wpa_driver != 'ralink legacy': self.generate_psk_and_authenticate(wiface) # Associate. wiface.Associate(self.network['essid'], self.network['channel'], self.network['bssid']) # Authenticate after association for Ralink legacy cards. if self.wpa_driver == 'ralink legacy': if self.network.get('key'): wiface.Authenticate(self.network) # Validate Authentication. if self.network.get('enctype'): self.SetStatus('validating_authentication') if not wiface.ValidateAuthentication(time.time()): print "connect result is %s" % self.connect_result if not self.connect_result or self.connect_result == 'failed': self.abort_connection('bad_pass') # Set up gateway, IP address, and DNS servers. self.set_broadcast_address(wiface) self.set_ip_address(wiface) self.set_dns_addresses(wiface) self.verify_association(wiface) # Run post-connection script. self.run_global_scripts_if_needed(wpath.postconnectscripts, extra_parameters=('wireless', self.network['essid'], self.network['bssid']) ) self.run_script_if_needed(self.after_script, 'post-connection', self.network['bssid'], self.network['essid']) self.SetStatus('done') print 'Connecting thread exiting.' if self.debug: print "IP Address is: " + str(wiface.GetIP()) self.connect_result = "success" self.is_connecting = False @abortable def verify_association(self, iface): """ Verify that our association the AP is valid. Try to ping the gateway we have set to see if we're really associated with it. This is only done if we're using a static IP. """ if self.network.get('gateway') and self.should_verify_ap: self.SetStatus('verifying_association') print "Verifying AP association..." for tries in range(1, 11): print "Attempt %d of 10..." % tries retcode = self.iface.VerifyAPAssociation(self.network['gateway']) if retcode == 0: print "Successfully associated." break time.sleep(1) #TODO this should be in wnettools.py if retcode: print "Connection Failed: Failed to ping the access point!" # Clean up before aborting. iface.SetAddress('0.0.0.0') iface.FlushRoutes() if hasattr(iface, "StopWPA"): iface.StopWPA() self.abort_connection('association_failed') else: print 'not verifying' @abortable def generate_psk_and_authenticate(self, wiface): """ Generates a PSK and authenticates if necessary. Generates a PSK, and starts the authentication process if encryption is on. """ # Check to see if we need to generate a PSK (only for non-ralink # cards). if self.debug: print "enctype is %s" % self.network.get('enctype') if self.network.get('key') and 'wpa' in str(self.network.get('enctype')): self.SetStatus('generating_psk') print 'Generating psk...' self.network['psk'] = wiface.GeneratePSK(self.network) if not self.network.get('psk'): self.network['psk'] = self.network['key'] print 'WARNING: PSK generation failed! Falling back to ' + \ 'wireless key.\nPlease report this error to the wicd ' + \ 'developers!' # Generate the wpa_supplicant file... if self.network.get('enctype'): self.SetStatus('generating_wpa_config') print 'Attempting to authenticate...' wiface.Authenticate(self.network) class Wired(Controller): """ A wrapper for common wired interface functions. """ def __init__(self, debug=False): """ Initialise the class. """ Controller.__init__(self, debug=debug) self.wpa_driver = None self._link_detect = None self._wired_interface = None self.liface = None def set_link_detect(self, value): self._link_detect = value if self.liface: self.liface.link_detect = value def get_link_detect(self): return self._link_detect link_detect = property(get_link_detect, set_link_detect) def set_wired_iface(self, value): self._wired_interface = value if self.liface: self.liface.SetInterface(value) def get_wired_iface(self): return self._wired_interface wired_interface = property(get_wired_iface, set_wired_iface) def set_iface(self, value): self.liface = value def get_iface(self): return self.liface iface = property(get_iface, set_iface) def LoadBackend(self, backend): """ Load the backend up. """ Controller.LoadBackend(self, backend) if self._backend: self.liface = self._backend.WiredInterface(self.wired_interface, self.debug) def CheckPluggedIn(self): """ Check whether the wired connection is plugged in. Returns: The status of the physical connection link. """ return self.liface.GetPluggedIn() def Connect(self, network, debug=False): """ Spawn a connection thread to connect to the network. Keyword arguments: network -- network to connect to """ if not self.liface: return False self.connecting_thread = WiredConnectThread(network, self.wired_interface, self.before_script, self.after_script, self.pre_disconnect_script, self.post_disconnect_script, self.global_dns_1, self.global_dns_2, self.global_dns_3, self.global_dns_dom, self.global_search_dom, self.liface, debug) self.connecting_thread.setDaemon(True) self.connecting_thread.start() return self.connecting_thread def Disconnect(self): Controller.Disconnect(self, 'wired', 'wired', 'wired') self.StopWPA() def StopWPA(self): self.liface.StopWPA() def DetectWiredInterface(self): """ Attempts to automatically detect a wired interface. """ try: return BACKEND.GetWiredInterfaces()[0] except IndexError: return None class WiredConnectThread(ConnectThread): """ A thread class to perform the connection to a wired network. This thread, when run, will perform the necessary steps to connect to the specified network. """ def __init__(self, network, wired, before_script, after_script, pre_disconnect_script, post_disconnect_script, gdns1, gdns2, gdns3, gdns_dom, gsearch_dom, liface, debug=False): """ Initialise the thread with network information. Keyword arguments: network -- the network to connect to wireless -- name of the wireless interface wired -- name of the wired interface before_script -- script to run before bringing up the interface after_script -- script to run after bringing up the interface pre_disconnect_script -- script to run before disconnection post_disconnect_script -- script to run after disconnection gdns1 -- global DNS server 1 gdns2 -- global DNS server 2 gdns3 -- global DNS server 3 """ ConnectThread.__init__(self, network, wired, before_script, after_script, pre_disconnect_script, post_disconnect_script, gdns1, gdns2, gdns3, gdns_dom, gsearch_dom, liface, debug) def _connect(self): """ The main function of the connection thread. This function performs the necessary calls to connect to the specified network, using the information provided. The following indicates the steps taken. 1. Run pre-connection script. 2. Take down the interface and clean up any previous connections. 3. Bring up the interface. 4. Get/set IP address and DNS servers. 5. Run post-connection script. """ liface = self.iface self.is_connecting = True # Run pre-connection script. self.run_global_scripts_if_needed(wpath.preconnectscripts, extra_parameters=('wired', 'wired', self.network['profilename']) ) self.run_script_if_needed(self.before_script, 'pre-connection', 'wired', 'wired') # Take down interface and clean up previous connections. self.put_iface_down(liface) self.release_dhcp_clients(liface) self.reset_ip_addresses(liface) self.stop_wpa(liface) self.flush_routes(liface) # Bring up interface. self.put_iface_up(liface) # Manage encryption. if self.network.get('encryption_enabled'): liface.Authenticate(self.network) # Set gateway, IP adresses, and DNS servers. self.set_broadcast_address(liface) self.set_ip_address(liface) self.set_dns_addresses(liface) # Run post-connection script. self.run_global_scripts_if_needed(wpath.postconnectscripts, extra_parameters=('wired', 'wired', self.network['profilename']) ) self.run_script_if_needed(self.after_script, 'post-connection', 'wired', 'wired') self.SetStatus('done') print 'Connecting thread exiting.' if self.debug: print "IP Address is: " + str(liface.GetIP()) self.connect_result = "success" self.is_connecting = False
nistormihai/superdesk-core
refs/heads/master
tests/io/io_tests.py
2
# -*- coding: utf-8; -*- # # This file is part of Superdesk. # # Copyright 2013, 2014 Sourcefabric z.u. and contributors. # # For the full copyright and license information, please see the # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license import os import unittest from datetime import timedelta from superdesk.utc import utcnow from superdesk.etree import etree, get_word_count, get_char_count from superdesk.io.registry import registered_feed_parsers from superdesk.io.feed_parsers.newsml_1_2 import NewsMLOneFeedParser from superdesk.io.feed_parsers.newsml_2_0 import NewsMLTwoFeedParser from superdesk.io.feed_parsers.nitf import NITFFeedParser from superdesk.io.feeding_services.file_service import FileFeedingService def get_etree(filename): dirname = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(dirname, 'fixtures', filename)) as f: return etree.fromstring(f.read().encode('utf-8')) class UtilsTest(unittest.TestCase): def test_get_word_count(self): self.assertEqual(2, get_word_count('plain text'), 'plain text') self.assertEqual(2, get_word_count('<p> html text </p>'), 'paragraph') self.assertEqual(22, get_word_count( '<doc><p xml:lang="en-US">The weather was superb today in Norfolk, Virginia. Made me want to take\n' 'out my boat, manufactured by the <org value="acm" idsrc="iptc.org">Acme Boat Company</org>.</p></doc>')) def test_get_char_count(self): self.assertEqual(10, get_char_count('plain text')) self.assertEqual(11, get_char_count('<p> html text </p>')) self.assertEqual(123, get_char_count( '<doc><p xml:lang="en-US">The weather was superb today in Norfolk, Virginia. Made me want to take\n' 'out my boat, manufactured by the <org value="acm" idsrc="iptc.org">Acme Boat Company</org>.</p></doc>')) def test_get_xml_parser_newsmlg2(self): etree = get_etree('snep.xml') self.assertIsInstance(FileFeedingService().get_feed_parser({'feed_parser': 'newsml2'}, etree), NewsMLTwoFeedParser) def test_get_xml_parser_nitf(self): etree = get_etree('nitf-fishing.xml') self.assertIsInstance(FileFeedingService().get_feed_parser({'feed_parser': 'nitf'}, etree), NITFFeedParser) def test_get_xml_parser_newsml12(self): etree = get_etree('afp.xml') self.assertIsInstance(FileFeedingService().get_feed_parser({'feed_parser': 'newsml12'}, etree), NewsMLOneFeedParser) def test_is_old_content(self): service = FileFeedingService() self.assertFalse(service.is_old_content(utcnow())) self.assertTrue(service.is_old_content(utcnow() - timedelta(minutes=11))) class ItemTest(unittest.TestCase): def setUpFixture(self, filename): self.tree = get_etree(filename) provider = {'name': 'Test'} for parser in registered_feed_parsers.values(): if parser.can_parse(self.tree): self.item = parser.parse(self.tree, provider)[0] class TextParserTest(ItemTest): def setUp(self): self.setUpFixture('text.xml') def test_instance(self): self.assertTrue(self.item) def test_parse_id(self): self.assertEqual("tag:reuters.com,0000:newsml_L4N0BT5PJ:263518268", self.item.get('guid')) self.assertEqual('263518268', self.item.get('version')) self.assertEqual(3, self.item.get('priority')) def test_parse_item_meta(self): self.assertEqual("text", self.item.get('type')) self.assertEqual("2013-03-01T15:09:04", self.item.get('versioncreated').isoformat()) self.assertEqual("2013-03-01T15:09:04", self.item.get('firstcreated').isoformat()) self.assertEqual("Editorial Note", self.item.get('ednote')) def test_parse_content_meta(self): self.assertEqual(3, self.item.get('urgency')) self.assertEqual("SOCCER-ENGLAND/CHELSEA-BENITEZ", self.item["slugline"]) self.assertEqual("Soccer-Smiling Benitez pleads for support " "after midweek outburst against opponent", self.item["headline"]) self.assertEqual("SOCCER-ENGLAND/CHELSEA-BENITEZ:Soccer-Smiling Benitez pleads for support after " "midweek outburst", self.item.get('description_text')) self.assertEqual("SOCCER-ENGLAND/CHELSEA-BENITEZ:Soccer-Smiling Benitez pleads for support after " "midweek outburst", self.item.get('archive_description')) def test_content_set(self): self.assertEqual("<p>By Toby Davis</p>", self.item.get('body_html')) self.assertEqual(569, self.item.get('word_count')) self.assertIsInstance(self.item.get('body_html'), type('')) def test_language(self): self.assertEqual('en', self.item.get('language')) def test_subject(self): self.assertEqual(2, len(self.item.get('subject'))) self.assertIn({'qcode': '15054000', 'name': 'soccer'}, self.item.get('subject')) def test_pubstatus(self): self.assertEqual('usable', self.item.get('pubstatus')) class PictureParserTest(ItemTest): def setUp(self): self.setUpFixture('picture.xml') def test_type(self): self.assertEqual('picture', self.item.get('type')) def test_content_set(self): self.assertEqual(3, len(self.item.get('renditions'))) self.assertEqual(4, self.item.get('priority')) remote = self.item.get('renditions').get('baseImage') self.assertTrue(remote) self.assertEqual("tag:reuters.com,0000:binary_GM1E9341HD701-BASEIMAGE", remote.get('residRef')) self.assertEqual(772617, remote.get('sizeinbytes')) self.assertEqual("image/jpeg", remote.get('mimetype')) self.assertEqual("http://content.reuters.com/auth-server/content/tag:reuters.com,0000:newsml_GM1E9341HD701:360624134/tag:reuters.com,0000:binary_GM1E9341HD701-BASEIMAGE", remote.get('href')) # noqa def test_byline(self): self.assertEqual('MARKO DJURICA', self.item.get('byline')) def test_place(self): self.assertEqual(2, len(self.item.get('place'))) self.assertIn({'name': 'NAIROBI'}, self.item['place']) self.assertIn({'name': 'Kenya'}, self.item['place']) class SNEPParserTest(ItemTest): def setUp(self): self.setUpFixture('snep.xml') def test_content_set(self): self.assertEqual(4, self.item.get('priority')) self.assertEqual(2, len(self.item.get('groups'))) group = self.item.get('groups')[0] self.assertTrue(group) self.assertEqual("root", group.get('id')) self.assertEqual("grpRole:SNEP", group.get('role')) self.assertEqual(1, len(group.get('refs'))) self.assertEqual("main", group.get('refs')[0].get('idRef')) group = self.item.get('groups')[1] self.assertEqual(10, len(group.get('refs'))) self.assertEqual("main", group.get('id')) ref = group.get('refs')[0] self.assertTrue(ref) self.assertEqual("tag:reuters.com,0000:newsml_BRE9220HA:15", ref.get('residRef')) self.assertEqual("application/vnd.iptc.g2.packageitem+xml", ref.get('contentType')) self.assertEqual("icls:composite", ref.get('itemClass')) self.assertEqual("At least 15 killed on Kenya coast on election day", ref.get('headline'))
TheTypoMaster/evennia
refs/heads/master
evennia/contrib/gendersub.py
5
""" Gendersub Griatch 2015 This is a simple gender-aware Character class for allowing users to insert custom markers in their text to indicate gender-aware messaging. It relies on a modified msg() and is meant as an inspiration and starting point to how to do stuff like this. When in use, all messages being sent to the character will make use of the character's gender, for example the echo ``` char.msg("%s falls on {p face with a thud." % char.key) ``` will result in "Tom falls on his|her|its face with a thud" depending on the gender of the object being messaged. Default gender is "neutral". To use, have DefaultCharacter inherit from this, or change setting.DEFAULT_CHARACTER to point to this class. The `@gender` command needs to be added to the default cmdset before it becomes available. """ import re from evennia import DefaultCharacter from evennia import Command # gender maps _GENDER_PRONOUN_MAP = {"male": {"s": "he", "o": "him", "p": "his", "a": "his"}, "female": {"s": "she", "o": "her", "p": "her", "a": "hers"}, "neutral": {"s": "it", "o": "it", "p": "its", "a": "its"}} _RE_GENDER_PRONOUN = re.compile(r'({s|{S|{o|{O|{p|{P|{a|{A)') # in-game command for setting the gender class SetGender(Command): """ Sets gender on yourself Usage: @gender male|female|neutral """ key = "@gender" alias = "@sex" locks = "call:all()" def func(self): """ Implements the command. """ caller = self.caller arg = self.args.strip().lower() if not arg in ("male", "female", "neutral"): caller.msg("Usage: @gender male|female|neutral") return caller.db.gender = arg caller.msg("Your gender was set to %s." % arg) # Gender-aware character class class GenderCharacter(DefaultCharacter): """ This is a Character class aware of gender. """ def at_object_creation(self): """ Called once when the object is created. """ super(GenderCharacter, self).at_object_creation() self.db.gender = "neutral" def _get_pronoun(self, regex_match): """ Get pronoun from the pronoun marker in the text. This is used as the callable for the re.sub function. Args: regex_match (MatchObject): the regular expression match. Notes: - `{s`, `{S`: Subjective form: he, she, it, He, She, It - `{o`, `{O`: Objective form: him, her, it, Him, Her, It - `{p`, `{P`: Possessive form: his, her, its, His, Her, Its - `{a`, `{A`: Absolute Possessive form: his, hers, its, His, Hers, Its """ typ = regex_match.group()[1] # "s", "O" etc gender = self.attributes.get("gender", default="neutral") gender = gender if gender in ("male", "female", "neutral") else "neutral" pronoun = _GENDER_PRONOUN_MAP[gender][typ.lower()] return pronoun.capitalize() if typ.isupper() else pronoun def msg(self, text, from_obj=None, sessid=0, **kwargs): """ Emits something to a session attached to the object. Overloads the default msg() implementation to include gender-aware markers in output. Args: text (str, optional): The message to send from_obj (obj, optional): object that is sending. If given, at_msg_send will be called sessid (int or list, optional): sessid or list of sessids to relay to, if any. If set, will force send regardless of MULTISESSION_MODE. Notes: `at_msg_receive` will be called on this Object. All extra kwargs will be passed on to the protocol. """ # pre-process the text before continuing text = _RE_GENDER_PRONOUN.sub(self._get_pronoun, text) super(GenderCharacter, self).msg(text, from_obj=from_obj, sessid=sessid, **kwargs)
elena/django
refs/heads/master
tests/template_tests/syntax_tests/test_width_ratio.py
77
from django.template import TemplateSyntaxError from django.test import SimpleTestCase from ..utils import setup class WidthRatioTagTests(SimpleTestCase): libraries = {'custom': 'template_tests.templatetags.custom'} @setup({'widthratio01': '{% widthratio a b 0 %}'}) def test_widthratio01(self): output = self.engine.render_to_string('widthratio01', {'a': 50, 'b': 100}) self.assertEqual(output, '0') @setup({'widthratio02': '{% widthratio a b 100 %}'}) def test_widthratio02(self): output = self.engine.render_to_string('widthratio02', {'a': 0, 'b': 0}) self.assertEqual(output, '0') @setup({'widthratio03': '{% widthratio a b 100 %}'}) def test_widthratio03(self): output = self.engine.render_to_string('widthratio03', {'a': 0, 'b': 100}) self.assertEqual(output, '0') @setup({'widthratio04': '{% widthratio a b 100 %}'}) def test_widthratio04(self): output = self.engine.render_to_string('widthratio04', {'a': 50, 'b': 100}) self.assertEqual(output, '50') @setup({'widthratio05': '{% widthratio a b 100 %}'}) def test_widthratio05(self): output = self.engine.render_to_string('widthratio05', {'a': 100, 'b': 100}) self.assertEqual(output, '100') @setup({'widthratio06': '{% widthratio a b 100 %}'}) def test_widthratio06(self): """ 62.5 should round to 62 """ output = self.engine.render_to_string('widthratio06', {'a': 50, 'b': 80}) self.assertEqual(output, '62') @setup({'widthratio07': '{% widthratio a b 100 %}'}) def test_widthratio07(self): """ 71.4 should round to 71 """ output = self.engine.render_to_string('widthratio07', {'a': 50, 'b': 70}) self.assertEqual(output, '71') # Raise exception if we don't have 3 args, last one an integer @setup({'widthratio08': '{% widthratio %}'}) def test_widthratio08(self): with self.assertRaises(TemplateSyntaxError): self.engine.get_template('widthratio08') @setup({'widthratio09': '{% widthratio a b %}'}) def test_widthratio09(self): with self.assertRaises(TemplateSyntaxError): self.engine.render_to_string('widthratio09', {'a': 50, 'b': 100}) @setup({'widthratio10': '{% widthratio a b 100.0 %}'}) def test_widthratio10(self): output = self.engine.render_to_string('widthratio10', {'a': 50, 'b': 100}) self.assertEqual(output, '50') @setup({'widthratio11': '{% widthratio a b c %}'}) def test_widthratio11(self): """ #10043: widthratio should allow max_width to be a variable """ output = self.engine.render_to_string('widthratio11', {'a': 50, 'c': 100, 'b': 100}) self.assertEqual(output, '50') # #18739: widthratio should handle None args consistently with # non-numerics @setup({'widthratio12a': '{% widthratio a b c %}'}) def test_widthratio12a(self): output = self.engine.render_to_string('widthratio12a', {'a': 'a', 'c': 100, 'b': 100}) self.assertEqual(output, '') @setup({'widthratio12b': '{% widthratio a b c %}'}) def test_widthratio12b(self): output = self.engine.render_to_string('widthratio12b', {'a': None, 'c': 100, 'b': 100}) self.assertEqual(output, '') @setup({'widthratio13a': '{% widthratio a b c %}'}) def test_widthratio13a(self): output = self.engine.render_to_string('widthratio13a', {'a': 0, 'c': 100, 'b': 'b'}) self.assertEqual(output, '') @setup({'widthratio13b': '{% widthratio a b c %}'}) def test_widthratio13b(self): output = self.engine.render_to_string('widthratio13b', {'a': 0, 'c': 100, 'b': None}) self.assertEqual(output, '') @setup({'widthratio14a': '{% widthratio a b c %}'}) def test_widthratio14a(self): with self.assertRaises(TemplateSyntaxError): self.engine.render_to_string('widthratio14a', {'a': 0, 'c': 'c', 'b': 100}) @setup({'widthratio14b': '{% widthratio a b c %}'}) def test_widthratio14b(self): with self.assertRaises(TemplateSyntaxError): self.engine.render_to_string('widthratio14b', {'a': 0, 'c': None, 'b': 100}) @setup({'widthratio15': '{% load custom %}{% widthratio a|noop:"x y" b 0 %}'}) def test_widthratio15(self): """ Test whitespace in filter argument """ output = self.engine.render_to_string('widthratio15', {'a': 50, 'b': 100}) self.assertEqual(output, '0') # Widthratio with variable assignment @setup({'widthratio16': '{% widthratio a b 100 as variable %}-{{ variable }}-'}) def test_widthratio16(self): output = self.engine.render_to_string('widthratio16', {'a': 50, 'b': 100}) self.assertEqual(output, '-50-') @setup({'widthratio17': '{% widthratio a b 100 as variable %}-{{ variable }}-'}) def test_widthratio17(self): output = self.engine.render_to_string('widthratio17', {'a': 100, 'b': 100}) self.assertEqual(output, '-100-') @setup({'widthratio18': '{% widthratio a b 100 as %}'}) def test_widthratio18(self): with self.assertRaises(TemplateSyntaxError): self.engine.get_template('widthratio18') @setup({'widthratio19': '{% widthratio a b 100 not_as variable %}'}) def test_widthratio19(self): with self.assertRaises(TemplateSyntaxError): self.engine.get_template('widthratio19') @setup({'widthratio20': '{% widthratio a b 100 %}'}) def test_widthratio20(self): output = self.engine.render_to_string('widthratio20', {'a': float('inf'), 'b': float('inf')}) self.assertEqual(output, '') @setup({'widthratio21': '{% widthratio a b 100 %}'}) def test_widthratio21(self): output = self.engine.render_to_string('widthratio21', {'a': float('inf'), 'b': 2}) self.assertEqual(output, '') @setup({'t': '{% widthratio a b 100 as variable %}-{{ variable }}-'}) def test_zerodivisionerror_as_var(self): output = self.engine.render_to_string('t', {'a': 0, 'b': 0}) self.assertEqual(output, '-0-') @setup({'t': '{% widthratio a b c as variable %}-{{ variable }}-'}) def test_typeerror_as_var(self): output = self.engine.render_to_string('t', {'a': 'a', 'c': 100, 'b': 100}) self.assertEqual(output, '--')
bdh1011/cupeye
refs/heads/master
venv/lib/python2.7/site-packages/pip/_vendor/requests/api.py
160
# -*- coding: utf-8 -*- """ requests.api ~~~~~~~~~~~~ This module implements the Requests API. :copyright: (c) 2012 by Kenneth Reitz. :license: Apache2, see LICENSE for more details. """ from . import sessions def request(method, url, **kwargs): """Constructs and sends a :class:`Request <Request>`. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (`connect timeout, read timeout <user/advanced.html#timeouts>`_) tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :return: :class:`Response <Response>` object :rtype: requests.Response Usage:: >>> import requests >>> req = requests.request('GET', 'http://httpbin.org/get') <Response [200]> """ session = sessions.Session() response = session.request(method=method, url=url, **kwargs) # By explicitly closing the session, we avoid leaving sockets open which # can trigger a ResourceWarning in some cases, and look like a memory leak # in others. session.close() return response def get(url, **kwargs): """Sends a GET request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return request('get', url, **kwargs) def options(url, **kwargs): """Sends a OPTIONS request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return request('options', url, **kwargs) def head(url, **kwargs): """Sends a HEAD request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ kwargs.setdefault('allow_redirects', False) return request('head', url, **kwargs) def post(url, data=None, json=None, **kwargs): """Sends a POST request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('post', url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): """Sends a PUT request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('put', url, data=data, **kwargs) def patch(url, data=None, **kwargs): """Sends a PATCH request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('patch', url, data=data, **kwargs) def delete(url, **kwargs): """Sends a DELETE request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('delete', url, **kwargs)
llambeau/finitio.py
refs/heads/master
finitio/types/type.py
1
from ..exceptions import DressError, UndressError from ..support.monad import Monad class Type(object): __slots__ = ['metadata'] def __init__(self, metadata): self.metadata = metadata @staticmethod def factor(_from): return _from[list(_from.keys())[0]] def to_factor(self): to = {} to[self._generator] = self return to def include(self, value, world={}): return self._include(value, world) def _include(self, value, world): raise NotImplementedError('`_include` not implemented') def dress(self, value, world={}): monad = self.m_dress(value, Monad(world)) if monad.is_success(): return monad.result else: raise DressError(monad.error) def m_dress(self, value, monad): return self._m_dress(value, monad) def _m_dress(self, value, monad): raise NotImplementedError('`_m_dress` not implemented') def undress(self, value, as_type): return self._undress(value, as_type.true_one()) def _undress(self, value, as_type): if as_type.is_super_type_of(self): return value if as_type.include(value): return value raise UndressError("Unable to undress `{0}` from {1} to `{2}`" .format(value, self, as_type)) def is_super_type_of(self, other): return self._is_super_type_of(other) def _is_super_type_of(self, other): raise NotImplementedError('`_is_super_type_of` not implemented') def _is_sub_type_of(self, other): return False def fetch_type(self): return self def is_fake(self): return False def true_one(self): return self def __eq__(self, other): return isinstance(other, Type) def __ne__(self, other): return not self.__eq__(other)
mhdella/scikit-learn
refs/heads/master
sklearn/linear_model/tests/test_omp.py
272
# Author: Vlad Niculae # Licence: BSD 3 clause import numpy as np from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_true from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_warns from sklearn.utils.testing import ignore_warnings from sklearn.linear_model import (orthogonal_mp, orthogonal_mp_gram, OrthogonalMatchingPursuit, OrthogonalMatchingPursuitCV, LinearRegression) from sklearn.utils import check_random_state from sklearn.datasets import make_sparse_coded_signal n_samples, n_features, n_nonzero_coefs, n_targets = 20, 30, 5, 3 y, X, gamma = make_sparse_coded_signal(n_targets, n_features, n_samples, n_nonzero_coefs, random_state=0) G, Xy = np.dot(X.T, X), np.dot(X.T, y) # this makes X (n_samples, n_features) # and y (n_samples, 3) def test_correct_shapes(): assert_equal(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5).shape, (n_features,)) assert_equal(orthogonal_mp(X, y, n_nonzero_coefs=5).shape, (n_features, 3)) def test_correct_shapes_gram(): assert_equal(orthogonal_mp_gram(G, Xy[:, 0], n_nonzero_coefs=5).shape, (n_features,)) assert_equal(orthogonal_mp_gram(G, Xy, n_nonzero_coefs=5).shape, (n_features, 3)) def test_n_nonzero_coefs(): assert_true(np.count_nonzero(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5)) <= 5) assert_true(np.count_nonzero(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5, precompute=True)) <= 5) def test_tol(): tol = 0.5 gamma = orthogonal_mp(X, y[:, 0], tol=tol) gamma_gram = orthogonal_mp(X, y[:, 0], tol=tol, precompute=True) assert_true(np.sum((y[:, 0] - np.dot(X, gamma)) ** 2) <= tol) assert_true(np.sum((y[:, 0] - np.dot(X, gamma_gram)) ** 2) <= tol) def test_with_without_gram(): assert_array_almost_equal( orthogonal_mp(X, y, n_nonzero_coefs=5), orthogonal_mp(X, y, n_nonzero_coefs=5, precompute=True)) def test_with_without_gram_tol(): assert_array_almost_equal( orthogonal_mp(X, y, tol=1.), orthogonal_mp(X, y, tol=1., precompute=True)) def test_unreachable_accuracy(): assert_array_almost_equal( orthogonal_mp(X, y, tol=0), orthogonal_mp(X, y, n_nonzero_coefs=n_features)) assert_array_almost_equal( assert_warns(RuntimeWarning, orthogonal_mp, X, y, tol=0, precompute=True), orthogonal_mp(X, y, precompute=True, n_nonzero_coefs=n_features)) def test_bad_input(): assert_raises(ValueError, orthogonal_mp, X, y, tol=-1) assert_raises(ValueError, orthogonal_mp, X, y, n_nonzero_coefs=-1) assert_raises(ValueError, orthogonal_mp, X, y, n_nonzero_coefs=n_features + 1) assert_raises(ValueError, orthogonal_mp_gram, G, Xy, tol=-1) assert_raises(ValueError, orthogonal_mp_gram, G, Xy, n_nonzero_coefs=-1) assert_raises(ValueError, orthogonal_mp_gram, G, Xy, n_nonzero_coefs=n_features + 1) def test_perfect_signal_recovery(): idx, = gamma[:, 0].nonzero() gamma_rec = orthogonal_mp(X, y[:, 0], 5) gamma_gram = orthogonal_mp_gram(G, Xy[:, 0], 5) assert_array_equal(idx, np.flatnonzero(gamma_rec)) assert_array_equal(idx, np.flatnonzero(gamma_gram)) assert_array_almost_equal(gamma[:, 0], gamma_rec, decimal=2) assert_array_almost_equal(gamma[:, 0], gamma_gram, decimal=2) def test_estimator(): omp = OrthogonalMatchingPursuit(n_nonzero_coefs=n_nonzero_coefs) omp.fit(X, y[:, 0]) assert_equal(omp.coef_.shape, (n_features,)) assert_equal(omp.intercept_.shape, ()) assert_true(np.count_nonzero(omp.coef_) <= n_nonzero_coefs) omp.fit(X, y) assert_equal(omp.coef_.shape, (n_targets, n_features)) assert_equal(omp.intercept_.shape, (n_targets,)) assert_true(np.count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs) omp.set_params(fit_intercept=False, normalize=False) omp.fit(X, y[:, 0]) assert_equal(omp.coef_.shape, (n_features,)) assert_equal(omp.intercept_, 0) assert_true(np.count_nonzero(omp.coef_) <= n_nonzero_coefs) omp.fit(X, y) assert_equal(omp.coef_.shape, (n_targets, n_features)) assert_equal(omp.intercept_, 0) assert_true(np.count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs) def test_identical_regressors(): newX = X.copy() newX[:, 1] = newX[:, 0] gamma = np.zeros(n_features) gamma[0] = gamma[1] = 1. newy = np.dot(newX, gamma) assert_warns(RuntimeWarning, orthogonal_mp, newX, newy, 2) def test_swapped_regressors(): gamma = np.zeros(n_features) # X[:, 21] should be selected first, then X[:, 0] selected second, # which will take X[:, 21]'s place in case the algorithm does # column swapping for optimization (which is the case at the moment) gamma[21] = 1.0 gamma[0] = 0.5 new_y = np.dot(X, gamma) new_Xy = np.dot(X.T, new_y) gamma_hat = orthogonal_mp(X, new_y, 2) gamma_hat_gram = orthogonal_mp_gram(G, new_Xy, 2) assert_array_equal(np.flatnonzero(gamma_hat), [0, 21]) assert_array_equal(np.flatnonzero(gamma_hat_gram), [0, 21]) def test_no_atoms(): y_empty = np.zeros_like(y) Xy_empty = np.dot(X.T, y_empty) gamma_empty = ignore_warnings(orthogonal_mp)(X, y_empty, 1) gamma_empty_gram = ignore_warnings(orthogonal_mp)(G, Xy_empty, 1) assert_equal(np.all(gamma_empty == 0), True) assert_equal(np.all(gamma_empty_gram == 0), True) def test_omp_path(): path = orthogonal_mp(X, y, n_nonzero_coefs=5, return_path=True) last = orthogonal_mp(X, y, n_nonzero_coefs=5, return_path=False) assert_equal(path.shape, (n_features, n_targets, 5)) assert_array_almost_equal(path[:, :, -1], last) path = orthogonal_mp_gram(G, Xy, n_nonzero_coefs=5, return_path=True) last = orthogonal_mp_gram(G, Xy, n_nonzero_coefs=5, return_path=False) assert_equal(path.shape, (n_features, n_targets, 5)) assert_array_almost_equal(path[:, :, -1], last) def test_omp_return_path_prop_with_gram(): path = orthogonal_mp(X, y, n_nonzero_coefs=5, return_path=True, precompute=True) last = orthogonal_mp(X, y, n_nonzero_coefs=5, return_path=False, precompute=True) assert_equal(path.shape, (n_features, n_targets, 5)) assert_array_almost_equal(path[:, :, -1], last) def test_omp_cv(): y_ = y[:, 0] gamma_ = gamma[:, 0] ompcv = OrthogonalMatchingPursuitCV(normalize=True, fit_intercept=False, max_iter=10, cv=5) ompcv.fit(X, y_) assert_equal(ompcv.n_nonzero_coefs_, n_nonzero_coefs) assert_array_almost_equal(ompcv.coef_, gamma_) omp = OrthogonalMatchingPursuit(normalize=True, fit_intercept=False, n_nonzero_coefs=ompcv.n_nonzero_coefs_) omp.fit(X, y_) assert_array_almost_equal(ompcv.coef_, omp.coef_) def test_omp_reaches_least_squares(): # Use small simple data; it's a sanity check but OMP can stop early rng = check_random_state(0) n_samples, n_features = (10, 8) n_targets = 3 X = rng.randn(n_samples, n_features) Y = rng.randn(n_samples, n_targets) omp = OrthogonalMatchingPursuit(n_nonzero_coefs=n_features) lstsq = LinearRegression() omp.fit(X, Y) lstsq.fit(X, Y) assert_array_almost_equal(omp.coef_, lstsq.coef_)
rodrigods/keystone
refs/heads/master
keystone/routers.py
8
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ The only types of routers in this file should be ``ComposingRouters``. The routers for the backends should be in the backend-specific router modules. For example, the ``ComposableRouter`` for ``identity`` belongs in:: keystone.identity.routers """ from keystone.common import wsgi from keystone import controllers class Extension(wsgi.ComposableRouter): def __init__(self, is_admin=True): if is_admin: self.controller = controllers.AdminExtensions() else: self.controller = controllers.PublicExtensions() def add_routes(self, mapper): extensions_controller = self.controller mapper.connect('/extensions', controller=extensions_controller, action='get_extensions_info', conditions=dict(method=['GET'])) mapper.connect('/extensions/{extension_alias}', controller=extensions_controller, action='get_extension_info', conditions=dict(method=['GET'])) class VersionV2(wsgi.ComposableRouter): def __init__(self, description): self.description = description def add_routes(self, mapper): version_controller = controllers.Version(self.description) mapper.connect('/', controller=version_controller, action='get_version_v2') class VersionV3(wsgi.ComposableRouter): def __init__(self, description): self.description = description def add_routes(self, mapper): version_controller = controllers.Version(self.description) mapper.connect('/', controller=version_controller, action='get_version_v3') class Versions(wsgi.ComposableRouter): def __init__(self, description): self.description = description def add_routes(self, mapper): version_controller = controllers.Version(self.description) mapper.connect('/', controller=version_controller, action='get_versions')
fh1ch/XMLTestExecutor
refs/heads/master
XMLTestExecutor.py
1
""" The MIT License (MIT) Copyright (c) 2014 fh1ch Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import sys import datetime import StringIO import unittest from xml.sax.saxutils import escape class StdCapture: def __init__(self, fpointer): self._fpointer = fpointer def write(self, string): self._fpointer.write(self.to_unicode(string)) def writelines(self, string): lines = map(self.to_unicode, string) self._fpointer.writelines(lines) def flush(self): self._fpointer.flush() def to_unicode(self, string): try: return unicode(string) except UnicodeDecodeError: return string.decode('unicode_escape') stdOutNew = StdCapture(sys.stdout) stdErrNew = StdCapture(sys.stderr) class _ReportGenerator: def __init__(self): self._report = [] self.statPassed = 0 self.statFailed = 0 self.statError = 0 def addSuccess(self, test, time, stdData): self.statPassed += 1 strValue = "" strValue += self._addSysOut(stdData[0]) strValue += self._addSysErr(stdData[1]) self._report.append("""\t\t<testcase classname="%s.%s" name="%s" status="Passed" time="%s">\n%s </testcase>\n""" % (test.__class__.__module__, test.__class__.__name__, test.id().split('.')[-1], time, strValue)) def addFailure(self, test, err, errStr, time, stdData): self.statFailed += 1 strValue = "" strValue += self._addFailureStr(err, errStr) strValue += self._addSysOut(stdData[0]) strValue += self._addSysErr(stdData[1]) self._report.append("""\t\t<testcase classname="%s.%s" name="%s" status="Failed" time="%s">\n%s </testcase>\n""" % (test.__class__.__module__, test.__class__.__name__, test.id().split('.')[-1], time, strValue)) def addError(self, test, err, errStr, time, stdData): self.statError += 1 strValue = "" strValue += self._addErrorStr(err, errStr) strValue += self._addSysOut(stdData[0]) strValue += self._addSysErr(stdData[1]) self._report.append("""\t\t<testcase classname="%s.%s" name="%s" status="Error" time="%s">\n%s </testcase>\n""" % (test.__class__.__module__, test.__class__.__name__, test.id().split('.')[-1], time, strValue)) def genReport(self, name, time, properties): numTests = self.statError + self.statFailed + self.statPassed strXMLOut = """<?xml version="1.0" encoding="UTF-8"?>\n""" strXMLOut += """<testsuites errors="%s" failures="%s" name="%s" tests="%s" time="%s">\n""" % (self.statError, self.statFailed, name, numTests, time) strXMLOut += """\t<testsuite errors="%s" failures="%s" name="%s" tests="%s" >\n""" % (self.statError, self.statFailed, name, numTests) if properties and len(properties) > 0: strXMLOut += """\t\t<properties>\n""" for prop in properties: strXMLOut += """\t\t\t<property name="%s" value="%s"/>\n""" % (prop[0], prop[1]) strXMLOut += """\t\t</properties>\n""" for testcase in self._report: strXMLOut += testcase strXMLOut += """\t</testsuite>\n""" strXMLOut += """</testsuites>\n""" return strXMLOut def _addSysOut(self, sysOut): strSysOut = "" if sysOut != "": strSysOut = """\t\t\t<system-out>%s<system-out/>\n""" % escape(sysOut) return strSysOut def _addSysErr(self, sysErr): strSysErr = "" if sysErr != "": strSysErr = """\t\t\t<system-err>%s<system-err/>\n""" % escape(sysErr) return strSysErr def _addFailureStr(self, err, errStr): return """\t\t\t<failure message="%s" type="%s">%s</failure>\n""" % (err[1], err[0].__name__, escape(errStr[1])) def _addErrorStr(self, err, errStr): return """\t\t\t<error message="%s" type="%s">%s</error>\n""" % (err[1], err[0].__name__, escape(errStr[1])) class _TestCaseResult(unittest.TestResult): def __init__(self): unittest.TestResult.__init__(self) self.result = _ReportGenerator() self._stdOutDef = None self._stdErrDef = None self._stdBuffOut = StringIO.StringIO() self._stdBuffErr = StringIO.StringIO() def startTest(self, test): unittest.TestResult.startTest(self, test) self._execStart = datetime.datetime.now() self._startCaptureStd() def stopTest(self, test): self._emptyCaptureStd() def addSuccess(self, test): unittest.TestResult.addSuccess(self, test) sys.stderr.write("[OK] " + str(test) + "\n") execDur = datetime.datetime.now() - self._execStart stdData = self._emptyCaptureStd() self.result.addSuccess(test, execDur, stdData) def addError(self, test, err): unittest.TestResult.addError(self, test, err) sys.stderr.write("[E] " + str(test) + "\n") errStr = self.errors[-1] execDur = datetime.datetime.now() - self._execStart stdData = self._emptyCaptureStd() self.result.addError(test, err, errStr, execDur, stdData) def addFailure(self, test, err): unittest.TestResult.addFailure(self, test, err) sys.stderr.write("[F] " + str(test) + "\n") errStr = self.failures[-1] execDur = datetime.datetime.now() - self._execStart stdData = self._emptyCaptureStd() self.result.addFailure(test, err, errStr, execDur, stdData) def _startCaptureStd(self): self._stdOutDef = sys.stdout self._stdErrDef = sys.stderr stdOutNew.fp = self._stdBuffOut stdErrNew.fp = self._stdBuffErr sys.stdout = stdOutNew sys.stderr = stdErrNew def _emptyCaptureStd(self): if self._stdOutDef: sys.stdout = self._stdOutDef sys.stderr = self._stdErrDef self._stdOutDef = None self._stdErrDef = None return (self._stdBuffOut.getvalue(), self._stdBuffErr.getvalue()) class XMLTestExecutor: DEFAULT_REPORT_NAME = "XMLTestExecutor Report" VERSION = "0.1.1" def __init__(self, stream = sys.stdout, name = None, properties = None): self._stream = stream if name == None: name = self.DEFAULT_REPORT_NAME self._name = name self._properties = properties self._execStart = datetime.datetime.now() def run(self, test): self._genStartupConsole(self._name, self._properties) result = _TestCaseResult() test(result) self._execStop = datetime.datetime.now() self._genReport(result) return result def _genReport(self, result): execDur = self._execStop - self._execStart self._genStatisticConsole(result) output = result.result.genReport(self._name, execDur, self._properties) self._stream.write(output.encode('utf8')) def _genStartupConsole(self, name, properties): sys.stderr.write("\n\n") sys.stderr.write("==============================\n") sys.stderr.write("Version: XMLTestExecutor v%s\n" % self.VERSION) sys.stderr.write("Report name: %s\n" % name) if properties: for prop in properties: sys.stderr.write("Property: %s - Value: %s\n" % (prop[0], prop[1])) sys.stderr.write("Start time: %s\n" % datetime.datetime.now()) sys.stderr.write("==============================\n\n") def _genStatisticConsole(self, result): numTests = result.result.statError + result.result.statFailed + result.result.statPassed sys.stderr.write("\n\n") sys.stderr.write("======= run statistics =======\n") sys.stderr.write("Tests passed: %s\n" % result.result.statPassed) sys.stderr.write("Tests failed: %s\n" % result.result.statFailed) sys.stderr.write("Tests errors: %s\n" % result.result.statError) sys.stderr.write("Total tests: %s\n\n" % numTests) sys.stderr.write("Total runtime: %s\n" % (self._execStop - self._execStart)) sys.stderr.write("==============================\n") class TestProgram(unittest.TestProgram): def runTests(self): self.testRunner = XMLTestExecutor() unittest.TestProgram.runTests(self) if __name__ == "__main__": TestProgram(module = None)
Stane1983/xbmc
refs/heads/master
tools/EventClients/lib/python/ps3/__init__.py
909
# -*- coding: utf-8 -*- # File intentionally left blank
spisneha25/django
refs/heads/master
tests/logging_tests/logconfig.py
609
import logging from django.conf import settings from django.core.mail.backends.base import BaseEmailBackend class MyHandler(logging.Handler): def __init__(self): logging.Handler.__init__(self) self.config = settings.LOGGING class MyEmailBackend(BaseEmailBackend): def send_messages(self, email_messages): pass
katrid/django
refs/heads/master
tests/admin_widgets/tests.py
65
# -*- coding: utf-8 -*- from __future__ import unicode_literals import gettext import os from datetime import datetime, timedelta from importlib import import_module from unittest import skipIf from django import forms from django.conf import settings from django.contrib import admin from django.contrib.admin import widgets from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase from django.contrib.auth.models import User from django.core.files.storage import default_storage from django.core.files.uploadedfile import SimpleUploadedFile from django.core.urlresolvers import reverse from django.db.models import CharField, DateField from django.test import SimpleTestCase, TestCase, override_settings from django.utils import six, translation from . import models from .widgetadmin import site as widget_admin_site try: import pytz except ImportError: pytz = None class TestDataMixin(object): @classmethod def setUpTestData(cls): cls.u1 = User.objects.create( pk=100, username='super', first_name='Super', last_name='User', email='super@example.com', password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True, is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10), date_joined=datetime(2007, 5, 30, 13, 20, 10) ) cls.u2 = User.objects.create( pk=101, username='testser', first_name='Add', last_name='User', email='auser@example.com', password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=False, is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10), date_joined=datetime(2007, 5, 30, 13, 20, 10) ) models.Car.objects.create(id=1, owner=cls.u1, make='Volkswagon', model='Passat') models.Car.objects.create(id=2, owner=cls.u2, make='BMW', model='M3') class SeleniumDataMixin(object): def setUp(self): self.u1 = User.objects.create( pk=100, username='super', first_name='Super', last_name='User', email='super@example.com', password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True, is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10), date_joined=datetime(2007, 5, 30, 13, 20, 10) ) class AdminFormfieldForDBFieldTests(SimpleTestCase): """ Tests for correct behavior of ModelAdmin.formfield_for_dbfield """ def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides): """ Helper to call formfield_for_dbfield for a given model and field name and verify that the returned formfield is appropriate. """ # Override any settings on the model admin class MyModelAdmin(admin.ModelAdmin): pass for k in admin_overrides: setattr(MyModelAdmin, k, admin_overrides[k]) # Construct the admin, and ask it for a formfield ma = MyModelAdmin(model, admin.site) ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None) # "unwrap" the widget wrapper, if needed if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper): widget = ff.widget.widget else: widget = ff.widget # Check that we got a field of the right type self.assertTrue( isinstance(widget, widgetclass), "Wrong widget for %s.%s: expected %s, got %s" % ( model.__class__.__name__, fieldname, widgetclass, type(widget), ) ) # Return the formfield so that other tests can continue return ff def test_DateField(self): self.assertFormfield(models.Event, 'start_date', widgets.AdminDateWidget) def test_DateTimeField(self): self.assertFormfield(models.Member, 'birthdate', widgets.AdminSplitDateTime) def test_TimeField(self): self.assertFormfield(models.Event, 'start_time', widgets.AdminTimeWidget) def test_TextField(self): self.assertFormfield(models.Event, 'description', widgets.AdminTextareaWidget) def test_URLField(self): self.assertFormfield(models.Event, 'link', widgets.AdminURLFieldWidget) def test_IntegerField(self): self.assertFormfield(models.Event, 'min_age', widgets.AdminIntegerFieldWidget) def test_CharField(self): self.assertFormfield(models.Member, 'name', widgets.AdminTextInputWidget) def test_EmailField(self): self.assertFormfield(models.Member, 'email', widgets.AdminEmailInputWidget) def test_FileField(self): self.assertFormfield(models.Album, 'cover_art', widgets.AdminFileWidget) def test_ForeignKey(self): self.assertFormfield(models.Event, 'main_band', forms.Select) def test_raw_id_ForeignKey(self): self.assertFormfield(models.Event, 'main_band', widgets.ForeignKeyRawIdWidget, raw_id_fields=['main_band']) def test_radio_fields_ForeignKey(self): ff = self.assertFormfield(models.Event, 'main_band', widgets.AdminRadioSelect, radio_fields={'main_band': admin.VERTICAL}) self.assertEqual(ff.empty_label, None) def test_many_to_many(self): self.assertFormfield(models.Band, 'members', forms.SelectMultiple) def test_raw_id_many_to_many(self): self.assertFormfield(models.Band, 'members', widgets.ManyToManyRawIdWidget, raw_id_fields=['members']) def test_filtered_many_to_many(self): self.assertFormfield(models.Band, 'members', widgets.FilteredSelectMultiple, filter_vertical=['members']) def test_formfield_overrides(self): self.assertFormfield(models.Event, 'start_date', forms.TextInput, formfield_overrides={DateField: {'widget': forms.TextInput}}) def test_formfield_overrides_widget_instances(self): """ Test that widget instances in formfield_overrides are not shared between different fields. (#19423) """ class BandAdmin(admin.ModelAdmin): formfield_overrides = { CharField: {'widget': forms.TextInput(attrs={'size': '10'})} } ma = BandAdmin(models.Band, admin.site) f1 = ma.formfield_for_dbfield(models.Band._meta.get_field('name'), request=None) f2 = ma.formfield_for_dbfield(models.Band._meta.get_field('style'), request=None) self.assertNotEqual(f1.widget, f2.widget) self.assertEqual(f1.widget.attrs['maxlength'], '100') self.assertEqual(f2.widget.attrs['maxlength'], '20') self.assertEqual(f2.widget.attrs['size'], '10') def test_field_with_choices(self): self.assertFormfield(models.Member, 'gender', forms.Select) def test_choices_with_radio_fields(self): self.assertFormfield(models.Member, 'gender', widgets.AdminRadioSelect, radio_fields={'gender': admin.VERTICAL}) def test_inheritance(self): self.assertFormfield(models.Album, 'backside_art', widgets.AdminFileWidget) def test_m2m_widgets(self): """m2m fields help text as it applies to admin app (#9321).""" class AdvisorAdmin(admin.ModelAdmin): filter_vertical = ['companies'] self.assertFormfield(models.Advisor, 'companies', widgets.FilteredSelectMultiple, filter_vertical=['companies']) ma = AdvisorAdmin(models.Advisor, admin.site) f = ma.formfield_for_dbfield(models.Advisor._meta.get_field('companies'), request=None) self.assertEqual(six.text_type(f.help_text), 'Hold down "Control", or "Command" on a Mac, to select more than one.') @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='admin_widgets.urls') class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase): def test_filter_choices_by_request_user(self): """ Ensure the user can only see their own cars in the foreign key dropdown. """ self.client.login(username="super", password="secret") response = self.client.get(reverse('admin:admin_widgets_cartire_add')) self.assertNotContains(response, "BMW M3") self.assertContains(response, "Volkswagon Passat") @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='admin_widgets.urls') class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase): def setUp(self): self.client.login(username="super", password="secret") def test_changelist_ForeignKey(self): response = self.client.get(reverse('admin:admin_widgets_car_changelist')) self.assertContains(response, '/auth/user/add/') @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='admin_widgets.urls') class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase): def setUp(self): self.client.login(username="super", password="secret") def test_nonexistent_target_id(self): band = models.Band.objects.create(name='Bogey Blues') pk = band.pk band.delete() post_data = { "main_band": '%s' % pk, } # Try posting with a non-existent pk in a raw id field: this # should result in an error message, not a server exception. response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data) self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.') def test_invalid_target_id(self): for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234): # This should result in an error message, not a server exception. response = self.client.post(reverse('admin:admin_widgets_event_add'), {"main_band": test_str}) self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.') def test_url_params_from_lookup_dict_any_iterable(self): lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')}) lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']}) self.assertEqual(lookup1, {'color__in': 'red,blue'}) self.assertEqual(lookup1, lookup2) def test_url_params_from_lookup_dict_callable(self): def my_callable(): return 'works' lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable}) lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()}) self.assertEqual(lookup1, lookup2) class FilteredSelectMultipleWidgetTest(SimpleTestCase): def test_render(self): # Backslash in verbose_name to ensure it is JavaScript escaped. w = widgets.FilteredSelectMultiple('test\\', False) self.assertHTMLEqual( w.render('test', 'test'), '<select multiple="multiple" name="test" class="selectfilter">\n</select>' '<script type="text/javascript">addEvent(window, "load", function(e) ' '{SelectFilter.init("id_test", "test\\u005C", 0); });</script>\n' ) def test_stacked_render(self): # Backslash in verbose_name to ensure it is JavaScript escaped. w = widgets.FilteredSelectMultiple('test\\', True) self.assertHTMLEqual( w.render('test', 'test'), '<select multiple="multiple" name="test" class="selectfilterstacked">\n</select>' '<script type="text/javascript">addEvent(window, "load", function(e) ' '{SelectFilter.init("id_test", "test\\u005C", 1); });</script>\n' ) class AdminDateWidgetTest(SimpleTestCase): def test_attrs(self): """ Ensure that user-supplied attrs are used. Refs #12073. """ w = widgets.AdminDateWidget() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="2007-12-01" type="text" class="vDateField" name="test" size="10" />', ) # pass attrs to widget w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'}) self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="2007-12-01" type="text" class="myDateField" name="test" size="20" />', ) class AdminTimeWidgetTest(SimpleTestCase): def test_attrs(self): """ Ensure that user-supplied attrs are used. Refs #12073. """ w = widgets.AdminTimeWidget() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="09:30:00" type="text" class="vTimeField" name="test" size="8" />', ) # pass attrs to widget w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'}) self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<input value="09:30:00" type="text" class="myTimeField" name="test" size="20" />', ) class AdminSplitDateTimeWidgetTest(SimpleTestCase): def test_render(self): w = widgets.AdminSplitDateTime() self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<p class="datetime">Date: <input value="2007-12-01" type="text" class="vDateField" name="test_0" size="10" /><br />Time: <input value="09:30:00" type="text" class="vTimeField" name="test_1" size="8" /></p>', ) def test_localization(self): w = widgets.AdminSplitDateTime() with self.settings(USE_L10N=True), translation.override('de-at'): w.is_localized = True self.assertHTMLEqual( w.render('test', datetime(2007, 12, 1, 9, 30)), '<p class="datetime">Datum: <input value="01.12.2007" type="text" class="vDateField" name="test_0" size="10" /><br />Zeit: <input value="09:30:00" type="text" class="vTimeField" name="test_1" size="8" /></p>', ) class AdminURLWidgetTest(SimpleTestCase): def test_render(self): w = widgets.AdminURLFieldWidget() self.assertHTMLEqual( w.render('test', ''), '<input class="vURLField" name="test" type="url" />' ) self.assertHTMLEqual( w.render('test', 'http://example.com'), '<p class="url">Currently:<a href="http://example.com">http://example.com</a><br />Change:<input class="vURLField" name="test" type="url" value="http://example.com" /></p>' ) def test_render_idn(self): w = widgets.AdminURLFieldWidget() self.assertHTMLEqual( w.render('test', 'http://example-äüö.com'), '<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">http://example-äüö.com</a><br />Change:<input class="vURLField" name="test" type="url" value="http://example-äüö.com" /></p>' ) def test_render_quoting(self): # WARNING: Don't use assertHTMLEqual in that testcase! # assertHTMLEqual will get rid of some escapes which are tested here! w = widgets.AdminURLFieldWidget() self.assertEqual( w.render('test', 'http://example.com/<sometag>some text</sometag>'), '<p class="url">Currently: <a href="http://example.com/%3Csometag%3Esome%20text%3C/sometag%3E">http://example.com/&lt;sometag&gt;some text&lt;/sometag&gt;</a><br />Change: <input class="vURLField" name="test" type="url" value="http://example.com/&lt;sometag&gt;some text&lt;/sometag&gt;" /></p>' ) self.assertEqual( w.render('test', 'http://example-äüö.com/<sometag>some text</sometag>'), '<p class="url">Currently: <a href="http://xn--example--7za4pnc.com/%3Csometag%3Esome%20text%3C/sometag%3E">http://example-äüö.com/&lt;sometag&gt;some text&lt;/sometag&gt;</a><br />Change: <input class="vURLField" name="test" type="url" value="http://example-äüö.com/&lt;sometag&gt;some text&lt;/sometag&gt;" /></p>' ) self.assertEqual( w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"'), '<p class="url">Currently: <a href="http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22">http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;</a><br />Change: <input class="vURLField" name="test" type="url" value="http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;" /></p>' ) @override_settings( PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='admin_widgets.urls', ) class AdminFileWidgetTests(TestDataMixin, TestCase): @classmethod def setUpTestData(cls): super(AdminFileWidgetTests, cls).setUpTestData() band = models.Band.objects.create(name='Linkin Park') cls.album = band.album_set.create( name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg' ) def test_render(self): w = widgets.AdminFileWidget() self.assertHTMLEqual( w.render('test', self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' 'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> ' '<span class="clearable-file-input">' '<input type="checkbox" name="test-clear" id="test-clear_id" /> ' '<label for="test-clear_id">Clear</label></span><br />' 'Change: <input type="file" name="test" /></p>' % { 'STORAGE_URL': default_storage.url(''), }, ) self.assertHTMLEqual( w.render('test', SimpleUploadedFile('test', b'content')), '<input type="file" name="test" />', ) def test_readonly_fields(self): """ File widgets should render as a link when they're marked "read only." """ self.client.login(username="super", password="secret") response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,))) self.assertContains( response, '<p><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">' 'albums\hybrid_theory.jpg</a></p>' % {'STORAGE_URL': default_storage.url('')}, html=True, ) self.assertNotContains( response, '<input type="file" name="cover_art" id="id_cover_art" />', html=True, ) response = self.client.get(reverse('admin:admin_widgets_album_add')) self.assertContains( response, '<p></p>', html=True, ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class ForeignKeyRawIdWidgetTest(TestCase): def test_render(self): band = models.Band.objects.create(name='Linkin Park') band.album_set.create( name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg' ) rel = models.Album._meta.get_field('band').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', band.pk, attrs={}), ( '<input type="text" name="test" value="%(bandpk)s" class="vForeignKeyRawIdAdminField" />' '<a href="/admin_widgets/band/?_to_field=id" class="related-lookup" id="lookup_id_test" title="Lookup"></a>' '&nbsp;<strong>Linkin Park</strong>' ) % {'bandpk': band.pk} ) def test_relations_to_non_primary_key(self): # Check that ForeignKeyRawIdWidget works with fields which aren't # related to the model's primary key. apple = models.Inventory.objects.create(barcode=86, name='Apple') models.Inventory.objects.create(barcode=22, name='Pear') core = models.Inventory.objects.create( barcode=87, name='Core', parent=apple ) rel = models.Inventory._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', core.parent_id, attrs={}), ( '<input type="text" name="test" value="86" class="vForeignKeyRawIdAdminField" />' '<a href="/admin_widgets/inventory/?_to_field=barcode" class="related-lookup" id="lookup_id_test" title="Lookup">' '</a>&nbsp;<strong>Apple</strong>' ) ) def test_fk_related_model_not_in_admin(self): # FK to a model not registered with admin site. Raw ID widget should # have no magnifying glass link. See #16542 big_honeycomb = models.Honeycomb.objects.create(location='Old tree') big_honeycomb.bee_set.create() rel = models.Bee._meta.get_field('honeycomb').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('honeycomb_widget', big_honeycomb.pk, attrs={}), '<input type="text" name="honeycomb_widget" value="%(hcombpk)s" />&nbsp;<strong>Honeycomb object</strong>' % {'hcombpk': big_honeycomb.pk} ) def test_fk_to_self_model_not_in_admin(self): # FK to self, not registered with admin site. Raw ID widget should have # no magnifying glass link. See #16542 subject1 = models.Individual.objects.create(name='Subject #1') models.Individual.objects.create(name='Child', parent=subject1) rel = models.Individual._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('individual_widget', subject1.pk, attrs={}), '<input type="text" name="individual_widget" value="%(subj1pk)s" />&nbsp;<strong>Individual object</strong>' % {'subj1pk': subject1.pk} ) def test_proper_manager_for_label_lookup(self): # see #9258 rel = models.Inventory._meta.get_field('parent').remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) hidden = models.Inventory.objects.create( barcode=93, name='Hidden', hidden=True ) child_of_hidden = models.Inventory.objects.create( barcode=94, name='Child of hidden', parent=hidden ) self.assertHTMLEqual( w.render('test', child_of_hidden.parent_id, attrs={}), ( '<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField" />' '<a href="/admin_widgets/inventory/?_to_field=barcode" class="related-lookup" id="lookup_id_test" title="Lookup">' '</a>&nbsp;<strong>Hidden</strong>' ) ) @override_settings(ROOT_URLCONF='admin_widgets.urls') class ManyToManyRawIdWidgetTest(TestCase): def test_render(self): band = models.Band.objects.create(name='Linkin Park') m1 = models.Member.objects.create(name='Chester') m2 = models.Member.objects.create(name='Mike') band.members.add(m1, m2) rel = models.Band._meta.get_field('members').remote_field w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('test', [m1.pk, m2.pk], attrs={}), ( '<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField" />' '<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>' ) % dict(m1pk=m1.pk, m2pk=m2.pk) ) self.assertHTMLEqual( w.render('test', [m1.pk]), ( '<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">' '<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>' ) % dict(m1pk=m1.pk) ) def test_m2m_related_model_not_in_admin(self): # M2M relationship with model not registered with admin site. Raw ID # widget should have no magnifying glass link. See #16542 consultor1 = models.Advisor.objects.create(name='Rockstar Techie') c1 = models.Company.objects.create(name='Doodle') c2 = models.Company.objects.create(name='Pear') consultor1.companies.add(c1, c2) rel = models.Advisor._meta.get_field('companies').remote_field w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render('company_widget1', [c1.pk, c2.pk], attrs={}), '<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s" />' % {'c1pk': c1.pk, 'c2pk': c2.pk} ) self.assertHTMLEqual( w.render('company_widget2', [c1.pk]), '<input type="text" name="company_widget2" value="%(c1pk)s" />' % {'c1pk': c1.pk} ) class RelatedFieldWidgetWrapperTests(SimpleTestCase): def test_no_can_add_related(self): rel = models.Individual._meta.get_field('parent').remote_field w = widgets.AdminRadioSelect() # Used to fail with a name error. w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site) self.assertFalse(w.can_add_related) def test_select_multiple_widget_cant_change_delete_related(self): rel = models.Individual._meta.get_field('parent').remote_field widget = forms.SelectMultiple() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) self.assertTrue(wrapper.can_add_related) self.assertFalse(wrapper.can_change_related) self.assertFalse(wrapper.can_delete_related) def test_on_delete_cascade_rel_cant_delete_related(self): rel = models.Individual._meta.get_field('soulmate').remote_field widget = forms.Select() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) self.assertTrue(wrapper.can_add_related) self.assertTrue(wrapper.can_change_related) self.assertFalse(wrapper.can_delete_related) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='admin_widgets.urls') class DateTimePickerSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase): available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver' def test_show_hide_date_time_picker_widgets(self): """ Ensure that pressing the ESC key closes the date and time picker widgets. Refs #17064. """ from selenium.webdriver.common.keys import Keys self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get('%s%s' % (self.live_server_url, reverse('admin:admin_widgets_member_add'))) # First, with the date picker widget --------------------------------- # Check that the date picker is hidden self.assertEqual( self.get_css_value('#calendarbox0', 'display'), 'none') # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # Check that the date picker is visible self.assertEqual( self.get_css_value('#calendarbox0', 'display'), 'block') # Press the ESC key self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) # Check that the date picker is hidden again self.assertEqual( self.get_css_value('#calendarbox0', 'display'), 'none') # Then, with the time picker widget ---------------------------------- # Check that the time picker is hidden self.assertEqual( self.get_css_value('#clockbox0', 'display'), 'none') # Click the time icon self.selenium.find_element_by_id('clocklink0').click() # Check that the time picker is visible self.assertEqual( self.get_css_value('#clockbox0', 'display'), 'block') self.assertEqual( [ x.text for x in self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a") ], ['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.'] ) # Press the ESC key self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) # Check that the time picker is hidden again self.assertEqual( self.get_css_value('#clockbox0', 'display'), 'none') def test_calendar_nonday_class(self): """ Ensure cells that are not days of the month have the `nonday` CSS class. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get('%s%s' % (self.live_server_url, reverse('admin:admin_widgets_member_add'))) # fill in the birth date. self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # make sure the first and last 6 cells have class nonday for td in tds[:6] + tds[-6:]: self.assertEqual(td.get_attribute('class'), 'nonday') def test_calendar_selected_class(self): """ Ensure cell for the day in the input has the `selected` CSS class. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get('%s%s' % (self.live_server_url, reverse('admin:admin_widgets_member_add'))) # fill in the birth date. self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # verify the selected cell selected = tds[6] self.assertEqual(selected.get_attribute('class'), 'selected') self.assertEqual(selected.text, '1') def test_calendar_no_selected_class(self): """ Ensure no cells are given the selected class when the field is empty. Refs #4574. """ self.admin_login(username='super', password='secret', login_url='/') # Open a page that has a date and time picker widgets self.selenium.get('%s%s' % (self.live_server_url, reverse('admin:admin_widgets_member_add'))) # Click the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # get all the tds within the calendar calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') # verify there are no cells with the selected class selected = [td for td in tds if td.get_attribute('class') == 'selected'] self.assertEqual(len(selected), 0) def test_calendar_show_date_from_input(self): """ Ensure that the calendar show the date from the input field for every locale supported by django. """ self.admin_login(username='super', password='secret', login_url='/') # Enter test data member = models.Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M') # Get month names translations for every locales month_string = 'January February March April May June July August September October November December' path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale') for language_code, language_name in settings.LANGUAGES: try: catalog = gettext.translation('djangojs', path, [language_code]) except IOError: continue if month_string in catalog._catalog: month_names = catalog._catalog[month_string] else: month_names = month_string # Get the expected caption may_translation = month_names.split(' ')[4] expected_caption = '{0:s} {1:d}'.format(may_translation.upper(), 1984) # Test with every locale with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True): # Open a page that has a date picker widget self.selenium.get('{}{}'.format(self.live_server_url, reverse('admin:admin_widgets_member_change', args=(member.pk,)))) # Click on the calendar icon self.selenium.find_element_by_id('calendarlink0').click() # Get the calendar caption calendar0 = self.selenium.find_element_by_id('calendarin0') caption = calendar0.find_element_by_tag_name('caption') # Make sure that the right month and year are displayed self.assertEqual(caption.text, expected_caption) class DateTimePickerSeleniumChromeTests(DateTimePickerSeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver' class DateTimePickerSeleniumIETests(DateTimePickerSeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver' @skipIf(pytz is None, "this test requires pytz") @override_settings(TIME_ZONE='Asia/Singapore') @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='admin_widgets.urls') class DateTimePickerShortcutsSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase): available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver' def test_date_time_picker_shortcuts(self): """ Ensure that date/time/datetime picker shortcuts work in the current time zone. Refs #20663. This test case is fairly tricky, it relies on selenium still running the browser in the default time zone "America/Chicago" despite `override_settings` changing the time zone to "Asia/Singapore". """ self.admin_login(username='super', password='secret', login_url='/') error_margin = timedelta(seconds=10) # If we are neighbouring a DST, we add an hour of error margin. tz = pytz.timezone('America/Chicago') utc_now = datetime.now(pytz.utc) tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname() tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname() if tz_yesterday != tz_tomorrow: error_margin += timedelta(hours=1) now = datetime.now() self.selenium.get('%s%s' % (self.live_server_url, reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('id_name').send_keys('test') # Click on the "today" and "now" shortcuts. shortcuts = self.selenium.find_elements_by_css_selector( '.field-birthdate .datetimeshortcuts') for shortcut in shortcuts: shortcut.find_element_by_tag_name('a').click() # Check that there is a time zone mismatch warning. # Warning: This would effectively fail if the TIME_ZONE defined in the # settings has the same UTC offset as "Asia/Singapore" because the # mismatch warning would be rightfully missing from the page. self.selenium.find_elements_by_css_selector( '.field-birthdate .timezonewarning') # Submit the form. self.selenium.find_element_by_tag_name('form').submit() self.wait_page_loaded() # Make sure that "now" in javascript is within 10 seconds # from "now" on the server side. member = models.Member.objects.get(name='test') self.assertGreater(member.birthdate, now - error_margin) self.assertLess(member.birthdate, now + error_margin) class DateTimePickerShortcutsSeleniumChromeTests(DateTimePickerShortcutsSeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver' class DateTimePickerShortcutsSeleniumIETests(DateTimePickerShortcutsSeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver' @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='admin_widgets.urls') class HorizontalVerticalFilterSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase): available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver' def setUp(self): super(HorizontalVerticalFilterSeleniumFirefoxTests, self).setUp() self.lisa = models.Student.objects.create(name='Lisa') self.john = models.Student.objects.create(name='John') self.bob = models.Student.objects.create(name='Bob') self.peter = models.Student.objects.create(name='Peter') self.jenny = models.Student.objects.create(name='Jenny') self.jason = models.Student.objects.create(name='Jason') self.cliff = models.Student.objects.create(name='Cliff') self.arthur = models.Student.objects.create(name='Arthur') self.school = models.School.objects.create(name='School of Awesome') def assertActiveButtons(self, mode, field_name, choose, remove, choose_all=None, remove_all=None): choose_link = '#id_%s_add_link' % field_name choose_all_link = '#id_%s_add_all_link' % field_name remove_link = '#id_%s_remove_link' % field_name remove_all_link = '#id_%s_remove_all_link' % field_name self.assertEqual(self.has_css_class(choose_link, 'active'), choose) self.assertEqual(self.has_css_class(remove_link, 'active'), remove) if mode == 'horizontal': self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all) self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all) def execute_basic_operations(self, mode, field_name): from_box = '#id_%s_from' % field_name to_box = '#id_%s_to' % field_name choose_link = 'id_%s_add_link' % field_name choose_all_link = 'id_%s_add_all_link' % field_name remove_link = 'id_%s_remove_link' % field_name remove_all_link = 'id_%s_remove_all_link' % field_name # Initial positions --------------------------------------------------- self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id)]) self.assertActiveButtons(mode, field_name, False, False, True, True) # Click 'Choose all' -------------------------------------------------- if mode == 'horizontal': self.selenium.find_element_by_id(choose_all_link).click() elif mode == 'vertical': # There 's no 'Choose all' button in vertical mode, so individually # select all options and click 'Choose'. for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'): option.click() self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, []) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id), str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) self.assertActiveButtons(mode, field_name, False, False, False, True) # Click 'Remove all' -------------------------------------------------- if mode == 'horizontal': self.selenium.find_element_by_id(remove_all_link).click() elif mode == 'vertical': # There 's no 'Remove all' button in vertical mode, so individually # select all options and click 'Remove'. for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'): option.click() self.selenium.find_element_by_id(remove_link).click() self.assertSelectOptions(from_box, [str(self.lisa.id), str(self.peter.id), str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) self.assertSelectOptions(to_box, []) self.assertActiveButtons(mode, field_name, False, False, True, False) # Choose some options ------------------------------------------------ from_lisa_select_option = self.get_select_option(from_box, str(self.lisa.id)) # Check the title attribute is there for tool tips: ticket #20821 self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text')) from_lisa_select_option.click() self.get_select_option(from_box, str(self.jason.id)).click() self.get_select_option(from_box, str(self.bob.id)).click() self.get_select_option(from_box, str(self.john.id)).click() self.assertActiveButtons(mode, field_name, True, False, True, False) self.selenium.find_element_by_id(choose_link).click() self.assertActiveButtons(mode, field_name, False, False, True, True) self.assertSelectOptions(from_box, [str(self.peter.id), str(self.arthur.id), str(self.cliff.id), str(self.jenny.id)]) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.bob.id), str(self.jason.id), str(self.john.id)]) # Check the tooltip is still there after moving: ticket #20821 to_lisa_select_option = self.get_select_option(to_box, str(self.lisa.id)) self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text')) # Remove some options ------------------------------------------------- self.get_select_option(to_box, str(self.lisa.id)).click() self.get_select_option(to_box, str(self.bob.id)).click() self.assertActiveButtons(mode, field_name, False, True, True, True) self.selenium.find_element_by_id(remove_link).click() self.assertActiveButtons(mode, field_name, False, False, True, True) self.assertSelectOptions(from_box, [str(self.peter.id), str(self.arthur.id), str(self.cliff.id), str(self.jenny.id), str(self.lisa.id), str(self.bob.id)]) self.assertSelectOptions(to_box, [str(self.jason.id), str(self.john.id)]) # Choose some more options -------------------------------------------- self.get_select_option(from_box, str(self.arthur.id)).click() self.get_select_option(from_box, str(self.cliff.id)).click() self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, [str(self.peter.id), str(self.jenny.id), str(self.lisa.id), str(self.bob.id)]) self.assertSelectOptions(to_box, [str(self.jason.id), str(self.john.id), str(self.arthur.id), str(self.cliff.id)]) def test_basic(self): self.school.students = [self.lisa, self.peter] self.school.alumni = [self.lisa, self.peter] self.school.save() self.admin_login(username='super', password='secret', login_url='/') self.selenium.get('%s%s' % ( self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,)))) self.wait_page_loaded() self.execute_basic_operations('vertical', 'students') self.execute_basic_operations('horizontal', 'alumni') # Save and check that everything is properly stored in the database --- self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.wait_page_loaded() self.school = models.School.objects.get(id=self.school.id) # Reload from database self.assertEqual(list(self.school.students.all()), [self.arthur, self.cliff, self.jason, self.john]) self.assertEqual(list(self.school.alumni.all()), [self.arthur, self.cliff, self.jason, self.john]) def test_filter(self): """ Ensure that typing in the search box filters out options displayed in the 'from' box. """ from selenium.webdriver.common.keys import Keys self.school.students = [self.lisa, self.peter] self.school.alumni = [self.lisa, self.peter] self.school.save() self.admin_login(username='super', password='secret', login_url='/') self.selenium.get( '%s%s' % (self.live_server_url, reverse('admin:admin_widgets_school_change', args=(self.school.id,)))) for field_name in ['students', 'alumni']: from_box = '#id_%s_from' % field_name to_box = '#id_%s_to' % field_name choose_link = '#id_%s_add_link' % field_name remove_link = '#id_%s_remove_link' % field_name input = self.selenium.find_element_by_css_selector('#id_%s_input' % field_name) # Initial values self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) # Typing in some characters filters out non-matching options input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys('R') self.assertSelectOptions(from_box, [str(self.arthur.id)]) # Clearing the text box makes the other options reappear input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) # ----------------------------------------------------------------- # Check that choosing a filtered option sends it properly to the # 'to' box. input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) self.get_select_option(from_box, str(self.jason.id)).click() self.selenium.find_element_by_css_selector(choose_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id)]) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id), str(self.jason.id)]) self.get_select_option(to_box, str(self.lisa.id)).click() self.selenium.find_element_by_css_selector(remove_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) # Clear text box self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jenny.id), str(self.john.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) # ----------------------------------------------------------------- # Check that pressing enter on a filtered option sends it properly # to the 'to' box. self.get_select_option(to_box, str(self.jason.id)).click() self.selenium.find_element_by_css_selector(remove_link).click() input.send_keys('ja') self.assertSelectOptions(from_box, [str(self.jason.id)]) input.send_keys([Keys.ENTER]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE]) # Save and check that everything is properly stored in the database --- self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.wait_page_loaded() self.school = models.School.objects.get(id=self.school.id) # Reload from database self.assertEqual(list(self.school.students.all()), [self.jason, self.peter]) self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter]) class HorizontalVerticalFilterSeleniumChromeTests(HorizontalVerticalFilterSeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver' class HorizontalVerticalFilterSeleniumIETests(HorizontalVerticalFilterSeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver' @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='admin_widgets.urls') class AdminRawIdWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase): available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver' def setUp(self): super(AdminRawIdWidgetSeleniumFirefoxTests, self).setUp() models.Band.objects.create(id=42, name='Bogey Blues') models.Band.objects.create(id=98, name='Green Potatoes') def test_ForeignKey(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get( '%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add'))) main_window = self.selenium.current_window_handle # No value has been selected yet self.assertEqual( self.selenium.find_element_by_id('id_main_band').get_attribute('value'), '') # Open the popup window and click on a band self.selenium.find_element_by_id('lookup_id_main_band').click() self.selenium.switch_to.window('id_main_band') self.wait_page_loaded() link = self.selenium.find_element_by_link_text('Bogey Blues') self.assertIn('/band/42/', link.get_attribute('href')) link.click() # The field now contains the selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_main_band', '42') # Reopen the popup window and click on another band self.selenium.find_element_by_id('lookup_id_main_band').click() self.selenium.switch_to.window('id_main_band') self.wait_page_loaded() link = self.selenium.find_element_by_link_text('Green Potatoes') self.assertIn('/band/98/', link.get_attribute('href')) link.click() # The field now contains the other selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_main_band', '98') def test_many_to_many(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get( '%s%s' % (self.live_server_url, reverse('admin:admin_widgets_event_add'))) main_window = self.selenium.current_window_handle # No value has been selected yet self.assertEqual( self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'), '') # Open the popup window and click on a band self.selenium.find_element_by_id('lookup_id_supporting_bands').click() self.selenium.switch_to.window('id_supporting_bands') self.wait_page_loaded() link = self.selenium.find_element_by_link_text('Bogey Blues') self.assertIn('/band/42/', link.get_attribute('href')) link.click() # The field now contains the selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value('#id_supporting_bands', '42') # Reopen the popup window and click on another band self.selenium.find_element_by_id('lookup_id_supporting_bands').click() self.selenium.switch_to.window('id_supporting_bands') self.wait_page_loaded() link = self.selenium.find_element_by_link_text('Green Potatoes') self.assertIn('/band/98/', link.get_attribute('href')) link.click() # The field now contains the two selected bands' ids self.selenium.switch_to.window(main_window) self.wait_for_value('#id_supporting_bands', '42,98') class AdminRawIdWidgetSeleniumChromeTests(AdminRawIdWidgetSeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver' class AdminRawIdWidgetSeleniumIETests(AdminRawIdWidgetSeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver' @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'], ROOT_URLCONF='admin_widgets.urls') class RelatedFieldWidgetSeleniumFirefoxTests(SeleniumDataMixin, AdminSeleniumWebDriverTestCase): available_apps = ['admin_widgets'] + AdminSeleniumWebDriverTestCase.available_apps webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver' def test_ForeignKey_using_to_field(self): self.admin_login(username='super', password='secret', login_url='/') self.selenium.get('%s%s' % ( self.live_server_url, reverse('admin:admin_widgets_profile_add'))) main_window = self.selenium.current_window_handle # Click the Add User button to add new self.selenium.find_element_by_id('add_id_user').click() self.selenium.switch_to.window('id_user') self.wait_for('#id_password') password_field = self.selenium.find_element_by_id('id_password') password_field.send_keys('password') username_field = self.selenium.find_element_by_id('id_username') username_value = 'newuser' username_field.send_keys(username_value) save_button_css_selector = '.submit-row > input[type=submit]' self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.selenium.switch_to.window(main_window) # The field now contains the new user self.wait_for('#id_user option[value="newuser"]') # Click the Change User button to change it self.selenium.find_element_by_id('change_id_user').click() self.selenium.switch_to.window('id_user') self.wait_page_loaded() username_field = self.selenium.find_element_by_id('id_username') username_value = 'changednewuser' username_field.clear() username_field.send_keys(username_value) save_button_css_selector = '.submit-row > input[type=submit]' self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.selenium.switch_to.window(main_window) # Wait up to 2 seconds for the new option to show up after clicking save in the popup. self.selenium.implicitly_wait(2) self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]') self.selenium.implicitly_wait(0) # Go ahead and submit the form to make sure it works self.selenium.find_element_by_css_selector(save_button_css_selector).click() self.wait_for_text('li.success', 'The profile "changednewuser" was added successfully.') profiles = models.Profile.objects.all() self.assertEqual(len(profiles), 1) self.assertEqual(profiles[0].user.username, username_value) class RelatedFieldWidgetSeleniumChromeTests(RelatedFieldWidgetSeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver' class RelatedFieldWidgetSeleniumIETests(RelatedFieldWidgetSeleniumFirefoxTests): webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
jigarkb/Programming
refs/heads/master
LeetCode/480-H-SlidingWindowMedian.py
2
# Median is the middle value in an ordered integer list. If the size of the list is even, there is no middle value. So # the median is the mean of the two middle value. # # Examples: # [2,3,4] , the median is 3 # # [2,3], the median is (2 + 3) / 2 = 2.5 # # Given an array nums, there is a sliding window of size k which is moving from the very left of the array to the very # right. You can only see the k numbers in the window. Each time the sliding window moves right by one position. Your # job is to output the median array for each window in the original array. # # For example, # Given nums = [1,3,-1,-3,5,3,6,7], and k = 3. # # Window position Median # --------------- ----- # [1 3 -1] -3 5 3 6 7 1 # 1 [3 -1 -3] 5 3 6 7 -1 # 1 3 [-1 -3 5] 3 6 7 -1 # 1 3 -1 [-3 5 3] 6 7 3 # 1 3 -1 -3 [5 3 6] 7 5 # 1 3 -1 -3 5 [3 6 7] 6 # Therefore, return the median sliding window as [1,-1,-1,3,5,6]. # # Note: # You may assume k is always valid, ie: k is always smaller than input array's size for non-empty array. import bisect class Solution(object): def medianSlidingWindow(self, nums, k): """ :type nums: List[int] :type k: int :rtype: List[float] """ window = sorted(nums[:k]) medians = [] for a, b in zip(nums, nums[k:] + [0]): medians.append((window[k/2] + window[~(k/2)]) / 2.) window.remove(a) bisect.insort(window, b) return medians # Note: # O(nk) solution # Can have O(nlogk) similar to 295
gaapt/opencog
refs/heads/master
tests/cython/agents/test_agent_w_load_exception.py
35
raise TypeError
nmercier/linux-cross-gcc
refs/heads/master
linux/lib/python2.7/dist-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py
3
# -*- coding: utf-8 -*- # # SelfTest/Cipher/test_pkcs1_oaep.py: Self-test for PKCS#1 OAEP encryption # # =================================================================== # The contents of this file are dedicated to the public domain. To # the extent that dedication to the public domain is not available, # everyone is granted a worldwide, perpetual, royalty-free, # non-exclusive license to exercise all rights associated with the # contents of this file for any purpose whatsoever. # No rights are reserved. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # =================================================================== from __future__ import nested_scopes __revision__ = "$Id$" import unittest from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex from Crypto.Util.py3compat import * from Crypto.PublicKey import RSA from Crypto.Cipher import PKCS1_OAEP as PKCS from Crypto.Hash import MD2,MD5,SHA as SHA1,SHA256,RIPEMD from Crypto import Random def rws(t): """Remove white spaces, tabs, and new lines from a string""" for c in ['\n', '\t', ' ']: t = t.replace(c,'') return t def t2b(t): """Convert a text string with bytes in hex form to a byte string""" clean = rws(t) if len(clean)%2 == 1: raise ValueError("Even number of characters expected") return a2b_hex(clean) class PKCS1_OAEP_Tests(unittest.TestCase): def setUp(self): self.rng = Random.new().read self.key1024 = RSA.generate(1024, self.rng) # List of tuples with test data for PKCS#1 OAEP # Each tuple is made up by: # Item #0: dictionary with RSA key component # Item #1: plaintext # Item #2: ciphertext # Item #3: random data (=seed) # Item #4: hash object _testData = ( # # From in oaep-int.txt to be found in # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip # ( # Private key { 'n':'''bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb''', # Public key 'e':'11', # In the test vector, only p and q were given... # d is computed offline as e^{-1} mod (p-1)(q-1) 'd':'''a5dafc5341faf289c4b988db30c1cdf83f31251e0 668b42784813801579641b29410b3c7998d6bc465745e5c3 92669d6870da2c082a939e37fdcb82ec93edac97ff3ad595 0accfbc111c76f1a9529444e56aaf68c56c092cd38dc3bef 5d20a939926ed4f74a13eddfbe1a1cecc4894af9428c2b7b 8883fe4463a4bc85b1cb3c1''' } , # Plaintext '''d4 36 e9 95 69 fd 32 a7 c8 a0 5b bc 90 d3 2c 49''', # Ciphertext '''12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55''', # Random '''aa fd 12 f6 59 ca e6 34 89 b4 79 e5 07 6d de c2 f0 6c b5 8f''', # Hash SHA1, ), # # From in oaep-vect.txt to be found in Example 1.1 # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip # ( # Private key { 'n':'''a8 b3 b2 84 af 8e b5 0b 38 70 34 a8 60 f1 46 c4 91 9f 31 87 63 cd 6c 55 98 c8 ae 48 11 a1 e0 ab c4 c7 e0 b0 82 d6 93 a5 e7 fc ed 67 5c f4 66 85 12 77 2c 0c bc 64 a7 42 c6 c6 30 f5 33 c8 cc 72 f6 2a e8 33 c4 0b f2 58 42 e9 84 bb 78 bd bf 97 c0 10 7d 55 bd b6 62 f5 c4 e0 fa b9 84 5c b5 14 8e f7 39 2d d3 aa ff 93 ae 1e 6b 66 7b b3 d4 24 76 16 d4 f5 ba 10 d4 cf d2 26 de 88 d3 9f 16 fb''', 'e':'''01 00 01''', 'd':'''53 33 9c fd b7 9f c8 46 6a 65 5c 73 16 ac a8 5c 55 fd 8f 6d d8 98 fd af 11 95 17 ef 4f 52 e8 fd 8e 25 8d f9 3f ee 18 0f a0 e4 ab 29 69 3c d8 3b 15 2a 55 3d 4a c4 d1 81 2b 8b 9f a5 af 0e 7f 55 fe 73 04 df 41 57 09 26 f3 31 1f 15 c4 d6 5a 73 2c 48 31 16 ee 3d 3d 2d 0a f3 54 9a d9 bf 7c bf b7 8a d8 84 f8 4d 5b eb 04 72 4d c7 36 9b 31 de f3 7d 0c f5 39 e9 cf cd d3 de 65 37 29 ea d5 d1 ''' } , # Plaintext '''66 28 19 4e 12 07 3d b0 3b a9 4c da 9e f9 53 23 97 d5 0d ba 79 b9 87 00 4a fe fe 34''', # Ciphertext '''35 4f e6 7b 4a 12 6d 5d 35 fe 36 c7 77 79 1a 3f 7b a1 3d ef 48 4e 2d 39 08 af f7 22 fa d4 68 fb 21 69 6d e9 5d 0b e9 11 c2 d3 17 4f 8a fc c2 01 03 5f 7b 6d 8e 69 40 2d e5 45 16 18 c2 1a 53 5f a9 d7 bf c5 b8 dd 9f c2 43 f8 cf 92 7d b3 13 22 d6 e8 81 ea a9 1a 99 61 70 e6 57 a0 5a 26 64 26 d9 8c 88 00 3f 84 77 c1 22 70 94 a0 d9 fa 1e 8c 40 24 30 9c e1 ec cc b5 21 00 35 d4 7a c7 2e 8a''', # Random '''18 b7 76 ea 21 06 9d 69 77 6a 33 e9 6b ad 48 e1 dd a0 a5 ef''', SHA1 ), # # From in oaep-vect.txt to be found in Example 2.1 # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip # ( # Private key { 'n':'''01 94 7c 7f ce 90 42 5f 47 27 9e 70 85 1f 25 d5 e6 23 16 fe 8a 1d f1 93 71 e3 e6 28 e2 60 54 3e 49 01 ef 60 81 f6 8c 0b 81 41 19 0d 2a e8 da ba 7d 12 50 ec 6d b6 36 e9 44 ec 37 22 87 7c 7c 1d 0a 67 f1 4b 16 94 c5 f0 37 94 51 a4 3e 49 a3 2d de 83 67 0b 73 da 91 a1 c9 9b c2 3b 43 6a 60 05 5c 61 0f 0b af 99 c1 a0 79 56 5b 95 a3 f1 52 66 32 d1 d4 da 60 f2 0e da 25 e6 53 c4 f0 02 76 6f 45''', 'e':'''01 00 01''', 'd':'''08 23 f2 0f ad b5 da 89 08 8a 9d 00 89 3e 21 fa 4a 1b 11 fb c9 3c 64 a3 be 0b aa ea 97 fb 3b 93 c3 ff 71 37 04 c1 9c 96 3c 1d 10 7a ae 99 05 47 39 f7 9e 02 e1 86 de 86 f8 7a 6d de fe a6 d8 cc d1 d3 c8 1a 47 bf a7 25 5b e2 06 01 a4 a4 b2 f0 8a 16 7b 5e 27 9d 71 5b 1b 45 5b dd 7e ab 24 59 41 d9 76 8b 9a ce fb 3c cd a5 95 2d a3 ce e7 25 25 b4 50 16 63 a8 ee 15 c9 e9 92 d9 24 62 fe 39''' }, # Plaintext '''8f f0 0c aa 60 5c 70 28 30 63 4d 9a 6c 3d 42 c6 52 b5 8c f1 d9 2f ec 57 0b ee e7''', # Ciphertext '''01 81 af 89 22 b9 fc b4 d7 9d 92 eb e1 98 15 99 2f c0 c1 43 9d 8b cd 49 13 98 a0 f4 ad 3a 32 9a 5b d9 38 55 60 db 53 26 83 c8 b7 da 04 e4 b1 2a ed 6a ac df 47 1c 34 c9 cd a8 91 ad dc c2 df 34 56 65 3a a6 38 2e 9a e5 9b 54 45 52 57 eb 09 9d 56 2b be 10 45 3f 2b 6d 13 c5 9c 02 e1 0f 1f 8a bb 5d a0 d0 57 09 32 da cf 2d 09 01 db 72 9d 0f ef cc 05 4e 70 96 8e a5 40 c8 1b 04 bc ae fe 72 0e''', # Random '''8c 40 7b 5e c2 89 9e 50 99 c5 3e 8c e7 93 bf 94 e7 1b 17 82''', SHA1 ), # # From in oaep-vect.txt to be found in Example 10.1 # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip # ( # Private key { 'n':'''ae 45 ed 56 01 ce c6 b8 cc 05 f8 03 93 5c 67 4d db e0 d7 5c 4c 09 fd 79 51 fc 6b 0c ae c3 13 a8 df 39 97 0c 51 8b ff ba 5e d6 8f 3f 0d 7f 22 a4 02 9d 41 3f 1a e0 7e 4e be 9e 41 77 ce 23 e7 f5 40 4b 56 9e 4e e1 bd cf 3c 1f b0 3e f1 13 80 2d 4f 85 5e b9 b5 13 4b 5a 7c 80 85 ad ca e6 fa 2f a1 41 7e c3 76 3b e1 71 b0 c6 2b 76 0e de 23 c1 2a d9 2b 98 08 84 c6 41 f5 a8 fa c2 6b da d4 a0 33 81 a2 2f e1 b7 54 88 50 94 c8 25 06 d4 01 9a 53 5a 28 6a fe b2 71 bb 9b a5 92 de 18 dc f6 00 c2 ae ea e5 6e 02 f7 cf 79 fc 14 cf 3b dc 7c d8 4f eb bb f9 50 ca 90 30 4b 22 19 a7 aa 06 3a ef a2 c3 c1 98 0e 56 0c d6 4a fe 77 95 85 b6 10 76 57 b9 57 85 7e fd e6 01 09 88 ab 7d e4 17 fc 88 d8 f3 84 c4 e6 e7 2c 3f 94 3e 0c 31 c0 c4 a5 cc 36 f8 79 d8 a3 ac 9d 7d 59 86 0e aa da 6b 83 bb''', 'e':'''01 00 01''', 'd':'''05 6b 04 21 6f e5 f3 54 ac 77 25 0a 4b 6b 0c 85 25 a8 5c 59 b0 bd 80 c5 64 50 a2 2d 5f 43 8e 59 6a 33 3a a8 75 e2 91 dd 43 f4 8c b8 8b 9d 5f c0 d4 99 f9 fc d1 c3 97 f9 af c0 70 cd 9e 39 8c 8d 19 e6 1d b7 c7 41 0a 6b 26 75 df bf 5d 34 5b 80 4d 20 1a dd 50 2d 5c e2 df cb 09 1c e9 99 7b be be 57 30 6f 38 3e 4d 58 81 03 f0 36 f7 e8 5d 19 34 d1 52 a3 23 e4 a8 db 45 1d 6f 4a 5b 1b 0f 10 2c c1 50 e0 2f ee e2 b8 8d ea 4a d4 c1 ba cc b2 4d 84 07 2d 14 e1 d2 4a 67 71 f7 40 8e e3 05 64 fb 86 d4 39 3a 34 bc f0 b7 88 50 1d 19 33 03 f1 3a 22 84 b0 01 f0 f6 49 ea f7 93 28 d4 ac 5c 43 0a b4 41 49 20 a9 46 0e d1 b7 bc 40 ec 65 3e 87 6d 09 ab c5 09 ae 45 b5 25 19 01 16 a0 c2 61 01 84 82 98 50 9c 1c 3b f3 a4 83 e7 27 40 54 e1 5e 97 07 50 36 e9 89 f6 09 32 80 7b 52 57 75 1e 79''' }, # Plaintext '''8b ba 6b f8 2a 6c 0f 86 d5 f1 75 6e 97 95 68 70 b0 89 53 b0 6b 4e b2 05 bc 16 94 ee''', # Ciphertext '''53 ea 5d c0 8c d2 60 fb 3b 85 85 67 28 7f a9 15 52 c3 0b 2f eb fb a2 13 f0 ae 87 70 2d 06 8d 19 ba b0 7f e5 74 52 3d fb 42 13 9d 68 c3 c5 af ee e0 bf e4 cb 79 69 cb f3 82 b8 04 d6 e6 13 96 14 4e 2d 0e 60 74 1f 89 93 c3 01 4b 58 b9 b1 95 7a 8b ab cd 23 af 85 4f 4c 35 6f b1 66 2a a7 2b fc c7 e5 86 55 9d c4 28 0d 16 0c 12 67 85 a7 23 eb ee be ff 71 f1 15 94 44 0a ae f8 7d 10 79 3a 87 74 a2 39 d4 a0 4c 87 fe 14 67 b9 da f8 52 08 ec 6c 72 55 79 4a 96 cc 29 14 2f 9a 8b d4 18 e3 c1 fd 67 34 4b 0c d0 82 9d f3 b2 be c6 02 53 19 62 93 c6 b3 4d 3f 75 d3 2f 21 3d d4 5c 62 73 d5 05 ad f4 cc ed 10 57 cb 75 8f c2 6a ee fa 44 12 55 ed 4e 64 c1 99 ee 07 5e 7f 16 64 61 82 fd b4 64 73 9b 68 ab 5d af f0 e6 3e 95 52 01 68 24 f0 54 bf 4d 3c 8c 90 a9 7b b6 b6 55 32 84 eb 42 9f cc''', # Random '''47 e1 ab 71 19 fe e5 6c 95 ee 5e aa d8 6f 40 d0 aa 63 bd 33''', SHA1 ), ) def testEncrypt1(self): # Verify encryption using all test vectors for test in self._testData: # Build the key comps = [ long(rws(test[0][x]),16) for x in ('n','e') ] key = RSA.construct(comps) # RNG that takes its random numbers from a pool given # at initialization class randGen: def __init__(self, data): self.data = data self.idx = 0 def __call__(self, N): r = self.data[self.idx:N] self.idx += N return r # The real test key._randfunc = randGen(t2b(test[3])) cipher = PKCS.new(key, test[4]) ct = cipher.encrypt(t2b(test[1])) self.assertEqual(ct, t2b(test[2])) def testEncrypt2(self): # Verify that encryption fails if plaintext is too long pt = '\x00'*(128-2*20-2+1) cipher = PKCS.new(self.key1024) self.assertRaises(ValueError, cipher.encrypt, pt) def testDecrypt1(self): # Verify decryption using all test vectors for test in self._testData: # Build the key comps = [ long(rws(test[0][x]),16) for x in ('n','e','d') ] key = RSA.construct(comps) # The real test cipher = PKCS.new(key, test[4]) pt = cipher.decrypt(t2b(test[2])) self.assertEqual(pt, t2b(test[1])) def testDecrypt2(self): # Simplest possible negative tests for ct_size in (127,128,129): cipher = PKCS.new(self.key1024) self.assertRaises(ValueError, cipher.decrypt, bchr(0x00)*ct_size) def testEncryptDecrypt1(self): # Encrypt/Decrypt messages of length [0..128-2*20-2] for pt_len in xrange(0,128-2*20-2): pt = self.rng(pt_len) cipher = PKCS.new(self.key1024) ct = cipher.encrypt(pt) pt2 = cipher.decrypt(ct) self.assertEqual(pt,pt2) def testEncryptDecrypt2(self): # Helper function to monitor what's requested from RNG global asked def localRng(N): global asked asked += N return self.rng(N) # Verify that OAEP is friendly to all hashes for hashmod in (MD2,MD5,SHA1,SHA256,RIPEMD): # Verify that encrypt() asks for as many random bytes # as the hash output size asked = 0 pt = self.rng(40) self.key1024._randfunc = localRng cipher = PKCS.new(self.key1024, hashmod) ct = cipher.encrypt(pt) self.assertEqual(cipher.decrypt(ct), pt) self.assertTrue(asked > hashmod.digest_size) def testEncryptDecrypt3(self): # Verify that OAEP supports labels pt = self.rng(35) xlabel = self.rng(22) cipher = PKCS.new(self.key1024, label=xlabel) ct = cipher.encrypt(pt) self.assertEqual(cipher.decrypt(ct), pt) def testEncryptDecrypt4(self): # Verify that encrypt() uses the custom MGF global mgfcalls # Helper function to monitor what's requested from MGF def newMGF(seed,maskLen): global mgfcalls mgfcalls += 1 return bchr(0x00)*maskLen mgfcalls = 0 pt = self.rng(32) cipher = PKCS.new(self.key1024, mgfunc=newMGF) ct = cipher.encrypt(pt) self.assertEqual(mgfcalls, 2) self.assertEqual(cipher.decrypt(ct), pt) def get_tests(config={}): tests = [] tests += list_test_cases(PKCS1_OAEP_Tests) return tests if __name__ == '__main__': suite = lambda: unittest.TestSuite(get_tests()) unittest.main(defaultTest='suite') # vim:set ts=4 sw=4 sts=4 expandtab:
koyuawsmbrtn/eclock
refs/heads/master
windows/Python27/Lib/site-packages/docutils/readers/pep.py
136
# $Id: pep.py 7320 2012-01-19 22:33:02Z milde $ # Author: David Goodger <goodger@python.org> # Copyright: This module has been placed in the public domain. """ Python Enhancement Proposal (PEP) Reader. """ __docformat__ = 'reStructuredText' from docutils.readers import standalone from docutils.transforms import peps, references, misc, frontmatter from docutils.parsers import rst class Reader(standalone.Reader): supported = ('pep',) """Contexts this reader supports.""" settings_spec = ( 'PEP Reader Option Defaults', 'The --pep-references and --rfc-references options (for the ' 'reStructuredText parser) are on by default.', ()) config_section = 'pep reader' config_section_dependencies = ('readers', 'standalone reader') def get_transforms(self): transforms = standalone.Reader.get_transforms(self) # We have PEP-specific frontmatter handling. transforms.remove(frontmatter.DocTitle) transforms.remove(frontmatter.SectionSubTitle) transforms.remove(frontmatter.DocInfo) transforms.extend([peps.Headers, peps.Contents, peps.TargetNotes]) return transforms settings_default_overrides = {'pep_references': 1, 'rfc_references': 1} inliner_class = rst.states.Inliner def __init__(self, parser=None, parser_name=None): """`parser` should be ``None``.""" if parser is None: parser = rst.Parser(rfc2822=True, inliner=self.inliner_class()) standalone.Reader.__init__(self, parser, '')
tensorflow/tensor2tensor
refs/heads/master
tensor2tensor/models/neural_assistant.py
1
# coding=utf-8 # Copyright 2021 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Neural Assistant.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import six from tensor2tensor.layers import common_attention from tensor2tensor.layers import common_layers from tensor2tensor.models import transformer from tensor2tensor.utils import registry import tensorflow.compat.v1 as tf @registry.register_model class NeuralAssistant(transformer.Transformer): """Attention net. See file docstring.""" def __init__(self, *args, **kwargs): super(NeuralAssistant, self).__init__(*args, **kwargs) self.attention_weights = dict() # For visualizing attention heads. # Loss scheduling. hparams = self._hparams self.triple_num = hparams.train_triple_num def model_fn(self, features): with tf.variable_scope(tf.get_variable_scope(), use_resource=True) as vs: self._add_variable_scope("model_fn", vs) transformed_features = self.bottom(features) if self.hparams.activation_dtype == "bfloat16": for k, v in sorted(six.iteritems(transformed_features)): if v.dtype == tf.float32: transformed_features[k] = tf.cast(v, tf.bfloat16) with tf.variable_scope("body") as body_vs: self._add_variable_scope("body", body_vs) body_out = self.body(transformed_features) output, losses = self._normalize_body_output(body_out) if "training" in losses: tf.logging.info( "Skipping T2TModel top and loss because training loss returned from body" ) logits = output else: tf.logging.warn("The loss will be computed in model_fn now.") logits = self.top(output, features) losses["training"] = 0.0 cur_kb_loss = losses["kb_loss"] cur_knowledge_training_loss = losses["transe_loss"] cur_kb_loss_weight = self._hparams.kb_loss_weight kb_train_weight = self._hparams.kb_train_weight cur_lm_loss_weight = 1.0 - cur_kb_loss_weight # Finalize loss if (self._hparams.mode != tf.estimator.ModeKeys.PREDICT and self._hparams.mode != "attack"): lm_loss_num, lm_loss_denom = self.loss(logits, features) total_loss = (kb_train_weight) * cur_knowledge_training_loss + ( 1 - kb_train_weight) * ( cur_kb_loss * cur_kb_loss_weight + (lm_loss_num / lm_loss_denom) * cur_lm_loss_weight) tf.summary.scalar("kb_loss", cur_kb_loss) tf.summary.scalar("transe_loss", cur_knowledge_training_loss) tf.summary.scalar("lm_loss", (lm_loss_num / lm_loss_denom)) tf.summary.scalar("cur_kb_loss_weight", tf.reshape(cur_kb_loss_weight, [])) tf.logging.info("Loss computed " + str(total_loss)) losses = {"training": total_loss} return logits, losses def encode_knowledge_bottom(self, features): tf.logging.info("Encoding knowledge " + str(self.triple_num)) # Make sure this is embeddings for triples # <tf.float32>[batch_size, triple_num*max_triple_length, 1, emb_dim] fact_embedding = features["encoded_triples"] # [batch_size, triple_num*max_triple_length, emb_dim] fact_embedding = tf.squeeze(fact_embedding, 2) kb_shape = common_layers.shape_list(fact_embedding) batch_size = kb_shape[0] embed_dim = kb_shape[2] # <tf.float32>[batch_size*triple_num, max_triple_length, emb_dim] re_fact_embedding = tf.reshape( fact_embedding, [batch_size * self.triple_num, -1, embed_dim], name="reshape_fact_embedding") # <tf.int64>[batch_size, triple_num] input_fact_lengths = features["triple_lens"] # Stack the fact lengths. # <tf.int64>[batch_size*max_triple_num] re_fact_lengths = tf.reshape( input_fact_lengths, [batch_size * self.triple_num, 1], name="reshape_fact_lengths") return re_fact_embedding, re_fact_lengths def compute_knowledge_selection_and_loss(self, features, encoder_output, fact_embedding, fact_lengths, margin, num_negative_samples): """Compute knowledge selection and loss. Args: features: features. encoder_output: <tf.float32>[batch_size, input_length, hidden_dim] fact_embedding: <tf.float32>[batch_size*triple_num, max_triple_length, emb_dim] fact_lengths: # <tf.int32>[batch_size*triple_num] margin: integer value for max margin in TransE loss, num_negative_samples: shuffle and sample multiple negative examples for the TransE loss Returns: knowledge_weights: knowledge_loss: """ hparams = self._hparams encoder_output_shape = common_layers.shape_list(encoder_output) encoder_hidden_dim = encoder_output_shape[-1] inputs = features["inputs"] # <tf.float32>[batch_size, input_length, emb_dim] inputs = tf.squeeze(inputs, 2) # <tf.float32>[batch_size, input_length] context_padding = common_attention.embedding_to_padding(inputs) # <tf.float32>[batch_size] context_lens = tf.to_float( common_attention.padding_to_length(context_padding)) # <tf.float32>[batch_size, 1] context_lens = tf.expand_dims(context_lens, -1) # Compute context vector summary. # <tf.float32>[batch_size, hidden_dim] context_vector_summary = compute_summary_embedding(encoder_output, context_lens, hparams) knowledge_encoder_output = compute_average_embedding( fact_embedding, fact_lengths) # <tf.float32>[batch_size, triple_num, emb_dim] knowledge_encoder_output = tf.reshape( knowledge_encoder_output, [-1, self.triple_num, encoder_hidden_dim]) original_knowledge_encoder_output = knowledge_encoder_output if hparams.similarity_fuction == "dot_product": triple_logits = tf.squeeze( tf.matmul(knowledge_encoder_output, tf.expand_dims(context_vector_summary, 2)), -1) elif hparams.similarity_fuction == "bilinear": # Tile the context vector summary. # <tf.float32>[batch_size, triple_num*hidden_dim] tiled_context_vector = tf.tile(context_vector_summary, [1, self.triple_num]) # <tf.float32>[batch_size, triple_num, hidden_dim] context_vector = tf.reshape(tiled_context_vector, [-1, self.triple_num, encoder_hidden_dim]) # compute outer product context_vector = tf.expand_dims(context_vector, -1) knowledge_encoder_output = tf.expand_dims(knowledge_encoder_output, 2) # <tf.float32>[batch_size, triple_num, hidden_dim, hidden_dim] outer_product = tf.matmul(context_vector, knowledge_encoder_output) outer_product = tf.reshape( outer_product, [-1, self.triple_num, encoder_hidden_dim * encoder_hidden_dim]) triple_logits = tf.squeeze( tf.layers.dense(outer_product, 1, name="knolwedge_final_mlp"), -1) avg_triple_loss = 0.0 triple_labels = features["triple_labels"] subject_mask = tf.reshape(features["subject_mask"], [-1, self.triple_num, hparams.max_triple_length]) subject_mask = tf.reshape(subject_mask, [-1, hparams.max_triple_length]) predicate_mask = tf.reshape( features["predicate_mask"], [-1, self.triple_num, hparams.max_triple_length]) predicate_mask = tf.reshape(predicate_mask, [-1, hparams.max_triple_length]) object_mask = tf.reshape(features["object_mask"], [-1, self.triple_num, hparams.max_triple_length]) object_mask = tf.reshape(object_mask, [-1, hparams.max_triple_length]) # mask : [bs, max_seq_len, triple_num] # the below operation will result in [bs*triple_num,emb_dim] subject_length = tf.cast( tf.expand_dims(tf.reduce_sum(subject_mask, -1), 1), tf.float32) # [bs*tn] object_length = tf.cast( tf.expand_dims(tf.reduce_sum(object_mask, -1), 1), tf.float32) predicate_length = tf.cast( tf.expand_dims(tf.reduce_sum(predicate_mask, -1), 1), tf.float32) # expand dimension 2 to be able to broadcast subject_mask = tf.cast(tf.expand_dims(subject_mask, 2), tf.float32) predicate_mask = tf.cast(tf.expand_dims(predicate_mask, 2), tf.float32) object_mask = tf.cast(tf.expand_dims(object_mask, 2), tf.float32) subject_vect = tf.reduce_sum(tf.multiply( fact_embedding, subject_mask), 1) / ( subject_length + tf.broadcast_to(tf.constant([1e-5]), tf.shape(subject_length))) object_vect = tf.reduce_sum(tf.multiply(fact_embedding, object_mask), 1) / ( object_length + tf.broadcast_to(tf.constant([1e-5]), tf.shape(object_length))) predicate_vect = tf.reduce_sum( tf.multiply(fact_embedding, predicate_mask), 1) / ( predicate_length + tf.broadcast_to(tf.constant([1e-5]), tf.shape(predicate_length))) # Shuffled rows to generate adversarial samples shuffled_subject_vect = [] shuffled_object_vect = [] for _ in range(num_negative_samples): shuffled_subject_vect += [ tf.gather(subject_vect, tf.random.shuffle(tf.range(tf.shape(subject_vect)[0]))) ] # [bs*tn,d] shuffled_object_vect += [ tf.gather(object_vect, tf.random.shuffle(tf.range(tf.shape(object_vect)[0]))) ] # [bs*tn,d] # KB pretraining loss positive_loss = tf.reduce_mean( tf.squared_difference(subject_vect + predicate_vect, object_vect)) negative_loss = 0 for n_adv in range(num_negative_samples): negative_loss += tf.reduce_mean( tf.squared_difference(shuffled_subject_vect[n_adv] + predicate_vect, object_vect)) negative_loss += tf.reduce_mean( tf.squared_difference(subject_vect + predicate_vect, shuffled_object_vect[n_adv])) # TransE Loss negative_loss = negative_loss / (2 * num_negative_samples) transe_loss = tf.clip_by_value( margin + positive_loss - negative_loss, clip_value_min=0, clip_value_max=100) if hparams.mode != tf.estimator.ModeKeys.PREDICT: triple_losses = tf.nn.weighted_cross_entropy_with_logits( labels=triple_labels, logits=triple_logits, pos_weight=hparams.pos_weight) avg_triple_loss = tf.reduce_mean(triple_losses) tf.summary.scalar("triple_loss", avg_triple_loss) return triple_logits, avg_triple_loss, original_knowledge_encoder_output, transe_loss def body(self, features): """Transformer main model_fn. Args: features: Map of features to the model. Should contain the following: "inputs": Transformer inputs [batch_size, input_length, hidden_dim] "targets": Target decoder outputs. [batch_size, decoder_length, hidden_dim] "target_space_id": A scalar int from data_generators.problem.SpaceID. Returns: Final decoder representation. [batch_size, decoder_length, hidden_dim] """ tf.logging.info("Using PgScratch BODY function.") hparams = self._hparams losses = {} inputs = features["inputs"] target_space = features["target_space_id"] # encoder_output: <tf.float32>[batch_size, input_length, hidden_dim] # encoder_decoder_attention_bias: <tf.float32>[batch_size, input_length] encoder_output, encoder_decoder_attention_bias = self.encode( inputs, target_space, hparams, features=features, losses=losses) with tf.variable_scope("knowledge"): with tf.name_scope("knowledge_encoding"): # Encode knowledge. # <tf.float32>[batch_size, triple_num, emb_dim] fact_embedding, fact_lengths = self.encode_knowledge_bottom(features) tf.logging.info("Encoded knowledge") with tf.name_scope("knowledge_selection_and_loss"): # Compute knowledge selection and loss. triple_logits, avg_triple_selection_loss, knowledge_encoder_output, transe_loss = self.compute_knowledge_selection_and_loss( features, encoder_output, fact_embedding, fact_lengths, hparams.margin, hparams.num_negative_samples) losses["kb_loss"] = avg_triple_selection_loss losses["transe_loss"] = transe_loss if hparams.attend_kb: tf.logging.info("ATTEND_KB is ACTIVE") with tf.name_scope("knowledge_attention"): knowledge_padding = tf.zeros_like(triple_logits, dtype=tf.float32) knowledge_attention_bias = common_attention.attention_bias_ignore_padding( knowledge_padding) encoder_output = tf.concat([knowledge_encoder_output, encoder_output], 1) encoder_decoder_attention_bias = tf.concat( [knowledge_attention_bias, encoder_decoder_attention_bias], -1) else: tf.logging.info("ATTEND_KB is INACTIVE") targets = features["targets"] targets_shape = common_layers.shape_list(targets) targets = common_layers.flatten4d3d(targets) (decoder_input, decoder_self_attention_bias) = transformer.transformer_prepare_decoder( targets, hparams, features=features) decode_kwargs = {} decoder_output = self.decode( decoder_input, encoder_output, encoder_decoder_attention_bias, decoder_self_attention_bias, hparams, nonpadding=transformer.features_to_nonpadding(features, "targets"), losses=losses, **decode_kwargs) expected_attentions = features.get("expected_attentions") if expected_attentions is not None: attention_loss = common_attention.encoder_decoder_attention_loss( expected_attentions, self.attention_weights, hparams.expected_attention_loss_type, hparams.expected_attention_loss_multiplier) return decoder_output, {"attention_loss": attention_loss} ret = tf.reshape(decoder_output, targets_shape) if losses: return ret, losses else: return ret def _normalize_body_output(self, body_out): if len(body_out) == 2: output, losses = body_out if not isinstance(losses, dict): losses = {"extra": tf.reduce_mean(losses)} else: output = body_out losses = {"extra": 0.0} return output, losses def _beam_decode(self, features, decode_length, beam_size, top_beams, alpha, use_tpu=False): """Beam search decoding. Args: features: an map of string to `Tensor` decode_length: an integer. How many additional timesteps to decode. beam_size: number of beams. top_beams: an integer. How many of the beams to return. alpha: Float that controls the length penalty. larger the alpha, stronger the preference for longer translations. use_tpu: A bool, whether to do beam decode on TPU. Returns: A dict of decoding results { "outputs": integer `Tensor` of decoded ids of shape [batch_size, <= decode_length] if beam_size == 1 or [batch_size, top_beams, <= decode_length] "scores": decoding log probs from the beam search, None if using greedy decoding (beam_size=1) } """ return super(transformer.Transformer, self)._beam_decode_slow(features, decode_length, beam_size, top_beams, alpha, use_tpu) def _greedy_infer(self, features, decode_length, use_tpu=False): """Fast version of greedy decoding. Args: features: an map of string to `Tensor` decode_length: an integer. How many additional timesteps to decode. use_tpu: A bool. Whether to build the inference graph for TPU. Returns: A dict of decoding results { "outputs": integer `Tensor` of decoded ids of shape [batch_size, <= decode_length] if beam_size == 1 or [batch_size, top_beams, <= decode_length] "scores": decoding log probs from the beam search, None if using greedy decoding (beam_size=1) } Raises: NotImplementedError: If there are multiple data shards. """ return super(transformer.Transformer, self)._greedy_infer(features, decode_length) def compute_last_embedding(input_embeddings, input_lengths, hparams): """Computes average of last K embedding. Args: input_embeddings: <tf.float32>[bs, max_seq_len, emb_dim] input_lengths: <tf.int64>[bs, 1] hparams: model hparams Returns: last_k_embedding: <tf.float32>[bs, emb_dim] """ max_seq_len = tf.shape(input_embeddings)[1] # <tf.float32>[bs, 1, max_seq_len] mask = tf.sequence_mask(input_lengths, max_seq_len, dtype=tf.float32) del_mask = tf.sequence_mask( input_lengths - hparams.last_k, max_seq_len, dtype=tf.float32) final_mask = mask - del_mask # <tf.float32>[bs, 1, emb_dim] sum_embedding = tf.matmul(final_mask, input_embeddings) # <tf.float32>[bs, 1, emb_dim] last_k_embedding = sum_embedding / tf.to_float( tf.expand_dims( tf.ones([tf.shape(input_embeddings)[0], 1]) * hparams.last_k, 2)) # <tf.float32>[bs, dim] return tf.squeeze(last_k_embedding, 1) def compute_max_pool_embedding(input_embeddings, input_lengths): """Computes max pool embedding. Args: input_embeddings: <tf.float32>[bs, max_seq_len, emb_dim] input_lengths: <tf.int64>[bs, 1] Returns: max_pool_embedding: <tf.float32>[bs, emb_dim] """ max_seq_len = tf.shape(input_embeddings)[1] # <tf.float32>[bs, max_seq_len] mask = 1.0 - tf.sequence_mask(input_lengths, max_seq_len, dtype=tf.float32) mask = tf.squeeze(mask * (-1e-6), 1) mask = tf.expand_dims(mask, 2) # <tf.float32>[bs, emb_dim] max_pool_embedding = tf.reduce_max(input_embeddings + mask, 1) # <tf.float32>[bs, dim] return max_pool_embedding def compute_average_embedding(input_embeddings, input_lengths): """Computes bag-of-words embedding. Args: input_embeddings: <tf.float32>[bs, max_seq_len, emb_dim] input_lengths: <tf.int64>[bs, 1] Returns: bow_embedding: <tf.float32>[bs, emb_dim] """ max_seq_len = tf.shape(input_embeddings)[1] # <tf.float32>[bs, 1, max_seq_len] mask = tf.sequence_mask(input_lengths, max_seq_len, dtype=tf.float32) # <tf.float32>[bs, 1, emb_dim] sum_embedding = tf.matmul(mask, input_embeddings) # <tf.float32>[bs, 1, emb_dim] avg_embedding = sum_embedding / tf.to_float(tf.expand_dims(input_lengths, 2)) # <tf.float32>[bs, dim] return tf.squeeze(avg_embedding, 1) def compute_summary_embedding(input_embeddings, input_lengths, hparams): """Convert list of embedding to single embedding. Args: input_embeddings: <tf.float32>[bs, max_seq_len, emb_dim] input_lengths: <tf.int64>[bs, 1] hparams: model hparams Returns: embedding: <tf.float32>[bs, emb_dim] """ if hparams.pool_technique == "average": return compute_average_embedding(input_embeddings, input_lengths) elif hparams.pool_technique == "max_pool": return compute_max_pool_embedding(input_embeddings, input_lengths) elif hparams.pool_technique == "last": return compute_last_embedding(input_embeddings, input_lengths, hparams) @registry.register_hparams def neural_assistant_base(): """HParams for a base neural_assistant model.""" hparams = transformer.transformer_tpu() hparams.add_hparam("pos_weight", 1.0) # weight for positive triples hparams.add_hparam("similarity_fuction", "bilinear") # dot_product or bilinear hparams.add_hparam("pool_technique", "average") # avg or max pool or last hparams.add_hparam("last_k", 1) # number of last indices for averaging hparams.add_hparam("max_triple_length", 30) # max length of every triple hparams.add_hparam("train_triple_num", 5000) # max number of triples during training hparams.add_hparam("attend_kb", True) # if False, it's a transformer model hparams.add_hparam("kb_loss_weight", 0.0) # weight for distant supervision hparams.add_hparam("test_triple_num", 28483) # max triples of KB hparams.add_hparam("margin", 0.0) # KB training max-margin loss hparams.add_hparam( "num_negative_samples", 1) # Sampling number of different adversarial training examples hparams.add_hparam("kb_train_weight", 0.0) # KB_training loss weight which combines Language model and KB selection loss return hparams @registry.register_hparams def neural_assistant_tiny(): """HParams for tiny neural_assistant model.""" hparams = transformer.transformer_tiny_tpu() hparams.add_hparam("pos_weight", 1.0) # weight for positive triples hparams.add_hparam("similarity_fuction", "bilinear") # dot_product or bilinear hparams.add_hparam("pool_technique", "average") # avg or max pool or last hparams.add_hparam("last_k", 1) # number of last indices for averaging hparams.add_hparam("max_triple_length", 30) # max length of every triple hparams.add_hparam("train_triple_num", 5000) # max number of triples during training hparams.add_hparam("attend_kb", True) # if False, it's a transformer model hparams.add_hparam("kb_loss_weight", 0.0) # weight for distant supervision hparams.add_hparam("test_triple_num", 28483) # max triples of KB hparams.add_hparam("margin", 1.0) # KB training max-margin loss hparams.add_hparam( "num_negative_samples", 1) # Sampling number of different adversarial training examples hparams.add_hparam("kb_train_weight", 0.0) # KB_training loss weight which combines Language model and KB selection loss return hparams @registry.register_hparams def neural_assistant_tiny_ds(): """HParams for tiny neural_assistant model with distant supervision loss.""" hparams = neural_assistant_tiny() hparams.kb_loss_weight = 0.2 return hparams
savanu/servo
refs/heads/master
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/executors/executorservo.py
4
import base64 import hashlib import httplib import json import os import subprocess import tempfile import threading import traceback import urlparse import uuid from collections import defaultdict from mozprocess import ProcessHandler from .base import (ExecutorException, Protocol, RefTestImplementation, testharness_result_converter, reftest_result_converter, WdspecExecutor) from .process import ProcessTestExecutor from ..browsers.base import browser_command from ..wpttest import WdspecResult, WdspecSubtestResult from ..webdriver_server import ServoDriverServer from .executormarionette import WdspecRun pytestrunner = None webdriver = None extra_timeout = 5 # seconds hosts_text = """127.0.0.1 web-platform.test 127.0.0.1 www.web-platform.test 127.0.0.1 www1.web-platform.test 127.0.0.1 www2.web-platform.test 127.0.0.1 xn--n8j6ds53lwwkrqhv28a.web-platform.test 127.0.0.1 xn--lve-6lad.web-platform.test """ def make_hosts_file(): hosts_fd, hosts_path = tempfile.mkstemp() with os.fdopen(hosts_fd, "w") as f: f.write(hosts_text) return hosts_path class ServoTestharnessExecutor(ProcessTestExecutor): convert_result = testharness_result_converter def __init__(self, browser, server_config, timeout_multiplier=1, debug_info=None, pause_after_test=False, **kwargs): ProcessTestExecutor.__init__(self, browser, server_config, timeout_multiplier=timeout_multiplier, debug_info=debug_info) self.pause_after_test = pause_after_test self.result_data = None self.result_flag = None self.protocol = Protocol(self, browser) self.hosts_path = make_hosts_file() def teardown(self): try: os.unlink(self.hosts_path) except OSError: pass ProcessTestExecutor.teardown(self) def do_test(self, test): self.result_data = None self.result_flag = threading.Event() args = [ "--hard-fail", "-u", "Servo/wptrunner", "-Z", "replace-surrogates", "-z", self.test_url(test), ] for stylesheet in self.browser.user_stylesheets: args += ["--user-stylesheet", stylesheet] for pref, value in test.environment.get('prefs', {}).iteritems(): args += ["--pref", "%s=%s" % (pref, value)] if self.browser.ca_certificate_path: args += ["--certificate-path", self.browser.ca_certificate_path] args += self.browser.binary_args debug_args, command = browser_command(self.binary, args, self.debug_info) self.command = command if self.pause_after_test: self.command.remove("-z") self.command = debug_args + self.command env = os.environ.copy() env["HOST_FILE"] = self.hosts_path env["RUST_BACKTRACE"] = "1" if not self.interactive: self.proc = ProcessHandler(self.command, processOutputLine=[self.on_output], onFinish=self.on_finish, env=env, storeOutput=False) self.proc.run() else: self.proc = subprocess.Popen(self.command, env=env) try: timeout = test.timeout * self.timeout_multiplier # Now wait to get the output we expect, or until we reach the timeout if not self.interactive and not self.pause_after_test: wait_timeout = timeout + 5 self.result_flag.wait(wait_timeout) else: wait_timeout = None self.proc.wait() proc_is_running = True if self.result_flag.is_set(): if self.result_data is not None: result = self.convert_result(test, self.result_data) else: self.proc.wait() result = (test.result_cls("CRASH", None), []) proc_is_running = False else: result = (test.result_cls("TIMEOUT", None), []) if proc_is_running: if self.pause_after_test: self.logger.info("Pausing until the browser exits") self.proc.wait() else: self.proc.kill() except KeyboardInterrupt: self.proc.kill() raise return result def on_output(self, line): prefix = "ALERT: RESULT: " line = line.decode("utf8", "replace") if line.startswith(prefix): self.result_data = json.loads(line[len(prefix):]) self.result_flag.set() else: if self.interactive: print line else: self.logger.process_output(self.proc.pid, line, " ".join(self.command)) def on_finish(self): self.result_flag.set() class TempFilename(object): def __init__(self, directory): self.directory = directory self.path = None def __enter__(self): self.path = os.path.join(self.directory, str(uuid.uuid4())) return self.path def __exit__(self, *args, **kwargs): try: os.unlink(self.path) except OSError: pass class ServoRefTestExecutor(ProcessTestExecutor): convert_result = reftest_result_converter def __init__(self, browser, server_config, binary=None, timeout_multiplier=1, screenshot_cache=None, debug_info=None, pause_after_test=False, **kwargs): ProcessTestExecutor.__init__(self, browser, server_config, timeout_multiplier=timeout_multiplier, debug_info=debug_info) self.protocol = Protocol(self, browser) self.screenshot_cache = screenshot_cache self.implementation = RefTestImplementation(self) self.tempdir = tempfile.mkdtemp() self.hosts_path = make_hosts_file() def teardown(self): try: os.unlink(self.hosts_path) except OSError: pass os.rmdir(self.tempdir) ProcessTestExecutor.teardown(self) def screenshot(self, test, viewport_size, dpi): full_url = self.test_url(test) with TempFilename(self.tempdir) as output_path: debug_args, command = browser_command( self.binary, [ "--hard-fail", "--exit", "-u", "Servo/wptrunner", "-Z", "disable-text-aa,load-webfonts-synchronously,replace-surrogates", "--output=%s" % output_path, full_url ] + self.browser.binary_args, self.debug_info) for stylesheet in self.browser.user_stylesheets: command += ["--user-stylesheet", stylesheet] for pref, value in test.environment.get('prefs', {}).iteritems(): command += ["--pref", "%s=%s" % (pref, value)] command += ["--resolution", viewport_size or "800x600"] if self.browser.ca_certificate_path: command += ["--certificate-path", self.browser.ca_certificate_path] if dpi: command += ["--device-pixel-ratio", dpi] # Run ref tests in headless mode command += ["-z"] self.command = debug_args + command env = os.environ.copy() env["HOST_FILE"] = self.hosts_path env["RUST_BACKTRACE"] = "1" if not self.interactive: self.proc = ProcessHandler(self.command, processOutputLine=[self.on_output], env=env) try: self.proc.run() timeout = test.timeout * self.timeout_multiplier + 5 rv = self.proc.wait(timeout=timeout) except KeyboardInterrupt: self.proc.kill() raise else: self.proc = subprocess.Popen(self.command, env=env) try: rv = self.proc.wait() except KeyboardInterrupt: self.proc.kill() raise if rv is None: self.proc.kill() return False, ("EXTERNAL-TIMEOUT", None) if rv != 0 or not os.path.exists(output_path): return False, ("CRASH", None) with open(output_path) as f: # Might need to strip variable headers or something here data = f.read() return True, base64.b64encode(data) def do_test(self, test): result = self.implementation.run_test(test) return self.convert_result(test, result) def on_output(self, line): line = line.decode("utf8", "replace") if self.interactive: print line else: self.logger.process_output(self.proc.pid, line, " ".join(self.command)) class ServoWdspecProtocol(Protocol): def __init__(self, executor, browser): self.do_delayed_imports() Protocol.__init__(self, executor, browser) self.session = None self.server = None def setup(self, runner): try: self.server = ServoDriverServer(self.logger, binary=self.browser.binary, binary_args=self.browser.binary_args) self.server.start(block=False) self.logger.info( "WebDriver HTTP server listening at %s" % self.server.url) self.logger.info( "Establishing new WebDriver session with %s" % self.server.url) self.session = webdriver.Session( self.server.host, self.server.port, self.server.base_path) except Exception: self.logger.error(traceback.format_exc()) self.executor.runner.send_message("init_failed") else: self.executor.runner.send_message("init_succeeded") def teardown(self): if self.server is not None: try: if self.session.session_id is not None: self.session.end() except Exception: pass if self.server.is_alive: self.server.stop() @property def is_alive(self): conn = httplib.HTTPConnection(self.server.host, self.server.port) conn.request("HEAD", self.server.base_path + "invalid") res = conn.getresponse() return res.status == 404 def do_delayed_imports(self): global pytestrunner, webdriver from . import pytestrunner import webdriver class ServoWdspecExecutor(WdspecExecutor): def __init__(self, browser, server_config, timeout_multiplier=1, close_after_done=True, debug_info=None, **kwargs): WdspecExecutor.__init__(self, browser, server_config, timeout_multiplier=timeout_multiplier, debug_info=debug_info) self.protocol = ServoWdspecProtocol(self, browser) def is_alive(self): return self.protocol.is_alive def on_environment_change(self, new_environment): pass def do_test(self, test): timeout = test.timeout * self.timeout_multiplier + extra_timeout success, data = WdspecRun(self.do_wdspec, self.protocol.session, test.path, timeout).run() if success: return self.convert_result(test, data) return (test.result_cls(*data), []) def do_wdspec(self, session, path, timeout): harness_result = ("OK", None) subtest_results = pytestrunner.run(path, session, timeout=timeout) return (harness_result, subtest_results)
someorz/spark
refs/heads/master
examples/src/main/python/ml/train_validation_split.py
71
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # $example on$ from pyspark.ml.evaluation import RegressionEvaluator from pyspark.ml.regression import LinearRegression from pyspark.ml.tuning import ParamGridBuilder, TrainValidationSplit # $example off$ from pyspark.sql import SparkSession """ This example demonstrates applying TrainValidationSplit to split data and preform model selection. Run with: bin/spark-submit examples/src/main/python/ml/train_validation_split.py """ if __name__ == "__main__": spark = SparkSession\ .builder\ .appName("TrainValidationSplit")\ .getOrCreate() # $example on$ # Prepare training and test data. data = spark.read.format("libsvm")\ .load("data/mllib/sample_linear_regression_data.txt") train, test = data.randomSplit([0.9, 0.1], seed=12345) lr = LinearRegression(maxIter=10) # We use a ParamGridBuilder to construct a grid of parameters to search over. # TrainValidationSplit will try all combinations of values and determine best model using # the evaluator. paramGrid = ParamGridBuilder()\ .addGrid(lr.regParam, [0.1, 0.01]) \ .addGrid(lr.fitIntercept, [False, True])\ .addGrid(lr.elasticNetParam, [0.0, 0.5, 1.0])\ .build() # In this case the estimator is simply the linear regression. # A TrainValidationSplit requires an Estimator, a set of Estimator ParamMaps, and an Evaluator. tvs = TrainValidationSplit(estimator=lr, estimatorParamMaps=paramGrid, evaluator=RegressionEvaluator(), # 80% of the data will be used for training, 20% for validation. trainRatio=0.8) # Run TrainValidationSplit, and choose the best set of parameters. model = tvs.fit(train) # Make predictions on test data. model is the model with combination of parameters # that performed best. model.transform(test)\ .select("features", "label", "prediction")\ .show() # $example off$ spark.stop()
doozr/euler.py
refs/heads/master
p0021_amicable_numbers_test.py
1
""" Let d(n) be defined as the sum of proper divisors of n (numbers less than n which divide evenly into n). If d(a) = b and d(b) = a, where a != b, then a and b are an amicable pair and each of a and b are called amicable numbers. For example, the proper divisors of 220 are 1, 2, 4, 5, 10, 11, 20, 22, 44, 55 and 110; therefore d(220) = 284. The proper divisors of 284 are 1, 2, 4, 71 and 142; so d(284) = 220. Evaluate the sum of all the amicable numbers under 10000. Answer: 31626 """ from euler.math import divisors def d(n): return sum(divisors(n)) # Could be improved by using a "seen" list and yielding both halves # of the pair at the same time but it's only marginally faster and this # is way cooler. One liners for the win! # # Check d(d(n)) == n first so it's faster. Checking d(n) != n first for # every number slows it down by 25%. For limit = 10000 there are only # 4 numbers (6, 28, 496 and 8128) where n == d(n) == d(d(n)) so we can # minimise how often the check runs by having it second. There are 10 # amicable numbers (5 pairs) < 10000 so we end up doing the check 10 # times instead of 10000 times. Much better. def amicable_numbers(limit): return (n for n in range(1, limit + 1) if d(d(n)) == n and d(n) != n) def sum_amicable_numbers(limit): return sum(amicable_numbers(limit)) def test_0021_amicable_numbers(): assert sum_amicable_numbers(10000) == 31626
okfn/brand-manager
refs/heads/master
manager/apps/brand/migrations/0023_auto__add_field_brandproposalreview_valid.py
2
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'BrandProposalReview.valid' db.add_column(u'brand_proposal_review', 'valid', self.gf('django.db.models.fields.BooleanField')(default=False, db_column=u'VALID'), keep_default=False) def backwards(self, orm): # Deleting field 'BrandProposalReview.valid' db.delete_column(u'brand_proposal_review', u'VALID') models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'brand.brand': { 'Meta': {'ordering': "[u'brand_nm']", 'object_name': 'Brand', 'db_table': "u'brand'"}, 'brand_link': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'db_column': "u'BRAND_LINK'", 'blank': 'True'}), 'brand_logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'db_column': "u'BRAND_LOGO'", 'blank': 'True'}), 'brand_nm': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'BRAND_NM'"}), 'brand_type_cd': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brand.BrandType']", 'db_column': "u'BRAND_TYPE_CD'"}), 'bsin': ('django.db.models.fields.CharField', [], {'max_length': '6', 'primary_key': 'True', 'db_column': "u'BSIN'"}), 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "u'COMMENTS'", 'blank': 'True'}), 'flag_delete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "u'FLAG_DELETE'"}), 'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "u'LAST_MODIFIED'", 'blank': 'True'}) }, u'brand.brandproposal': { 'Meta': {'ordering': "[u'insert_date']", 'object_name': 'BrandProposal', 'db_table': "u'brand_proposal'"}, 'brand_link': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'db_column': "u'BRAND_LINK'", 'blank': 'True'}), 'brand_logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'db_column': "u'BRAND_LOGO'", 'blank': 'True'}), 'brand_nm': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'BRAND_NM'"}), 'brand_type_cd': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brand.BrandType']", 'db_column': "u'BRAND_TYPE_CD'"}), 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "u'COMMENTS'", 'blank': 'True'}), 'insert_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_column': "u'INSERT_DATE'", 'blank': 'True'}), 'proposal_cd': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "u'PROPOSAL_CD'"}), 'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_column': "u'STATUS'"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'db_column': "u'USER_ID'"}) }, u'brand.brandproposalreview': { 'Meta': {'ordering': "[u'proposal_cd']", 'unique_together': "((u'proposal_cd', u'user'),)", 'object_name': 'BrandProposalReview', 'db_table': "u'brand_proposal_review'"}, 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "u'COMMENTS'", 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'proposal_cd': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['brand.BrandProposal']", 'db_column': "u'PROPOSAL_CD'"}), 'review_dt': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_column': "u'REVIEW_DT'", 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'db_column': "u'USER_ID'"}), 'valid': ('django.db.models.fields.BooleanField', [], {'db_column': "u'VALID'"}) }, u'brand.brandtype': { 'Meta': {'object_name': 'BrandType', 'db_table': "u'brand_type'"}, 'brand_type_cd': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "u'BRAND_TYPE_CD'"}), 'brand_type_nm': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "u'BRAND_TYPE_NM'"}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['brand']
mtils/ems
refs/heads/master
ems/qt/layout/toolbararea.py
1
from ems.qt import QtCore, QtGui, QtWidgets Qt = QtCore.Qt QWidget = QtWidgets.QWidget QHBoxLayout = QtWidgets.QHBoxLayout QVBoxLayout = QtWidgets.QVBoxLayout QSpacerItem = QtWidgets.QSpacerItem QSizePolicy = QtWidgets.QSizePolicy QIcon = QtGui.QIcon QApplication = QtWidgets.QApplication QToolBar = QtWidgets.QToolBar QAction = QtWidgets.QAction QActionGroup = QtWidgets.QActionGroup QKeySequence = QtGui.QKeySequence QFont = QtGui.QFont QPixmap = QtGui.QPixmap QMessageBox = QtWidgets.QMessageBox QComboBox = QtWidgets.QComboBox QFontComboBox = QtWidgets.QFontComboBox QFontDatabase = QtGui.QFontDatabase QTextListFormat = QtGui.QTextListFormat QTextBlockFormat = QtGui.QTextBlockFormat QColorDialog = QtWidgets.QColorDialog QTextCursor = QtGui.QTextCursor class ToolBarArea(QWidget): def __init__(self, parent=None): super(ToolBarArea, self).__init__(parent) self._toolBars = [] self._currentToolBarRow = 0 self._toolBarContainers = [] self._setupLayout() def addToolBar(self, toolBar): toolBar.setParent(self) self._lastRowWidget().layout().addWidget(toolBar) def addToolBarBreak(self): self._addToolBarRow() def _lastRowWidget(self): if not self.layout().count(): self._addToolBarRow() last = self.layout().count() - 1 return self.layout().itemAt(last).widget() def _setupLayout(self): self.setLayout(QVBoxLayout(self)) self.layout().setSpacing(0) self.layout().setContentsMargins(0,0,0,0) def _addToolBarRow(self): self.layout().addWidget(self._createToolBarRow()) def _createToolBarRow(self): rowWidget = QWidget(self) rowWidget.setLayout(QHBoxLayout(rowWidget)) rowWidget.layout().setSpacing(0) rowWidget.layout().setContentsMargins(0,0,0,0) #spacerItem = QSpacerItem(10, 5, QSizePolicy.Expanding) #rowWidget.layout().addSpacerItem(spacerItem) return rowWidget
jnewland/home-assistant
refs/heads/ci
homeassistant/components/nest/const.py
16
"""Constants used by the Nest component.""" DOMAIN = 'nest'
erikr/weerapi
refs/heads/master
weerapi/tests/test_knmidata.py
1
import os import unittest from weerapi.knmidata import KNMIData class KNMIDataTest(unittest.TestCase): def setUp(self): self.knmidata = KNMIData() def test_actueel(self): my_path = os.path.dirname(os.path.abspath(__file__)) input_data = open(my_path + '/actueel.html').read() result = self.knmidata.actueel(input_data) self.assertEqual(len(result['actueel']), 36) self.assertEqual(result['timestamp'], '5 september 2013 10:30') self.assertEqual(result['source'], 'KNMI') self.assertEqual(result['actueel']['Schiphol']['latitude'], '52.30769') self.assertEqual(result['actueel']['Schiphol']['weather_type'], 'onbewolkt') self.assertEqual(result['actueel']['Schiphol']['temperature'], '21.8') self.assertEqual(result['actueel']['Schiphol']['humidity'], '75') self.assertEqual(result['actueel']['Schiphol']['wind_direction'], 'ZO') self.assertEqual(result['actueel']['Schiphol']['wind_direction_deg'], '135') self.assertEqual(result['actueel']['Schiphol']['wind_speed_ms'], '4') self.assertEqual(result['actueel']['Schiphol']['wind_speed_bft'], 3) self.assertEqual(result['actueel']['Schiphol']['visibility'], '12900') self.assertEqual(result['actueel']['Schiphol']['pressure'], '1014.0') self.assertEqual(result['actueel']['Lauwersoog']['visibility'], None) self.assertEqual(result['actueel']['Wijk aan Zee']['wind_direction'], None) self.assertEqual(result['actueel']['Wijk aan Zee']['wind_direction_deg'], None) if __name__ == '__main__': unittest.main()
xshotD/pyglet
refs/heads/master
tests/graphics/IMMEDIATE.py
15
#!/usr/bin/env python """Tests immediate drawing. """ import unittest import pyglet from graphics_common import GraphicsGenericTestCase, get_feedback, GL_TRIANGLES __noninteractive = True class GraphicsImmediateTestCase(GraphicsGenericTestCase, unittest.TestCase): def get_feedback(self, data): return get_feedback(lambda: pyglet.graphics.draw(self.n_vertices, GL_TRIANGLES, *data)) if __name__ == '__main__': unittest.main()
srossross/stable.world
refs/heads/develop
stable_world/commands/ci.py
1
""" This is a command """ from __future__ import print_function import click from stable_world.interact.setup_bucket import setup_bucket from stable_world import utils, application @click.group() def main(): pass @main.command() @click.argument('key') @click.argument('value') @application.pass_app def set(app, key, value): """Set a config value e.g: """ app.write_config(key, value) utils.echo_success() click.echo('key "{}" was set'.format(key)) return @main.command('ci') def ci(app, dir): "Set up stable.world in your continuous delivery pipline" @main.command('ci:bash') @utils.dir_option @utils.login_required def ci_bash(app, dir): "Set up stable.world in your continuous delivery pipline using bash" setup_bucket(app, dir, 'custom') @main.command('ci:circle') @utils.dir_option @utils.login_required def ci_circle(app, dir): "Set up stable.world in your continuous delivery pipline using circleci" setup_bucket(app, dir, 'circleci') if __name__ == '__main__': main(obj={})
doug-fish/horizon
refs/heads/master
openstack_dashboard/dashboards/project/images/snapshots/urls.py
54
# Copyright 2012 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf.urls import patterns from django.conf.urls import url from openstack_dashboard.dashboards.project.images.snapshots import views urlpatterns = patterns( '', url(r'^(?P<instance_id>[^/]+)/create', views.CreateView.as_view(), name='create') )
macarthur-lab/xbrowse
refs/heads/master
xbrowse/analysis_modules/diagnostic_search.py
1
from xbrowse.core.variant_filters import VariantFilter class DiagnosticSearchSpec(): """ """ def __init__(self): self.analysis_module = 'diagnostic_search' self.project_id = None self.family_id = None self.gene_ids = None self.variant_filter = None def toJSON(self): d = { 'gene_ids': self.gene_ids, 'variant_filter': self.variant_filter.toJSON(), 'quality_filter': self.quality_filter, } return d @staticmethod def fromJSON(spec_dict): spec = DiagnosticSearchSpec() spec.gene_ids = spec_dict.get('gene_ids') spec.variant_filter = VariantFilter(**spec_dict.get('variant_filter')) spec.quality_filter = spec_dict.get('quality_filter') return spec
pquentin/django
refs/heads/stable/1.8.x
tests/template_tests/test_nodelist.py
22
from unittest import TestCase from django.template import Context, Template from django.template.base import VariableNode from django.test import override_settings class NodelistTest(TestCase): def test_for(self): template = Template('{% for i in 1 %}{{ a }}{% endfor %}') vars = template.nodelist.get_nodes_by_type(VariableNode) self.assertEqual(len(vars), 1) def test_if(self): template = Template('{% if x %}{{ a }}{% endif %}') vars = template.nodelist.get_nodes_by_type(VariableNode) self.assertEqual(len(vars), 1) def test_ifequal(self): template = Template('{% ifequal x y %}{{ a }}{% endifequal %}') vars = template.nodelist.get_nodes_by_type(VariableNode) self.assertEqual(len(vars), 1) def test_ifchanged(self): template = Template('{% ifchanged x %}{{ a }}{% endifchanged %}') vars = template.nodelist.get_nodes_by_type(VariableNode) self.assertEqual(len(vars), 1) class ErrorIndexTest(TestCase): """ Checks whether index of error is calculated correctly in template debugger in for loops. Refs ticket #5831 """ @override_settings(DEBUG=True) def test_correct_exception_index(self): tests = [ ('{% load bad_tag %}{% for i in range %}{% badsimpletag %}{% endfor %}', (38, 56)), ('{% load bad_tag %}{% for i in range %}{% for j in range %}{% badsimpletag %}{% endfor %}{% endfor %}', (58, 76)), ('{% load bad_tag %}{% for i in range %}{% badsimpletag %}{% for j in range %}Hello{% endfor %}{% endfor %}', (38, 56)), ('{% load bad_tag %}{% for i in range %}{% for j in five %}{% badsimpletag %}{% endfor %}{% endfor %}', (38, 57)), ('{% load bad_tag %}{% for j in five %}{% badsimpletag %}{% endfor %}', (18, 37)), ] context = Context({ 'range': range(5), 'five': 5, }) for source, expected_error_source_index in tests: template = Template(source) try: template.render(context) except (RuntimeError, TypeError) as e: error_source_index = e.django_template_source[1] self.assertEqual(error_source_index, expected_error_source_index)
v-i-s-h/Enocean-Py
refs/heads/master
EnoceanPy/eopy.py
1
# eopy.py -- Demo application for Enocean Py API # # Copyright 2014 Vishnu Raj <rajvishnu90@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # import time from time import gmtime, strftime import EO import ESP ''' Function : main Description : main function Arguments : none Returns : none ''' def main(): print "\t\t*************************************************" print "\t\t** Enocean Py - Monitor Enocean devices **" print "\t\t*************************************************" ttyPort = "/dev/ttyAMA0" print "\tUsing serial port : " + ttyPort + '\n' # CO_RD_VERSION command cmd0 = [ 0x55, 0x00, 0x01, 0x00, 0x05, 0x70, 0x03, 0x09 ] # CO_RD_IDBASE command cmd1 = [ 0x55, 0x00, 0x01, 0x00, 0x05, 0x70, 0x08, 0x38 ] hEOGateway = EO.connect( ttyPort ) # better to wait a little for connection to establish time.sleep( 0.100 ) ## display any packets already in buffer print "Buffered Packets : ", rawResp = EO.receiveData( hEOGateway ) if rawResp: print '' print 'RECEIVED :', for i in range( len( rawResp ) ): print "%02X" %(rawResp[i]), print '' pkts = ESP.decodeRawResponse( rawResp ) for pkt in pkts: print "[PACKET]................................................................." ESP.displayPacketInfo( pkt ) print "........................................................................." else: print"\t[NONE]" ## Send CO_RD_VERSION print "RQST : ", for i in range( len( cmd0 ) ): print "%02X" %( cmd0[i] ), print '' rawResp = EO.sendData( hEOGateway, cmd0 ) print 'RESP(%3dB) : ' %len( rawResp ), for i in range( len( rawResp ) ): print "%02X" %(rawResp[i]), print '' pkt = ESP.decodePacket( rawResp ) ESP.displayPacketInfo( pkt, 'CO_RD_VERSION' ) ## Send CO_RD_IDBASE print "RQST : ", for i in range( len( cmd1 ) ): print "%02X" %( cmd1[i] ), print '' #~ EO_receiveData( hEOGateway ) # Read any buffered data rawResp = EO.sendData( hEOGateway, cmd1 ) print 'RESP(%3dB) : ' %len( rawResp ), for i in range( len( rawResp ) ): print "%02X" %(rawResp[i]), print '' pkt = ESP.decodePacket( rawResp ) ESP.displayPacketInfo( pkt, 'CO_RD_IDBASE' ) try: while( True ): rawResp = EO.receiveData( hEOGateway ) if rawResp: print strftime("%Y-%m-%d %H:%M:%S",gmtime()),": ", print '[RXD] ', for i in range( len( rawResp ) ): print "%02X" %(rawResp[i]), print '' pkts = ESP.decodeRawResponse( rawResp ) for pkt in pkts: print " :> ", for i in range(len(pkt['data_recv'])): print "%02X" %(pkt['data_recv'][i]), print '' # print "[PACKET]................................................................." ESP.displayPacketInfo( pkt ) # print "........................................................................." except KeyboardInterrupt: print "\nExiting Enocean Py Demo" EO.disconnect( hEOGateway ) print "Bye..bye.. :) " if __name__ == "__main__": main()
e-democracy/edem.profile.email.settings
refs/heads/master
edem/__init__.py
916
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages try: __import__('pkg_resources').declare_namespace(__name__) except ImportError: from pkgutil import extend_path __path__ = extend_path(__path__, __name__)
trabacus-softapps/openerp-8.0-cc
refs/heads/master
openerp/addons/website/tests/__init__.py
3
# -*- coding: utf-8 -*- import test_views, test_converter, test_requests, test_ui checks = [ test_views, test_converter, ]
dragon788/wordfreq
refs/heads/master
wordfreq/chinese.py
1
from pkg_resources import resource_filename import jieba import msgpack import gzip DICT_FILENAME = resource_filename('wordfreq', 'data/jieba_zh.txt') SIMP_MAP_FILENAME = resource_filename('wordfreq', 'data/_chinese_mapping.msgpack.gz') SIMPLIFIED_MAP = msgpack.load(gzip.open(SIMP_MAP_FILENAME), encoding='utf-8') jieba_tokenizer = None def simplify_chinese(text): return text.translate(SIMPLIFIED_MAP).casefold() def jieba_tokenize(text): global jieba_tokenizer if jieba_tokenizer is None: jieba_tokenizer = jieba.Tokenizer(dictionary=DICT_FILENAME) return jieba_tokenizer.lcut(simplify_chinese(text), HMM=False)