repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
lukebakken/riak-python-pbcpp
|
refs/heads/master
|
riak/riak_pb2.py
|
1
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: riak.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='riak.proto',
package='',
serialized_pb=_b('\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xb0\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\"*\n\x14RpbToggleEncodingReq\x12\x12\n\nuse_native\x18\x01 \x02(\x08\"+\n\x15RpbToggleEncodingResp\x12\x12\n\nuse_native\x18\x01 \x02(\x08\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_RPBBUCKETPROPS_RPBREPLMODE = _descriptor.EnumDescriptor(
name='RpbReplMode',
full_name='RpbBucketProps.RpbReplMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='FALSE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REALTIME', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FULLSYNC', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TRUE', index=3, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1236,
serialized_end=1298,
)
_sym_db.RegisterEnumDescriptor(_RPBBUCKETPROPS_RPBREPLMODE)
_RPBERRORRESP = _descriptor.Descriptor(
name='RpbErrorResp',
full_name='RpbErrorResp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='errmsg', full_name='RpbErrorResp.errmsg', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='errcode', full_name='RpbErrorResp.errcode', index=1,
number=2, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=14,
serialized_end=61,
)
_RPBGETSERVERINFORESP = _descriptor.Descriptor(
name='RpbGetServerInfoResp',
full_name='RpbGetServerInfoResp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node', full_name='RpbGetServerInfoResp.node', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='server_version', full_name='RpbGetServerInfoResp.server_version', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=63,
serialized_end=123,
)
_RPBPAIR = _descriptor.Descriptor(
name='RpbPair',
full_name='RpbPair',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='RpbPair.key', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='RpbPair.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=125,
serialized_end=162,
)
_RPBGETBUCKETREQ = _descriptor.Descriptor(
name='RpbGetBucketReq',
full_name='RpbGetBucketReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bucket', full_name='RpbGetBucketReq.bucket', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='RpbGetBucketReq.type', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=164,
serialized_end=211,
)
_RPBGETBUCKETRESP = _descriptor.Descriptor(
name='RpbGetBucketResp',
full_name='RpbGetBucketResp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='props', full_name='RpbGetBucketResp.props', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=213,
serialized_end=263,
)
_RPBSETBUCKETREQ = _descriptor.Descriptor(
name='RpbSetBucketReq',
full_name='RpbSetBucketReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bucket', full_name='RpbSetBucketReq.bucket', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='props', full_name='RpbSetBucketReq.props', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='RpbSetBucketReq.type', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=265,
serialized_end=344,
)
_RPBRESETBUCKETREQ = _descriptor.Descriptor(
name='RpbResetBucketReq',
full_name='RpbResetBucketReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bucket', full_name='RpbResetBucketReq.bucket', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='RpbResetBucketReq.type', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=346,
serialized_end=395,
)
_RPBGETBUCKETTYPEREQ = _descriptor.Descriptor(
name='RpbGetBucketTypeReq',
full_name='RpbGetBucketTypeReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='RpbGetBucketTypeReq.type', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=397,
serialized_end=432,
)
_RPBSETBUCKETTYPEREQ = _descriptor.Descriptor(
name='RpbSetBucketTypeReq',
full_name='RpbSetBucketTypeReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='RpbSetBucketTypeReq.type', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='props', full_name='RpbSetBucketTypeReq.props', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=434,
serialized_end=501,
)
_RPBMODFUN = _descriptor.Descriptor(
name='RpbModFun',
full_name='RpbModFun',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='module', full_name='RpbModFun.module', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='function', full_name='RpbModFun.function', index=1,
number=2, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=503,
serialized_end=548,
)
_RPBCOMMITHOOK = _descriptor.Descriptor(
name='RpbCommitHook',
full_name='RpbCommitHook',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='modfun', full_name='RpbCommitHook.modfun', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name', full_name='RpbCommitHook.name', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=550,
serialized_end=607,
)
_RPBBUCKETPROPS = _descriptor.Descriptor(
name='RpbBucketProps',
full_name='RpbBucketProps',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='n_val', full_name='RpbBucketProps.n_val', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='allow_mult', full_name='RpbBucketProps.allow_mult', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_write_wins', full_name='RpbBucketProps.last_write_wins', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='precommit', full_name='RpbBucketProps.precommit', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_precommit', full_name='RpbBucketProps.has_precommit', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='postcommit', full_name='RpbBucketProps.postcommit', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='has_postcommit', full_name='RpbBucketProps.has_postcommit', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='chash_keyfun', full_name='RpbBucketProps.chash_keyfun', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='linkfun', full_name='RpbBucketProps.linkfun', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='old_vclock', full_name='RpbBucketProps.old_vclock', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='young_vclock', full_name='RpbBucketProps.young_vclock', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='big_vclock', full_name='RpbBucketProps.big_vclock', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='small_vclock', full_name='RpbBucketProps.small_vclock', index=12,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pr', full_name='RpbBucketProps.pr', index=13,
number=14, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='r', full_name='RpbBucketProps.r', index=14,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='w', full_name='RpbBucketProps.w', index=15,
number=16, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pw', full_name='RpbBucketProps.pw', index=16,
number=17, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dw', full_name='RpbBucketProps.dw', index=17,
number=18, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rw', full_name='RpbBucketProps.rw', index=18,
number=19, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='basic_quorum', full_name='RpbBucketProps.basic_quorum', index=19,
number=20, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='notfound_ok', full_name='RpbBucketProps.notfound_ok', index=20,
number=21, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='backend', full_name='RpbBucketProps.backend', index=21,
number=22, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='search', full_name='RpbBucketProps.search', index=22,
number=23, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='repl', full_name='RpbBucketProps.repl', index=23,
number=24, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='search_index', full_name='RpbBucketProps.search_index', index=24,
number=25, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='datatype', full_name='RpbBucketProps.datatype', index=25,
number=26, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='consistent', full_name='RpbBucketProps.consistent', index=26,
number=27, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='write_once', full_name='RpbBucketProps.write_once', index=27,
number=28, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_RPBBUCKETPROPS_RPBREPLMODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=610,
serialized_end=1298,
)
_RPBAUTHREQ = _descriptor.Descriptor(
name='RpbAuthReq',
full_name='RpbAuthReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user', full_name='RpbAuthReq.user', index=0,
number=1, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='password', full_name='RpbAuthReq.password', index=1,
number=2, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1300,
serialized_end=1344,
)
_RPBTOGGLEENCODINGREQ = _descriptor.Descriptor(
name='RpbToggleEncodingReq',
full_name='RpbToggleEncodingReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='use_native', full_name='RpbToggleEncodingReq.use_native', index=0,
number=1, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1346,
serialized_end=1388,
)
_RPBTOGGLEENCODINGRESP = _descriptor.Descriptor(
name='RpbToggleEncodingResp',
full_name='RpbToggleEncodingResp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='use_native', full_name='RpbToggleEncodingResp.use_native', index=0,
number=1, type=8, cpp_type=7, label=2,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1390,
serialized_end=1433,
)
_RPBGETBUCKETRESP.fields_by_name['props'].message_type = _RPBBUCKETPROPS
_RPBSETBUCKETREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS
_RPBSETBUCKETTYPEREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS
_RPBCOMMITHOOK.fields_by_name['modfun'].message_type = _RPBMODFUN
_RPBBUCKETPROPS.fields_by_name['precommit'].message_type = _RPBCOMMITHOOK
_RPBBUCKETPROPS.fields_by_name['postcommit'].message_type = _RPBCOMMITHOOK
_RPBBUCKETPROPS.fields_by_name['chash_keyfun'].message_type = _RPBMODFUN
_RPBBUCKETPROPS.fields_by_name['linkfun'].message_type = _RPBMODFUN
_RPBBUCKETPROPS.fields_by_name['repl'].enum_type = _RPBBUCKETPROPS_RPBREPLMODE
_RPBBUCKETPROPS_RPBREPLMODE.containing_type = _RPBBUCKETPROPS
DESCRIPTOR.message_types_by_name['RpbErrorResp'] = _RPBERRORRESP
DESCRIPTOR.message_types_by_name['RpbGetServerInfoResp'] = _RPBGETSERVERINFORESP
DESCRIPTOR.message_types_by_name['RpbPair'] = _RPBPAIR
DESCRIPTOR.message_types_by_name['RpbGetBucketReq'] = _RPBGETBUCKETREQ
DESCRIPTOR.message_types_by_name['RpbGetBucketResp'] = _RPBGETBUCKETRESP
DESCRIPTOR.message_types_by_name['RpbSetBucketReq'] = _RPBSETBUCKETREQ
DESCRIPTOR.message_types_by_name['RpbResetBucketReq'] = _RPBRESETBUCKETREQ
DESCRIPTOR.message_types_by_name['RpbGetBucketTypeReq'] = _RPBGETBUCKETTYPEREQ
DESCRIPTOR.message_types_by_name['RpbSetBucketTypeReq'] = _RPBSETBUCKETTYPEREQ
DESCRIPTOR.message_types_by_name['RpbModFun'] = _RPBMODFUN
DESCRIPTOR.message_types_by_name['RpbCommitHook'] = _RPBCOMMITHOOK
DESCRIPTOR.message_types_by_name['RpbBucketProps'] = _RPBBUCKETPROPS
DESCRIPTOR.message_types_by_name['RpbAuthReq'] = _RPBAUTHREQ
DESCRIPTOR.message_types_by_name['RpbToggleEncodingReq'] = _RPBTOGGLEENCODINGREQ
DESCRIPTOR.message_types_by_name['RpbToggleEncodingResp'] = _RPBTOGGLEENCODINGRESP
RpbErrorResp = _reflection.GeneratedProtocolMessageType('RpbErrorResp', (_message.Message,), dict(
DESCRIPTOR = _RPBERRORRESP,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbErrorResp)
))
_sym_db.RegisterMessage(RpbErrorResp)
RpbGetServerInfoResp = _reflection.GeneratedProtocolMessageType('RpbGetServerInfoResp', (_message.Message,), dict(
DESCRIPTOR = _RPBGETSERVERINFORESP,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbGetServerInfoResp)
))
_sym_db.RegisterMessage(RpbGetServerInfoResp)
RpbPair = _reflection.GeneratedProtocolMessageType('RpbPair', (_message.Message,), dict(
DESCRIPTOR = _RPBPAIR,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbPair)
))
_sym_db.RegisterMessage(RpbPair)
RpbGetBucketReq = _reflection.GeneratedProtocolMessageType('RpbGetBucketReq', (_message.Message,), dict(
DESCRIPTOR = _RPBGETBUCKETREQ,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbGetBucketReq)
))
_sym_db.RegisterMessage(RpbGetBucketReq)
RpbGetBucketResp = _reflection.GeneratedProtocolMessageType('RpbGetBucketResp', (_message.Message,), dict(
DESCRIPTOR = _RPBGETBUCKETRESP,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbGetBucketResp)
))
_sym_db.RegisterMessage(RpbGetBucketResp)
RpbSetBucketReq = _reflection.GeneratedProtocolMessageType('RpbSetBucketReq', (_message.Message,), dict(
DESCRIPTOR = _RPBSETBUCKETREQ,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbSetBucketReq)
))
_sym_db.RegisterMessage(RpbSetBucketReq)
RpbResetBucketReq = _reflection.GeneratedProtocolMessageType('RpbResetBucketReq', (_message.Message,), dict(
DESCRIPTOR = _RPBRESETBUCKETREQ,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbResetBucketReq)
))
_sym_db.RegisterMessage(RpbResetBucketReq)
RpbGetBucketTypeReq = _reflection.GeneratedProtocolMessageType('RpbGetBucketTypeReq', (_message.Message,), dict(
DESCRIPTOR = _RPBGETBUCKETTYPEREQ,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbGetBucketTypeReq)
))
_sym_db.RegisterMessage(RpbGetBucketTypeReq)
RpbSetBucketTypeReq = _reflection.GeneratedProtocolMessageType('RpbSetBucketTypeReq', (_message.Message,), dict(
DESCRIPTOR = _RPBSETBUCKETTYPEREQ,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbSetBucketTypeReq)
))
_sym_db.RegisterMessage(RpbSetBucketTypeReq)
RpbModFun = _reflection.GeneratedProtocolMessageType('RpbModFun', (_message.Message,), dict(
DESCRIPTOR = _RPBMODFUN,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbModFun)
))
_sym_db.RegisterMessage(RpbModFun)
RpbCommitHook = _reflection.GeneratedProtocolMessageType('RpbCommitHook', (_message.Message,), dict(
DESCRIPTOR = _RPBCOMMITHOOK,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbCommitHook)
))
_sym_db.RegisterMessage(RpbCommitHook)
RpbBucketProps = _reflection.GeneratedProtocolMessageType('RpbBucketProps', (_message.Message,), dict(
DESCRIPTOR = _RPBBUCKETPROPS,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbBucketProps)
))
_sym_db.RegisterMessage(RpbBucketProps)
RpbAuthReq = _reflection.GeneratedProtocolMessageType('RpbAuthReq', (_message.Message,), dict(
DESCRIPTOR = _RPBAUTHREQ,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbAuthReq)
))
_sym_db.RegisterMessage(RpbAuthReq)
RpbToggleEncodingReq = _reflection.GeneratedProtocolMessageType('RpbToggleEncodingReq', (_message.Message,), dict(
DESCRIPTOR = _RPBTOGGLEENCODINGREQ,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbToggleEncodingReq)
))
_sym_db.RegisterMessage(RpbToggleEncodingReq)
RpbToggleEncodingResp = _reflection.GeneratedProtocolMessageType('RpbToggleEncodingResp', (_message.Message,), dict(
DESCRIPTOR = _RPBTOGGLEENCODINGRESP,
__module__ = 'riak_pb2'
# @@protoc_insertion_point(class_scope:RpbToggleEncodingResp)
))
_sym_db.RegisterMessage(RpbToggleEncodingResp)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\006RiakPB'))
# @@protoc_insertion_point(module_scope)
|
zqfan/leetcode
|
refs/heads/master
|
algorithms/516. Longest Palindromic Subsequence/solution.py
|
1
|
class Solution(object):
def longestPalindromeSubseq(self, s):
"""
:type s: str
:rtype: int
"""
# based on @jedihy
dp = [1] * len(s)
for end in xrange(len(s)):
for start in reversed(xrange(end)):
if s[start] == s[end]:
m = 2 + pre if start < end - 1 else 2
else:
m = max(dp[start+1], dp[start])
pre, dp[start] = dp[start], m
return dp[0] if dp else 0
|
SachaMPS/django-cms
|
refs/heads/develop
|
cms/utils/decorators.py
|
34
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth.views import redirect_to_login
from django.utils.http import urlquote
from cms.page_rendering import _handle_no_page
def cms_perms(func):
def inner(request, *args, **kwargs):
page = request.current_page
if page:
if page.login_required and not request.user.is_authenticated():
return redirect_to_login(urlquote(request.get_full_path()), settings.LOGIN_URL)
if not page.has_view_permission(request):
return _handle_no_page(request, "$")
return func(request, *args, **kwargs)
inner.__module__ = func.__module__
inner.__doc__ = func.__doc__
if hasattr(func, '__name__'):
inner.__name__ = func.__name__
elif hasattr(func, '__class__'):
inner.__name__ = func.__class__.__name__
return inner
|
dchaplinsky/pep.org.ua
|
refs/heads/master
|
pepdb/cms_pages/migrations/0009_auto_20151025_0116.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('cms_pages', '0008_auto_20151025_0051'),
]
operations = [
migrations.AddField(
model_name='homepage',
name='footer',
field=wagtail.wagtailcore.fields.RichTextField(default='', verbose_name='[UA] \u0422\u0435\u043a\u0441\u0442 \u0432\u043d\u0438\u0437\u0443 \u0443\u0441\u0456\u0445 \u0441\u0442\u043e\u0440\u0456\u043d\u043e\u043a'),
),
migrations.AddField(
model_name='homepage',
name='footer_en',
field=wagtail.wagtailcore.fields.RichTextField(default='', verbose_name='[EN] \u0422\u0435\u043a\u0441\u0442 \u0432\u043d\u0438\u0437\u0443 \u0443\u0441\u0456\u0445 \u0441\u0442\u043e\u0440\u0456\u043d\u043e\u043a'),
),
]
|
airbnb/knowledge-repo
|
refs/heads/master
|
knowledge_repo/app/utils/posts.py
|
1
|
"""Functions that interact with posts.
Functions include:
- get_posts
- get_all_post_stats
"""
import math
from flask import current_app
from sqlalchemy import func, distinct, or_
from ..proxies import db_session
from ..models import (Comment, PageView, Post,
Tag, Vote, User)
def get_query_param_set(params):
"""
Strip, lowercase, and remove empty params to be used in a query
"""
param_set = params.strip().lower().split(" ")
param_set = [p for p in param_set if len(p) > 0]
return param_set
def get_posts(feed_params):
"""
Return a list of post objects (either WebEditorPosts or GitPosts)
by building a query based on the feed_params
:param feed_params: Parameters in the url request
:type feed_params: object
:return: Posts matching feed param specification
:rtype: Tuple
"""
# make sure post is published
query = (db_session.query(Post).filter(Post.is_published))
# posts returned should not include any posts in the excluded tags
excluded_tags = current_app.config.get('EXCLUDED_TAGS', [])
if excluded_tags:
query = query.filter(~Post.tags.any(Tag.name.in_(excluded_tags)))
# filter out based on feed param filters
filters = feed_params['filters']
if filters and str(filters):
filter_set = get_query_param_set(filters)
for elem in filter_set:
query = query.filter(or_(func.lower(Post.keywords).like('%' + elem + '%'),
func.lower(Post.keywords).like('%' + elem),
func.lower(Post.keywords).like(elem + '%')))
author_names = feed_params['authors']
if author_names:
author_names = [author_name.strip() for author_name in author_names.split(",")]
query = query.filter(Post.authors.any(User.identifier.in_(author_names)))
# sort - TODO clean up
sort_by = feed_params['sort_by']
# sort by post property
post_properties = {
"updated_at": Post.updated_at,
"created_at": Post.created_at,
"title": Post.title,
}
join_order_col = {
"uniqueviews": func.count(distinct(PageView.user_id)),
"allviews": func.count(PageView.object_id),
"views": func.count(PageView.object_id),
"upvotes": func.count(Vote.object_id),
"comments": func.count(Comment.post_id)
}
order_col = None
if sort_by in post_properties:
order_col = post_properties[sort_by]
elif sort_by in join_order_col: # sort by joined property
order_col = join_order_col[sort_by]
joins = {
"uniqueviews": (PageView, PageView.object_id),
"allviews": (PageView, PageView.object_id),
"views": (PageView, PageView.object_id),
"upvotes": (Vote, Vote.object_id),
"comments": (Comment, Comment.post_id)
}
(join_table, join_on) = joins[sort_by]
query = (db_session.query(Post, order_col)
.outerjoin(join_table, Post.id == join_on))
query = query.group_by(Post.id)
# sort order
if order_col is not None:
if feed_params['sort_desc']:
query = query.order_by(order_col.desc())
else:
query = query.order_by(order_col.asc())
query = (query.order_by(Post.id.desc()))
posts = query.all()
# Check if a grouped by result, and if so, unnest Post object
if posts and not isinstance(posts[0], Post):
posts = [post[0] for post in posts]
# get the right indexes
feed_params['posts_count'] = len(posts)
feed_params['page_count'] = int(math.ceil(float(len(posts)) / feed_params['results']))
posts = posts[feed_params['start']:feed_params[
'start'] + feed_params['results']]
# Post.authors is lazy loaded, so we need to make sure it has been loaded before being
# passed beyond the scope of this database db_session.
for post in posts:
post.authors
post_stats = {post.path: {'all_views': post.view_count,
'distinct_views': post.view_user_count,
'total_likes': post.vote_count,
'total_comments': post.comment_count} for post in posts}
db_session.expunge_all()
return posts, post_stats
|
mikewiebe-ansible/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/parsing/convert_bool.py
|
118
|
# Copyright: 2017, Ansible Project
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause )
from ansible.module_utils.six import binary_type, text_type
from ansible.module_utils._text import to_text
BOOLEANS_TRUE = frozenset(('y', 'yes', 'on', '1', 'true', 't', 1, 1.0, True))
BOOLEANS_FALSE = frozenset(('n', 'no', 'off', '0', 'false', 'f', 0, 0.0, False))
BOOLEANS = BOOLEANS_TRUE.union(BOOLEANS_FALSE)
def boolean(value, strict=True):
if isinstance(value, bool):
return value
normalized_value = value
if isinstance(value, (text_type, binary_type)):
normalized_value = to_text(value, errors='surrogate_or_strict').lower().strip()
if normalized_value in BOOLEANS_TRUE:
return True
elif normalized_value in BOOLEANS_FALSE or not strict:
return False
raise TypeError("The value '%s' is not a valid boolean. Valid booleans include: %s" % (to_text(value), ', '.join(repr(i) for i in BOOLEANS)))
|
alrifqi/django
|
refs/heads/master
|
tests/template_tests/filter_tests/test_timesince.py
|
207
|
from __future__ import unicode_literals
from datetime import datetime, timedelta
from django.template.defaultfilters import timesince_filter
from django.test import SimpleTestCase
from django.test.utils import requires_tz_support
from ..utils import setup
from .timezone_utils import TimezoneTestCase
class TimesinceTests(TimezoneTestCase):
"""
#20246 - \xa0 in output avoids line-breaks between value and unit
"""
# Default compare with datetime.now()
@setup({'timesince01': '{{ a|timesince }}'})
def test_timesince01(self):
output = self.engine.render_to_string('timesince01', {'a': datetime.now() + timedelta(minutes=-1, seconds=-10)})
self.assertEqual(output, '1\xa0minute')
@setup({'timesince02': '{{ a|timesince }}'})
def test_timesince02(self):
output = self.engine.render_to_string('timesince02', {'a': datetime.now() - timedelta(days=1, minutes=1)})
self.assertEqual(output, '1\xa0day')
@setup({'timesince03': '{{ a|timesince }}'})
def test_timesince03(self):
output = self.engine.render_to_string('timesince03', {'a': datetime.now() - timedelta(hours=1, minutes=25, seconds=10)})
self.assertEqual(output, '1\xa0hour, 25\xa0minutes')
# Compare to a given parameter
@setup({'timesince04': '{{ a|timesince:b }}'})
def test_timesince04(self):
output = self.engine.render_to_string(
'timesince04',
{'a': self.now - timedelta(days=2), 'b': self.now - timedelta(days=1)},
)
self.assertEqual(output, '1\xa0day')
@setup({'timesince05': '{{ a|timesince:b }}'})
def test_timesince05(self):
output = self.engine.render_to_string(
'timesince05',
{'a': self.now - timedelta(days=2, minutes=1), 'b': self.now - timedelta(days=2)},
)
self.assertEqual(output, '1\xa0minute')
# Check that timezone is respected
@setup({'timesince06': '{{ a|timesince:b }}'})
def test_timesince06(self):
output = self.engine.render_to_string('timesince06', {'a': self.now_tz - timedelta(hours=8), 'b': self.now_tz})
self.assertEqual(output, '8\xa0hours')
# Tests for #7443
@setup({'timesince07': '{{ earlier|timesince }}'})
def test_timesince07(self):
output = self.engine.render_to_string('timesince07', {'earlier': self.now - timedelta(days=7)})
self.assertEqual(output, '1\xa0week')
@setup({'timesince08': '{{ earlier|timesince:now }}'})
def test_timesince08(self):
output = self.engine.render_to_string('timesince08', {'now': self.now, 'earlier': self.now - timedelta(days=7)})
self.assertEqual(output, '1\xa0week')
@setup({'timesince09': '{{ later|timesince }}'})
def test_timesince09(self):
output = self.engine.render_to_string('timesince09', {'later': self.now + timedelta(days=7)})
self.assertEqual(output, '0\xa0minutes')
@setup({'timesince10': '{{ later|timesince:now }}'})
def test_timesince10(self):
output = self.engine.render_to_string('timesince10', {'now': self.now, 'later': self.now + timedelta(days=7)})
self.assertEqual(output, '0\xa0minutes')
# Ensures that differing timezones are calculated correctly.
@setup({'timesince11': '{{ a|timesince }}'})
def test_timesince11(self):
output = self.engine.render_to_string('timesince11', {'a': self.now})
self.assertEqual(output, '0\xa0minutes')
@requires_tz_support
@setup({'timesince12': '{{ a|timesince }}'})
def test_timesince12(self):
output = self.engine.render_to_string('timesince12', {'a': self.now_tz})
self.assertEqual(output, '0\xa0minutes')
@requires_tz_support
@setup({'timesince13': '{{ a|timesince }}'})
def test_timesince13(self):
output = self.engine.render_to_string('timesince13', {'a': self.now_tz_i})
self.assertEqual(output, '0\xa0minutes')
@setup({'timesince14': '{{ a|timesince:b }}'})
def test_timesince14(self):
output = self.engine.render_to_string('timesince14', {'a': self.now_tz, 'b': self.now_tz_i})
self.assertEqual(output, '0\xa0minutes')
@setup({'timesince15': '{{ a|timesince:b }}'})
def test_timesince15(self):
output = self.engine.render_to_string('timesince15', {'a': self.now, 'b': self.now_tz_i})
self.assertEqual(output, '')
@setup({'timesince16': '{{ a|timesince:b }}'})
def test_timesince16(self):
output = self.engine.render_to_string('timesince16', {'a': self.now_tz_i, 'b': self.now})
self.assertEqual(output, '')
# Tests for #9065 (two date objects).
@setup({'timesince17': '{{ a|timesince:b }}'})
def test_timesince17(self):
output = self.engine.render_to_string('timesince17', {'a': self.today, 'b': self.today})
self.assertEqual(output, '0\xa0minutes')
@setup({'timesince18': '{{ a|timesince:b }}'})
def test_timesince18(self):
output = self.engine.render_to_string('timesince18', {'a': self.today, 'b': self.today + timedelta(hours=24)})
self.assertEqual(output, '1\xa0day')
class FunctionTests(SimpleTestCase):
def test_since_now(self):
self.assertEqual(timesince_filter(datetime.now() - timedelta(1)), '1\xa0day')
def test_explicit_date(self):
self.assertEqual(timesince_filter(datetime(2005, 12, 29), datetime(2005, 12, 30)), '1\xa0day')
|
ayosef/pynet_test
|
refs/heads/master
|
class9/ex7/mytest/whatever.py
|
27
|
'''
Python class on writing reusable code
'''
def func3():
'''Simple test function'''
print "Whatever"
if __name__ == "__main__":
print "Main program - whatever"
|
furious-luke/django-qunit-tests
|
refs/heads/master
|
examples/example_site/example_site/wsgi.py
|
2
|
"""
WSGI config for example_site project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example_site.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
nkuhlen/replication-study-financial-macro
|
refs/heads/nk
|
.mywaflib/waflib/extras/pgicc.py
|
10
|
#!/usr/bin/env python
# encoding: utf-8
# Antoine Dechaume 2011
"""
Detect the PGI C compiler
"""
import sys, re
from waflib.Configure import conf
from waflib.Tools.compiler_c import c_compiler
c_compiler['linux'].append('pgicc')
@conf
def find_pgi_compiler(conf, var, name):
"""
Find the program name, and execute it to ensure it really is itself.
"""
if sys.platform == 'cygwin':
conf.fatal('The PGI compiler does not work on Cygwin')
v = conf.env
cc = None
if v[var]: cc = v[var]
elif var in conf.environ: cc = conf.environ[var]
if not cc: cc = conf.find_program(name, var=var)
if not cc: conf.fatal('PGI Compiler (%s) was not found' % name)
v[var + '_VERSION'] = conf.get_pgi_version(cc)
v[var] = cc
v[var + '_NAME'] = 'pgi'
@conf
def get_pgi_version(conf, cc):
"""Find the version of a pgi compiler."""
version_re = re.compile(r"The Portland Group", re.I).search
cmd = cc + ['-V', '-E'] # Issue 1078, prevent wrappers from linking
try:
out, err = conf.cmd_and_log(cmd, output=0)
except Exception:
conf.fatal('Could not find pgi compiler %r' % cmd)
if out: match = version_re(out)
else: match = version_re(err)
if not match:
conf.fatal('Could not verify PGI signature')
cmd = cc + ['-help=variable']
try:
out, err = conf.cmd_and_log(cmd, output=0)
except Exception:
conf.fatal('Could not find pgi compiler %r' % cmd)
version = re.findall('^COMPVER\s*=(.*)', out, re.M)
if len(version) != 1:
conf.fatal('Could not determine the compiler version')
return version[0]
def configure(conf):
conf.find_pgi_compiler('CC', 'pgcc')
conf.find_ar()
conf.gcc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
|
jhjguxin/blogserver
|
refs/heads/master
|
apps/about/tests.py
|
1940
|
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
|
tboyce021/home-assistant
|
refs/heads/dev
|
homeassistant/components/rflink/light.py
|
7
|
"""Support for Rflink lights."""
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_NAME, CONF_TYPE
import homeassistant.helpers.config_validation as cv
from . import (
CONF_ALIASES,
CONF_AUTOMATIC_ADD,
CONF_DEVICE_DEFAULTS,
CONF_DEVICES,
CONF_FIRE_EVENT,
CONF_GROUP,
CONF_GROUP_ALIASES,
CONF_NOGROUP_ALIASES,
CONF_SIGNAL_REPETITIONS,
DATA_DEVICE_REGISTER,
DEVICE_DEFAULTS_SCHEMA,
EVENT_KEY_COMMAND,
EVENT_KEY_ID,
SwitchableRflinkDevice,
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
TYPE_DIMMABLE = "dimmable"
TYPE_SWITCHABLE = "switchable"
TYPE_HYBRID = "hybrid"
TYPE_TOGGLE = "toggle"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_DEVICE_DEFAULTS, default=DEVICE_DEFAULTS_SCHEMA({})
): DEVICE_DEFAULTS_SCHEMA,
vol.Optional(CONF_AUTOMATIC_ADD, default=True): cv.boolean,
vol.Optional(CONF_DEVICES, default={}): {
cv.string: vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_TYPE): vol.Any(
TYPE_DIMMABLE, TYPE_SWITCHABLE, TYPE_HYBRID, TYPE_TOGGLE
),
vol.Optional(CONF_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_GROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_NOGROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_FIRE_EVENT): cv.boolean,
vol.Optional(CONF_SIGNAL_REPETITIONS): vol.Coerce(int),
vol.Optional(CONF_GROUP, default=True): cv.boolean,
}
)
},
},
extra=vol.ALLOW_EXTRA,
)
def entity_type_for_device_id(device_id):
"""Return entity class for protocol of a given device_id.
Async friendly.
"""
entity_type_mapping = {
# KlikAanKlikUit support both dimmers and on/off switches on the same
# protocol
"newkaku": TYPE_HYBRID
}
protocol = device_id.split("_")[0]
return entity_type_mapping.get(protocol)
def entity_class_for_type(entity_type):
"""Translate entity type to entity class.
Async friendly.
"""
entity_device_mapping = {
# sends only 'dim' commands not compatible with on/off switches
TYPE_DIMMABLE: DimmableRflinkLight,
# sends only 'on/off' commands not advices with dimmers and signal
# repetition
TYPE_SWITCHABLE: RflinkLight,
# sends 'dim' and 'on' command to support both dimmers and on/off
# switches. Not compatible with signal repetition.
TYPE_HYBRID: HybridRflinkLight,
# sends only 'on' commands for switches which turn on and off
# using the same 'on' command for both.
TYPE_TOGGLE: ToggleRflinkLight,
}
return entity_device_mapping.get(entity_type, RflinkLight)
def devices_from_config(domain_config):
"""Parse configuration and add Rflink light devices."""
devices = []
for device_id, config in domain_config[CONF_DEVICES].items():
# Determine which kind of entity to create
if CONF_TYPE in config:
# Remove type from config to not pass it as and argument to entity
# instantiation
entity_type = config.pop(CONF_TYPE)
else:
entity_type = entity_type_for_device_id(device_id)
entity_class = entity_class_for_type(entity_type)
device_config = dict(domain_config[CONF_DEVICE_DEFAULTS], **config)
is_hybrid = entity_class is HybridRflinkLight
# Make user aware this can cause problems
repetitions_enabled = device_config[CONF_SIGNAL_REPETITIONS] != 1
if is_hybrid and repetitions_enabled:
_LOGGER.warning(
"Hybrid type for %s not compatible with signal "
"repetitions. Please set 'dimmable' or 'switchable' "
"type explicitly in configuration",
device_id,
)
device = entity_class(device_id, **device_config)
devices.append(device)
return devices
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Rflink light platform."""
async_add_entities(devices_from_config(config))
async def add_new_device(event):
"""Check if device is known, otherwise add to list of known devices."""
device_id = event[EVENT_KEY_ID]
entity_type = entity_type_for_device_id(event[EVENT_KEY_ID])
entity_class = entity_class_for_type(entity_type)
device_config = config[CONF_DEVICE_DEFAULTS]
device = entity_class(device_id, initial_event=event, **device_config)
async_add_entities([device])
if config[CONF_AUTOMATIC_ADD]:
hass.data[DATA_DEVICE_REGISTER][EVENT_KEY_COMMAND] = add_new_device
class RflinkLight(SwitchableRflinkDevice, LightEntity):
"""Representation of a Rflink light."""
class DimmableRflinkLight(SwitchableRflinkDevice, LightEntity):
"""Rflink light device that support dimming."""
_brightness = 255
async def async_added_to_hass(self):
"""Restore RFLink light brightness attribute."""
await super().async_added_to_hass()
old_state = await self.async_get_last_state()
if (
old_state is not None
and old_state.attributes.get(ATTR_BRIGHTNESS) is not None
):
# restore also brightness in dimmables devices
self._brightness = int(old_state.attributes[ATTR_BRIGHTNESS])
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
if ATTR_BRIGHTNESS in kwargs:
# rflink only support 16 brightness levels
self._brightness = int(kwargs[ATTR_BRIGHTNESS] / 17) * 17
# Turn on light at the requested dim level
await self._async_handle_command("dim", self._brightness)
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def device_state_attributes(self):
"""Return the device state attributes."""
if self._brightness is None:
return {}
return {ATTR_BRIGHTNESS: self._brightness}
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
class HybridRflinkLight(SwitchableRflinkDevice, LightEntity):
"""Rflink light device that sends out both dim and on/off commands.
Used for protocols which support lights that are not exclusively on/off
style. For example KlikAanKlikUit supports both on/off and dimmable light
switches using the same protocol. This type allows unconfigured
KlikAanKlikUit devices to support dimming without breaking support for
on/off switches.
This type is not compatible with signal repetitions as the 'dim' and 'on'
command are send sequential and multiple 'on' commands to a dimmable
device can cause the dimmer to switch into a pulsating brightness mode.
Which results in a nice house disco :)
"""
_brightness = 255
async def async_added_to_hass(self):
"""Restore RFLink light brightness attribute."""
await super().async_added_to_hass()
old_state = await self.async_get_last_state()
if (
old_state is not None
and old_state.attributes.get(ATTR_BRIGHTNESS) is not None
):
# restore also brightness in dimmables devices
self._brightness = int(old_state.attributes[ATTR_BRIGHTNESS])
async def async_turn_on(self, **kwargs):
"""Turn the device on and set dim level."""
if ATTR_BRIGHTNESS in kwargs:
# rflink only support 16 brightness levels
self._brightness = int(kwargs[ATTR_BRIGHTNESS] / 17) * 17
# if receiver supports dimming this will turn on the light
# at the requested dim level
await self._async_handle_command("dim", self._brightness)
# if the receiving device does not support dimlevel this
# will ensure it is turned on when full brightness is set
if self._brightness == 255:
await self._async_handle_command("turn_on")
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def device_state_attributes(self):
"""Return the device state attributes."""
if self._brightness is None:
return {}
return {ATTR_BRIGHTNESS: self._brightness}
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
class ToggleRflinkLight(SwitchableRflinkDevice, LightEntity):
"""Rflink light device which sends out only 'on' commands.
Some switches like for example Livolo light switches use the
same 'on' command to switch on and switch off the lights.
If the light is on and 'on' gets sent, the light will turn off
and if the light is off and 'on' gets sent, the light will turn on.
"""
def _handle_event(self, event):
"""Adjust state if Rflink picks up a remote command for this device."""
self.cancel_queued_send_commands()
command = event["command"]
if command == "on":
# if the state is unknown or false, it gets set as true
# if the state is true, it gets set as false
self._state = self._state in [None, False]
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._async_handle_command("toggle")
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._async_handle_command("toggle")
|
camptocamp/odoo
|
refs/heads/master
|
addons/account_payment/account_invoice.py
|
382
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.tools.translate import _
from openerp.osv import osv
class Invoice(osv.osv):
_inherit = 'account.invoice'
# Forbid to cancel an invoice if the related move lines have already been
# used in a payment order. The risk is that importing the payment line
# in the bank statement will result in a crash cause no more move will
# be found in the payment line
def action_cancel(self, cr, uid, ids, context=None):
payment_line_obj = self.pool.get('payment.line')
for inv in self.browse(cr, uid, ids, context=context):
pl_line_ids = []
if inv.move_id and inv.move_id.line_id:
inv_mv_lines = [x.id for x in inv.move_id.line_id]
pl_line_ids = payment_line_obj.search(cr, uid, [('move_line_id','in',inv_mv_lines)], context=context)
if pl_line_ids:
pay_line = payment_line_obj.browse(cr, uid, pl_line_ids, context=context)
payment_order_name = ','.join(map(lambda x: x.order_id.reference, pay_line))
raise osv.except_osv(_('Error!'), _("You cannot cancel an invoice which has already been imported in a payment order. Remove it from the following payment order : %s."%(payment_order_name)))
return super(Invoice, self).action_cancel(cr, uid, ids, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Tivix/wagtail
|
refs/heads/master
|
wagtail/tests/testapp/migrations/0011_auto_20151006_2141.py
|
4
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import taggit.managers
import wagtail.wagtailadmin.taggable
from django.conf import settings
import wagtail.wagtailimages.models
class Migration(migrations.Migration):
dependencies = [
('taggit', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('tests', '0010_mtibasepage_mtichildpage'),
]
operations = [
migrations.CreateModel(
name='CustomImage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(verbose_name='Title', max_length=255)),
('file', models.ImageField(verbose_name='File', upload_to=wagtail.wagtailimages.models.get_upload_to, height_field='height', width_field='width')),
('width', models.IntegerField(verbose_name='Width', editable=False)),
('height', models.IntegerField(verbose_name='Height', editable=False)),
('created_at', models.DateTimeField(db_index=True, verbose_name='Created at', auto_now_add=True)),
('focal_point_x', models.PositiveIntegerField(null=True, blank=True)),
('focal_point_y', models.PositiveIntegerField(null=True, blank=True)),
('focal_point_width', models.PositiveIntegerField(null=True, blank=True)),
('focal_point_height', models.PositiveIntegerField(null=True, blank=True)),
('file_size', models.PositiveIntegerField(editable=False, null=True)),
('caption', models.CharField(max_length=255)),
('not_editable_field', models.CharField(max_length=255)),
('tags', taggit.managers.TaggableManager(blank=True, help_text=None, through='taggit.TaggedItem', verbose_name='Tags', to='taggit.Tag')),
('uploaded_by_user', models.ForeignKey(blank=True, editable=False, verbose_name='Uploaded by user', null=True, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
bases=(models.Model, wagtail.wagtailadmin.taggable.TagSearchable),
),
migrations.RemoveField(
model_name='customimagewithadminformfields',
name='tags',
),
migrations.RemoveField(
model_name='customimagewithadminformfields',
name='uploaded_by_user',
),
migrations.RemoveField(
model_name='customimagewithoutadminformfields',
name='tags',
),
migrations.RemoveField(
model_name='customimagewithoutadminformfields',
name='uploaded_by_user',
),
migrations.DeleteModel(
name='CustomImageWithAdminFormFields',
),
migrations.DeleteModel(
name='CustomImageWithoutAdminFormFields',
),
]
|
cntnboys/410Lab6
|
refs/heads/master
|
build/django/tests/test_runner/valid_app/tests/__init__.py
|
5
|
import unittest
class SampleTest(unittest.TestCase):
def test_one(self):
pass
|
krismz/Delv
|
refs/heads/master
|
examples/code_snippets/qplot_example.py
|
1
|
# Run from jython started from run.sh
# ie ./jython.sh qplot_example.py
import org.rosuda.JRI.Rengine as Rengine
if __name__ == "__main__":
reng = Rengine(["--vanilla"], False, None)
# set up R library environment
reng.eval('library(ggplot2)') # for qplot
reng.eval('library(gridSVG)') # for SVG annotation and export to SVG
reng.eval('library(XML)') # for converting SVG DOM to a String via saveXML
# plot
reng.eval('print(qplot(mpg,cyl,data=mtcars))')
# now export to SVG
reng.eval('sv<-grid.export("")')
# and extract the SVG as a string
svg=reng.eval('saveXML(sv$svg)').asString()
# write the SVG to file
fid=open('qplot_example.svg','w')
fid.writelines(svg)
fid.close()
|
jpmfribeiro/PyCharts
|
refs/heads/master
|
build/lib.linux-x86_64-2.7/pycharts/charts/highchart.py
|
2
|
class HighChart(object):
def to_javascript(self):
pass
def script_header(self):
# jsc = '''<script src="/home/jpedro/workspace/tools/highcharts/js/highcharts.js"></script>
# <script src="/home/jpedro/workspace/tools/highcharts/js/modules/exporting.js"></script>
# <script src="/home/jpedro/workspace/tools/highcharts/js/highcharts-more.js"></script> '''
jsc = '''<script src="http://code.highcharts.com/highcharts.js"></script>
<script src="http://code.highcharts.com/modules/exporting.js"></script>
<script src="http://code.highcharts.com/highcharts-more.js"></script>'''
return jsc
|
cgstudiomap/cgstudiomap
|
refs/heads/develop
|
main/parts/odoo/addons/procurement/__init__.py
|
374
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import procurement
import wizard
|
impl/rbppc-linux
|
refs/heads/master
|
tools/perf/util/setup.py
|
989
|
#!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = getenv('CFLAGS', '').split()
# switch off several checks (need to be at the end of cflags list)
cflags += ['-fno-strict-aliasing', '-Wno-write-strings', '-Wno-unused-parameter' ]
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
libapikfs = getenv('LIBAPIKFS')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, libapikfs],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
|
brianwoo/django-tutorial
|
refs/heads/master
|
build/Django/tests/lookup/tests.py
|
33
|
from __future__ import unicode_literals
from datetime import datetime
from operator import attrgetter
from unittest import skipUnless
from django.core.exceptions import FieldError
from django.db import connection
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import Article, Author, Game, MyISAMArticle, Player, Season, Tag
class LookupTests(TestCase):
def setUp(self):
# Create a few Authors.
self.au1 = Author(name='Author 1')
self.au1.save()
self.au2 = Author(name='Author 2')
self.au2.save()
# Create a couple of Articles.
self.a1 = Article(headline='Article 1', pub_date=datetime(2005, 7, 26), author=self.au1)
self.a1.save()
self.a2 = Article(headline='Article 2', pub_date=datetime(2005, 7, 27), author=self.au1)
self.a2.save()
self.a3 = Article(headline='Article 3', pub_date=datetime(2005, 7, 27), author=self.au1)
self.a3.save()
self.a4 = Article(headline='Article 4', pub_date=datetime(2005, 7, 28), author=self.au1)
self.a4.save()
self.a5 = Article(headline='Article 5', pub_date=datetime(2005, 8, 1, 9, 0), author=self.au2)
self.a5.save()
self.a6 = Article(headline='Article 6', pub_date=datetime(2005, 8, 1, 8, 0), author=self.au2)
self.a6.save()
self.a7 = Article(headline='Article 7', pub_date=datetime(2005, 7, 27), author=self.au2)
self.a7.save()
# Create a few Tags.
self.t1 = Tag(name='Tag 1')
self.t1.save()
self.t1.articles.add(self.a1, self.a2, self.a3)
self.t2 = Tag(name='Tag 2')
self.t2.save()
self.t2.articles.add(self.a3, self.a4, self.a5)
self.t3 = Tag(name='Tag 3')
self.t3.save()
self.t3.articles.add(self.a5, self.a6, self.a7)
def test_exists(self):
# We can use .exists() to check that there are some
self.assertTrue(Article.objects.exists())
for a in Article.objects.all():
a.delete()
# There should be none now!
self.assertFalse(Article.objects.exists())
def test_lookup_int_as_str(self):
# Integer value can be queried using string
self.assertQuerysetEqual(Article.objects.filter(id__iexact=str(self.a1.id)),
['<Article: Article 1>'])
@skipUnlessDBFeature('supports_date_lookup_using_string')
def test_lookup_date_as_str(self):
# A date lookup can be performed using a string search
self.assertQuerysetEqual(Article.objects.filter(pub_date__startswith='2005'),
[
'<Article: Article 5>',
'<Article: Article 6>',
'<Article: Article 4>',
'<Article: Article 2>',
'<Article: Article 3>',
'<Article: Article 7>',
'<Article: Article 1>',
])
def test_iterator(self):
# Each QuerySet gets iterator(), which is a generator that "lazily"
# returns results using database-level iteration.
self.assertQuerysetEqual(Article.objects.iterator(),
[
'Article 5',
'Article 6',
'Article 4',
'Article 2',
'Article 3',
'Article 7',
'Article 1',
],
transform=attrgetter('headline'))
# iterator() can be used on any QuerySet.
self.assertQuerysetEqual(
Article.objects.filter(headline__endswith='4').iterator(),
['Article 4'],
transform=attrgetter('headline'))
def test_count(self):
# count() returns the number of objects matching search criteria.
self.assertEqual(Article.objects.count(), 7)
self.assertEqual(Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).count(), 3)
self.assertEqual(Article.objects.filter(headline__startswith='Blah blah').count(), 0)
# count() should respect sliced query sets.
articles = Article.objects.all()
self.assertEqual(articles.count(), 7)
self.assertEqual(articles[:4].count(), 4)
self.assertEqual(articles[1:100].count(), 6)
self.assertEqual(articles[10:100].count(), 0)
# Date and date/time lookups can also be done with strings.
self.assertEqual(Article.objects.filter(pub_date__exact='2005-07-27 00:00:00').count(), 3)
def test_in_bulk(self):
# in_bulk() takes a list of IDs and returns a dictionary mapping IDs to objects.
arts = Article.objects.in_bulk([self.a1.id, self.a2.id])
self.assertEqual(arts[self.a1.id], self.a1)
self.assertEqual(arts[self.a2.id], self.a2)
self.assertEqual(Article.objects.in_bulk([self.a3.id]), {self.a3.id: self.a3})
self.assertEqual(Article.objects.in_bulk({self.a3.id}), {self.a3.id: self.a3})
self.assertEqual(Article.objects.in_bulk(frozenset([self.a3.id])), {self.a3.id: self.a3})
self.assertEqual(Article.objects.in_bulk((self.a3.id,)), {self.a3.id: self.a3})
self.assertEqual(Article.objects.in_bulk([1000]), {})
self.assertEqual(Article.objects.in_bulk([]), {})
self.assertEqual(Article.objects.in_bulk(iter([self.a1.id])), {self.a1.id: self.a1})
self.assertEqual(Article.objects.in_bulk(iter([])), {})
self.assertRaises(TypeError, Article.objects.in_bulk)
self.assertRaises(TypeError, Article.objects.in_bulk, headline__startswith='Blah')
def test_values(self):
# values() returns a list of dictionaries instead of object instances --
# and you can specify which fields you want to retrieve.
identity = lambda x: x
self.assertQuerysetEqual(Article.objects.values('headline'),
[
{'headline': 'Article 5'},
{'headline': 'Article 6'},
{'headline': 'Article 4'},
{'headline': 'Article 2'},
{'headline': 'Article 3'},
{'headline': 'Article 7'},
{'headline': 'Article 1'},
],
transform=identity)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).values('id'),
[{'id': self.a2.id}, {'id': self.a3.id}, {'id': self.a7.id}],
transform=identity)
self.assertQuerysetEqual(Article.objects.values('id', 'headline'),
[
{'id': self.a5.id, 'headline': 'Article 5'},
{'id': self.a6.id, 'headline': 'Article 6'},
{'id': self.a4.id, 'headline': 'Article 4'},
{'id': self.a2.id, 'headline': 'Article 2'},
{'id': self.a3.id, 'headline': 'Article 3'},
{'id': self.a7.id, 'headline': 'Article 7'},
{'id': self.a1.id, 'headline': 'Article 1'},
],
transform=identity)
# You can use values() with iterator() for memory savings,
# because iterator() uses database-level iteration.
self.assertQuerysetEqual(Article.objects.values('id', 'headline').iterator(),
[
{'headline': 'Article 5', 'id': self.a5.id},
{'headline': 'Article 6', 'id': self.a6.id},
{'headline': 'Article 4', 'id': self.a4.id},
{'headline': 'Article 2', 'id': self.a2.id},
{'headline': 'Article 3', 'id': self.a3.id},
{'headline': 'Article 7', 'id': self.a7.id},
{'headline': 'Article 1', 'id': self.a1.id},
],
transform=identity)
# The values() method works with "extra" fields specified in extra(select).
self.assertQuerysetEqual(
Article.objects.extra(select={'id_plus_one': 'id + 1'}).values('id', 'id_plus_one'),
[
{'id': self.a5.id, 'id_plus_one': self.a5.id + 1},
{'id': self.a6.id, 'id_plus_one': self.a6.id + 1},
{'id': self.a4.id, 'id_plus_one': self.a4.id + 1},
{'id': self.a2.id, 'id_plus_one': self.a2.id + 1},
{'id': self.a3.id, 'id_plus_one': self.a3.id + 1},
{'id': self.a7.id, 'id_plus_one': self.a7.id + 1},
{'id': self.a1.id, 'id_plus_one': self.a1.id + 1},
],
transform=identity)
data = {
'id_plus_one': 'id+1',
'id_plus_two': 'id+2',
'id_plus_three': 'id+3',
'id_plus_four': 'id+4',
'id_plus_five': 'id+5',
'id_plus_six': 'id+6',
'id_plus_seven': 'id+7',
'id_plus_eight': 'id+8',
}
self.assertQuerysetEqual(
Article.objects.filter(id=self.a1.id).extra(select=data).values(*data.keys()),
[{
'id_plus_one': self.a1.id + 1,
'id_plus_two': self.a1.id + 2,
'id_plus_three': self.a1.id + 3,
'id_plus_four': self.a1.id + 4,
'id_plus_five': self.a1.id + 5,
'id_plus_six': self.a1.id + 6,
'id_plus_seven': self.a1.id + 7,
'id_plus_eight': self.a1.id + 8,
}], transform=identity)
# You can specify fields from forward and reverse relations, just like filter().
self.assertQuerysetEqual(
Article.objects.values('headline', 'author__name'),
[
{'headline': self.a5.headline, 'author__name': self.au2.name},
{'headline': self.a6.headline, 'author__name': self.au2.name},
{'headline': self.a4.headline, 'author__name': self.au1.name},
{'headline': self.a2.headline, 'author__name': self.au1.name},
{'headline': self.a3.headline, 'author__name': self.au1.name},
{'headline': self.a7.headline, 'author__name': self.au2.name},
{'headline': self.a1.headline, 'author__name': self.au1.name},
], transform=identity)
self.assertQuerysetEqual(
Author.objects.values('name', 'article__headline').order_by('name', 'article__headline'),
[
{'name': self.au1.name, 'article__headline': self.a1.headline},
{'name': self.au1.name, 'article__headline': self.a2.headline},
{'name': self.au1.name, 'article__headline': self.a3.headline},
{'name': self.au1.name, 'article__headline': self.a4.headline},
{'name': self.au2.name, 'article__headline': self.a5.headline},
{'name': self.au2.name, 'article__headline': self.a6.headline},
{'name': self.au2.name, 'article__headline': self.a7.headline},
], transform=identity)
self.assertQuerysetEqual(
Author.objects.values('name', 'article__headline', 'article__tag__name').order_by('name', 'article__headline', 'article__tag__name'),
[
{'name': self.au1.name, 'article__headline': self.a1.headline, 'article__tag__name': self.t1.name},
{'name': self.au1.name, 'article__headline': self.a2.headline, 'article__tag__name': self.t1.name},
{'name': self.au1.name, 'article__headline': self.a3.headline, 'article__tag__name': self.t1.name},
{'name': self.au1.name, 'article__headline': self.a3.headline, 'article__tag__name': self.t2.name},
{'name': self.au1.name, 'article__headline': self.a4.headline, 'article__tag__name': self.t2.name},
{'name': self.au2.name, 'article__headline': self.a5.headline, 'article__tag__name': self.t2.name},
{'name': self.au2.name, 'article__headline': self.a5.headline, 'article__tag__name': self.t3.name},
{'name': self.au2.name, 'article__headline': self.a6.headline, 'article__tag__name': self.t3.name},
{'name': self.au2.name, 'article__headline': self.a7.headline, 'article__tag__name': self.t3.name},
], transform=identity)
# However, an exception FieldDoesNotExist will be thrown if you specify
# a non-existent field name in values() (a field that is neither in the
# model nor in extra(select)).
self.assertRaises(FieldError,
Article.objects.extra(select={'id_plus_one': 'id + 1'}).values,
'id', 'id_plus_two')
# If you don't specify field names to values(), all are returned.
self.assertQuerysetEqual(Article.objects.filter(id=self.a5.id).values(),
[{
'id': self.a5.id,
'author_id': self.au2.id,
'headline': 'Article 5',
'pub_date': datetime(2005, 8, 1, 9, 0)
}], transform=identity)
def test_values_list(self):
# values_list() is similar to values(), except that the results are
# returned as a list of tuples, rather than a list of dictionaries.
# Within each tuple, the order of the elements is the same as the order
# of fields in the values_list() call.
identity = lambda x: x
self.assertQuerysetEqual(Article.objects.values_list('headline'),
[
('Article 5',),
('Article 6',),
('Article 4',),
('Article 2',),
('Article 3',),
('Article 7',),
('Article 1',),
], transform=identity)
self.assertQuerysetEqual(Article.objects.values_list('id').order_by('id'),
[(self.a1.id,), (self.a2.id,), (self.a3.id,), (self.a4.id,), (self.a5.id,), (self.a6.id,), (self.a7.id,)],
transform=identity)
self.assertQuerysetEqual(
Article.objects.values_list('id', flat=True).order_by('id'),
[self.a1.id, self.a2.id, self.a3.id, self.a4.id, self.a5.id, self.a6.id, self.a7.id],
transform=identity)
self.assertQuerysetEqual(
Article.objects.extra(select={'id_plus_one': 'id+1'})
.order_by('id').values_list('id'),
[(self.a1.id,), (self.a2.id,), (self.a3.id,), (self.a4.id,), (self.a5.id,), (self.a6.id,), (self.a7.id,)],
transform=identity)
self.assertQuerysetEqual(
Article.objects.extra(select={'id_plus_one': 'id+1'})
.order_by('id').values_list('id_plus_one', 'id'),
[
(self.a1.id + 1, self.a1.id),
(self.a2.id + 1, self.a2.id),
(self.a3.id + 1, self.a3.id),
(self.a4.id + 1, self.a4.id),
(self.a5.id + 1, self.a5.id),
(self.a6.id + 1, self.a6.id),
(self.a7.id + 1, self.a7.id)
],
transform=identity)
self.assertQuerysetEqual(
Article.objects.extra(select={'id_plus_one': 'id+1'})
.order_by('id').values_list('id', 'id_plus_one'),
[
(self.a1.id, self.a1.id + 1),
(self.a2.id, self.a2.id + 1),
(self.a3.id, self.a3.id + 1),
(self.a4.id, self.a4.id + 1),
(self.a5.id, self.a5.id + 1),
(self.a6.id, self.a6.id + 1),
(self.a7.id, self.a7.id + 1)
],
transform=identity)
self.assertQuerysetEqual(
Author.objects.values_list('name', 'article__headline', 'article__tag__name').order_by('name', 'article__headline', 'article__tag__name'),
[
(self.au1.name, self.a1.headline, self.t1.name),
(self.au1.name, self.a2.headline, self.t1.name),
(self.au1.name, self.a3.headline, self.t1.name),
(self.au1.name, self.a3.headline, self.t2.name),
(self.au1.name, self.a4.headline, self.t2.name),
(self.au2.name, self.a5.headline, self.t2.name),
(self.au2.name, self.a5.headline, self.t3.name),
(self.au2.name, self.a6.headline, self.t3.name),
(self.au2.name, self.a7.headline, self.t3.name),
], transform=identity)
self.assertRaises(TypeError, Article.objects.values_list, 'id', 'headline', flat=True)
def test_get_next_previous_by(self):
# Every DateField and DateTimeField creates get_next_by_FOO() and
# get_previous_by_FOO() methods. In the case of identical date values,
# these methods will use the ID as a fallback check. This guarantees
# that no records are skipped or duplicated.
self.assertEqual(repr(self.a1.get_next_by_pub_date()),
'<Article: Article 2>')
self.assertEqual(repr(self.a2.get_next_by_pub_date()),
'<Article: Article 3>')
self.assertEqual(repr(self.a2.get_next_by_pub_date(headline__endswith='6')),
'<Article: Article 6>')
self.assertEqual(repr(self.a3.get_next_by_pub_date()),
'<Article: Article 7>')
self.assertEqual(repr(self.a4.get_next_by_pub_date()),
'<Article: Article 6>')
self.assertRaises(Article.DoesNotExist, self.a5.get_next_by_pub_date)
self.assertEqual(repr(self.a6.get_next_by_pub_date()),
'<Article: Article 5>')
self.assertEqual(repr(self.a7.get_next_by_pub_date()),
'<Article: Article 4>')
self.assertEqual(repr(self.a7.get_previous_by_pub_date()),
'<Article: Article 3>')
self.assertEqual(repr(self.a6.get_previous_by_pub_date()),
'<Article: Article 4>')
self.assertEqual(repr(self.a5.get_previous_by_pub_date()),
'<Article: Article 6>')
self.assertEqual(repr(self.a4.get_previous_by_pub_date()),
'<Article: Article 7>')
self.assertEqual(repr(self.a3.get_previous_by_pub_date()),
'<Article: Article 2>')
self.assertEqual(repr(self.a2.get_previous_by_pub_date()),
'<Article: Article 1>')
def test_escaping(self):
# Underscores, percent signs and backslashes have special meaning in the
# underlying SQL code, but Django handles the quoting of them automatically.
a8 = Article(headline='Article_ with underscore', pub_date=datetime(2005, 11, 20))
a8.save()
self.assertQuerysetEqual(Article.objects.filter(headline__startswith='Article'),
[
'<Article: Article_ with underscore>',
'<Article: Article 5>',
'<Article: Article 6>',
'<Article: Article 4>',
'<Article: Article 2>',
'<Article: Article 3>',
'<Article: Article 7>',
'<Article: Article 1>',
])
self.assertQuerysetEqual(Article.objects.filter(headline__startswith='Article_'),
['<Article: Article_ with underscore>'])
a9 = Article(headline='Article% with percent sign', pub_date=datetime(2005, 11, 21))
a9.save()
self.assertQuerysetEqual(Article.objects.filter(headline__startswith='Article'),
[
'<Article: Article% with percent sign>',
'<Article: Article_ with underscore>',
'<Article: Article 5>',
'<Article: Article 6>',
'<Article: Article 4>',
'<Article: Article 2>',
'<Article: Article 3>',
'<Article: Article 7>',
'<Article: Article 1>',
])
self.assertQuerysetEqual(Article.objects.filter(headline__startswith='Article%'),
['<Article: Article% with percent sign>'])
a10 = Article(headline='Article with \\ backslash', pub_date=datetime(2005, 11, 22))
a10.save()
self.assertQuerysetEqual(Article.objects.filter(headline__contains='\\'),
['<Article: Article with \ backslash>'])
def test_exclude(self):
Article.objects.create(headline='Article_ with underscore', pub_date=datetime(2005, 11, 20))
Article.objects.create(headline='Article% with percent sign', pub_date=datetime(2005, 11, 21))
Article.objects.create(headline='Article with \\ backslash', pub_date=datetime(2005, 11, 22))
# exclude() is the opposite of filter() when doing lookups:
self.assertQuerysetEqual(
Article.objects.filter(headline__contains='Article').exclude(headline__contains='with'),
[
'<Article: Article 5>',
'<Article: Article 6>',
'<Article: Article 4>',
'<Article: Article 2>',
'<Article: Article 3>',
'<Article: Article 7>',
'<Article: Article 1>',
])
self.assertQuerysetEqual(Article.objects.exclude(headline__startswith="Article_"),
[
'<Article: Article with \\ backslash>',
'<Article: Article% with percent sign>',
'<Article: Article 5>',
'<Article: Article 6>',
'<Article: Article 4>',
'<Article: Article 2>',
'<Article: Article 3>',
'<Article: Article 7>',
'<Article: Article 1>',
])
self.assertQuerysetEqual(Article.objects.exclude(headline="Article 7"),
[
'<Article: Article with \\ backslash>',
'<Article: Article% with percent sign>',
'<Article: Article_ with underscore>',
'<Article: Article 5>',
'<Article: Article 6>',
'<Article: Article 4>',
'<Article: Article 2>',
'<Article: Article 3>',
'<Article: Article 1>',
])
def test_none(self):
# none() returns a QuerySet that behaves like any other QuerySet object
self.assertQuerysetEqual(Article.objects.none(), [])
self.assertQuerysetEqual(
Article.objects.none().filter(headline__startswith='Article'), [])
self.assertQuerysetEqual(
Article.objects.filter(headline__startswith='Article').none(), [])
self.assertEqual(Article.objects.none().count(), 0)
self.assertEqual(
Article.objects.none().update(headline="This should not take effect"), 0)
self.assertQuerysetEqual(
[article for article in Article.objects.none().iterator()],
[])
def test_in(self):
# using __in with an empty list should return an empty query set
self.assertQuerysetEqual(Article.objects.filter(id__in=[]), [])
self.assertQuerysetEqual(Article.objects.exclude(id__in=[]),
[
'<Article: Article 5>',
'<Article: Article 6>',
'<Article: Article 4>',
'<Article: Article 2>',
'<Article: Article 3>',
'<Article: Article 7>',
'<Article: Article 1>',
])
def test_error_messages(self):
# Programming errors are pointed out with nice error messages
try:
Article.objects.filter(pub_date_year='2005').count()
self.fail('FieldError not raised')
except FieldError as ex:
self.assertEqual(str(ex), "Cannot resolve keyword 'pub_date_year' "
"into field. Choices are: author, author_id, headline, "
"id, pub_date, tag")
try:
Article.objects.filter(headline__starts='Article')
self.fail('FieldError not raised')
except FieldError as ex:
self.assertEqual(
str(ex), "Unsupported lookup 'starts' for CharField "
"or join on the field not permitted.")
def test_regex(self):
# Create some articles with a bit more interesting headlines for testing field lookups:
for a in Article.objects.all():
a.delete()
now = datetime.now()
a1 = Article(pub_date=now, headline='f')
a1.save()
a2 = Article(pub_date=now, headline='fo')
a2.save()
a3 = Article(pub_date=now, headline='foo')
a3.save()
a4 = Article(pub_date=now, headline='fooo')
a4.save()
a5 = Article(pub_date=now, headline='hey-Foo')
a5.save()
a6 = Article(pub_date=now, headline='bar')
a6.save()
a7 = Article(pub_date=now, headline='AbBa')
a7.save()
a8 = Article(pub_date=now, headline='baz')
a8.save()
a9 = Article(pub_date=now, headline='baxZ')
a9.save()
# zero-or-more
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'fo*'),
['<Article: f>', '<Article: fo>', '<Article: foo>', '<Article: fooo>'])
self.assertQuerysetEqual(Article.objects.filter(headline__iregex=r'fo*'),
[
'<Article: f>',
'<Article: fo>',
'<Article: foo>',
'<Article: fooo>',
'<Article: hey-Foo>',
])
# one-or-more
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'fo+'),
['<Article: fo>', '<Article: foo>', '<Article: fooo>'])
# wildcard
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'fooo?'),
['<Article: foo>', '<Article: fooo>'])
# leading anchor
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'^b'),
['<Article: bar>', '<Article: baxZ>', '<Article: baz>'])
self.assertQuerysetEqual(Article.objects.filter(headline__iregex=r'^a'),
['<Article: AbBa>'])
# trailing anchor
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'z$'),
['<Article: baz>'])
self.assertQuerysetEqual(Article.objects.filter(headline__iregex=r'z$'),
['<Article: baxZ>', '<Article: baz>'])
# character sets
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'ba[rz]'),
['<Article: bar>', '<Article: baz>'])
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'ba.[RxZ]'),
['<Article: baxZ>'])
self.assertQuerysetEqual(Article.objects.filter(headline__iregex=r'ba[RxZ]'),
['<Article: bar>', '<Article: baxZ>', '<Article: baz>'])
# and more articles:
a10 = Article(pub_date=now, headline='foobar')
a10.save()
a11 = Article(pub_date=now, headline='foobaz')
a11.save()
a12 = Article(pub_date=now, headline='ooF')
a12.save()
a13 = Article(pub_date=now, headline='foobarbaz')
a13.save()
a14 = Article(pub_date=now, headline='zoocarfaz')
a14.save()
a15 = Article(pub_date=now, headline='barfoobaz')
a15.save()
a16 = Article(pub_date=now, headline='bazbaRFOO')
a16.save()
# alternation
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'oo(f|b)'),
[
'<Article: barfoobaz>',
'<Article: foobar>',
'<Article: foobarbaz>',
'<Article: foobaz>',
])
self.assertQuerysetEqual(Article.objects.filter(headline__iregex=r'oo(f|b)'),
[
'<Article: barfoobaz>',
'<Article: foobar>',
'<Article: foobarbaz>',
'<Article: foobaz>',
'<Article: ooF>',
])
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'^foo(f|b)'),
['<Article: foobar>', '<Article: foobarbaz>', '<Article: foobaz>'])
# greedy matching
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'b.*az'),
[
'<Article: barfoobaz>',
'<Article: baz>',
'<Article: bazbaRFOO>',
'<Article: foobarbaz>',
'<Article: foobaz>',
])
self.assertQuerysetEqual(Article.objects.filter(headline__iregex=r'b.*ar'),
[
'<Article: bar>',
'<Article: barfoobaz>',
'<Article: bazbaRFOO>',
'<Article: foobar>',
'<Article: foobarbaz>',
])
@skipUnlessDBFeature('supports_regex_backreferencing')
def test_regex_backreferencing(self):
# grouping and backreferences
now = datetime.now()
a10 = Article(pub_date=now, headline='foobar')
a10.save()
a11 = Article(pub_date=now, headline='foobaz')
a11.save()
a12 = Article(pub_date=now, headline='ooF')
a12.save()
a13 = Article(pub_date=now, headline='foobarbaz')
a13.save()
a14 = Article(pub_date=now, headline='zoocarfaz')
a14.save()
a15 = Article(pub_date=now, headline='barfoobaz')
a15.save()
a16 = Article(pub_date=now, headline='bazbaRFOO')
a16.save()
self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'b(.).*b\1'),
['<Article: barfoobaz>', '<Article: bazbaRFOO>', '<Article: foobarbaz>'])
def test_regex_null(self):
"""
Ensure that a regex lookup does not fail on null/None values
"""
Season.objects.create(year=2012, gt=None)
self.assertQuerysetEqual(Season.objects.filter(gt__regex=r'^$'), [])
def test_regex_non_string(self):
"""
Ensure that a regex lookup does not fail on non-string fields
"""
Season.objects.create(year=2013, gt=444)
self.assertQuerysetEqual(Season.objects.filter(gt__regex=r'^444$'),
['<Season: 2013>'])
def test_regex_non_ascii(self):
"""
Ensure that a regex lookup does not trip on non-ASCII characters.
"""
Player.objects.create(name='\u2660')
Player.objects.get(name__regex='\u2660')
def test_nonfield_lookups(self):
"""
Ensure that a lookup query containing non-fields raises the proper
exception.
"""
with self.assertRaises(FieldError):
Article.objects.filter(headline__blahblah=99)
with self.assertRaises(FieldError):
Article.objects.filter(headline__blahblah__exact=99)
with self.assertRaises(FieldError):
Article.objects.filter(blahblah=99)
def test_lookup_collision(self):
"""
Ensure that genuine field names don't collide with built-in lookup
types ('year', 'gt', 'range', 'in' etc.).
Refs #11670.
"""
# Here we're using 'gt' as a code number for the year, e.g. 111=>2009.
season_2009 = Season.objects.create(year=2009, gt=111)
season_2009.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2010 = Season.objects.create(year=2010, gt=222)
season_2010.games.create(home="Houston Astros", away="Chicago Cubs")
season_2010.games.create(home="Houston Astros", away="Milwaukee Brewers")
season_2010.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2011 = Season.objects.create(year=2011, gt=333)
season_2011.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2011.games.create(home="Houston Astros", away="Milwaukee Brewers")
hunter_pence = Player.objects.create(name="Hunter Pence")
hunter_pence.games = Game.objects.filter(season__year__in=[2009, 2010])
pudge = Player.objects.create(name="Ivan Rodriquez")
pudge.games = Game.objects.filter(season__year=2009)
pedro_feliz = Player.objects.create(name="Pedro Feliz")
pedro_feliz.games = Game.objects.filter(season__year__in=[2011])
johnson = Player.objects.create(name="Johnson")
johnson.games = Game.objects.filter(season__year__in=[2011])
# Games in 2010
self.assertEqual(Game.objects.filter(season__year=2010).count(), 3)
self.assertEqual(Game.objects.filter(season__year__exact=2010).count(), 3)
self.assertEqual(Game.objects.filter(season__gt=222).count(), 3)
self.assertEqual(Game.objects.filter(season__gt__exact=222).count(), 3)
# Games in 2011
self.assertEqual(Game.objects.filter(season__year=2011).count(), 2)
self.assertEqual(Game.objects.filter(season__year__exact=2011).count(), 2)
self.assertEqual(Game.objects.filter(season__gt=333).count(), 2)
self.assertEqual(Game.objects.filter(season__gt__exact=333).count(), 2)
self.assertEqual(Game.objects.filter(season__year__gt=2010).count(), 2)
self.assertEqual(Game.objects.filter(season__gt__gt=222).count(), 2)
# Games played in 2010 and 2011
self.assertEqual(Game.objects.filter(season__year__in=[2010, 2011]).count(), 5)
self.assertEqual(Game.objects.filter(season__year__gt=2009).count(), 5)
self.assertEqual(Game.objects.filter(season__gt__in=[222, 333]).count(), 5)
self.assertEqual(Game.objects.filter(season__gt__gt=111).count(), 5)
# Players who played in 2009
self.assertEqual(Player.objects.filter(games__season__year=2009).distinct().count(), 2)
self.assertEqual(Player.objects.filter(games__season__year__exact=2009).distinct().count(), 2)
self.assertEqual(Player.objects.filter(games__season__gt=111).distinct().count(), 2)
self.assertEqual(Player.objects.filter(games__season__gt__exact=111).distinct().count(), 2)
# Players who played in 2010
self.assertEqual(Player.objects.filter(games__season__year=2010).distinct().count(), 1)
self.assertEqual(Player.objects.filter(games__season__year__exact=2010).distinct().count(), 1)
self.assertEqual(Player.objects.filter(games__season__gt=222).distinct().count(), 1)
self.assertEqual(Player.objects.filter(games__season__gt__exact=222).distinct().count(), 1)
# Players who played in 2011
self.assertEqual(Player.objects.filter(games__season__year=2011).distinct().count(), 2)
self.assertEqual(Player.objects.filter(games__season__year__exact=2011).distinct().count(), 2)
self.assertEqual(Player.objects.filter(games__season__gt=333).distinct().count(), 2)
self.assertEqual(Player.objects.filter(games__season__year__gt=2010).distinct().count(), 2)
self.assertEqual(Player.objects.filter(games__season__gt__gt=222).distinct().count(), 2)
class LookupTransactionTests(TransactionTestCase):
available_apps = ['lookup']
@skipUnless(connection.vendor == 'mysql', 'requires MySQL')
def test_mysql_lookup_search(self):
# To use fulltext indexes on MySQL either version 5.6 is needed, or one must use
# MyISAM tables. Neither of these combinations is currently available on CI, so
# lets manually create a MyISAM table for Article model.
with connection.cursor() as cursor:
cursor.execute(
"CREATE TEMPORARY TABLE myisam_article ("
" id INTEGER PRIMARY KEY AUTO_INCREMENT, "
" headline VARCHAR(100) NOT NULL "
") ENGINE MYISAM")
dr = MyISAMArticle.objects.create(headline='Django Reinhardt')
MyISAMArticle.objects.create(headline='Ringo Star')
# NOTE: Needs to be created after the article has been saved.
cursor.execute(
'CREATE FULLTEXT INDEX myisam_article_ft ON myisam_article (headline)')
self.assertQuerysetEqual(
MyISAMArticle.objects.filter(headline__search='Reinhardt'),
[dr], lambda x: x)
|
orbitfp7/horizon
|
refs/heads/master
|
horizon/tables/__init__.py
|
48
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Convenience imports for public API components.
# Importing non-modules that are not used explicitly
from horizon.tables.actions import Action # noqa
from horizon.tables.actions import BatchAction # noqa
from horizon.tables.actions import DeleteAction # noqa
from horizon.tables.actions import FilterAction # noqa
from horizon.tables.actions import FixedFilterAction # noqa
from horizon.tables.actions import LinkAction # noqa
from horizon.tables.actions import UpdateAction # noqa
from horizon.tables.base import Column # noqa
from horizon.tables.base import DataTable # noqa
from horizon.tables.base import Row # noqa
from horizon.tables.views import DataTableView # noqa
from horizon.tables.views import MixedDataTableView # noqa
from horizon.tables.views import MultiTableMixin # noqa
from horizon.tables.views import MultiTableView # noqa
|
ivanbaldo/yowsup
|
refs/heads/master
|
yowsup/layers/protocol_receipts/protocolentities/test_receipt_outgoing.py
|
68
|
from yowsup.layers.protocol_receipts.protocolentities import OutgoingReceiptProtocolEntity
from yowsup.structs.protocolentity import ProtocolEntityTest
import unittest
class OutgoingReceiptProtocolEntityTest(ProtocolEntityTest, unittest.TestCase):
def setUp(self):
self.ProtocolEntity = OutgoingReceiptProtocolEntity
self.node = OutgoingReceiptProtocolEntity("123", "target", "read").toProtocolTreeNode()
|
Allow2CEO/browser-ios
|
refs/heads/master
|
brave/node_modules/bloom-filter-cpp/vendor/depot_tools/third_party/boto/file/key.py
|
82
|
# Copyright 2010 Google Inc.
# Copyright (c) 2011, Nexenta Systems Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# File representation of key, for use with "file://" URIs.
import os, shutil, StringIO
import sys
class Key(object):
KEY_STREAM_READABLE = 0x01
KEY_STREAM_WRITABLE = 0x02
KEY_STREAM = (KEY_STREAM_READABLE | KEY_STREAM_WRITABLE)
KEY_REGULAR_FILE = 0x00
def __init__(self, bucket, name, fp=None, key_type=KEY_REGULAR_FILE):
self.bucket = bucket
self.full_path = name
if name == '-':
self.name = None
self.size = None
else:
self.name = name
self.size = os.stat(name).st_size
self.key_type = key_type
if key_type == self.KEY_STREAM_READABLE:
self.fp = sys.stdin
self.full_path = '<STDIN>'
elif key_type == self.KEY_STREAM_WRITABLE:
self.fp = sys.stdout
self.full_path = '<STDOUT>'
else:
self.fp = fp
def __str__(self):
return 'file://' + self.full_path
def get_file(self, fp, headers=None, cb=None, num_cb=10, torrent=False):
"""
Retrieves a file from a Key
:type fp: file
:param fp: File pointer to put the data into
:type headers: string
:param: ignored in this subclass.
:type cb: function
:param cb: ignored in this subclass.
:type cb: int
:param num_cb: ignored in this subclass.
"""
if self.key_type & self.KEY_STREAM_WRITABLE:
raise BotoClientError('Stream is not readable')
elif self.key_type & self.KEY_STREAM_READABLE:
key_file = self.fp
else:
key_file = open(self.full_path, 'rb')
try:
shutil.copyfileobj(key_file, fp)
finally:
key_file.close()
def set_contents_from_file(self, fp, headers=None, replace=True, cb=None,
num_cb=10, policy=None, md5=None):
"""
Store an object in a file using the name of the Key object as the
key in file URI and the contents of the file pointed to by 'fp' as the
contents.
:type fp: file
:param fp: the file whose contents to upload
:type headers: dict
:param headers: ignored in this subclass.
:type replace: bool
:param replace: If this parameter is False, the method
will first check to see if an object exists in the
bucket with the same key. If it does, it won't
overwrite it. The default value is True which will
overwrite the object.
:type cb: function
:param cb: ignored in this subclass.
:type cb: int
:param num_cb: ignored in this subclass.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: ignored in this subclass.
:type md5: A tuple containing the hexdigest version of the MD5 checksum
of the file as the first element and the Base64-encoded
version of the plain checksum as the second element.
This is the same format returned by the compute_md5 method.
:param md5: ignored in this subclass.
"""
if self.key_type & self.KEY_STREAM_READABLE:
raise BotoClientError('Stream is not writable')
elif self.key_type & self.KEY_STREAM_WRITABLE:
key_file = self.fp
else:
if not replace and os.path.exists(self.full_path):
return
key_file = open(self.full_path, 'wb')
try:
shutil.copyfileobj(fp, key_file)
finally:
key_file.close()
def get_contents_to_file(self, fp, headers=None, cb=None, num_cb=None,
torrent=False, version_id=None,
res_download_handler=None, response_headers=None):
"""
Copy contents from the current file to the file pointed to by 'fp'.
:type fp: File-like object
:param fp:
:type headers: dict
:param headers: Unused in this subclass.
:type cb: function
:param cb: Unused in this subclass.
:type cb: int
:param num_cb: Unused in this subclass.
:type torrent: bool
:param torrent: Unused in this subclass.
:type res_upload_handler: ResumableDownloadHandler
:param res_download_handler: Unused in this subclass.
:type response_headers: dict
:param response_headers: Unused in this subclass.
"""
shutil.copyfileobj(self.fp, fp)
def get_contents_as_string(self, headers=None, cb=None, num_cb=10,
torrent=False):
"""
Retrieve file data from the Key, and return contents as a string.
:type headers: dict
:param headers: ignored in this subclass.
:type cb: function
:param cb: ignored in this subclass.
:type cb: int
:param num_cb: ignored in this subclass.
:type cb: int
:param num_cb: ignored in this subclass.
:type torrent: bool
:param torrent: ignored in this subclass.
:rtype: string
:returns: The contents of the file as a string
"""
fp = StringIO.StringIO()
self.get_contents_to_file(fp)
return fp.getvalue()
def is_stream(self):
return (self.key_type & self.KEY_STREAM)
def close(self):
"""
Closes fp associated with underlying file.
Caller should call this method when done with this class, to avoid
using up OS resources (e.g., when iterating over a large number
of files).
"""
self.fp.close()
|
Zhongqilong/mykbengineer
|
refs/heads/master
|
kbe/src/lib/python/Lib/distutils/tests/test_install_headers.py
|
147
|
"""Tests for distutils.command.install_headers."""
import sys
import os
import unittest
import getpass
from distutils.command.install_headers import install_headers
from distutils.tests import support
from test.support import run_unittest
class InstallHeadersTestCase(support.TempdirManager,
support.LoggingSilencer,
support.EnvironGuard,
unittest.TestCase):
def test_simple_run(self):
# we have two headers
header_list = self.mkdtemp()
header1 = os.path.join(header_list, 'header1')
header2 = os.path.join(header_list, 'header2')
self.write_file(header1)
self.write_file(header2)
headers = [header1, header2]
pkg_dir, dist = self.create_dist(headers=headers)
cmd = install_headers(dist)
self.assertEqual(cmd.get_inputs(), headers)
# let's run the command
cmd.install_dir = os.path.join(pkg_dir, 'inst')
cmd.ensure_finalized()
cmd.run()
# let's check the results
self.assertEqual(len(cmd.get_outputs()), 2)
def test_suite():
return unittest.makeSuite(InstallHeadersTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
|
rlanyi/mitro
|
refs/heads/master
|
browser-ext/third_party/firefox-addon-sdk/python-lib/cuddlefish/prefs.py
|
28
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
DEFAULT_COMMON_PREFS = {
# allow debug output via dump to be printed to the system console
# (setting it here just in case, even though PlainTextConsole also
# sets this preference)
'browser.dom.window.dump.enabled': True,
# warn about possibly incorrect code
'javascript.options.strict': True,
'javascript.options.showInConsole': True,
# Allow remote connections to the debugger
'devtools.debugger.remote-enabled' : True,
'extensions.sdk.console.logLevel': 'info',
'extensions.checkCompatibility.nightly' : False,
# Disable extension updates and notifications.
'extensions.update.enabled' : False,
'extensions.update.notifyUser' : False,
# From:
# http://hg.mozilla.org/mozilla-central/file/1dd81c324ac7/build/automation.py.in#l372
# Only load extensions from the application and user profile.
# AddonManager.SCOPE_PROFILE + AddonManager.SCOPE_APPLICATION
'extensions.enabledScopes' : 5,
# Disable metadata caching for installed add-ons by default
'extensions.getAddons.cache.enabled' : False,
# Disable intalling any distribution add-ons
'extensions.installDistroAddons' : False,
# Allow installing extensions dropped into the profile folder
'extensions.autoDisableScopes' : 10,
# Disable app update
'app.update.enabled' : False,
# Point update checks to a nonexistent local URL for fast failures.
'extensions.update.url' : 'http://localhost/extensions-dummy/updateURL',
'extensions.blocklist.url' : 'http://localhost/extensions-dummy/blocklistURL',
# Make sure opening about:addons won't hit the network.
'extensions.webservice.discoverURL' : 'http://localhost/extensions-dummy/discoveryURL'
}
DEFAULT_FENNEC_PREFS = {
'browser.console.showInPanel': True,
'browser.firstrun.show.uidiscovery': False
}
# When launching a temporary new Firefox profile, use these preferences.
DEFAULT_FIREFOX_PREFS = {
'browser.startup.homepage' : 'about:blank',
'startup.homepage_welcome_url' : 'about:blank',
'devtools.errorconsole.enabled' : True,
'devtools.chrome.enabled' : True,
# From:
# http://hg.mozilla.org/mozilla-central/file/1dd81c324ac7/build/automation.py.in#l388
# Make url-classifier updates so rare that they won't affect tests.
'urlclassifier.updateinterval' : 172800,
# Point the url-classifier to a nonexistent local URL for fast failures.
'browser.safebrowsing.provider.0.gethashURL' : 'http://localhost/safebrowsing-dummy/gethash',
'browser.safebrowsing.provider.0.updateURL' : 'http://localhost/safebrowsing-dummy/update',
}
# When launching a temporary new Thunderbird profile, use these preferences.
# Note that these were taken from:
# http://mxr.mozilla.org/comm-central/source/mail/test/mozmill/runtest.py
DEFAULT_THUNDERBIRD_PREFS = {
# say no to slow script warnings
'dom.max_chrome_script_run_time': 200,
'dom.max_script_run_time': 0,
# do not ask about being the default mail client
'mail.shell.checkDefaultClient': False,
# disable non-gloda indexing daemons
'mail.winsearch.enable': False,
'mail.winsearch.firstRunDone': True,
'mail.spotlight.enable': False,
'mail.spotlight.firstRunDone': True,
# disable address books for undisclosed reasons
'ldap_2.servers.osx.position': 0,
'ldap_2.servers.oe.position': 0,
# disable the first use junk dialog
'mailnews.ui.junk.firstuse': False,
# other unknown voodoo
# -- dummied up local accounts to stop the account wizard
'mail.account.account1.server' : "server1",
'mail.account.account2.identities' : "id1",
'mail.account.account2.server' : "server2",
'mail.accountmanager.accounts' : "account1,account2",
'mail.accountmanager.defaultaccount' : "account2",
'mail.accountmanager.localfoldersserver' : "server1",
'mail.identity.id1.fullName' : "Tinderbox",
'mail.identity.id1.smtpServer' : "smtp1",
'mail.identity.id1.useremail' : "tinderbox@invalid.com",
'mail.identity.id1.valid' : True,
'mail.root.none-rel' : "[ProfD]Mail",
'mail.root.pop3-rel' : "[ProfD]Mail",
'mail.server.server1.directory-rel' : "[ProfD]Mail/Local Folders",
'mail.server.server1.hostname' : "Local Folders",
'mail.server.server1.name' : "Local Folders",
'mail.server.server1.type' : "none",
'mail.server.server1.userName' : "nobody",
'mail.server.server2.check_new_mail' : False,
'mail.server.server2.directory-rel' : "[ProfD]Mail/tinderbox",
'mail.server.server2.download_on_biff' : True,
'mail.server.server2.hostname' : "tinderbox",
'mail.server.server2.login_at_startup' : False,
'mail.server.server2.name' : "tinderbox@invalid.com",
'mail.server.server2.type' : "pop3",
'mail.server.server2.userName' : "tinderbox",
'mail.smtp.defaultserver' : "smtp1",
'mail.smtpserver.smtp1.hostname' : "tinderbox",
'mail.smtpserver.smtp1.username' : "tinderbox",
'mail.smtpservers' : "smtp1",
'mail.startup.enabledMailCheckOnce' : True,
'mailnews.start_page_override.mstone' : "ignore",
}
|
Venturi/oldcms
|
refs/heads/master
|
env/lib/python2.7/site-packages/unidecode/x1d7.py
|
248
|
data = (
'', # 0x00
'', # 0x01
'', # 0x02
'', # 0x03
'', # 0x04
'', # 0x05
'', # 0x06
'', # 0x07
'', # 0x08
'', # 0x09
'', # 0x0a
'', # 0x0b
'', # 0x0c
'', # 0x0d
'', # 0x0e
'', # 0x0f
'', # 0x10
'', # 0x11
'', # 0x12
'', # 0x13
'', # 0x14
'', # 0x15
'', # 0x16
'', # 0x17
'', # 0x18
'', # 0x19
'', # 0x1a
'', # 0x1b
'', # 0x1c
'', # 0x1d
'', # 0x1e
'', # 0x1f
'', # 0x20
'', # 0x21
'', # 0x22
'', # 0x23
'', # 0x24
'', # 0x25
'', # 0x26
'', # 0x27
'', # 0x28
'', # 0x29
'', # 0x2a
'', # 0x2b
'', # 0x2c
'', # 0x2d
'', # 0x2e
'', # 0x2f
'', # 0x30
'', # 0x31
'', # 0x32
'', # 0x33
'', # 0x34
'', # 0x35
'', # 0x36
'', # 0x37
'', # 0x38
'', # 0x39
'', # 0x3a
'', # 0x3b
'', # 0x3c
'', # 0x3d
'', # 0x3e
'', # 0x3f
'', # 0x40
'', # 0x41
'', # 0x42
'', # 0x43
'', # 0x44
'', # 0x45
'', # 0x46
'', # 0x47
'', # 0x48
'', # 0x49
'', # 0x4a
'', # 0x4b
'', # 0x4c
'', # 0x4d
'', # 0x4e
'', # 0x4f
'', # 0x50
'', # 0x51
'', # 0x52
'', # 0x53
'', # 0x54
'', # 0x55
'', # 0x56
'', # 0x57
'', # 0x58
'', # 0x59
'', # 0x5a
'', # 0x5b
'', # 0x5c
'', # 0x5d
'', # 0x5e
'', # 0x5f
'', # 0x60
'', # 0x61
'', # 0x62
'', # 0x63
'', # 0x64
'', # 0x65
'', # 0x66
'', # 0x67
'', # 0x68
'', # 0x69
'', # 0x6a
'', # 0x6b
'', # 0x6c
'', # 0x6d
'', # 0x6e
'', # 0x6f
'', # 0x70
'', # 0x71
'', # 0x72
'', # 0x73
'', # 0x74
'', # 0x75
'', # 0x76
'', # 0x77
'', # 0x78
'', # 0x79
'', # 0x7a
'', # 0x7b
'', # 0x7c
'', # 0x7d
'', # 0x7e
'', # 0x7f
'', # 0x80
'', # 0x81
'', # 0x82
'', # 0x83
'', # 0x84
'', # 0x85
'', # 0x86
'', # 0x87
'', # 0x88
'', # 0x89
'', # 0x8a
'', # 0x8b
'', # 0x8c
'', # 0x8d
'', # 0x8e
'', # 0x8f
'', # 0x90
'', # 0x91
'', # 0x92
'', # 0x93
'', # 0x94
'', # 0x95
'', # 0x96
'', # 0x97
'', # 0x98
'', # 0x99
'', # 0x9a
'', # 0x9b
'', # 0x9c
'', # 0x9d
'', # 0x9e
'', # 0x9f
'', # 0xa0
'', # 0xa1
'', # 0xa2
'', # 0xa3
'', # 0xa4
'', # 0xa5
'', # 0xa6
'', # 0xa7
'', # 0xa8
'', # 0xa9
'', # 0xaa
'', # 0xab
'', # 0xac
'', # 0xad
'', # 0xae
'', # 0xaf
'', # 0xb0
'', # 0xb1
'', # 0xb2
'', # 0xb3
'', # 0xb4
'', # 0xb5
'', # 0xb6
'', # 0xb7
'', # 0xb8
'', # 0xb9
'', # 0xba
'', # 0xbb
'', # 0xbc
'', # 0xbd
'', # 0xbe
'', # 0xbf
'', # 0xc0
'', # 0xc1
'', # 0xc2
'', # 0xc3
'', # 0xc4
'', # 0xc5
'', # 0xc6
'', # 0xc7
'', # 0xc8
'', # 0xc9
'', # 0xca
'', # 0xcb
'', # 0xcc
'', # 0xcd
'0', # 0xce
'1', # 0xcf
'2', # 0xd0
'3', # 0xd1
'4', # 0xd2
'5', # 0xd3
'6', # 0xd4
'7', # 0xd5
'8', # 0xd6
'9', # 0xd7
'0', # 0xd8
'1', # 0xd9
'2', # 0xda
'3', # 0xdb
'4', # 0xdc
'5', # 0xdd
'6', # 0xde
'7', # 0xdf
'8', # 0xe0
'9', # 0xe1
'0', # 0xe2
'1', # 0xe3
'2', # 0xe4
'3', # 0xe5
'4', # 0xe6
'5', # 0xe7
'6', # 0xe8
'7', # 0xe9
'8', # 0xea
'9', # 0xeb
'0', # 0xec
'1', # 0xed
'2', # 0xee
'3', # 0xef
'4', # 0xf0
'5', # 0xf1
'6', # 0xf2
'7', # 0xf3
'8', # 0xf4
'9', # 0xf5
'0', # 0xf6
'1', # 0xf7
'2', # 0xf8
'3', # 0xf9
'4', # 0xfa
'5', # 0xfb
'6', # 0xfc
'7', # 0xfd
'8', # 0xfe
'9', # 0xff
)
|
silveregg/moto
|
refs/heads/master
|
moto/rds2/__init__.py
|
21
|
from __future__ import unicode_literals
from .models import rds2_backends
from ..core.models import MockAWS
rds2_backend = rds2_backends['us-west-1']
def mock_rds2(func=None):
if func:
return MockAWS(rds2_backends)(func)
else:
return MockAWS(rds2_backends)
|
bailey1234/hyeri7846
|
refs/heads/master
|
lib/werkzeug/serving.py
|
309
|
# -*- coding: utf-8 -*-
"""
werkzeug.serving
~~~~~~~~~~~~~~~~
There are many ways to serve a WSGI application. While you're developing
it you usually don't want a full blown webserver like Apache but a simple
standalone one. From Python 2.5 onwards there is the `wsgiref`_ server in
the standard library. If you're using older versions of Python you can
download the package from the cheeseshop.
However there are some caveats. Sourcecode won't reload itself when
changed and each time you kill the server using ``^C`` you get an
`KeyboardInterrupt` error. While the latter is easy to solve the first
one can be a pain in the ass in some situations.
The easiest way is creating a small ``start-myproject.py`` that runs the
application::
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from myproject import make_app
from werkzeug.serving import run_simple
app = make_app(...)
run_simple('localhost', 8080, app, use_reloader=True)
You can also pass it a `extra_files` keyword argument with a list of
additional files (like configuration files) you want to observe.
For bigger applications you should consider using `werkzeug.script`
instead of a simple start file.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import with_statement
import os
import socket
import sys
import time
import signal
import subprocess
try:
import thread
except ImportError:
import _thread as thread
try:
from SocketServer import ThreadingMixIn, ForkingMixIn
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
except ImportError:
from socketserver import ThreadingMixIn, ForkingMixIn
from http.server import HTTPServer, BaseHTTPRequestHandler
import werkzeug
from werkzeug._internal import _log
from werkzeug._compat import iteritems, PY2, reraise, text_type, \
wsgi_encoding_dance
from werkzeug.urls import url_parse, url_unquote
from werkzeug.exceptions import InternalServerError, BadRequest
class WSGIRequestHandler(BaseHTTPRequestHandler, object):
"""A request handler that implements WSGI dispatching."""
@property
def server_version(self):
return 'Werkzeug/' + werkzeug.__version__
def make_environ(self):
request_url = url_parse(self.path)
def shutdown_server():
self.server.shutdown_signal = True
url_scheme = self.server.ssl_context is None and 'http' or 'https'
path_info = url_unquote(request_url.path)
environ = {
'wsgi.version': (1, 0),
'wsgi.url_scheme': url_scheme,
'wsgi.input': self.rfile,
'wsgi.errors': sys.stderr,
'wsgi.multithread': self.server.multithread,
'wsgi.multiprocess': self.server.multiprocess,
'wsgi.run_once': False,
'werkzeug.server.shutdown':
shutdown_server,
'SERVER_SOFTWARE': self.server_version,
'REQUEST_METHOD': self.command,
'SCRIPT_NAME': '',
'PATH_INFO': wsgi_encoding_dance(path_info),
'QUERY_STRING': wsgi_encoding_dance(request_url.query),
'CONTENT_TYPE': self.headers.get('Content-Type', ''),
'CONTENT_LENGTH': self.headers.get('Content-Length', ''),
'REMOTE_ADDR': self.client_address[0],
'REMOTE_PORT': self.client_address[1],
'SERVER_NAME': self.server.server_address[0],
'SERVER_PORT': str(self.server.server_address[1]),
'SERVER_PROTOCOL': self.request_version
}
for key, value in self.headers.items():
key = 'HTTP_' + key.upper().replace('-', '_')
if key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
environ[key] = value
if request_url.netloc:
environ['HTTP_HOST'] = request_url.netloc
return environ
def run_wsgi(self):
if self.headers.get('Expect', '').lower().strip() == '100-continue':
self.wfile.write(b'HTTP/1.1 100 Continue\r\n\r\n')
environ = self.make_environ()
headers_set = []
headers_sent = []
def write(data):
assert headers_set, 'write() before start_response'
if not headers_sent:
status, response_headers = headers_sent[:] = headers_set
try:
code, msg = status.split(None, 1)
except ValueError:
code, msg = status, ""
self.send_response(int(code), msg)
header_keys = set()
for key, value in response_headers:
self.send_header(key, value)
key = key.lower()
header_keys.add(key)
if 'content-length' not in header_keys:
self.close_connection = True
self.send_header('Connection', 'close')
if 'server' not in header_keys:
self.send_header('Server', self.version_string())
if 'date' not in header_keys:
self.send_header('Date', self.date_time_string())
self.end_headers()
assert type(data) is bytes, 'applications must write bytes'
self.wfile.write(data)
self.wfile.flush()
def start_response(status, response_headers, exc_info=None):
if exc_info:
try:
if headers_sent:
reraise(*exc_info)
finally:
exc_info = None
elif headers_set:
raise AssertionError('Headers already set')
headers_set[:] = [status, response_headers]
return write
def execute(app):
application_iter = app(environ, start_response)
try:
for data in application_iter:
write(data)
if not headers_sent:
write(b'')
finally:
if hasattr(application_iter, 'close'):
application_iter.close()
application_iter = None
try:
execute(self.server.app)
except (socket.error, socket.timeout) as e:
self.connection_dropped(e, environ)
except Exception:
if self.server.passthrough_errors:
raise
from werkzeug.debug.tbtools import get_current_traceback
traceback = get_current_traceback(ignore_system_exceptions=True)
try:
# if we haven't yet sent the headers but they are set
# we roll back to be able to set them again.
if not headers_sent:
del headers_set[:]
execute(InternalServerError())
except Exception:
pass
self.server.log('error', 'Error on request:\n%s',
traceback.plaintext)
def handle(self):
"""Handles a request ignoring dropped connections."""
rv = None
try:
rv = BaseHTTPRequestHandler.handle(self)
except (socket.error, socket.timeout) as e:
self.connection_dropped(e)
except Exception:
if self.server.ssl_context is None or not is_ssl_error():
raise
if self.server.shutdown_signal:
self.initiate_shutdown()
return rv
def initiate_shutdown(self):
"""A horrible, horrible way to kill the server for Python 2.6 and
later. It's the best we can do.
"""
# Windows does not provide SIGKILL, go with SIGTERM then.
sig = getattr(signal, 'SIGKILL', signal.SIGTERM)
# reloader active
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
os.kill(os.getpid(), sig)
# python 2.7
self.server._BaseServer__shutdown_request = True
# python 2.6
self.server._BaseServer__serving = False
def connection_dropped(self, error, environ=None):
"""Called if the connection was closed by the client. By default
nothing happens.
"""
def handle_one_request(self):
"""Handle a single HTTP request."""
self.raw_requestline = self.rfile.readline()
if not self.raw_requestline:
self.close_connection = 1
elif self.parse_request():
return self.run_wsgi()
def send_response(self, code, message=None):
"""Send the response header and log the response code."""
self.log_request(code)
if message is None:
message = code in self.responses and self.responses[code][0] or ''
if self.request_version != 'HTTP/0.9':
hdr = "%s %d %s\r\n" % (self.protocol_version, code, message)
self.wfile.write(hdr.encode('ascii'))
def version_string(self):
return BaseHTTPRequestHandler.version_string(self).strip()
def address_string(self):
return self.client_address[0]
def log_request(self, code='-', size='-'):
self.log('info', '"%s" %s %s', self.requestline, code, size)
def log_error(self, *args):
self.log('error', *args)
def log_message(self, format, *args):
self.log('info', format, *args)
def log(self, type, message, *args):
_log(type, '%s - - [%s] %s\n' % (self.address_string(),
self.log_date_time_string(),
message % args))
#: backwards compatible name if someone is subclassing it
BaseRequestHandler = WSGIRequestHandler
def generate_adhoc_ssl_pair(cn=None):
from random import random
from OpenSSL import crypto
# pretty damn sure that this is not actually accepted by anyone
if cn is None:
cn = '*'
cert = crypto.X509()
cert.set_serial_number(int(random() * sys.maxint))
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(60 * 60 * 24 * 365)
subject = cert.get_subject()
subject.CN = cn
subject.O = 'Dummy Certificate'
issuer = cert.get_issuer()
issuer.CN = 'Untrusted Authority'
issuer.O = 'Self-Signed'
pkey = crypto.PKey()
pkey.generate_key(crypto.TYPE_RSA, 768)
cert.set_pubkey(pkey)
cert.sign(pkey, 'md5')
return cert, pkey
def make_ssl_devcert(base_path, host=None, cn=None):
"""Creates an SSL key for development. This should be used instead of
the ``'adhoc'`` key which generates a new cert on each server start.
It accepts a path for where it should store the key and cert and
either a host or CN. If a host is given it will use the CN
``*.host/CN=host``.
For more information see :func:`run_simple`.
.. versionadded:: 0.9
:param base_path: the path to the certificate and key. The extension
``.crt`` is added for the certificate, ``.key`` is
added for the key.
:param host: the name of the host. This can be used as an alternative
for the `cn`.
:param cn: the `CN` to use.
"""
from OpenSSL import crypto
if host is not None:
cn = '*.%s/CN=%s' % (host, host)
cert, pkey = generate_adhoc_ssl_pair(cn=cn)
cert_file = base_path + '.crt'
pkey_file = base_path + '.key'
with open(cert_file, 'w') as f:
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
with open(pkey_file, 'w') as f:
f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
return cert_file, pkey_file
def generate_adhoc_ssl_context():
"""Generates an adhoc SSL context for the development server."""
from OpenSSL import SSL
cert, pkey = generate_adhoc_ssl_pair()
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.use_privatekey(pkey)
ctx.use_certificate(cert)
return ctx
def load_ssl_context(cert_file, pkey_file):
"""Loads an SSL context from a certificate and private key file."""
from OpenSSL import SSL
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.use_certificate_file(cert_file)
ctx.use_privatekey_file(pkey_file)
return ctx
def is_ssl_error(error=None):
"""Checks if the given error (or the current one) is an SSL error."""
if error is None:
error = sys.exc_info()[1]
from OpenSSL import SSL
return isinstance(error, SSL.Error)
class _SSLConnectionFix(object):
"""Wrapper around SSL connection to provide a working makefile()."""
def __init__(self, con):
self._con = con
def makefile(self, mode, bufsize):
return socket._fileobject(self._con, mode, bufsize)
def __getattr__(self, attrib):
return getattr(self._con, attrib)
def shutdown(self, arg=None):
try:
self._con.shutdown()
except Exception:
pass
def select_ip_version(host, port):
"""Returns AF_INET4 or AF_INET6 depending on where to connect to."""
# disabled due to problems with current ipv6 implementations
# and various operating systems. Probably this code also is
# not supposed to work, but I can't come up with any other
# ways to implement this.
##try:
## info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
## socket.SOCK_STREAM, 0,
## socket.AI_PASSIVE)
## if info:
## return info[0][0]
##except socket.gaierror:
## pass
if ':' in host and hasattr(socket, 'AF_INET6'):
return socket.AF_INET6
return socket.AF_INET
class BaseWSGIServer(HTTPServer, object):
"""Simple single-threaded, single-process WSGI server."""
multithread = False
multiprocess = False
request_queue_size = 128
def __init__(self, host, port, app, handler=None,
passthrough_errors=False, ssl_context=None):
if handler is None:
handler = WSGIRequestHandler
self.address_family = select_ip_version(host, port)
HTTPServer.__init__(self, (host, int(port)), handler)
self.app = app
self.passthrough_errors = passthrough_errors
self.shutdown_signal = False
if ssl_context is not None:
try:
from OpenSSL import tsafe
except ImportError:
raise TypeError('SSL is not available if the OpenSSL '
'library is not installed.')
if isinstance(ssl_context, tuple):
ssl_context = load_ssl_context(*ssl_context)
if ssl_context == 'adhoc':
ssl_context = generate_adhoc_ssl_context()
self.socket = tsafe.Connection(ssl_context, self.socket)
self.ssl_context = ssl_context
else:
self.ssl_context = None
def log(self, type, message, *args):
_log(type, message, *args)
def serve_forever(self):
self.shutdown_signal = False
try:
HTTPServer.serve_forever(self)
except KeyboardInterrupt:
pass
def handle_error(self, request, client_address):
if self.passthrough_errors:
raise
else:
return HTTPServer.handle_error(self, request, client_address)
def get_request(self):
con, info = self.socket.accept()
if self.ssl_context is not None:
con = _SSLConnectionFix(con)
return con, info
class ThreadedWSGIServer(ThreadingMixIn, BaseWSGIServer):
"""A WSGI server that does threading."""
multithread = True
class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer):
"""A WSGI server that does forking."""
multiprocess = True
def __init__(self, host, port, app, processes=40, handler=None,
passthrough_errors=False, ssl_context=None):
BaseWSGIServer.__init__(self, host, port, app, handler,
passthrough_errors, ssl_context)
self.max_children = processes
def make_server(host, port, app=None, threaded=False, processes=1,
request_handler=None, passthrough_errors=False,
ssl_context=None):
"""Create a new server instance that is either threaded, or forks
or just processes one request after another.
"""
if threaded and processes > 1:
raise ValueError("cannot have a multithreaded and "
"multi process server.")
elif threaded:
return ThreadedWSGIServer(host, port, app, request_handler,
passthrough_errors, ssl_context)
elif processes > 1:
return ForkingWSGIServer(host, port, app, processes, request_handler,
passthrough_errors, ssl_context)
else:
return BaseWSGIServer(host, port, app, request_handler,
passthrough_errors, ssl_context)
def _iter_module_files():
# The list call is necessary on Python 3 in case the module
# dictionary modifies during iteration.
for module in list(sys.modules.values()):
filename = getattr(module, '__file__', None)
if filename:
old = None
while not os.path.isfile(filename):
old = filename
filename = os.path.dirname(filename)
if filename == old:
break
else:
if filename[-4:] in ('.pyc', '.pyo'):
filename = filename[:-1]
yield filename
def _reloader_stat_loop(extra_files=None, interval=1):
"""When this function is run from the main thread, it will force other
threads to exit when any modules currently loaded change.
Copyright notice. This function is based on the autoreload.py from
the CherryPy trac which originated from WSGIKit which is now dead.
:param extra_files: a list of additional files it should watch.
"""
from itertools import chain
mtimes = {}
while 1:
for filename in chain(_iter_module_files(), extra_files or ()):
try:
mtime = os.stat(filename).st_mtime
except OSError:
continue
old_time = mtimes.get(filename)
if old_time is None:
mtimes[filename] = mtime
continue
elif mtime > old_time:
_log('info', ' * Detected change in %r, reloading' % filename)
sys.exit(3)
time.sleep(interval)
def _reloader_inotify(extra_files=None, interval=None):
# Mutated by inotify loop when changes occur.
changed = [False]
# Setup inotify watches
from pyinotify import WatchManager, Notifier
# this API changed at one point, support both
try:
from pyinotify import EventsCodes as ec
ec.IN_ATTRIB
except (ImportError, AttributeError):
import pyinotify as ec
wm = WatchManager()
mask = ec.IN_DELETE_SELF | ec.IN_MOVE_SELF | ec.IN_MODIFY | ec.IN_ATTRIB
def signal_changed(event):
if changed[0]:
return
_log('info', ' * Detected change in %r, reloading' % event.path)
changed[:] = [True]
for fname in extra_files or ():
wm.add_watch(fname, mask, signal_changed)
# ... And now we wait...
notif = Notifier(wm)
try:
while not changed[0]:
# always reiterate through sys.modules, adding them
for fname in _iter_module_files():
wm.add_watch(fname, mask, signal_changed)
notif.process_events()
if notif.check_events(timeout=interval):
notif.read_events()
# TODO Set timeout to something small and check parent liveliness
finally:
notif.stop()
sys.exit(3)
# currently we always use the stat loop reloader for the simple reason
# that the inotify one does not respond to added files properly. Also
# it's quite buggy and the API is a mess.
reloader_loop = _reloader_stat_loop
def restart_with_reloader():
"""Spawn a new Python interpreter with the same arguments as this one,
but running the reloader thread.
"""
while 1:
_log('info', ' * Restarting with reloader')
args = [sys.executable] + sys.argv
new_environ = os.environ.copy()
new_environ['WERKZEUG_RUN_MAIN'] = 'true'
# a weird bug on windows. sometimes unicode strings end up in the
# environment and subprocess.call does not like this, encode them
# to latin1 and continue.
if os.name == 'nt' and PY2:
for key, value in iteritems(new_environ):
if isinstance(value, text_type):
new_environ[key] = value.encode('iso-8859-1')
exit_code = subprocess.call(args, env=new_environ)
if exit_code != 3:
return exit_code
def run_with_reloader(main_func, extra_files=None, interval=1):
"""Run the given function in an independent python interpreter."""
import signal
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
thread.start_new_thread(main_func, ())
try:
reloader_loop(extra_files, interval)
except KeyboardInterrupt:
return
try:
sys.exit(restart_with_reloader())
except KeyboardInterrupt:
pass
def run_simple(hostname, port, application, use_reloader=False,
use_debugger=False, use_evalex=True,
extra_files=None, reloader_interval=1, threaded=False,
processes=1, request_handler=None, static_files=None,
passthrough_errors=False, ssl_context=None):
"""Start an application using wsgiref and with an optional reloader. This
wraps `wsgiref` to fix the wrong default reporting of the multithreaded
WSGI variable and adds optional multithreading and fork support.
This function has a command-line interface too::
python -m werkzeug.serving --help
.. versionadded:: 0.5
`static_files` was added to simplify serving of static files as well
as `passthrough_errors`.
.. versionadded:: 0.6
support for SSL was added.
.. versionadded:: 0.8
Added support for automatically loading a SSL context from certificate
file and private key.
.. versionadded:: 0.9
Added command-line interface.
:param hostname: The host for the application. eg: ``'localhost'``
:param port: The port for the server. eg: ``8080``
:param application: the WSGI application to execute
:param use_reloader: should the server automatically restart the python
process if modules were changed?
:param use_debugger: should the werkzeug debugging system be used?
:param use_evalex: should the exception evaluation feature be enabled?
:param extra_files: a list of files the reloader should watch
additionally to the modules. For example configuration
files.
:param reloader_interval: the interval for the reloader in seconds.
:param threaded: should the process handle each request in a separate
thread?
:param processes: if greater than 1 then handle each request in a new process
up to this maximum number of concurrent processes.
:param request_handler: optional parameter that can be used to replace
the default one. You can use this to replace it
with a different
:class:`~BaseHTTPServer.BaseHTTPRequestHandler`
subclass.
:param static_files: a dict of paths for static files. This works exactly
like :class:`SharedDataMiddleware`, it's actually
just wrapping the application in that middleware before
serving.
:param passthrough_errors: set this to `True` to disable the error catching.
This means that the server will die on errors but
it can be useful to hook debuggers in (pdb etc.)
:param ssl_context: an SSL context for the connection. Either an OpenSSL
context, a tuple in the form ``(cert_file, pkey_file)``,
the string ``'adhoc'`` if the server should
automatically create one, or `None` to disable SSL
(which is the default).
"""
if use_debugger:
from werkzeug.debug import DebuggedApplication
application = DebuggedApplication(application, use_evalex)
if static_files:
from werkzeug.wsgi import SharedDataMiddleware
application = SharedDataMiddleware(application, static_files)
def inner():
make_server(hostname, port, application, threaded,
processes, request_handler,
passthrough_errors, ssl_context).serve_forever()
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
display_hostname = hostname != '*' and hostname or 'localhost'
if ':' in display_hostname:
display_hostname = '[%s]' % display_hostname
_log('info', ' * Running on %s://%s:%d/', ssl_context is None
and 'http' or 'https', display_hostname, port)
if use_reloader:
# Create and destroy a socket so that any exceptions are raised before
# we spawn a separate Python interpreter and lose this ability.
address_family = select_ip_version(hostname, port)
test_socket = socket.socket(address_family, socket.SOCK_STREAM)
test_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
test_socket.bind((hostname, port))
test_socket.close()
run_with_reloader(inner, extra_files, reloader_interval)
else:
inner()
def main():
'''A simple command-line interface for :py:func:`run_simple`.'''
# in contrast to argparse, this works at least under Python < 2.7
import optparse
from werkzeug.utils import import_string
parser = optparse.OptionParser(usage='Usage: %prog [options] app_module:app_object')
parser.add_option('-b', '--bind', dest='address',
help='The hostname:port the app should listen on.')
parser.add_option('-d', '--debug', dest='use_debugger',
action='store_true', default=False,
help='Use Werkzeug\'s debugger.')
parser.add_option('-r', '--reload', dest='use_reloader',
action='store_true', default=False,
help='Reload Python process if modules change.')
options, args = parser.parse_args()
hostname, port = None, None
if options.address:
address = options.address.split(':')
hostname = address[0]
if len(address) > 1:
port = address[1]
if len(args) != 1:
sys.stdout.write('No application supplied, or too much. See --help\n')
sys.exit(1)
app = import_string(args[0])
run_simple(
hostname=(hostname or '127.0.0.1'), port=int(port or 5000),
application=app, use_reloader=options.use_reloader,
use_debugger=options.use_debugger
)
if __name__ == '__main__':
main()
|
m11s/MissionPlanner
|
refs/heads/master
|
Lib/site-packages/numpy/ma/tests/test_regression.py
|
60
|
from numpy.testing import *
import numpy as np
rlevel = 1
class TestRegression(TestCase):
def test_masked_array_create(self,level=rlevel):
"""Ticket #17"""
x = np.ma.masked_array([0,1,2,3,0,4,5,6],mask=[0,0,0,1,1,1,0,0])
assert_array_equal(np.ma.nonzero(x),[[1,2,6,7]])
def test_masked_array(self,level=rlevel):
"""Ticket #61"""
x = np.ma.array(1,mask=[1])
def test_mem_masked_where(self,level=rlevel):
"""Ticket #62"""
from numpy.ma import masked_where, MaskType
a = np.zeros((1,1))
b = np.zeros(a.shape, MaskType)
c = masked_where(b,a)
a-c
def test_masked_array_multiply(self,level=rlevel):
"""Ticket #254"""
a = np.ma.zeros((4,1))
a[2,0] = np.ma.masked
b = np.zeros((4,2))
a*b
b*a
def test_masked_array_repeat(self, level=rlevel):
"""Ticket #271"""
np.ma.array([1],mask=False).repeat(10)
def test_masked_array_repr_unicode(self):
"""Ticket #1256"""
repr(np.ma.array(u"Unicode"))
|
chriskiehl/Gooey
|
refs/heads/master
|
gooey/gui/components/menubar.py
|
2
|
import webbrowser
from functools import partial
import wx
from gooey.gui import three_to_four
class MenuBar(wx.MenuBar):
"""
Wx.MenuBar handles converting the users list of Menu Groups into
concrete wx.Menu instances.
"""
def __init__(self, buildSpec, *args, **kwargs):
super(MenuBar,self).__init__(*args, **kwargs)
self.buildSpec = buildSpec
self.makeMenuItems(buildSpec.get('menu', []))
def makeMenuItems(self, menuGroups):
"""
Assign the menu groups list to wx.Menu instances
and bind the appropriate handlers.
"""
for menuGroup in menuGroups:
menu = wx.Menu()
for item in menuGroup.get('items'):
option = menu.Append(wx.NewId(), item.get('menuTitle', ''))
self.Bind(wx.EVT_MENU, self.handleMenuAction(item), option)
self.Append(menu, '&' + menuGroup.get('name'))
def handleMenuAction(self, item):
"""
Dispatch based on the value of the type field.
"""
handlers = {
'Link': self.openBrowser,
'AboutDialog': self.spawnAboutDialog,
'MessageDialog': self.spawnMessageDialog
}
f = handlers[item['type']]
return partial(f, item)
def openBrowser(self, item, *args, **kwargs):
"""
Open the supplied URL in the user's default browser.
"""
webbrowser.open(item.get('url'))
def spawnMessageDialog(self, item, *args, **kwargs):
"""
Show a simple message dialog with the user's message and caption.
"""
wx.MessageDialog(self, item.get('message', ''),
caption=item.get('caption', '')).ShowModal()
def spawnAboutDialog(self, item, *args, **kwargs):
"""
Fill the wx.AboutBox with any relevant info the user provided
and launch the dialog
"""
aboutOptions = {
'name': 'SetName',
'version': 'SetVersion',
'description': 'SetDescription',
'copyright': 'SetCopyright',
'website': 'SetWebSite',
'developer': 'AddDeveloper',
'license': 'SetLicense'
}
about = three_to_four.AboutDialog()
for field, method in aboutOptions.items():
if field in item:
getattr(about, method)(item[field])
three_to_four.AboutBox(about)
|
isyippee/nova
|
refs/heads/master
|
nova/db/sqlalchemy/api_models.py
|
43
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db.sqlalchemy import models
from sqlalchemy import Column
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import schema
from sqlalchemy import String
from sqlalchemy import Text
class _NovaAPIBase(models.ModelBase, models.TimestampMixin):
pass
API_BASE = declarative_base(cls=_NovaAPIBase)
class CellMapping(API_BASE):
"""Contains information on communicating with a cell"""
__tablename__ = 'cell_mappings'
__table_args__ = (Index('uuid_idx', 'uuid'),
schema.UniqueConstraint('uuid',
name='uniq_cell_mappings0uuid'))
id = Column(Integer, primary_key=True)
uuid = Column(String(36), nullable=False)
name = Column(String(255))
transport_url = Column(Text())
database_connection = Column(Text())
class InstanceMapping(API_BASE):
"""Contains the mapping of an instance to which cell it is in"""
__tablename__ = 'instance_mappings'
__table_args__ = (Index('project_id_idx', 'project_id'),
Index('instance_uuid_idx', 'instance_uuid'),
schema.UniqueConstraint('instance_uuid',
name='uniq_instance_mappings0instance_uuid'))
id = Column(Integer, primary_key=True)
instance_uuid = Column(String(36), nullable=False)
cell_id = Column(Integer, ForeignKey('cell_mappings.id'),
nullable=False)
project_id = Column(String(255), nullable=False)
class HostMapping(API_BASE):
"""Contains mapping of a compute host to which cell it is in"""
__tablename__ = "host_mappings"
__table_args__ = (Index('host_idx', 'host'),
schema.UniqueConstraint('host',
name='uniq_host_mappings0host'))
id = Column(Integer, primary_key=True)
cell_id = Column(Integer, ForeignKey('cell_mappings.id'),
nullable=False)
host = Column(String(255), nullable=False)
|
SeungGiJeong/SK_FastIR
|
refs/heads/master
|
filecatcher/windows2008ServerFiles.py
|
1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from fileCatcher import _FileCatcher
from utils.vss import _VSS
class Windows2008ServerFiles(_FileCatcher):
def __init__(self, params):
super(Windows2008ServerFiles, self).__init__(params)
drive, p = os.path.splitdrive(self.systemroot)
self.vss = _VSS._get_instance(params, drive)
def _changeroot(self, dir):
drive, p = os.path.splitdrive(dir)
path_return = self.vss._return_root() + p
return path_return
def csv_print_infos_files(self):
super(Windows2008ServerFiles, self)._csv_infos_fs(self._list_files())
|
apache/incubator-superset
|
refs/heads/master
|
superset/annotation_layers/commands/delete.py
|
3
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Optional
from flask_appbuilder.models.sqla import Model
from flask_appbuilder.security.sqla.models import User
from superset.annotation_layers.commands.exceptions import (
AnnotationLayerDeleteFailedError,
AnnotationLayerDeleteIntegrityError,
AnnotationLayerNotFoundError,
)
from superset.annotation_layers.dao import AnnotationLayerDAO
from superset.commands.base import BaseCommand
from superset.dao.exceptions import DAODeleteFailedError
from superset.models.annotations import AnnotationLayer
logger = logging.getLogger(__name__)
class DeleteAnnotationLayerCommand(BaseCommand):
def __init__(self, user: User, model_id: int):
self._actor = user
self._model_id = model_id
self._model: Optional[AnnotationLayer] = None
def run(self) -> Model:
self.validate()
try:
annotation_layer = AnnotationLayerDAO.delete(self._model)
except DAODeleteFailedError as ex:
logger.exception(ex.exception)
raise AnnotationLayerDeleteFailedError()
return annotation_layer
def validate(self) -> None:
# Validate/populate model exists
self._model = AnnotationLayerDAO.find_by_id(self._model_id)
if not self._model:
raise AnnotationLayerNotFoundError()
if AnnotationLayerDAO.has_annotations(self._model.id):
raise AnnotationLayerDeleteIntegrityError()
|
Evil-Green/Lonas_KL-GT-I9300
|
refs/heads/master
|
tools/perf/scripts/python/netdev-times.py
|
11271
|
# Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
|
karger/nbproject
|
refs/heads/master
|
apps/base/forms.py
|
5
|
from django.forms import Form, ModelForm, ValidationError
from base import models as M
from django.forms.fields import CharField
from django.forms.widgets import PasswordInput
class EnterYourNameUserForm(ModelForm):
def clean_firstname(self):
data = self.cleaned_data["firstname"].strip()
if data == "":
raise ValidationError("First name can't be empty")
return data
def clean_lastname(self):
data = self.cleaned_data["lastname"].strip()
if data == "":
raise ValidationError("Last name can't be empty")
return data
class Meta:
model = M.User
fields = ("firstname", "lastname")
class UserForm(ModelForm):
confirm_password = CharField(widget=PasswordInput())
def clean_email(self):
return self.cleaned_data["email"].strip().lower()
def clean(self):
super(UserForm, self).clean()
cleaned_data = self.cleaned_data
if cleaned_data.get("password") != cleaned_data.get("confirm_password"):
raise ValidationError("passwords don't match")
return cleaned_data
def save(self, force_insert=False, force_update=False, commit=True):
m = super(UserForm, self).save(commit=False)
m.password = None
m.set_password(self.cleaned_data.get("password"))
if commit:
m.save()
return m
class Meta:
model = M.User
fields = ( "firstname", "lastname", "email", "password", "confirm_password")
widgets = {
'password': PasswordInput(),
}
class EnsembleForm(ModelForm):
class Meta:
model = M.Ensemble
exclude = ("invitekey",)
class Html5Form(Form):
title = CharField(max_length=256)
url = CharField(max_length=1024)
class YoutubeForm(Form):
#title = CharField(max_length=256)
url = CharField(max_length=1024)
class SourceForm(ModelForm):
class Meta:
model = M.Source
exclude = ("numpages", "w", "h", "rotation", "version", "type", "submittedby")
class HTML5InfoForm(ModelForm):
class Meta:
model = M.HTML5Info
exclude = ("source",)
#class EditEnsembleForm(ModelForm):
# class Meta:
# model = M.Ensemble
# exclude = ("invitekey")
|
hybrideagle/django
|
refs/heads/master
|
django/db/backends/base/schema.py
|
339
|
import hashlib
import logging
from django.db.backends.utils import truncate_name
from django.db.transaction import atomic
from django.utils import six
from django.utils.encoding import force_bytes
logger = logging.getLogger('django.db.backends.schema')
def _related_non_m2m_objects(old_field, new_field):
# Filters out m2m objects from reverse relations.
# Returns (old_relation, new_relation) tuples.
return zip(
(obj for obj in old_field.model._meta.related_objects if not obj.field.many_to_many),
(obj for obj in new_field.model._meta.related_objects if not obj.field.many_to_many)
)
class BaseDatabaseSchemaEditor(object):
"""
This class (and its subclasses) are responsible for emitting schema-changing
statements to the databases - model creation/removal/alteration, field
renaming, index fiddling, and so on.
It is intended to eventually completely replace DatabaseCreation.
This class should be used by creating an instance for each set of schema
changes (e.g. a migration file), and by first calling start(),
then the relevant actions, and then commit(). This is necessary to allow
things like circular foreign key references - FKs will only be created once
commit() is called.
"""
# Overrideable SQL templates
sql_create_table = "CREATE TABLE %(table)s (%(definition)s)"
sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s"
sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s"
sql_delete_table = "DROP TABLE %(table)s CASCADE"
sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s"
sql_alter_column = "ALTER TABLE %(table)s %(changes)s"
sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s"
sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL"
sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL"
sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s"
sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT"
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE"
sql_rename_column = "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s"
sql_update_with_default = "UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL"
sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)"
sql_delete_check = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)"
sql_delete_unique = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED"
)
sql_create_inline_fk = None
sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s"
sql_delete_index = "DROP INDEX %(name)s"
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
def __init__(self, connection, collect_sql=False):
self.connection = connection
self.collect_sql = collect_sql
if self.collect_sql:
self.collected_sql = []
# State-managing methods
def __enter__(self):
self.deferred_sql = []
if self.connection.features.can_rollback_ddl:
self.atomic = atomic(self.connection.alias)
self.atomic.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
for sql in self.deferred_sql:
self.execute(sql)
if self.connection.features.can_rollback_ddl:
self.atomic.__exit__(exc_type, exc_value, traceback)
# Core utility functions
def execute(self, sql, params=[]):
"""
Executes the given SQL statement, with optional parameters.
"""
# Log the command we're running, then run it
logger.debug("%s; (params %r)" % (sql, params))
if self.collect_sql:
ending = "" if sql.endswith(";") else ";"
if params is not None:
self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending)
else:
self.collected_sql.append(sql + ending)
else:
with self.connection.cursor() as cursor:
cursor.execute(sql, params)
def quote_name(self, name):
return self.connection.ops.quote_name(name)
@classmethod
def _digest(cls, *args):
"""
Generates a 32-bit digest of a set of arguments that can be used to
shorten identifying names.
"""
h = hashlib.md5()
for arg in args:
h.update(force_bytes(arg))
return h.hexdigest()[:8]
# Field <-> database mapping functions
def column_sql(self, model, field, include_default=False):
"""
Takes a field and returns its column definition.
The field must already have had set_attributes_from_name called.
"""
# Get the column's type and use that as the basis of the SQL
db_params = field.db_parameters(connection=self.connection)
sql = db_params['type']
params = []
# Check for fields that aren't actually columns (e.g. M2M)
if sql is None:
return None, None
# Work out nullability
null = field.null
# If we were told to include a default value, do so
include_default = include_default and not self.skip_default(field)
if include_default:
default_value = self.effective_default(field)
if default_value is not None:
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (oracle)
# If this is the case, the individual schema backend should
# implement prepare_default
sql += " DEFAULT %s" % self.prepare_default(default_value)
else:
sql += " DEFAULT %s"
params += [default_value]
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if (field.empty_strings_allowed and not field.primary_key and
self.connection.features.interprets_empty_strings_as_nulls):
null = True
if null and not self.connection.features.implied_column_null:
sql += " NULL"
elif not null:
sql += " NOT NULL"
# Primary key/unique outputs
if field.primary_key:
sql += " PRIMARY KEY"
elif field.unique:
sql += " UNIQUE"
# Optionally add the tablespace if it's an implicitly indexed column
tablespace = field.db_tablespace or model._meta.db_tablespace
if tablespace and self.connection.features.supports_tablespaces and field.unique:
sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True)
# Return the sql
return sql, params
def skip_default(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob).
"""
return False
def prepare_default(self, value):
"""
Only used for backends which have requires_literal_defaults feature
"""
raise NotImplementedError(
'subclasses of BaseDatabaseSchemaEditor for backends which have '
'requires_literal_defaults must provide a prepare_default() method'
)
def effective_default(self, field):
"""
Returns a field's effective database default value
"""
if field.has_default():
default = field.get_default()
elif not field.null and field.blank and field.empty_strings_allowed:
if field.get_internal_type() == "BinaryField":
default = six.binary_type()
else:
default = six.text_type()
else:
default = None
# If it's a callable, call it
if six.callable(default):
default = default()
# Run it through the field's get_db_prep_save method so we can send it
# to the database.
default = field.get_db_prep_save(default, self.connection)
return default
def quote_value(self, value):
"""
Returns a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends (defaults are not user-defined, though,
so this is safe).
"""
raise NotImplementedError()
# Actions
def create_model(self, model):
"""
Takes a model and creates a table for it in the database.
Will also create any accompanying indexes or unique constraints.
"""
# Create column SQL, add FK deferreds if needed
column_sqls = []
params = []
for field in model._meta.local_fields:
# SQL
definition, extra_params = self.column_sql(model, field)
if definition is None:
continue
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params['check']:
definition += " CHECK (%s)" % db_params['check']
# Autoincrement SQL (for backends with inline variant)
col_type_suffix = field.db_type_suffix(connection=self.connection)
if col_type_suffix:
definition += " %s" % col_type_suffix
params.extend(extra_params)
# FK
if field.remote_field and field.db_constraint:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column
if self.connection.features.supports_foreign_keys:
self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))
elif self.sql_create_inline_fk:
definition += " " + self.sql_create_inline_fk % {
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
}
# Add the SQL to our big list
column_sqls.append("%s %s" % (
self.quote_name(field.column),
definition,
))
# Autoincrement SQL (for backends with post table definition variant)
if field.get_internal_type() == "AutoField":
autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column)
if autoinc_sql:
self.deferred_sql.extend(autoinc_sql)
# Add any unique_togethers (always deferred, as some fields might be
# created afterwards, like geometry fields with some backends)
for fields in model._meta.unique_together:
columns = [model._meta.get_field(field).column for field in fields]
self.deferred_sql.append(self._create_unique_sql(model, columns))
# Make the table
sql = self.sql_create_table % {
"table": self.quote_name(model._meta.db_table),
"definition": ", ".join(column_sqls)
}
if model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
if tablespace_sql:
sql += ' ' + tablespace_sql
# Prevent using [] as params, in the case a literal '%' is used in the definition
self.execute(sql, params or None)
# Add any field index and index_together's (deferred as SQLite3 _remake_table needs it)
self.deferred_sql.extend(self._model_indexes_sql(model))
# Make M2M tables
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.create_model(field.remote_field.through)
def delete_model(self, model):
"""
Deletes a model from the database.
"""
# Handle auto-created intermediary models
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.delete_model(field.remote_field.through)
# Delete the table
self.execute(self.sql_delete_table % {
"table": self.quote_name(model._meta.db_table),
})
def alter_unique_together(self, model, old_unique_together, new_unique_together):
"""
Deals with a model changing its unique_together.
Note: The input unique_togethers must be doubly-nested, not the single-
nested ["foo", "bar"] format.
"""
olds = set(tuple(fields) for fields in old_unique_together)
news = set(tuple(fields) for fields in new_unique_together)
# Deleted uniques
for fields in olds.difference(news):
self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_unique)
# Created uniques
for fields in news.difference(olds):
columns = [model._meta.get_field(field).column for field in fields]
self.execute(self._create_unique_sql(model, columns))
def alter_index_together(self, model, old_index_together, new_index_together):
"""
Deals with a model changing its index_together.
Note: The input index_togethers must be doubly-nested, not the single-
nested ["foo", "bar"] format.
"""
olds = set(tuple(fields) for fields in old_index_together)
news = set(tuple(fields) for fields in new_index_together)
# Deleted indexes
for fields in olds.difference(news):
self._delete_composed_index(model, fields, {'index': True}, self.sql_delete_index)
# Created indexes
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_index_sql(model, fields, suffix="_idx"))
def _delete_composed_index(self, model, fields, constraint_kwargs, sql):
columns = [model._meta.get_field(field).column for field in fields]
constraint_names = self._constraint_names(model, columns, **constraint_kwargs)
if len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % (
len(constraint_names),
model._meta.db_table,
", ".join(columns),
))
self.execute(self._delete_constraint_sql(sql, model, constraint_names[0]))
def alter_db_table(self, model, old_db_table, new_db_table):
"""
Renames the table a model points to.
"""
if old_db_table == new_db_table:
return
self.execute(self.sql_rename_table % {
"old_table": self.quote_name(old_db_table),
"new_table": self.quote_name(new_db_table),
})
def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace):
"""
Moves a model's table between tablespaces
"""
self.execute(self.sql_retablespace_table % {
"table": self.quote_name(model._meta.db_table),
"old_tablespace": self.quote_name(old_db_tablespace),
"new_tablespace": self.quote_name(new_db_tablespace),
})
def add_field(self, model, field):
"""
Creates a field on a model.
Usually involves adding a column, but may involve adding a
table instead (for M2M fields)
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.create_model(field.remote_field.through)
# Get the column's definition
definition, params = self.column_sql(model, field, include_default=True)
# It might not actually have a column behind it
if definition is None:
return
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params['check']:
definition += " CHECK (%s)" % db_params['check']
# Build the SQL and run it
sql = self.sql_create_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
"definition": definition,
}
self.execute(sql, params)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if not self.skip_default(field) and field.default is not None:
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": self.sql_alter_column_no_default % {
"column": self.quote_name(field.column),
}
}
self.execute(sql)
# Add an index, if required
if field.db_index and not field.unique:
self.deferred_sql.append(self._create_index_sql(model, [field]))
# Add any FK constraints later
if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint:
self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def remove_field(self, model, field):
"""
Removes a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.delete_model(field.remote_field.through)
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)['type'] is None:
return
# Drop any FK constraints, MySQL requires explicit deletion
if field.remote_field:
fk_names = self._constraint_names(model, [field.column], foreign_key=True)
for fk_name in fk_names:
self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))
# Delete the column
sql = self.sql_delete_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def alter_field(self, model, old_field, new_field, strict=False):
"""
Allows a field's type, uniqueness, nullability, default, column,
constraints etc. to be modified.
Requires a copy of the old field as well so we can only perform
changes that are required.
If strict is true, raises errors if the old column does not match old_field precisely.
"""
# Ensure this field is even column-based
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params['type']
new_db_params = new_field.db_parameters(connection=self.connection)
new_type = new_db_params['type']
if ((old_type is None and old_field.remote_field is None) or
(new_type is None and new_field.remote_field is None)):
raise ValueError(
"Cannot alter field %s into %s - they do not properly define "
"db_type (are you using a badly-written custom field?)" %
(old_field, new_field),
)
elif old_type is None and new_type is None and (
old_field.remote_field.through and new_field.remote_field.through and
old_field.remote_field.through._meta.auto_created and
new_field.remote_field.through._meta.auto_created):
return self._alter_many_to_many(model, old_field, new_field, strict)
elif old_type is None and new_type is None and (
old_field.remote_field.through and new_field.remote_field.through and
not old_field.remote_field.through._meta.auto_created and
not new_field.remote_field.through._meta.auto_created):
# Both sides have through models; this is a no-op.
return
elif old_type is None or new_type is None:
raise ValueError(
"Cannot alter field %s into %s - they are not compatible types "
"(you cannot alter to or from M2M fields, or add or remove "
"through= on M2M fields)" % (old_field, new_field)
)
self._alter_field(model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict)
def _alter_field(self, model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict=False):
"""Actually perform a "physical" (non-ManyToMany) field update."""
# Drop any FK constraints, we'll remake them later
fks_dropped = set()
if old_field.remote_field and old_field.db_constraint:
fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)
if strict and len(fk_names) != 1:
raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (
len(fk_names),
model._meta.db_table,
old_field.column,
))
for fk_name in fk_names:
fks_dropped.add((old_field.column,))
self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))
# Has unique been removed?
if old_field.unique and (not new_field.unique or (not old_field.primary_key and new_field.primary_key)):
# Find the unique constraint for this field
constraint_names = self._constraint_names(model, [old_field.column], unique=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name))
# Drop incoming FK constraints if we're a primary key and things are going
# to change.
if old_field.primary_key and new_field.primary_key and old_type != new_type:
# '_meta.related_field' also contains M2M reverse fields, these
# will be filtered out
for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field):
rel_fk_names = self._constraint_names(
new_rel.related_model, [new_rel.field.column], foreign_key=True
)
for fk_name in rel_fk_names:
self.execute(self._delete_constraint_sql(self.sql_delete_fk, new_rel.related_model, fk_name))
# Removed an index? (no strict check, as multiple indexes are possible)
if (old_field.db_index and not new_field.db_index and
not old_field.unique and not
(not new_field.unique and old_field.unique)):
# Find the index for this field
index_names = self._constraint_names(model, [old_field.column], index=True)
for index_name in index_names:
self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
# Change check constraints?
if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
constraint_names = self._constraint_names(model, [old_field.column], check=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_check, model, constraint_name))
# Have they renamed the column?
if old_field.column != new_field.column:
self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type))
# Next, start accumulating actions to do
actions = []
null_actions = []
post_actions = []
# Type change?
if old_type != new_type:
fragment, other_actions = self._alter_column_type_sql(
model._meta.db_table, old_field, new_field, new_type
)
actions.append(fragment)
post_actions.extend(other_actions)
# When changing a column NULL constraint to NOT NULL with a given
# default value, we need to perform 4 steps:
# 1. Add a default for new incoming writes
# 2. Update existing NULL rows with new default
# 3. Replace NULL constraint with NOT NULL
# 4. Drop the default again.
# Default change?
old_default = self.effective_default(old_field)
new_default = self.effective_default(new_field)
needs_database_default = (
old_default != new_default and
new_default is not None and
not self.skip_default(new_field)
)
if needs_database_default:
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (oracle)
# If this is the case, the individual schema backend should
# implement prepare_default
actions.append((
self.sql_alter_column_default % {
"column": self.quote_name(new_field.column),
"default": self.prepare_default(new_default),
},
[],
))
else:
actions.append((
self.sql_alter_column_default % {
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
))
# Nullability change?
if old_field.null != new_field.null:
if (self.connection.features.interprets_empty_strings_as_nulls and
new_field.get_internal_type() in ("CharField", "TextField")):
# The field is nullable in the database anyway, leave it alone
pass
elif new_field.null:
null_actions.append((
self.sql_alter_column_null % {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
))
else:
null_actions.append((
self.sql_alter_column_not_null % {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
))
# Only if we have a default and there is a change from NULL to NOT NULL
four_way_default_alteration = (
new_field.has_default() and
(old_field.null and not new_field.null)
)
if actions or null_actions:
if not four_way_default_alteration:
# If we don't have to do a 4-way default alteration we can
# directly run a (NOT) NULL alteration
actions = actions + null_actions
# Combine actions together if we can (e.g. postgres)
if self.connection.features.supports_combined_alters and actions:
sql, params = tuple(zip(*actions))
actions = [(", ".join(sql), sum(params, []))]
# Apply those actions
for sql, params in actions:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if four_way_default_alteration:
# Update existing rows with default value
self.execute(
self.sql_update_with_default % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
)
# Since we didn't run a NOT NULL change before we need to do it
# now
for sql, params in null_actions:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if post_actions:
for sql, params in post_actions:
self.execute(sql, params)
# Added a unique?
if (not old_field.unique and new_field.unique) or (
old_field.primary_key and not new_field.primary_key and new_field.unique
):
self.execute(self._create_unique_sql(model, [new_field.column]))
# Added an index?
if (not old_field.db_index and new_field.db_index and
not new_field.unique and not
(not old_field.unique and new_field.unique)):
self.execute(self._create_index_sql(model, [new_field], suffix="_uniq"))
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = []
if old_field.primary_key and new_field.primary_key and old_type != new_type:
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Changed to become primary key?
# Note that we don't detect unsetting of a PK, as we assume another field
# will always come along and replace it.
if not old_field.primary_key and new_field.primary_key:
# First, drop the old PK
constraint_names = self._constraint_names(model, primary_key=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of PK constraints for %s" % (
len(constraint_names),
model._meta.db_table,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_pk, model, constraint_name))
# Make the new one
self.execute(
self.sql_create_pk % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, [new_field.column], suffix="_pk")),
"columns": self.quote_name(new_field.column),
}
)
# Update all referencing columns
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Handle our type alters on the other end of rels from the PK stuff above
for old_rel, new_rel in rels_to_update:
rel_db_params = new_rel.field.db_parameters(connection=self.connection)
rel_type = rel_db_params['type']
fragment, other_actions = self._alter_column_type_sql(
new_rel.related_model._meta.db_table, old_rel.field, new_rel.field, rel_type
)
self.execute(
self.sql_alter_column % {
"table": self.quote_name(new_rel.related_model._meta.db_table),
"changes": fragment[0],
},
fragment[1],
)
for sql, params in other_actions:
self.execute(sql, params)
# Does it have a foreign key?
if (new_field.remote_field and
(fks_dropped or not old_field.remote_field or not old_field.db_constraint) and
new_field.db_constraint):
self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s"))
# Rebuild FKs that pointed to us if we previously had to drop them
if old_field.primary_key and new_field.primary_key and old_type != new_type:
for rel in new_field.model._meta.related_objects:
if not rel.many_to_many:
self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk"))
# Does it have check constraints we need to add?
if old_db_params['check'] != new_db_params['check'] and new_db_params['check']:
self.execute(
self.sql_create_check % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, [new_field.column], suffix="_check")),
"column": self.quote_name(new_field.column),
"check": new_db_params['check'],
}
)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if needs_database_default:
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": self.sql_alter_column_no_default % {
"column": self.quote_name(new_field.column),
}
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def _alter_column_type_sql(self, table, old_field, new_field, new_type):
"""
Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields).
Should return two things; an SQL fragment of (sql, params) to insert
into an ALTER TABLE statement, and a list of extra (sql, params) tuples
to run once the field is altered.
"""
return (
(
self.sql_alter_column_type % {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
),
[],
)
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""
Alters M2Ms to repoint their to= endpoints.
"""
# Rename the through table
if old_field.remote_field.through._meta.db_table != new_field.remote_field.through._meta.db_table:
self.alter_db_table(old_field.remote_field.through, old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table)
# Repoint the FK to the other side
self.alter_field(
new_field.remote_field.through,
# We need the field that points to the target model, so we can tell alter_field to change it -
# this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model)
old_field.remote_field.through._meta.get_field(old_field.m2m_reverse_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_reverse_field_name()),
)
self.alter_field(
new_field.remote_field.through,
# for self-referential models we need to alter field from the other end too
old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()),
)
def _create_index_name(self, model, column_names, suffix=""):
"""
Generates a unique name for an index/unique constraint.
"""
# If there is just one column in the index, use a default algorithm from Django
if len(column_names) == 1 and not suffix:
return truncate_name(
'%s_%s' % (model._meta.db_table, self._digest(column_names[0])),
self.connection.ops.max_name_length()
)
# Else generate the name for the index using a different algorithm
table_name = model._meta.db_table.replace('"', '').replace('.', '_')
index_unique_name = '_%s' % self._digest(table_name, *column_names)
max_length = self.connection.ops.max_name_length() or 200
# If the index name is too long, truncate it
index_name = ('%s_%s%s%s' % (
table_name, column_names[0], index_unique_name, suffix,
)).replace('"', '').replace('.', '_')
if len(index_name) > max_length:
part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix))
index_name = '%s%s' % (table_name[:(max_length - len(part))], part)
# It shouldn't start with an underscore (Oracle hates this)
if index_name[0] == "_":
index_name = index_name[1:]
# If it's STILL too long, just hash it down
if len(index_name) > max_length:
index_name = hashlib.md5(force_bytes(index_name)).hexdigest()[:max_length]
# It can't start with a number on Oracle, so prepend D if we need to
if index_name[0].isdigit():
index_name = "D%s" % index_name[:-1]
return index_name
def _create_index_sql(self, model, fields, suffix="", sql=None):
"""
Return the SQL statement to create the index for one or several fields.
`sql` can be specified if the syntax differs from the standard (GIS
indexes, ...).
"""
if len(fields) == 1 and fields[0].db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(fields[0].db_tablespace)
elif model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
else:
tablespace_sql = ""
if tablespace_sql:
tablespace_sql = " " + tablespace_sql
columns = [field.column for field in fields]
sql_create_index = sql or self.sql_create_index
return sql_create_index % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, columns, suffix=suffix)),
"columns": ", ".join(self.quote_name(column) for column in columns),
"extra": tablespace_sql,
}
def _model_indexes_sql(self, model):
"""
Return all index SQL statements (field indexes, index_together) for the
specified model, as a list.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
for field in model._meta.local_fields:
if field.db_index and not field.unique:
output.append(self._create_index_sql(model, [field], suffix=""))
for field_names in model._meta.index_together:
fields = [model._meta.get_field(field) for field in field_names]
output.append(self._create_index_sql(model, fields, suffix="_idx"))
return output
def _rename_field_sql(self, table, old_field, new_field, new_type):
return self.sql_rename_column % {
"table": self.quote_name(table),
"old_column": self.quote_name(old_field.column),
"new_column": self.quote_name(new_field.column),
"type": new_type,
}
def _create_fk_sql(self, model, field, suffix):
from_table = model._meta.db_table
from_column = field.column
to_table = field.target_field.model._meta.db_table
to_column = field.target_field.column
suffix = suffix % {
"to_table": to_table,
"to_column": to_column,
}
return self.sql_create_fk % {
"table": self.quote_name(from_table),
"name": self.quote_name(self._create_index_name(model, [from_column], suffix=suffix)),
"column": self.quote_name(from_column),
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
}
def _create_unique_sql(self, model, columns):
return self.sql_create_unique % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, columns, suffix="_uniq")),
"columns": ", ".join(self.quote_name(column) for column in columns),
}
def _delete_constraint_sql(self, template, model, name):
return template % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(name),
}
def _constraint_names(self, model, column_names=None, unique=None,
primary_key=None, index=None, foreign_key=None,
check=None):
"""
Returns all constraint names matching the columns and conditions
"""
column_names = list(column_names) if column_names else None
with self.connection.cursor() as cursor:
constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table)
result = []
for name, infodict in constraints.items():
if column_names is None or column_names == infodict['columns']:
if unique is not None and infodict['unique'] != unique:
continue
if primary_key is not None and infodict['primary_key'] != primary_key:
continue
if index is not None and infodict['index'] != index:
continue
if check is not None and infodict['check'] != check:
continue
if foreign_key is not None and not infodict['foreign_key']:
continue
result.append(name)
return result
|
julian-seward1/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/py/py/_process/killproc.py
|
278
|
import py
import os, sys
if sys.platform == "win32" or getattr(os, '_name', '') == 'nt':
try:
import ctypes
except ImportError:
def dokill(pid):
py.process.cmdexec("taskkill /F /PID %d" %(pid,))
else:
def dokill(pid):
PROCESS_TERMINATE = 1
handle = ctypes.windll.kernel32.OpenProcess(
PROCESS_TERMINATE, False, pid)
ctypes.windll.kernel32.TerminateProcess(handle, -1)
ctypes.windll.kernel32.CloseHandle(handle)
else:
def dokill(pid):
os.kill(pid, 15)
def kill(pid):
""" kill process by id. """
dokill(pid)
|
wimnat/ansible
|
refs/heads/devel
|
test/units/module_utils/common/validation/test_check_type_bits.py
|
83
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils._text import to_native
from ansible.module_utils.common.validation import check_type_bits
def test_check_type_bits():
test_cases = (
('1', 1),
(99, 99),
(1.5, 2),
('1.5', 2),
('2b', 2),
('2k', 2048),
('2K', 2048),
('1m', 1048576),
('1M', 1048576),
('1g', 1073741824),
('1G', 1073741824),
(1073741824, 1073741824),
)
for case in test_cases:
assert case[1] == check_type_bits(case[0])
def test_check_type_bits_fail():
test_cases = (
'foo',
'2KB',
'1MB',
'1GB',
)
for case in test_cases:
with pytest.raises(TypeError) as e:
check_type_bits(case)
assert 'cannot be converted to a Bit value' in to_native(e.value)
|
rdkit/rdkit-orig
|
refs/heads/master
|
rdkit/ML/DecTree/QuantTree.py
|
2
|
# $Id$
#
# Copyright (C) 2001, 2003 greg Landrum and Rational Discovery LLC
# All Rights Reserved
#
""" Defines the class _QuantTreeNode_, used to represent decision trees with automatic
quantization bounds
_QuantTreeNode_ is derived from _DecTree.DecTreeNode_
"""
from rdkit.ML.DecTree import DecTree,Tree
class QuantTreeNode(DecTree.DecTreeNode):
"""
"""
def __init__(self,*args,**kwargs):
apply(DecTree.DecTreeNode.__init__,(self,)+args,kwargs)
self.qBounds = []
self.nBounds = 0
def ClassifyExample(self,example,appendExamples=0):
""" Recursively classify an example by running it through the tree
**Arguments**
- example: the example to be classified
- appendExamples: if this is nonzero then this node (and all children)
will store the example
**Returns**
the classification of _example_
**NOTE:**
In the interest of speed, I don't use accessor functions
here. So if you subclass DecTreeNode for your own trees, you'll
have to either include ClassifyExample or avoid changing the names
of the instance variables this needs.
"""
if appendExamples:
self.examples.append(example)
if self.terminalNode:
return self.label
else:
val = example[self.label]
if not hasattr(self,'nBounds'): self.nBounds = len(self.qBounds)
if self.nBounds:
for i,bound in enumerate(self.qBounds):
if val < bound:
val = i
break
else:
val = i+1
else:
val = int(val)
return self.children[val].ClassifyExample(example,appendExamples=appendExamples)
def SetQuantBounds(self,qBounds):
self.qBounds = qBounds[:]
self.nBounds = len(self.qBounds)
def GetQuantBounds(self):
return self.qBounds
def __cmp__(self,other):
return cmp(type(self),type(other)) or \
cmp(self.qBounds,other.qBounds) or \
Tree.TreeNode.__cmp__(self,other)
def __str__(self):
""" returns a string representation of the tree
**Note**
this works recursively
"""
here = '%s%s %s\n'%(' '*self.level,self.name,str(self.qBounds))
for child in self.children:
here = here + str(child)
return here
|
apache/incubator-airflow
|
refs/heads/master
|
tests/api_connexion/endpoints/test_event_log_endpoint.py
|
5
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from parameterized import parameterized
from airflow import DAG
from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP
from airflow.models import Log, TaskInstance
from airflow.operators.dummy import DummyOperator
from airflow.security import permissions
from airflow.utils import timezone
from airflow.utils.session import provide_session
from airflow.www import app
from tests.test_utils.api_connexion_utils import assert_401, create_user, delete_user
from tests.test_utils.config import conf_vars
from tests.test_utils.db import clear_db_logs
class TestEventLogEndpoint(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
with conf_vars({("api", "auth_backend"): "tests.test_utils.remote_user_api_auth_backend"}):
cls.app = app.create_app(testing=True) # type:ignore
create_user(
cls.app, # type:ignore
username="test",
role_name="Test",
permissions=[(permissions.ACTION_CAN_READ, permissions.RESOURCE_AUDIT_LOG)], # type: ignore
)
create_user(cls.app, username="test_no_permissions", role_name="TestNoPermissions") # type: ignore
@classmethod
def tearDownClass(cls) -> None:
delete_user(cls.app, username="test") # type: ignore
delete_user(cls.app, username="test_no_permissions") # type: ignore
def setUp(self) -> None:
self.client = self.app.test_client() # type:ignore
clear_db_logs()
self.default_time = "2020-06-10T20:00:00+00:00"
self.default_time_2 = '2020-06-11T07:00:00+00:00'
def tearDown(self) -> None:
clear_db_logs()
def _create_task_instance(self):
dag = DAG(
'TEST_DAG_ID',
start_date=timezone.parse(self.default_time),
end_date=timezone.parse(self.default_time),
)
op1 = DummyOperator(
task_id="TEST_TASK_ID",
owner="airflow",
)
dag.add_task(op1)
ti = TaskInstance(task=op1, execution_date=timezone.parse(self.default_time))
return ti
class TestGetEventLog(TestEventLogEndpoint):
@provide_session
def test_should_respond_200(self, session):
log_model = Log(
event='TEST_EVENT',
task_instance=self._create_task_instance(),
)
log_model.dttm = timezone.parse(self.default_time)
session.add(log_model)
session.commit()
event_log_id = log_model.id
response = self.client.get(
f"/api/v1/eventLogs/{event_log_id}", environ_overrides={'REMOTE_USER': "test"}
)
assert response.status_code == 200
self.assertEqual(
response.json,
{
"event_log_id": event_log_id,
"event": "TEST_EVENT",
"dag_id": "TEST_DAG_ID",
"task_id": "TEST_TASK_ID",
"execution_date": self.default_time,
"owner": 'airflow',
"when": self.default_time,
"extra": None,
},
)
def test_should_respond_404(self):
response = self.client.get("/api/v1/eventLogs/1", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 404
self.assertEqual(
{'detail': None, 'status': 404, 'title': 'Event Log not found', 'type': EXCEPTIONS_LINK_MAP[404]},
response.json,
)
@provide_session
def test_should_raises_401_unauthenticated(self, session):
log_model = Log(
event='TEST_EVENT',
task_instance=self._create_task_instance(),
)
log_model.dttm = timezone.parse(self.default_time)
session.add(log_model)
session.commit()
event_log_id = log_model.id
response = self.client.get(f"/api/v1/eventLogs/{event_log_id}")
assert_401(response)
def test_should_raise_403_forbidden(self):
response = self.client.get(
"/api/v1/eventLogs", environ_overrides={'REMOTE_USER': "test_no_permissions"}
)
assert response.status_code == 403
class TestGetEventLogs(TestEventLogEndpoint):
@provide_session
def test_should_respond_200(self, session):
log_model_1 = Log(
event='TEST_EVENT_1',
task_instance=self._create_task_instance(),
)
log_model_2 = Log(
event='TEST_EVENT_2',
task_instance=self._create_task_instance(),
)
log_model_3 = Log(event="cli_scheduler", owner='root', extra='{"host_name": "e24b454f002a"}')
log_model_1.dttm = timezone.parse(self.default_time)
log_model_2.dttm = timezone.parse(self.default_time_2)
log_model_3.dttm = timezone.parse(self.default_time_2)
session.add_all([log_model_1, log_model_2, log_model_3])
session.commit()
response = self.client.get("/api/v1/eventLogs", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
self.assertEqual(
response.json,
{
"event_logs": [
{
"event_log_id": log_model_1.id,
"event": "TEST_EVENT_1",
"dag_id": "TEST_DAG_ID",
"task_id": "TEST_TASK_ID",
"execution_date": self.default_time,
"owner": 'airflow',
"when": self.default_time,
"extra": None,
},
{
"event_log_id": log_model_2.id,
"event": "TEST_EVENT_2",
"dag_id": "TEST_DAG_ID",
"task_id": "TEST_TASK_ID",
"execution_date": self.default_time,
"owner": 'airflow',
"when": self.default_time_2,
"extra": None,
},
{
"event_log_id": log_model_3.id,
"event": "cli_scheduler",
"dag_id": None,
"task_id": None,
"execution_date": None,
"owner": 'root',
"when": self.default_time_2,
"extra": '{"host_name": "e24b454f002a"}',
},
],
"total_entries": 3,
},
)
@provide_session
def test_should_raises_401_unauthenticated(self, session):
log_model_1 = Log(
event='TEST_EVENT_1',
task_instance=self._create_task_instance(),
)
log_model_2 = Log(
event='TEST_EVENT_2',
task_instance=self._create_task_instance(),
)
log_model_1.dttm = timezone.parse(self.default_time)
log_model_2.dttm = timezone.parse(self.default_time_2)
session.add_all([log_model_1, log_model_2])
session.commit()
response = self.client.get("/api/v1/eventLogs")
assert_401(response)
class TestGetEventLogPagination(TestEventLogEndpoint):
@parameterized.expand(
[
("api/v1/eventLogs?limit=1", ["TEST_EVENT_1"]),
("api/v1/eventLogs?limit=2", ["TEST_EVENT_1", "TEST_EVENT_2"]),
(
"api/v1/eventLogs?offset=5",
[
"TEST_EVENT_6",
"TEST_EVENT_7",
"TEST_EVENT_8",
"TEST_EVENT_9",
"TEST_EVENT_10",
],
),
(
"api/v1/eventLogs?offset=0",
[
"TEST_EVENT_1",
"TEST_EVENT_2",
"TEST_EVENT_3",
"TEST_EVENT_4",
"TEST_EVENT_5",
"TEST_EVENT_6",
"TEST_EVENT_7",
"TEST_EVENT_8",
"TEST_EVENT_9",
"TEST_EVENT_10",
],
),
("api/v1/eventLogs?limit=1&offset=5", ["TEST_EVENT_6"]),
("api/v1/eventLogs?limit=1&offset=1", ["TEST_EVENT_2"]),
(
"api/v1/eventLogs?limit=2&offset=2",
["TEST_EVENT_3", "TEST_EVENT_4"],
),
]
)
@provide_session
def test_handle_limit_and_offset(self, url, expected_events, session):
log_models = self._create_event_logs(10)
session.add_all(log_models)
session.commit()
response = self.client.get(url, environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
self.assertEqual(response.json["total_entries"], 10)
events = [event_log["event"] for event_log in response.json["event_logs"]]
self.assertEqual(events, expected_events)
@provide_session
def test_should_respect_page_size_limit_default(self, session):
log_models = self._create_event_logs(200)
session.add_all(log_models)
session.commit()
response = self.client.get("/api/v1/eventLogs", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
self.assertEqual(response.json["total_entries"], 200)
self.assertEqual(len(response.json["event_logs"]), 100) # default 100
@provide_session
@conf_vars({("api", "maximum_page_limit"): "150"})
def test_should_return_conf_max_if_req_max_above_conf(self, session):
log_models = self._create_event_logs(200)
session.add_all(log_models)
session.commit()
response = self.client.get("/api/v1/eventLogs?limit=180", environ_overrides={'REMOTE_USER': "test"})
assert response.status_code == 200
self.assertEqual(len(response.json['event_logs']), 150)
def _create_event_logs(self, count):
return [
Log(event="TEST_EVENT_" + str(i), task_instance=self._create_task_instance())
for i in range(1, count + 1)
]
|
MattCCS/PyVault
|
refs/heads/master
|
site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
|
762
|
import hashlib
import os
from pip._vendor.lockfile import LockFile
from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile
from ..cache import BaseCache
from ..controller import CacheController
def _secure_open_write(filename, fmode):
# We only want to write to this file, so open it in write only mode
flags = os.O_WRONLY
# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
# will open *new* files.
# We specify this because we want to ensure that the mode we pass is the
# mode of the file.
flags |= os.O_CREAT | os.O_EXCL
# Do not follow symlinks to prevent someone from making a symlink that
# we follow and insecurely open a cache file.
if hasattr(os, "O_NOFOLLOW"):
flags |= os.O_NOFOLLOW
# On Windows we'll mark this file as binary
if hasattr(os, "O_BINARY"):
flags |= os.O_BINARY
# Before we open our file, we want to delete any existing file that is
# there
try:
os.remove(filename)
except (IOError, OSError):
# The file must not exist already, so we can just skip ahead to opening
pass
# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
# race condition happens between the os.remove and this line, that an
# error will be raised. Because we utilize a lockfile this should only
# happen if someone is attempting to attack us.
fd = os.open(filename, flags, fmode)
try:
return os.fdopen(fd, "wb")
except:
# An error occurred wrapping our FD in a file object
os.close(fd)
raise
class FileCache(BaseCache):
def __init__(self, directory, forever=False, filemode=0o0600,
dirmode=0o0700, use_dir_lock=None, lock_class=None):
if use_dir_lock is not None and lock_class is not None:
raise ValueError("Cannot use use_dir_lock and lock_class together")
if use_dir_lock:
lock_class = MkdirLockFile
if lock_class is None:
lock_class = LockFile
self.directory = directory
self.forever = forever
self.filemode = filemode
self.dirmode = dirmode
self.lock_class = lock_class
@staticmethod
def encode(x):
return hashlib.sha224(x.encode()).hexdigest()
def _fn(self, name):
# NOTE: This method should not change as some may depend on it.
# See: https://github.com/ionrock/cachecontrol/issues/63
hashed = self.encode(name)
parts = list(hashed[:5]) + [hashed]
return os.path.join(self.directory, *parts)
def get(self, key):
name = self._fn(key)
if not os.path.exists(name):
return None
with open(name, 'rb') as fh:
return fh.read()
def set(self, key, value):
name = self._fn(key)
# Make sure the directory exists
try:
os.makedirs(os.path.dirname(name), self.dirmode)
except (IOError, OSError):
pass
with self.lock_class(name) as lock:
# Write our actual file
with _secure_open_write(lock.path, self.filemode) as fh:
fh.write(value)
def delete(self, key):
name = self._fn(key)
if not self.forever:
os.remove(name)
def url_to_file_path(url, filecache):
"""Return the file cache path based on the URL.
This does not ensure the file exists!
"""
key = CacheController.cache_url(url)
return filecache._fn(key)
|
khushboo9293/mailman3
|
refs/heads/develop
|
src/mailman/database/transaction.py
|
7
|
# Copyright (C) 2006-2015 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""Transactional support."""
__all__ = [
'dbconnection',
'flush',
'transaction',
'transactional',
]
from contextlib import contextmanager
from mailman.config import config
@contextmanager
def transaction():
"""Context manager for ensuring the transaction is complete."""
try:
yield
except:
config.db.abort()
raise
else:
config.db.commit()
def transactional(function):
"""Decorator for transactional support.
When the function this decorator wraps exits cleanly, the current
transaction is committed. When it exits uncleanly (i.e. because of an
exception, the transaction is aborted.
Either way, the current transaction is completed.
"""
def wrapper(*args, **kws):
try:
rtn = function(*args, **kws)
config.db.commit()
return rtn
except:
config.db.abort()
raise
return wrapper
@contextmanager
def flush():
"""Context manager for flushing SQLAlchemy.
We need this for SA whereas we didn't need it for Storm because the latter
did auto-reloads. However, in SA this is needed when we add or delete
objects from the database. Use it when you need the id after adding, or
when you want to be sure the object won't be found after a delete.
This is lighter weight than committing the transaction.
"""
yield
config.db.store.flush()
def dbconnection(function):
"""Decorator for getting at the database connection.
Use this to avoid having to access the global `config.db.store`
attribute. This calls the function with `store` as the first argument.
"""
def wrapper(*args, **kws):
# args[0] is self, if there is one.
if len(args) > 0:
return function(args[0], config.db.store, *args[1:], **kws)
else:
return function(config.db.store, **kws)
return wrapper
|
CybOXProject/python-cybox
|
refs/heads/master
|
cybox/test/objects/network_route_test.py
|
1
|
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from mixbox.vendor.six import u
from cybox.objects.network_route_object import NetRoute
from cybox.test.objects import ObjectTestCase
from cybox.test.objects.network_route_entry_test import TestNetworkRouteEntry
class TestNetworkRoute(ObjectTestCase, unittest.TestCase):
object_type = "NetRouteObjectType"
klass = NetRoute
_full_dict = {
'is_ipv6': False,
'is_autoconfigure_address': True,
'is_immortal': False,
'is_loopback': False,
'is_publish': True,
'description': "A description",
'preferred_lifetime': u("P10D"),
'valid_lifetime': u("P5D"),
'route_age': u("P1D"),
'network_route_entries': [
TestNetworkRouteEntry._full_dict,
TestNetworkRouteEntry._full_dict,
],
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
ramadhane/odoo
|
refs/heads/8.0
|
addons/mail/report/__init__.py
|
438
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
krig/beets
|
refs/heads/master
|
test/test_library.py
|
7
|
# This file is part of beets.
# Copyright 2013, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Tests for non-query database functions of Item.
"""
import os
import shutil
import re
import unicodedata
import sys
import _common
from _common import unittest
from _common import item
import beets.library
from beets import util
from beets import plugins
from beets import config
TEMP_LIB = os.path.join(_common.RSRC, 'test_copy.blb')
# Shortcut to path normalization.
np = util.normpath
class LoadTest(_common.LibTestCase):
def test_load_restores_data_from_db(self):
original_title = self.i.title
self.i.title = 'something'
self.i.load()
self.assertEqual(original_title, self.i.title)
def test_load_clears_dirty_flags(self):
self.i.artist = 'something'
self.assertTrue('artist' in self.i._dirty)
self.i.load()
self.assertTrue('artist' not in self.i._dirty)
class StoreTest(_common.LibTestCase):
def test_store_changes_database_value(self):
self.i.year = 1987
self.i.store()
new_year = self.lib._connection().execute(
'select year from items where '
'title="the title"').fetchone()['year']
self.assertEqual(new_year, 1987)
def test_store_only_writes_dirty_fields(self):
original_genre = self.i.genre
self.i._values_fixed['genre'] = 'beatboxing' # change w/o dirtying
self.i.store()
new_genre = self.lib._connection().execute(
'select genre from items where '
'title="the title"').fetchone()['genre']
self.assertEqual(new_genre, original_genre)
def test_store_clears_dirty_flags(self):
self.i.composer = 'tvp'
self.i.store()
self.assertTrue('composer' not in self.i._dirty)
class AddTest(_common.TestCase):
def setUp(self):
super(AddTest, self).setUp()
self.lib = beets.library.Library(':memory:')
self.i = item()
def test_item_add_inserts_row(self):
self.lib.add(self.i)
new_grouping = self.lib._connection().execute(
'select grouping from items '
'where composer="the composer"').fetchone()['grouping']
self.assertEqual(new_grouping, self.i.grouping)
def test_library_add_path_inserts_row(self):
i = beets.library.Item.from_path(
os.path.join(_common.RSRC, 'full.mp3')
)
self.lib.add(i)
new_grouping = self.lib._connection().execute(
'select grouping from items '
'where composer="the composer"').fetchone()['grouping']
self.assertEqual(new_grouping, self.i.grouping)
class RemoveTest(_common.LibTestCase):
def test_remove_deletes_from_db(self):
self.i.remove()
c = self.lib._connection().execute('select * from items')
self.assertEqual(c.fetchone(), None)
class GetSetTest(_common.TestCase):
def setUp(self):
super(GetSetTest, self).setUp()
self.i = item()
def test_set_changes_value(self):
self.i.bpm = 4915
self.assertEqual(self.i.bpm, 4915)
def test_set_sets_dirty_flag(self):
self.i.comp = not self.i.comp
self.assertTrue('comp' in self.i._dirty)
def test_set_does_not_dirty_if_value_unchanged(self):
self.i.title = self.i.title
self.assertTrue('title' not in self.i._dirty)
def test_invalid_field_raises_attributeerror(self):
self.assertRaises(AttributeError, getattr, self.i, 'xyzzy')
class DestinationTest(_common.TestCase):
def setUp(self):
super(DestinationTest, self).setUp()
self.lib = beets.library.Library(':memory:')
self.i = item(self.lib)
def tearDown(self):
super(DestinationTest, self).tearDown()
self.lib._connection().close()
def test_directory_works_with_trailing_slash(self):
self.lib.directory = 'one/'
self.lib.path_formats = [('default', 'two')]
self.assertEqual(self.i.destination(), np('one/two'))
def test_directory_works_without_trailing_slash(self):
self.lib.directory = 'one'
self.lib.path_formats = [('default', 'two')]
self.assertEqual(self.i.destination(), np('one/two'))
def test_destination_substitues_metadata_values(self):
self.lib.directory = 'base'
self.lib.path_formats = [('default', '$album/$artist $title')]
self.i.title = 'three'
self.i.artist = 'two'
self.i.album = 'one'
self.assertEqual(self.i.destination(),
np('base/one/two three'))
def test_destination_preserves_extension(self):
self.lib.directory = 'base'
self.lib.path_formats = [('default', '$title')]
self.i.path = 'hey.audioformat'
self.assertEqual(self.i.destination(),
np('base/the title.audioformat'))
def test_lower_case_extension(self):
self.lib.directory = 'base'
self.lib.path_formats = [('default', '$title')]
self.i.path = 'hey.MP3'
self.assertEqual(self.i.destination(),
np('base/the title.mp3'))
def test_destination_pads_some_indices(self):
self.lib.directory = 'base'
self.lib.path_formats = [('default',
'$track $tracktotal $disc $disctotal $bpm')]
self.i.track = 1
self.i.tracktotal = 2
self.i.disc = 3
self.i.disctotal = 4
self.i.bpm = 5
self.assertEqual(self.i.destination(),
np('base/01 02 03 04 5'))
def test_destination_pads_date_values(self):
self.lib.directory = 'base'
self.lib.path_formats = [('default', '$year-$month-$day')]
self.i.year = 1
self.i.month = 2
self.i.day = 3
self.assertEqual(self.i.destination(),
np('base/0001-02-03'))
def test_destination_escapes_slashes(self):
self.i.album = 'one/two'
dest = self.i.destination()
self.assertTrue('one' in dest)
self.assertTrue('two' in dest)
self.assertFalse('one/two' in dest)
def test_destination_escapes_leading_dot(self):
self.i.album = '.something'
dest = self.i.destination()
self.assertTrue('something' in dest)
self.assertFalse('/.' in dest)
def test_destination_preserves_legitimate_slashes(self):
self.i.artist = 'one'
self.i.album = 'two'
dest = self.i.destination()
self.assertTrue(os.path.join('one', 'two') in dest)
def test_destination_long_names_truncated(self):
self.i.title = 'X' * 300
self.i.artist = 'Y' * 300
for c in self.i.destination().split(os.path.sep):
self.assertTrue(len(c) <= 255)
def test_destination_long_names_keep_extension(self):
self.i.title = 'X' * 300
self.i.path = 'something.extn'
dest = self.i.destination()
self.assertEqual(dest[-5:], '.extn')
def test_distination_windows_removes_both_separators(self):
self.i.title = 'one \\ two / three.mp3'
with _common.platform_windows():
p = self.i.destination()
self.assertFalse('one \\ two' in p)
self.assertFalse('one / two' in p)
self.assertFalse('two \\ three' in p)
self.assertFalse('two / three' in p)
def test_sanitize_unix_replaces_leading_dot(self):
with _common.platform_posix():
p = util.sanitize_path(u'one/.two/three')
self.assertFalse('.' in p)
def test_sanitize_windows_replaces_trailing_dot(self):
with _common.platform_windows():
p = util.sanitize_path(u'one/two./three')
self.assertFalse('.' in p)
def test_sanitize_windows_replaces_illegal_chars(self):
with _common.platform_windows():
p = util.sanitize_path(u':*?"<>|')
self.assertFalse(':' in p)
self.assertFalse('*' in p)
self.assertFalse('?' in p)
self.assertFalse('"' in p)
self.assertFalse('<' in p)
self.assertFalse('>' in p)
self.assertFalse('|' in p)
def test_path_with_format(self):
self.lib.path_formats = [('default', '$artist/$album ($format)')]
p = self.i.destination()
self.assert_('(FLAC)' in p)
def test_heterogeneous_album_gets_single_directory(self):
i1, i2 = item(), item()
self.lib.add_album([i1, i2])
i1.year, i2.year = 2009, 2010
self.lib.path_formats = [('default', '$album ($year)/$track $title')]
dest1, dest2 = i1.destination(), i2.destination()
self.assertEqual(os.path.dirname(dest1), os.path.dirname(dest2))
def test_default_path_for_non_compilations(self):
self.i.comp = False
self.lib.add_album([self.i])
self.lib.directory = 'one'
self.lib.path_formats = [('default', 'two'),
('comp:true', 'three')]
self.assertEqual(self.i.destination(), np('one/two'))
def test_singleton_path(self):
i = item(self.lib)
self.lib.directory = 'one'
self.lib.path_formats = [
('default', 'two'),
('singleton:true', 'four'),
('comp:true', 'three'),
]
self.assertEqual(i.destination(), np('one/four'))
def test_comp_before_singleton_path(self):
i = item(self.lib)
i.comp = True
self.lib.directory = 'one'
self.lib.path_formats = [
('default', 'two'),
('comp:true', 'three'),
('singleton:true', 'four'),
]
self.assertEqual(i.destination(), np('one/three'))
def test_comp_path(self):
self.i.comp = True
self.lib.add_album([self.i])
self.lib.directory = 'one'
self.lib.path_formats = [
('default', 'two'),
('comp:true', 'three'),
]
self.assertEqual(self.i.destination(), np('one/three'))
def test_albumtype_query_path(self):
self.i.comp = True
self.lib.add_album([self.i])
self.i.albumtype = 'sometype'
self.lib.directory = 'one'
self.lib.path_formats = [
('default', 'two'),
('albumtype:sometype', 'four'),
('comp:true', 'three'),
]
self.assertEqual(self.i.destination(), np('one/four'))
def test_albumtype_path_fallback_to_comp(self):
self.i.comp = True
self.lib.add_album([self.i])
self.i.albumtype = 'sometype'
self.lib.directory = 'one'
self.lib.path_formats = [
('default', 'two'),
('albumtype:anothertype', 'four'),
('comp:true', 'three'),
]
self.assertEqual(self.i.destination(), np('one/three'))
def test_sanitize_windows_replaces_trailing_space(self):
with _common.platform_windows():
p = util.sanitize_path(u'one/two /three')
self.assertFalse(' ' in p)
def test_get_formatted_does_not_replace_separators(self):
with _common.platform_posix():
name = os.path.join('a', 'b')
self.i.title = name
newname = self.i._get_formatted('title')
self.assertEqual(name, newname)
def test_get_formatted_pads_with_zero(self):
with _common.platform_posix():
self.i.track = 1
name = self.i._get_formatted('track')
self.assertTrue(name.startswith('0'))
def test_get_formatted_uses_kbps_bitrate(self):
with _common.platform_posix():
self.i.bitrate = 12345
val = self.i._get_formatted('bitrate')
self.assertEqual(val, u'12kbps')
def test_get_formatted_uses_khz_samplerate(self):
with _common.platform_posix():
self.i.samplerate = 12345
val = self.i._get_formatted('samplerate')
self.assertEqual(val, u'12kHz')
def test_get_formatted_datetime(self):
with _common.platform_posix():
self.i.added = 1368302461.210265
val = self.i._get_formatted('added')
self.assertTrue(val.startswith('2013'))
def test_get_formatted_none(self):
with _common.platform_posix():
self.i.some_other_field = None
val = self.i._get_formatted('some_other_field')
self.assertEqual(val, u'')
def test_artist_falls_back_to_albumartist(self):
self.i.artist = ''
self.i.albumartist = 'something'
self.lib.path_formats = [('default', '$artist')]
p = self.i.destination()
self.assertEqual(p.rsplit(os.path.sep, 1)[1], 'something')
def test_albumartist_falls_back_to_artist(self):
self.i.artist = 'trackartist'
self.i.albumartist = ''
self.lib.path_formats = [('default', '$albumartist')]
p = self.i.destination()
self.assertEqual(p.rsplit(os.path.sep, 1)[1], 'trackartist')
def test_artist_overrides_albumartist(self):
self.i.artist = 'theartist'
self.i.albumartist = 'something'
self.lib.path_formats = [('default', '$artist')]
p = self.i.destination()
self.assertEqual(p.rsplit(os.path.sep, 1)[1], 'theartist')
def test_albumartist_overrides_artist(self):
self.i.artist = 'theartist'
self.i.albumartist = 'something'
self.lib.path_formats = [('default', '$albumartist')]
p = self.i.destination()
self.assertEqual(p.rsplit(os.path.sep, 1)[1], 'something')
def test_sanitize_path_works_on_empty_string(self):
with _common.platform_posix():
p = util.sanitize_path(u'')
self.assertEqual(p, u'')
def test_sanitize_with_custom_replace_overrides_built_in_sub(self):
with _common.platform_posix():
p = util.sanitize_path(u'a/.?/b', [
(re.compile(ur'foo'), u'bar'),
])
self.assertEqual(p, u'a/.?/b')
def test_sanitize_with_custom_replace_adds_replacements(self):
with _common.platform_posix():
p = util.sanitize_path(u'foo/bar', [
(re.compile(ur'foo'), u'bar'),
])
self.assertEqual(p, u'bar/bar')
def test_unicode_normalized_nfd_on_mac(self):
instr = unicodedata.normalize('NFC', u'caf\xe9')
self.lib.path_formats = [('default', instr)]
dest = self.i.destination(platform='darwin', fragment=True)
self.assertEqual(dest, unicodedata.normalize('NFD', instr))
def test_unicode_normalized_nfc_on_linux(self):
instr = unicodedata.normalize('NFD', u'caf\xe9')
self.lib.path_formats = [('default', instr)]
dest = self.i.destination(platform='linux2', fragment=True)
self.assertEqual(dest, unicodedata.normalize('NFC', instr))
def test_non_mbcs_characters_on_windows(self):
oldfunc = sys.getfilesystemencoding
sys.getfilesystemencoding = lambda: 'mbcs'
try:
self.i.title = u'h\u0259d'
self.lib.path_formats = [('default', '$title')]
p = self.i.destination()
self.assertFalse('?' in p)
# We use UTF-8 to encode Windows paths now.
self.assertTrue(u'h\u0259d'.encode('utf8') in p)
finally:
sys.getfilesystemencoding = oldfunc
def test_unicode_extension_in_fragment(self):
self.lib.path_formats = [('default', u'foo')]
self.i.path = util.bytestring_path(u'bar.caf\xe9')
dest = self.i.destination(platform='linux2', fragment=True)
self.assertEqual(dest, u'foo.caf\xe9')
class PathFormattingMixin(object):
"""Utilities for testing path formatting."""
def _setf(self, fmt):
self.lib.path_formats.insert(0, ('default', fmt))
def _assert_dest(self, dest, i=None):
if i is None:
i = self.i
with _common.platform_posix():
actual = i.destination()
self.assertEqual(actual, dest)
class DestinationFunctionTest(_common.TestCase, PathFormattingMixin):
def setUp(self):
super(DestinationFunctionTest, self).setUp()
self.lib = beets.library.Library(':memory:')
self.lib.directory = '/base'
self.lib.path_formats = [('default', u'path')]
self.i = item(self.lib)
def tearDown(self):
super(DestinationFunctionTest, self).tearDown()
self.lib._connection().close()
def test_upper_case_literal(self):
self._setf(u'%upper{foo}')
self._assert_dest('/base/FOO')
def test_upper_case_variable(self):
self._setf(u'%upper{$title}')
self._assert_dest('/base/THE TITLE')
def test_title_case_variable(self):
self._setf(u'%title{$title}')
self._assert_dest('/base/The Title')
def test_left_variable(self):
self._setf(u'%left{$title, 3}')
self._assert_dest('/base/the')
def test_right_variable(self):
self._setf(u'%right{$title,3}')
self._assert_dest('/base/tle')
def test_if_false(self):
self._setf(u'x%if{,foo}')
self._assert_dest('/base/x')
def test_if_true(self):
self._setf(u'%if{bar,foo}')
self._assert_dest('/base/foo')
def test_if_else_false(self):
self._setf(u'%if{,foo,baz}')
self._assert_dest('/base/baz')
def test_if_int_value(self):
self._setf(u'%if{0,foo,baz}')
self._assert_dest('/base/baz')
def test_nonexistent_function(self):
self._setf(u'%foo{bar}')
self._assert_dest('/base/%foo{bar}')
class DisambiguationTest(_common.TestCase, PathFormattingMixin):
def setUp(self):
super(DisambiguationTest, self).setUp()
self.lib = beets.library.Library(':memory:')
self.lib.directory = '/base'
self.lib.path_formats = [('default', u'path')]
self.i1 = item()
self.i1.year = 2001
self.lib.add_album([self.i1])
self.i2 = item()
self.i2.year = 2002
self.lib.add_album([self.i2])
self.lib._connection().commit()
self._setf(u'foo%aunique{albumartist album,year}/$title')
def tearDown(self):
super(DisambiguationTest, self).tearDown()
self.lib._connection().close()
def test_unique_expands_to_disambiguating_year(self):
self._assert_dest('/base/foo [2001]/the title', self.i1)
def test_unique_with_default_arguments_uses_albumtype(self):
album2 = self.lib.get_album(self.i1)
album2.albumtype = 'bar'
album2.store()
self._setf(u'foo%aunique{}/$title')
self._assert_dest('/base/foo [bar]/the title', self.i1)
def test_unique_expands_to_nothing_for_distinct_albums(self):
album2 = self.lib.get_album(self.i2)
album2.album = 'different album'
album2.store()
self._assert_dest('/base/foo/the title', self.i1)
def test_use_fallback_numbers_when_identical(self):
album2 = self.lib.get_album(self.i2)
album2.year = 2001
album2.store()
self._assert_dest('/base/foo 1/the title', self.i1)
self._assert_dest('/base/foo 2/the title', self.i2)
def test_unique_falls_back_to_second_distinguishing_field(self):
self._setf(u'foo%aunique{albumartist album,month year}/$title')
self._assert_dest('/base/foo [2001]/the title', self.i1)
def test_unique_sanitized(self):
album2 = self.lib.get_album(self.i2)
album2.year = 2001
album1 = self.lib.get_album(self.i1)
album1.albumtype = 'foo/bar'
album2.store()
album1.store()
self._setf(u'foo%aunique{albumartist album,albumtype}/$title')
self._assert_dest('/base/foo [foo_bar]/the title', self.i1)
class PathConversionTest(_common.TestCase):
def test_syspath_windows_format(self):
with _common.platform_windows():
path = os.path.join('a', 'b', 'c')
outpath = util.syspath(path)
self.assertTrue(isinstance(outpath, unicode))
self.assertTrue(outpath.startswith(u'\\\\?\\'))
def test_syspath_posix_unchanged(self):
with _common.platform_posix():
path = os.path.join('a', 'b', 'c')
outpath = util.syspath(path)
self.assertEqual(path, outpath)
def _windows_bytestring_path(self, path):
old_gfse = sys.getfilesystemencoding
sys.getfilesystemencoding = lambda: 'mbcs'
try:
with _common.platform_windows():
return util.bytestring_path(path)
finally:
sys.getfilesystemencoding = old_gfse
def test_bytestring_path_windows_encodes_utf8(self):
path = u'caf\xe9'
outpath = self._windows_bytestring_path(path)
self.assertEqual(path, outpath.decode('utf8'))
def test_bytesting_path_windows_removes_magic_prefix(self):
path = u'\\\\?\\C:\\caf\xe9'
outpath = self._windows_bytestring_path(path)
self.assertEqual(outpath, u'C:\\caf\xe9'.encode('utf8'))
class PluginDestinationTest(_common.TestCase):
def setUp(self):
super(PluginDestinationTest, self).setUp()
# Mock beets.plugins.item_field_getters.
self._tv_map = {}
def field_getters():
getters = {}
for key, value in self._tv_map.items():
getters[key] = lambda _: value
return getters
self.old_field_getters = plugins.item_field_getters
plugins.item_field_getters = field_getters
self.lib = beets.library.Library(':memory:')
self.lib.directory = '/base'
self.lib.path_formats = [('default', u'$artist $foo')]
self.i = item(self.lib)
def tearDown(self):
super(PluginDestinationTest, self).tearDown()
plugins.item_field_getters = self.old_field_getters
def _assert_dest(self, dest):
with _common.platform_posix():
the_dest = self.i.destination()
self.assertEqual(the_dest, '/base/' + dest)
def test_undefined_value_not_substituted(self):
self._assert_dest('the artist $foo')
def test_plugin_value_not_substituted(self):
self._tv_map = {
'foo': 'bar',
}
self._assert_dest('the artist bar')
def test_plugin_value_overrides_attribute(self):
self._tv_map = {
'artist': 'bar',
}
self._assert_dest('bar $foo')
def test_plugin_value_sanitized(self):
self._tv_map = {
'foo': 'bar/baz',
}
self._assert_dest('the artist bar_baz')
class AlbumInfoTest(_common.TestCase):
def setUp(self):
super(AlbumInfoTest, self).setUp()
self.lib = beets.library.Library(':memory:')
self.i = item()
self.lib.add_album((self.i,))
def test_albuminfo_reflects_metadata(self):
ai = self.lib.get_album(self.i)
self.assertEqual(ai.mb_albumartistid, self.i.mb_albumartistid)
self.assertEqual(ai.albumartist, self.i.albumartist)
self.assertEqual(ai.album, self.i.album)
self.assertEqual(ai.year, self.i.year)
def test_albuminfo_stores_art(self):
ai = self.lib.get_album(self.i)
ai.artpath = '/my/great/art'
ai.store()
new_ai = self.lib.get_album(self.i)
self.assertEqual(new_ai.artpath, '/my/great/art')
def test_albuminfo_for_two_items_doesnt_duplicate_row(self):
i2 = item(self.lib)
self.lib.get_album(self.i)
self.lib.get_album(i2)
c = self.lib._connection().cursor()
c.execute('select * from albums where album=?', (self.i.album,))
# Cursor should only return one row.
self.assertNotEqual(c.fetchone(), None)
self.assertEqual(c.fetchone(), None)
def test_individual_tracks_have_no_albuminfo(self):
i2 = item()
i2.album = 'aTotallyDifferentAlbum'
self.lib.add(i2)
ai = self.lib.get_album(i2)
self.assertEqual(ai, None)
def test_get_album_by_id(self):
ai = self.lib.get_album(self.i)
ai = self.lib.get_album(self.i.id)
self.assertNotEqual(ai, None)
def test_album_items_consistent(self):
ai = self.lib.get_album(self.i)
for item in ai.items():
if item.id == self.i.id:
break
else:
self.fail("item not found")
def test_albuminfo_changes_affect_items(self):
ai = self.lib.get_album(self.i)
ai.album = 'myNewAlbum'
ai.store()
i = self.lib.items()[0]
self.assertEqual(i.album, 'myNewAlbum')
def test_albuminfo_change_albumartist_changes_items(self):
ai = self.lib.get_album(self.i)
ai.albumartist = 'myNewArtist'
ai.store()
i = self.lib.items()[0]
self.assertEqual(i.albumartist, 'myNewArtist')
self.assertNotEqual(i.artist, 'myNewArtist')
def test_albuminfo_change_artist_does_not_change_items(self):
ai = self.lib.get_album(self.i)
ai.artist = 'myNewArtist'
ai.store()
i = self.lib.items()[0]
self.assertNotEqual(i.artist, 'myNewArtist')
def test_albuminfo_remove_removes_items(self):
item_id = self.i.id
self.lib.get_album(self.i).remove()
c = self.lib._connection().execute(
'SELECT id FROM items WHERE id=?', (item_id,)
)
self.assertEqual(c.fetchone(), None)
def test_removing_last_item_removes_album(self):
self.assertEqual(len(self.lib.albums()), 1)
self.i.remove()
self.assertEqual(len(self.lib.albums()), 0)
class ArtDestinationTest(_common.TestCase):
def setUp(self):
super(ArtDestinationTest, self).setUp()
config['art_filename'] = u'artimage'
config['replace'] = {u'X': u'Y'}
self.lib = beets.library.Library(
':memory:', replacements=[(re.compile(u'X'), u'Y')]
)
self.i = item(self.lib)
self.i.path = self.i.destination()
self.ai = self.lib.add_album((self.i,))
def test_art_filename_respects_setting(self):
art = self.ai.art_destination('something.jpg')
self.assert_('%sartimage.jpg' % os.path.sep in art)
def test_art_path_in_item_dir(self):
art = self.ai.art_destination('something.jpg')
track = self.i.destination()
self.assertEqual(os.path.dirname(art), os.path.dirname(track))
def test_art_path_sanitized(self):
config['art_filename'] = u'artXimage'
art = self.ai.art_destination('something.jpg')
self.assert_('artYimage' in art)
class PathStringTest(_common.TestCase):
def setUp(self):
super(PathStringTest, self).setUp()
self.lib = beets.library.Library(':memory:')
self.i = item(self.lib)
def test_item_path_is_bytestring(self):
self.assert_(isinstance(self.i.path, str))
def test_fetched_item_path_is_bytestring(self):
i = list(self.lib.items())[0]
self.assert_(isinstance(i.path, str))
def test_unicode_path_becomes_bytestring(self):
self.i.path = u'unicodepath'
self.assert_(isinstance(self.i.path, str))
def test_unicode_in_database_becomes_bytestring(self):
self.lib._connection().execute("""
update items set path=? where id=?
""", (self.i.id, u'somepath'))
i = list(self.lib.items())[0]
self.assert_(isinstance(i.path, str))
def test_special_chars_preserved_in_database(self):
path = 'b\xe1r'
self.i.path = path
self.i.store()
i = list(self.lib.items())[0]
self.assertEqual(i.path, path)
def test_special_char_path_added_to_database(self):
self.i.remove()
path = 'b\xe1r'
i = item()
i.path = path
self.lib.add(i)
i = list(self.lib.items())[0]
self.assertEqual(i.path, path)
def test_destination_returns_bytestring(self):
self.i.artist = u'b\xe1r'
dest = self.i.destination()
self.assert_(isinstance(dest, str))
def test_art_destination_returns_bytestring(self):
self.i.artist = u'b\xe1r'
alb = self.lib.add_album([self.i])
dest = alb.art_destination(u'image.jpg')
self.assert_(isinstance(dest, str))
def test_artpath_stores_special_chars(self):
path = b'b\xe1r'
alb = self.lib.add_album([self.i])
alb.artpath = path
alb.store()
alb = self.lib.get_album(self.i)
self.assertEqual(path, alb.artpath)
def test_sanitize_path_with_special_chars(self):
path = u'b\xe1r?'
new_path = util.sanitize_path(path)
self.assert_(new_path.startswith(u'b\xe1r'))
def test_sanitize_path_returns_unicode(self):
path = u'b\xe1r?'
new_path = util.sanitize_path(path)
self.assert_(isinstance(new_path, unicode))
def test_unicode_artpath_becomes_bytestring(self):
alb = self.lib.add_album([self.i])
alb.artpath = u'somep\xe1th'
self.assert_(isinstance(alb.artpath, str))
def test_unicode_artpath_in_database_decoded(self):
alb = self.lib.add_album([self.i])
self.lib._connection().execute(
"update albums set artpath=? where id=?",
(u'somep\xe1th', alb.id)
)
alb = self.lib.get_album(alb.id)
self.assert_(isinstance(alb.artpath, str))
class PathTruncationTest(_common.TestCase):
def test_truncate_bytestring(self):
with _common.platform_posix():
p = util.truncate_path('abcde/fgh', 4)
self.assertEqual(p, 'abcd/fgh')
def test_truncate_unicode(self):
with _common.platform_posix():
p = util.truncate_path(u'abcde/fgh', 4)
self.assertEqual(p, u'abcd/fgh')
def test_truncate_preserves_extension(self):
with _common.platform_posix():
p = util.truncate_path(u'abcde/fgh.ext', 5)
self.assertEqual(p, u'abcde/f.ext')
class MtimeTest(_common.TestCase):
def setUp(self):
super(MtimeTest, self).setUp()
self.ipath = os.path.join(_common.RSRC, 'testfile.mp3')
shutil.copy(os.path.join(_common.RSRC, 'full.mp3'), self.ipath)
self.i = beets.library.Item.from_path(self.ipath)
self.lib = beets.library.Library(':memory:')
self.lib.add(self.i)
def tearDown(self):
super(MtimeTest, self).tearDown()
if os.path.exists(self.ipath):
os.remove(self.ipath)
def _mtime(self):
return int(os.path.getmtime(self.ipath))
def test_mtime_initially_up_to_date(self):
self.assertGreaterEqual(self.i.mtime, self._mtime())
def test_mtime_reset_on_db_modify(self):
self.i.title = 'something else'
self.assertLess(self.i.mtime, self._mtime())
def test_mtime_up_to_date_after_write(self):
self.i.title = 'something else'
self.i.write()
self.assertGreaterEqual(self.i.mtime, self._mtime())
def test_mtime_up_to_date_after_read(self):
self.i.title = 'something else'
self.i.read()
self.assertGreaterEqual(self.i.mtime, self._mtime())
class ImportTimeTest(_common.TestCase):
def setUp(self):
super(ImportTimeTest, self).setUp()
self.lib = beets.library.Library(':memory:')
def added(self):
self.track = item()
self.album = self.lib.add_album((self.track,))
self.assertGreater(self.album.added, 0)
self.assertGreater(self.track.added, 0)
def test_atime_for_singleton(self):
self.singleton = item(self.lib)
self.assertGreater(self.singleton.added, 0)
class TemplateTest(_common.LibTestCase):
def album_fields_override_item_values(self):
self.album = self.lib.add_album([self.i])
self.album.albumartist = 'album-level'
self.album.store()
self.i.albumartist = 'track-level'
self.i.store()
self.assertEqual(self.i.evaluate_template('$albumartist'),
'album-level')
def test_year_formatted_in_template(self):
self.i.year = 123
self.i.store()
self.assertEqual(self.i.evaluate_template('$year'), '0123')
def test_album_flexattr_appears_in_item_template(self):
self.album = self.lib.add_album([self.i])
self.album.foo = 'baz'
self.album.store()
self.assertEqual(self.i.evaluate_template('$foo'), 'baz')
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Pennebaker/wagtail
|
refs/heads/master
|
wagtail/wagtailsearch/urls/__init__.py
|
40
|
from wagtail.wagtailsearch.urls.frontend import urlpatterns # noqa
|
GNS3/gns3-server
|
refs/heads/master
|
gns3server/controller/topology.py
|
1
|
#!/usr/bin/env python
#
# Copyright (C) 2016 GNS3 Technologies Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import html
import json
import copy
import uuid
import glob
import shutil
import zipfile
import aiohttp
import jsonschema
from ..version import __version__
from ..schemas.topology import TOPOLOGY_SCHEMA
from ..schemas import dynamips_vm
from ..utils.qt import qt_font_to_style
from ..compute.dynamips import PLATFORMS_DEFAULT_RAM
import logging
log = logging.getLogger(__name__)
GNS3_FILE_FORMAT_REVISION = 9
def _check_topology_schema(topo):
try:
jsonschema.validate(topo, TOPOLOGY_SCHEMA)
# Check the nodes property against compute schemas
for node in topo["topology"].get("nodes", []):
schema = None
if node["node_type"] == "dynamips":
schema = copy.deepcopy(dynamips_vm.VM_CREATE_SCHEMA)
if schema:
# Properties send to compute but in an other place in topology
delete_properties = ["name", "node_id"]
for prop in delete_properties:
del schema["properties"][prop]
schema["required"] = [p for p in schema["required"] if p not in delete_properties]
jsonschema.validate(node.get("properties", {}), schema)
except jsonschema.ValidationError as e:
error = "Invalid data in topology file: {} in schema: {}".format(
e.message,
json.dumps(e.schema))
log.debug(error)
raise aiohttp.web.HTTPConflict(text=error)
def project_to_topology(project):
"""
:return: A dictionary with the topology ready to dump to a .gns3
"""
data = {
"project_id": project.id,
"name": project.name,
"auto_start": project.auto_start,
"auto_open": project.auto_open,
"auto_close": project.auto_close,
"scene_width": project.scene_width,
"scene_height": project.scene_height,
"zoom": project.zoom,
"show_layers": project.show_layers,
"snap_to_grid": project.snap_to_grid,
"show_grid": project.show_grid,
"grid_size": project.grid_size,
"drawing_grid_size": project.drawing_grid_size,
"show_interface_labels": project.show_interface_labels,
"variables": project.variables,
"supplier": project.supplier,
"topology": {
"nodes": [],
"links": [],
"computes": [],
"drawings": []
},
"type": "topology",
"revision": GNS3_FILE_FORMAT_REVISION,
"version": __version__
}
for node in project.nodes.values():
if hasattr(node, "__json__"):
data["topology"]["nodes"].append(node.__json__(topology_dump=True))
else:
data["topology"]["nodes"].append(node)
for link in project.links.values():
if hasattr(link, "__json__"):
data["topology"]["links"].append(link.__json__(topology_dump=True))
else:
data["topology"]["links"].append(link)
for drawing in project.drawings.values():
if hasattr(drawing, "__json__"):
data["topology"]["drawings"].append(drawing.__json__(topology_dump=True))
else:
data["topology"]["drawings"].append(drawing)
for compute in project.computes:
if hasattr(compute, "__json__"):
compute = compute.__json__(topology_dump=True)
if compute["compute_id"] not in ("vm", "local", ):
data["topology"]["computes"].append(compute)
elif isinstance(compute, dict):
data["topology"]["computes"].append(compute)
_check_topology_schema(data)
return data
def load_topology(path):
"""
Open a topology file, patch it for last GNS3 release and return it
"""
log.debug("Read topology %s", path)
try:
with open(path, encoding="utf-8") as f:
topo = json.load(f)
except (OSError, UnicodeDecodeError, ValueError) as e:
raise aiohttp.web.HTTPConflict(text="Could not load topology {}: {}".format(path, str(e)))
if topo.get("revision", 0) > GNS3_FILE_FORMAT_REVISION:
raise aiohttp.web.HTTPConflict(text="This project was created with more recent version of GNS3 (file revision: {}). Please upgrade GNS3 to version {} or later".format(topo["revision"], topo["version"]))
changed = False
if "revision" not in topo or topo["revision"] < GNS3_FILE_FORMAT_REVISION:
# Convert the topology if this is an old one but backup the file first
try:
shutil.copy(path, path + ".backup{}".format(topo.get("revision", 0)))
except OSError as e:
raise aiohttp.web.HTTPConflict(text="Can't write backup of the topology {}: {}".format(path, str(e)))
changed = True
# update the version because we converted the topology
topo["version"] = __version__
if "revision" not in topo or topo["revision"] < 5:
topo = _convert_1_3_later(topo, path)
# Version before GNS3 2.0 alpha 4
if topo["revision"] < 6:
topo = _convert_2_0_0_alpha(topo, path)
# Version before GNS3 2.0 beta 3
if topo["revision"] < 7:
topo = _convert_2_0_0_beta_2(topo, path)
# Version before GNS3 2.1
if topo["revision"] < 8:
topo = _convert_2_0_0(topo, path)
# Version before GNS3 2.1
if topo["revision"] < 9:
topo = _convert_2_1_0(topo, path)
# Version GNS3 2.2 dev (for project created with 2.2dev).
# Appliance ID has been replaced by Template ID
if topo["revision"] == 9:
for node in topo.get("topology", {}).get("nodes", []):
if "appliance_id" in node:
node["template_id"] = node["appliance_id"]
del node["appliance_id"]
# make sure console_type is not None but "none" string
if "console_type" in node and node["console_type"] is None:
node["console_type"] = "none"
# make sure we can open a project with empty variable name
variables = topo.get("variables")
if variables:
topo["variables"] = [var for var in variables if var.get("name")]
try:
_check_topology_schema(topo)
except aiohttp.web.HTTPConflict as e:
log.error("Can't load the topology %s", path)
raise e
if changed:
try:
with open(path, "w+", encoding="utf-8") as f:
json.dump(topo, f, indent=4, sort_keys=True)
except OSError as e:
raise aiohttp.web.HTTPConflict(text="Can't write the topology {}: {}".format(path, str(e)))
return topo
def _convert_2_1_0(topo, topo_path):
"""
Convert topologies from GNS3 2.1.x to 2.2
Changes:
* Removed acpi_shutdown option from Qemu, VMware and VirtualBox
"""
topo["revision"] = 9
if "grid_size" in topo:
# drawing_grid_size should be the same size as grid_size
# to avoid overlapping grids
topo["drawing_grid_size"] = topo["grid_size"]
for node in topo.get("topology", {}).get("nodes", []):
# make sure console_type is not None but "none" string
if "console_type" in node and node["console_type"] is None:
node["console_type"] = "none"
if "properties" in node:
if node["node_type"] in ("qemu", "vmware", "virtualbox"):
if "acpi_shutdown" in node["properties"]:
if node["properties"]["acpi_shutdown"] is True:
node["properties"]["on_close"] = "save_vm_sate"
else:
node["properties"]["on_close"] = "power_off"
del node["properties"]["acpi_shutdown"]
if "save_vm_state" in node["properties"]:
del node["properties"]["save_vm_state"]
return topo
def _convert_2_0_0(topo, topo_path):
"""
Convert topologies from GNS3 2.0.0 to 2.1
Changes:
* Remove startup_script_path from VPCS and base config file for IOU and Dynamips
"""
topo["revision"] = 8
for node in topo.get("topology", {}).get("nodes", []):
if "properties" in node:
if node["node_type"] == "vpcs":
if "startup_script_path" in node["properties"]:
del node["properties"]["startup_script_path"]
if "startup_script" in node["properties"]:
del node["properties"]["startup_script"]
elif node["node_type"] == "dynamips" or node["node_type"] == "iou":
if "startup_config" in node["properties"]:
del node["properties"]["startup_config"]
if "private_config" in node["properties"]:
del node["properties"]["private_config"]
if "startup_config_content" in node["properties"]:
del node["properties"]["startup_config_content"]
if "private_config_content" in node["properties"]:
del node["properties"]["private_config_content"]
return topo
def _convert_2_0_0_beta_2(topo, topo_path):
"""
Convert topologies from GNS3 2.0.0 beta 2 to beta 3.
Changes:
* Node id folders for dynamips
"""
topo_dir = os.path.dirname(topo_path)
topo["revision"] = 7
for node in topo.get("topology", {}).get("nodes", []):
if node["node_type"] == "dynamips":
node_id = node["node_id"]
dynamips_id = node["properties"]["dynamips_id"]
dynamips_dir = os.path.join(topo_dir, "project-files", "dynamips")
node_dir = os.path.join(dynamips_dir, node_id)
try:
os.makedirs(os.path.join(node_dir, "configs"), exist_ok=True)
for path in glob.glob(os.path.join(glob.escape(dynamips_dir), "*_i{}_*".format(dynamips_id))):
shutil.move(path, os.path.join(node_dir, os.path.basename(path)))
for path in glob.glob(os.path.join(glob.escape(dynamips_dir), "configs", "i{}_*".format(dynamips_id))):
shutil.move(path, os.path.join(node_dir, "configs", os.path.basename(path)))
except OSError as e:
raise aiohttp.web.HTTPConflict(text="Can't convert project {}: {}".format(topo_path, str(e)))
return topo
def _convert_2_0_0_alpha(topo, topo_path):
"""
Convert topologies from GNS3 2.0.0 alpha to 2.0.0 final.
Changes:
* No more serial console
* No more option for VMware / VirtualBox remote console (always use telnet)
"""
topo["revision"] = 6
for node in topo.get("topology", {}).get("nodes", []):
if node.get("console_type") == "serial":
node["console_type"] = "telnet"
if node["node_type"] in ("vmware", "virtualbox"):
prop = node.get("properties")
if "enable_remote_console" in prop:
del prop["enable_remote_console"]
return topo
def _convert_1_3_later(topo, topo_path):
"""
Convert topologies from 1_3 to the new file format
Look in tests/topologies/README.rst for instructions to test changes here
"""
topo_dir = os.path.dirname(topo_path)
_convert_snapshots(topo_dir)
new_topo = {
"type": "topology",
"revision": 5,
"version": __version__,
"auto_start": topo.get("auto_start", False),
"name": topo["name"],
"project_id": topo.get("project_id"),
"topology": {
"links": [],
"drawings": [],
"computes": [],
"nodes": []
}
}
if new_topo["project_id"] is None:
new_topo["project_id"] = str(uuid.uuid4()) # Could arrive for topologues with drawing only
if "topology" not in topo:
return new_topo
topo = topo["topology"]
# Create computes
server_id_to_compute_id = {}
for server in topo.get("servers", []):
compute = {
"host": server.get("host", "localhost"),
"port": server.get("port", 3080),
"protocol": server.get("protocol", "http")
}
if server["local"]:
compute["compute_id"] = "local"
compute["name"] = "Local"
elif server.get("vm", False):
compute["compute_id"] = "vm"
compute["name"] = "GNS3 VM"
else:
compute["name"] = "Remote {}".format(server["id"])
compute["compute_id"] = str(uuid.uuid4())
server_id_to_compute_id[server["id"]] = compute["compute_id"]
new_topo["topology"]["computes"].append(compute)
# Create nodes
ports = {}
node_id_to_node_uuid = {}
for old_node in topo.get("nodes", []):
node = {}
node["console"] = old_node["properties"].get("console", None)
try:
node["compute_id"] = server_id_to_compute_id[old_node["server_id"]]
except KeyError:
node["compute_id"] = "local"
node["console_type"] = old_node["properties"].get("console_type", "telnet")
if "label" in old_node:
node["name"] = old_node["label"]["text"]
node["label"] = _convert_label(old_node["label"])
else:
node["name"] = old_node["properties"]["name"]
node["node_id"] = old_node.get("vm_id", str(uuid.uuid4()))
node["symbol"] = old_node.get("symbol", None)
# Compatibility with <= 1.3
if node["symbol"] is None and "default_symbol" in old_node:
if old_node["default_symbol"].endswith("normal.svg"):
node["symbol"] = old_node["default_symbol"][:-11] + ".svg"
else:
node["symbol"] = old_node["default_symbol"]
node["x"] = int(old_node["x"])
node["y"] = int(old_node["y"])
node["z"] = int(old_node.get("z", 1))
node["port_name_format"] = old_node.get("port_name_format", "Ethernet{0}")
node["port_segment_size"] = int(old_node.get("port_segment_size", "0"))
node["first_port_name"] = old_node.get("first_port_name")
node["properties"] = {}
# Some old dynamips node don't have type
if "type" not in old_node:
old_node["type"] = old_node["properties"]["platform"].upper()
if old_node["type"] == "VPCSDevice":
node["node_type"] = "vpcs"
elif old_node["type"] == "QemuVM":
node = _convert_qemu_node(node, old_node)
elif old_node["type"] == "DockerVM":
node["node_type"] = "docker"
if node["symbol"] is None:
node["symbol"] = ":/symbols/docker_guest.svg"
elif old_node["type"] == "ATMSwitch":
node["node_type"] = "atm_switch"
node["symbol"] = ":/symbols/atm_switch.svg"
node["console_type"] = None
elif old_node["type"] == "EthernetHub":
node["node_type"] = "ethernet_hub"
node["console_type"] = None
node["symbol"] = ":/symbols/hub.svg"
node["properties"]["ports_mapping"] = []
for port in old_node.get("ports", []):
node["properties"]["ports_mapping"].append({
"name": "Ethernet{}".format(port["port_number"] - 1),
"port_number": port["port_number"] - 1
})
elif old_node["type"] == "EthernetSwitch":
node["node_type"] = "ethernet_switch"
node["symbol"] = ":/symbols/ethernet_switch.svg"
node["console_type"] = None
node["properties"]["ports_mapping"] = []
for port in old_node.get("ports", []):
node["properties"]["ports_mapping"].append({
"name": "Ethernet{}".format(port["port_number"] - 1),
"port_number": port["port_number"] - 1,
"type": port["type"],
"vlan": port["vlan"]
})
elif old_node["type"] == "FrameRelaySwitch":
node["node_type"] = "frame_relay_switch"
node["symbol"] = ":/symbols/frame_relay_switch.svg"
node["console_type"] = None
elif old_node["type"].upper() in ["C1700", "C2600", "C2691", "C3600", "C3620", "C3640", "C3660", "C3725", "C3745", "C7200", "EtherSwitchRouter"]:
if node["symbol"] is None:
node["symbol"] = ":/symbols/router.svg"
node["node_type"] = "dynamips"
node["properties"]["dynamips_id"] = old_node.get("dynamips_id")
if "platform" not in node["properties"] and old_node["type"].upper().startswith("C"):
node["properties"]["platform"] = old_node["type"].lower()
if node["properties"]["platform"].startswith("c36"):
node["properties"]["platform"] = "c3600"
if "ram" not in node["properties"] and old_node["type"].startswith("C"):
node["properties"]["ram"] = PLATFORMS_DEFAULT_RAM[old_node["type"].lower()]
elif old_node["type"] == "VMwareVM":
node["node_type"] = "vmware"
node["properties"]["linked_clone"] = old_node.get("linked_clone", False)
if node["symbol"] is None:
node["symbol"] = ":/symbols/vmware_guest.svg"
elif old_node["type"] == "VirtualBoxVM":
node["node_type"] = "virtualbox"
node["properties"]["linked_clone"] = old_node.get("linked_clone", False)
if node["symbol"] is None:
node["symbol"] = ":/symbols/vbox_guest.svg"
elif old_node["type"] == "IOUDevice":
node["node_type"] = "iou"
node["port_name_format"] = old_node.get("port_name_format", "Ethernet{segment0}/{port0}")
node["port_segment_size"] = int(old_node.get("port_segment_size", "4"))
if node["symbol"] is None:
if "l2" in node["properties"].get("path", ""):
node["symbol"] = ":/symbols/multilayer_switch.svg"
else:
node["symbol"] = ":/symbols/router.svg"
elif old_node["type"] == "Cloud":
symbol = old_node.get("symbol", ":/symbols/cloud.svg")
old_node["ports"] = _create_cloud(node, old_node, symbol)
elif old_node["type"] == "Host":
symbol = old_node.get("symbol", ":/symbols/computer.svg")
old_node["ports"] = _create_cloud(node, old_node, symbol)
else:
raise aiohttp.web.HTTPConflict(text="Conversion of {} is not supported".format(old_node["type"]))
for prop in old_node.get("properties", {}):
if prop not in ["console", "name", "console_type", "console_host", "use_ubridge"]:
node["properties"][prop] = old_node["properties"][prop]
node_id_to_node_uuid[old_node["id"]] = node["node_id"]
for port in old_node.get("ports", []):
if node["node_type"] in ("ethernet_hub", "ethernet_switch"):
port["port_number"] -= 1
ports[port["id"]] = port
new_topo["topology"]["nodes"].append(node)
# Create links
for old_link in topo.get("links", []):
try:
nodes = []
source_node = {
"adapter_number": ports[old_link["source_port_id"]].get("adapter_number", 0),
"port_number": ports[old_link["source_port_id"]].get("port_number", 0),
"node_id": node_id_to_node_uuid[old_link["source_node_id"]]
}
nodes.append(source_node)
destination_node = {
"adapter_number": ports[old_link["destination_port_id"]].get("adapter_number", 0),
"port_number": ports[old_link["destination_port_id"]].get("port_number", 0),
"node_id": node_id_to_node_uuid[old_link["destination_node_id"]]
}
nodes.append(destination_node)
except KeyError:
continue
link = {
"link_id": str(uuid.uuid4()),
"nodes": nodes
}
new_topo["topology"]["links"].append(link)
# Ellipse
for ellipse in topo.get("ellipses", []):
svg = '<svg height="{height}" width="{width}"><ellipse cx="{cx}" cy="{cy}" fill="{fill}" fill-opacity="1.0" rx="{rx}" ry="{ry}" {border_style} /></svg>'.format(
height=int(ellipse["height"]),
width=int(ellipse["width"]),
cx=int(ellipse["width"] / 2),
cy=int(ellipse["height"] / 2),
rx=int(ellipse["width"] / 2),
ry=int(ellipse["height"] / 2),
fill=ellipse.get("color", "#ffffff"),
border_style=_convert_border_style(ellipse)
)
new_ellipse = {
"drawing_id": str(uuid.uuid4()),
"x": int(ellipse["x"]),
"y": int(ellipse["y"]),
"z": int(ellipse.get("z", 0)),
"rotation": int(ellipse.get("rotation", 0)),
"svg": svg
}
new_topo["topology"]["drawings"].append(new_ellipse)
# Notes
for note in topo.get("notes", []):
font_info = note.get("font", "TypeWriter,10,-1,5,75,0,0,0,0,0").split(",")
if font_info[4] == "75":
weight = "bold"
else:
weight = "normal"
if font_info[5] == "1":
style = "italic"
else:
style = "normal"
svg = '<svg height="{height}" width="{width}"><text fill="{fill}" fill-opacity="{opacity}" font-family="{family}" font-size="{size}" font-weight="{weight}" font-style="{style}">{text}</text></svg>'.format(
height=int(font_info[1]) * 2,
width=int(font_info[1]) * len(note["text"]),
fill="#" + note.get("color", "#00000000")[-6:],
opacity=round(1.0 / 255 * int(note.get("color", "#ffffffff")[:3][-2:], base=16), 2), # Extract the alpha channel from the hexa version
family=font_info[0],
size=int(font_info[1]),
weight=weight,
style=style,
text=html.escape(note["text"])
)
new_note = {
"drawing_id": str(uuid.uuid4()),
"x": int(note["x"]),
"y": int(note["y"]),
"z": int(note.get("z", 0)),
"rotation": int(note.get("rotation", 0)),
"svg": svg
}
new_topo["topology"]["drawings"].append(new_note)
# Images
for image in topo.get("images", []):
img_path = image["path"]
# Absolute image path are rewrite to project specific image
if os.path.abspath(img_path):
try:
os.makedirs(os.path.join(topo_dir, "images"), exist_ok=True)
shutil.copy(img_path, os.path.join(topo_dir, "images", os.path.basename(img_path)))
except OSError:
pass
new_image = {
"drawing_id": str(uuid.uuid4()),
"x": int(image["x"]),
"y": int(image["y"]),
"z": int(image.get("z", 0)),
"rotation": int(image.get("rotation", 0)),
"svg": os.path.basename(img_path)
}
new_topo["topology"]["drawings"].append(new_image)
# Rectangles
for rectangle in topo.get("rectangles", []):
svg = '<svg height="{height}" width="{width}"><rect fill="{fill}" fill-opacity="1.0" height="{height}" width="{width}" {border_style} /></svg>'.format(
height=int(rectangle["height"]),
width=int(rectangle["width"]),
fill=rectangle.get("color", "#ffffff"),
border_style=_convert_border_style(rectangle)
)
new_rectangle = {
"drawing_id": str(uuid.uuid4()),
"x": int(rectangle["x"]),
"y": int(rectangle["y"]),
"z": int(rectangle.get("z", 0)),
"rotation": int(rectangle.get("rotation", 0)),
"svg": svg
}
new_topo["topology"]["drawings"].append(new_rectangle)
# Convert instructions.txt to README.txt
instructions_path = os.path.join(topo_dir, "instructions.txt")
readme_path = os.path.join(topo_dir, "README.txt")
if os.path.exists(instructions_path) and not os.path.exists(readme_path):
shutil.move(instructions_path, readme_path)
return new_topo
def _convert_border_style(element):
QT_DASH_TO_SVG = {
2: "25, 25",
3: "5, 25",
4: "5, 25, 25",
5: "25, 25, 5, 25, 5"
}
border_style = int(element.get("border_style", 0))
style = ""
if border_style == 1: # No border
return ""
elif border_style == 0:
pass # Solid line
else:
style += 'stroke-dasharray="{}" '.format(QT_DASH_TO_SVG[border_style])
style += 'stroke="{stroke}" stroke-width="{stroke_width}"'.format(
stroke=element.get("border_color", "#000000"),
stroke_width=element.get("border_width", 2)
)
return style
def _convert_label(label):
"""
Convert a label from 1.X to the new format
"""
style = qt_font_to_style(label.get("font"), label.get("color"))
return {
"text": html.escape(label["text"]),
"rotation": 0,
"style": style,
"x": int(label["x"]),
"y": int(label["y"])
}
def _create_cloud(node, old_node, icon):
node["node_type"] = "cloud"
node["symbol"] = icon
node["console_type"] = None
node["console"] = None
del old_node["properties"]["nios"]
ports = []
keep_ports = []
for old_port in old_node.get("ports", []):
if old_port["name"].startswith("nio_gen_eth"):
port_type = "ethernet"
elif old_port["name"].startswith("nio_gen_linux"):
port_type = "ethernet"
elif old_port["name"].startswith("nio_tap"):
port_type = "tap"
elif old_port["name"].startswith("nio_udp"):
port_type = "udp"
elif old_port["name"].startswith("nio_nat"):
continue
else:
raise aiohttp.web.HTTPConflict(text="The conversion of cloud with {} is not supported".format(old_port["name"]))
if port_type == "udp":
try:
_, lport, rhost, rport = old_port["name"].split(":")
except ValueError:
raise aiohttp.web.HTTPConflict(text="UDP tunnel using IPV6 is not supported in cloud")
port = {
"name": "UDP tunnel {}".format(len(ports) + 1),
"port_number": len(ports) + 1,
"type": port_type,
"lport": int(lport),
"rhost": rhost,
"rport": int(rport)
}
else:
port = {
"interface": old_port["name"].split(":")[1],
"name": old_port["name"].split(":")[1],
"port_number": len(ports) + 1,
"type": port_type
}
keep_ports.append(old_port)
ports.append(port)
node["properties"]["ports_mapping"] = ports
node["properties"]["interfaces"] = []
return keep_ports
def _convert_snapshots(topo_dir):
"""
Convert 1.x snapshot to the new format
"""
old_snapshots_dir = os.path.join(topo_dir, "project-files", "snapshots")
if os.path.exists(old_snapshots_dir):
new_snapshots_dir = os.path.join(topo_dir, "snapshots")
os.makedirs(new_snapshots_dir, exist_ok=True)
for snapshot in os.listdir(old_snapshots_dir):
snapshot_dir = os.path.join(old_snapshots_dir, snapshot)
if os.path.isdir(snapshot_dir):
is_gns3_topo = False
# In .gns3project fileformat the .gns3 should be name project.gns3
for file in os.listdir(snapshot_dir):
if file.endswith(".gns3"):
shutil.move(os.path.join(snapshot_dir, file), os.path.join(snapshot_dir, "project.gns3"))
is_gns3_topo = True
if is_gns3_topo:
snapshot_arc = os.path.join(new_snapshots_dir, snapshot + ".gns3project")
with zipfile.ZipFile(snapshot_arc, 'w', allowZip64=True) as myzip:
for root, dirs, files in os.walk(snapshot_dir):
for file in files:
myzip.write(os.path.join(root, file), os.path.relpath(os.path.join(root, file), snapshot_dir), compress_type=zipfile.ZIP_DEFLATED)
shutil.rmtree(old_snapshots_dir)
def _convert_qemu_node(node, old_node):
"""
Convert qemu node from 1.X to 2.0
"""
# In 2.0 the internet VM is replaced by the NAT node
if old_node.get("properties", {}).get("hda_disk_image_md5sum") == "8ebc5a6ec53a1c05b7aa101b5ceefe31":
node["console"] = None
node["console_type"] = None
node["node_type"] = "nat"
del old_node["properties"]
node["properties"] = {
"ports": [
{
"interface": "eth1",
"name": "nat0",
"port_number": 0,
"type": "ethernet"
}
]
}
if node["symbol"] is None:
node["symbol"] = ":/symbols/cloud.svg"
return node
node["node_type"] = "qemu"
if node["symbol"] is None:
node["symbol"] = ":/symbols/qemu_guest.svg"
return node
|
cts2/rf2service
|
refs/heads/master
|
server/converters/tojson.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Mayo Clinic
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of the <ORGANIZATION> nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
from server.converters.toxml import as_xml
from server.config import ServiceSettings
from XSLTGateway.XSLTGateway import XSLTGateway
gw = XSLTGateway(ServiceSettings.settings.gatewayport)
def as_json(rval, ns=None, **kwargs):
""" Convert an XML rendering to JSON using an external py4j xml to json conversion package """
rval, mimetype = as_xml(rval, ns, **kwargs)
if mimetype.startswith('application/xml;'):
json = gw.toJSON(rval)
if json:
rval = json
mimetype = 'application/json;charset=UTF-8'
return rval, mimetype
|
dustinblake/polactions
|
refs/heads/master
|
Support/googlecode_upload.py
|
2
|
#!/usr/bin/env python
#
# Copyright 2006, 2007 Google Inc. All Rights Reserved.
# Author: danderson@google.com (David Anderson)
#
# Script for uploading files to a Google Code project.
#
# This is intended to be both a useful script for people who want to
# streamline project uploads and a reference implementation for
# uploading files to Google Code projects.
#
# To upload a file to Google Code, you need to provide a path to the
# file on your local machine, a small summary of what the file is, a
# project name, and a valid account that is a member or owner of that
# project. You can optionally provide a list of labels that apply to
# the file. The file will be uploaded under the same name that it has
# in your local filesystem (that is, the "basename" or last path
# component). Run the script with '--help' to get the exact syntax
# and available options.
#
# Note that the upload script requests that you enter your
# googlecode.com password. This is NOT your Gmail account password!
# This is the password you use on googlecode.com for committing to
# Subversion and uploading files. You can find your password by going
# to http://code.google.com/hosting/settings when logged in with your
# Gmail account. If you have already committed to your project's
# Subversion repository, the script will automatically retrieve your
# credentials from there (unless disabled, see the output of '--help'
# for details).
#
# If you are looking at this script as a reference for implementing
# your own Google Code file uploader, then you should take a look at
# the upload() function, which is the meat of the uploader. You
# basically need to build a multipart/form-data POST request with the
# right fields and send it to https://PROJECT.googlecode.com/files .
# Authenticate the request using HTTP Basic authentication, as is
# shown below.
#
# Licensed under the terms of the Apache Software License 2.0:
# http://www.apache.org/licenses/LICENSE-2.0
#
# Questions, comments, feature requests and patches are most welcome.
# Please direct all of these to the Google Code users group:
# http://groups.google.com/group/google-code-hosting
"""Google Code file uploader script.
"""
__author__ = 'danderson@google.com (David Anderson)'
import httplib
import os.path
import optparse
import getpass
import base64
import sys
def get_svn_config_dir():
"""Return user's Subversion configuration directory."""
try:
from win32com.shell.shell import SHGetFolderPath
import win32com.shell.shellcon
except ImportError:
# If we can't import the win32api, just use ~; this is right on unix, and
# returns not entirely unreasonable results on Windows.
return os.path.expanduser('~/.subversion')
# We're on Windows with win32api; use APPDATA.
return os.path.join(SHGetFolderPath(0, win32com.shell.shellcon.CSIDL_APPDATA,
0, 0).encode('utf-8'),
'Subversion')
def get_svn_auth(project_name, config_dir):
"""Return (username, password) for project_name in config_dir.
This function is not working now, as svn_config_read_auth_data's 1st argument
should be apr_hash_t **, it requies ctypes module. It is possible to implement
it. However, there is already a Summer of Code project working on
ctypes_python_binding. So, we don't have to waste our engergy. The work around
for now is add a password option to this script
"""
# Default to returning nothing.
result = (None, None)
try:
from svn.core import SVN_AUTH_CRED_SIMPLE, svn_config_read_auth_data
from svn.core import SubversionException
import csvn
# We will fail here, so this function will returen None regardlessly
except ImportError:
return result
realm = ('<https://%s.googlecode.com:443> Google Code Subversion Repository'
% project_name)
pool = Pool()
creds_hash = apr_hash_make(pool)
# auth may be none even if no exception is raised, e.g. if config_dir does
# not exist, or exists but has no entry for realm.
try:
auth = svn_config_read_auth_data(creds_hash, SVN_AUTH_CRED_SIMPLE, realm,
config_dir, pool)
except SubversionException:
auth = None
if auth is not None:
try:
result = (auth['username'], auth['password'])
except KeyError:
# Missing the keys, so return nothing.
pass
print pool
print creds_hash
print realm
return result
def upload(file, project_name, user_name, password, summary, labels=None):
"""Upload a file to a Google Code project's file server.
Args:
file: The local path to the file.
project_name: The name of your project on Google Code.
user_name: Your Google account name.
password: The googlecode.com password for your account.
Note that this is NOT your global Google Account password!
summary: A small description for the file.
labels: an optional list of label strings with which to tag the file.
Returns: a tuple:
http_status: 201 if the upload succeeded, something else if an
error occured.
http_reason: The human-readable string associated with http_status
file_url: If the upload succeeded, the URL of the file on Google
Code, None otherwise.
"""
# The login is the user part of user@gmail.com. If the login provided
# is in the full user@domain form, strip it down.
if user_name.endswith('@gmail.com'):
user_name = user_name[:user_name.index('@gmail.com')]
form_fields = [('summary', summary)]
if labels is not None:
form_fields.extend([('label', l.strip()) for l in labels])
content_type, body = encode_upload_request(form_fields, file)
upload_host = '%s.googlecode.com' % project_name
upload_uri = '/files'
auth_token = base64.b64encode('%s:%s'% (user_name, password))
headers = {
'Authorization': 'Basic %s' % auth_token,
'User-Agent': 'Googlecode.com uploader v0.9.4',
'Content-Type': content_type,
}
server = httplib.HTTPSConnection(upload_host)
server.request('POST', upload_uri, body, headers)
resp = server.getresponse()
server.close()
if resp.status == 201:
location = resp.getheader('Location', None)
else:
location = None
return resp.status, resp.reason, location
def encode_upload_request(fields, file_path):
"""Encode the given fields and file into a multipart form body.
fields is a sequence of (name, value) pairs. file is the path of
the file to upload. The file will be uploaded to Google Code with
the same file name.
Returns: (content_type, body) ready for httplib.HTTP instance
"""
BOUNDARY = '----------Googlecode_boundary_reindeer_flotilla'
CRLF = '\r\n'
body = []
# Add the metadata about the upload first
for key, value in fields:
body.extend(
['--' + BOUNDARY,
'Content-Disposition: form-data; name="%s"' % key,
'',
value,
])
# Now add the file itself
file_name = os.path.basename(file_path)
f = open(file_path, 'rb')
file_content = f.read()
f.close()
body.extend(
['--' + BOUNDARY,
'Content-Disposition: form-data; name="filename"; filename="%s"'
% file_name,
# The upload server determines the mime-type, no need to set it.
'Content-Type: application/octet-stream',
'',
file_content,
])
# Finalize the form body
body.extend(['--' + BOUNDARY + '--', ''])
return 'multipart/form-data; boundary=%s' % BOUNDARY, CRLF.join(body)
def upload_find_auth(file_path, project_name, summary, labels=None,
config_dir=None, user_name=None, user_password=None, tries=3):
"""Find credentials and upload a file to a Google Code project's file server.
file_path, project_name, summary, and labels are passed as-is to upload.
If config_dir is None, try get_svn_config_dir(); if it is 'none', skip
trying the Subversion configuration entirely. If user_name is not None, use
it for the first attempt; prompt for subsequent attempts.
Args:
file_path: The local path to the file.
project_name: The name of your project on Google Code.
summary: A small description for the file.
labels: an optional list of label strings with which to tag the file.
config_dir: Path to Subversion configuration directory, 'none', or None.
user_name: Your Google account name.
user_password: Your Google account password.
tries: How many attempts to make.
"""
if config_dir != 'none':
# Try to load username/password from svn config for first try.
if config_dir is None:
config_dir = get_svn_config_dir()
(svn_username, svn_password) = get_svn_auth(project_name, config_dir)
if svn_username is not None and svn_password is not None:
(user_name, user_password) = (svn_username, svn_password)
while tries > 0:
if user_name is None:
# Read username if not specified or loaded from svn config, or on
# subsequent tries.
sys.stdout.write('Please enter your googlecode.com username: ')
sys.stdout.flush()
user_name = sys.stdin.readline().rstrip()
if user_password is None:
# Read password if not loaded from svn config, or on subsequent tries.
print 'Please enter your googlecode.com password.'
print '** Note that this is NOT your Gmail account password! **'
print 'It is the password you use to access Subversion repositories,'
print 'and can be found here: http://code.google.com/hosting/settings'
user_password = getpass.getpass()
status, reason, url = upload(file_path, project_name, user_name, user_password,
summary, labels)
# Returns 403 Forbidden instead of 401 Unauthorized for bad
# credentials as of 2007-07-17.
if status in [httplib.FORBIDDEN, httplib.UNAUTHORIZED]:
# Rest for another try.
user_name = password = None
tries = tries - 1
else:
# We're done.
break
return status, reason, url
def main():
parser = optparse.OptionParser(usage='googlecode-upload.py -s SUMMARY '
'-P PROJECT [options] FILE')
parser.add_option('--config-dir', dest='config_dir', metavar='DIR',
help='read svn auth data from DIR'
' ("none" means not to use svn auth data)')
parser.add_option('-s', '--summary', dest='summary',
help='Short description of the file')
parser.add_option('-p', '--project', dest='project',
help='Google Code project name')
parser.add_option('-u', '--user', dest='user',
help='Your Google Code username')
parser.add_option('-P', '--password', dest='password',
help='Your Google Code password')
parser.add_option('-l', '--labels', dest='labels',
help='An optional list of labels to attach to the file')
options, args = parser.parse_args()
if not options.summary:
parser.error('File summary is missing.')
elif not options.project:
parser.error('Project name is missing.')
elif len(args) < 1:
parser.error('File to upload not provided.')
elif len(args) > 1:
parser.error('Only one file may be specified.')
file_path = args[0]
if options.labels:
labels = options.labels.split(',')
else:
labels = None
status, reason, url = upload_find_auth(file_path, options.project,
options.summary, labels,
options.config_dir, options.user,
options.password)
if url:
print 'The file was uploaded successfully.'
print 'URL: %s' % url
return 0
else:
print 'An error occurred. Your file was not uploaded.'
print 'Google Code upload server said: %s (%s)' % (reason, status)
return 1
if __name__ == '__main__':
sys.exit(main())
|
doheekim/chuizonetest
|
refs/heads/master
|
lib/sqlalchemy/event/legacy.py
|
33
|
# event/legacy.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Routines to handle adaption of legacy call signatures,
generation of deprecation notes and docstrings.
"""
from .. import util
def _legacy_signature(since, argnames, converter=None):
def leg(fn):
if not hasattr(fn, '_legacy_signatures'):
fn._legacy_signatures = []
fn._legacy_signatures.append((since, argnames, converter))
return fn
return leg
def _wrap_fn_for_legacy(dispatch_descriptor, fn, argspec):
for since, argnames, conv in dispatch_descriptor.legacy_signatures:
if argnames[-1] == "**kw":
has_kw = True
argnames = argnames[0:-1]
else:
has_kw = False
if len(argnames) == len(argspec.args) \
and has_kw is bool(argspec.keywords):
if conv:
assert not has_kw
def wrap_leg(*args):
return fn(*conv(*args))
else:
def wrap_leg(*args, **kw):
argdict = dict(zip(dispatch_descriptor.arg_names, args))
args = [argdict[name] for name in argnames]
if has_kw:
return fn(*args, **kw)
else:
return fn(*args)
return wrap_leg
else:
return fn
def _indent(text, indent):
return "\n".join(
indent + line
for line in text.split("\n")
)
def _standard_listen_example(dispatch_descriptor, sample_target, fn):
example_kw_arg = _indent(
"\n".join(
"%(arg)s = kw['%(arg)s']" % {"arg": arg}
for arg in dispatch_descriptor.arg_names[0:2]
),
" ")
if dispatch_descriptor.legacy_signatures:
current_since = max(since for since, args, conv
in dispatch_descriptor.legacy_signatures)
else:
current_since = None
text = (
"from sqlalchemy import event\n\n"
"# standard decorator style%(current_since)s\n"
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
"def receive_%(event_name)s("
"%(named_event_arguments)s%(has_kw_arguments)s):\n"
" \"listen for the '%(event_name)s' event\"\n"
"\n # ... (event handling logic) ...\n"
)
if len(dispatch_descriptor.arg_names) > 3:
text += (
"\n# named argument style (new in 0.9)\n"
"@event.listens_for("
"%(sample_target)s, '%(event_name)s', named=True)\n"
"def receive_%(event_name)s(**kw):\n"
" \"listen for the '%(event_name)s' event\"\n"
"%(example_kw_arg)s\n"
"\n # ... (event handling logic) ...\n"
)
text %= {
"current_since": " (arguments as of %s)" %
current_since if current_since else "",
"event_name": fn.__name__,
"has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else "",
"named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
"example_kw_arg": example_kw_arg,
"sample_target": sample_target
}
return text
def _legacy_listen_examples(dispatch_descriptor, sample_target, fn):
text = ""
for since, args, conv in dispatch_descriptor.legacy_signatures:
text += (
"\n# legacy calling style (pre-%(since)s)\n"
"@event.listens_for(%(sample_target)s, '%(event_name)s')\n"
"def receive_%(event_name)s("
"%(named_event_arguments)s%(has_kw_arguments)s):\n"
" \"listen for the '%(event_name)s' event\"\n"
"\n # ... (event handling logic) ...\n" % {
"since": since,
"event_name": fn.__name__,
"has_kw_arguments": " **kw"
if dispatch_descriptor.has_kw else "",
"named_event_arguments": ", ".join(args),
"sample_target": sample_target
}
)
return text
def _version_signature_changes(dispatch_descriptor):
since, args, conv = dispatch_descriptor.legacy_signatures[0]
return (
"\n.. versionchanged:: %(since)s\n"
" The ``%(event_name)s`` event now accepts the \n"
" arguments ``%(named_event_arguments)s%(has_kw_arguments)s``.\n"
" Listener functions which accept the previous argument \n"
" signature(s) listed above will be automatically \n"
" adapted to the new signature." % {
"since": since,
"event_name": dispatch_descriptor.__name__,
"named_event_arguments": ", ".join(dispatch_descriptor.arg_names),
"has_kw_arguments": ", **kw" if dispatch_descriptor.has_kw else ""
}
)
def _augment_fn_docs(dispatch_descriptor, parent_dispatch_cls, fn):
header = ".. container:: event_signatures\n\n"\
" Example argument forms::\n"\
"\n"
sample_target = getattr(parent_dispatch_cls, "_target_class_doc", "obj")
text = (
header +
_indent(
_standard_listen_example(
dispatch_descriptor, sample_target, fn),
" " * 8)
)
if dispatch_descriptor.legacy_signatures:
text += _indent(
_legacy_listen_examples(
dispatch_descriptor, sample_target, fn),
" " * 8)
text += _version_signature_changes(dispatch_descriptor)
return util.inject_docstring_text(fn.__doc__,
text,
1
)
|
fayf/pyload
|
refs/heads/stable
|
module/plugins/hoster/CrockoCom.py
|
3
|
# -*- coding: utf-8 -*-
import re
import urlparse
from module.plugins.captcha.ReCaptcha import ReCaptcha
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class CrockoCom(SimpleHoster):
__name__ = "CrockoCom"
__type__ = "hoster"
__version__ = "0.21"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?(crocko|easy-share)\.com/\w+'
__config__ = [("use_premium", "bool", "Use premium account if available", True)]
__description__ = """Crocko hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
NAME_PATTERN = r'<span class="fz24">Download:\s*<strong>(?P<N>.*)'
SIZE_PATTERN = r'<span class="tip1"><span class="inner">(?P<S>[^<]+)</span></span>'
OFFLINE_PATTERN = r'<h1>Sorry,<br />the page you\'re looking for <br />isn\'t here.</h1>|File not found'
CAPTCHA_PATTERN = r"u='(/file_contents/captcha/\w+)';\s*w='(\d+)';"
FORM_PATTERN = r'<form method="post" action="(.+?)">(.*?)</form>'
FORM_INPUT_PATTERN = r'<input[^>]* name="?([^" ]+)"? value="?([^" ]+)"?.*?>'
NAME_REPLACEMENTS = [(r'<.*?>', '')]
def handle_free(self, pyfile):
if "You need Premium membership to download this file." in self.html:
self.fail(_("You need Premium membership to download this file"))
for _i in xrange(5):
m = re.search(self.CAPTCHA_PATTERN, self.html)
if m:
url = urlparse.urljoin("http://crocko.com", m.group(1))
self.wait(m.group(2))
self.html = self.load(url)
else:
break
m = re.search(self.FORM_PATTERN, self.html, re.S)
if m is None:
self.error(_("FORM_PATTERN not found"))
action, form = m.groups()
inputs = dict(re.findall(self.FORM_INPUT_PATTERN, form))
recaptcha = ReCaptcha(self)
for _i in xrange(5):
inputs['recaptcha_response_field'], inputs['recaptcha_challenge_field'] = recaptcha.challenge()
self.download(action, post=inputs)
if self.check_download({'captcha': recaptcha.KEY_AJAX_PATTERN}):
self.captcha.invalid()
else:
break
else:
self.fail(_("No valid captcha solution received"))
getInfo = create_getInfo(CrockoCom)
|
jchevin/MissionPlanner-master
|
refs/heads/master
|
packages/IronPython.StdLib.2.7.4/content/Lib/timeit.py
|
76
|
#! /usr/bin/env python
"""Tool for measuring execution time of small code snippets.
This module avoids a number of common traps for measuring execution
times. See also Tim Peters' introduction to the Algorithms chapter in
the Python Cookbook, published by O'Reilly.
Library usage: see the Timer class.
Command line usage:
python timeit.py [-n N] [-r N] [-s S] [-t] [-c] [-h] [--] [statement]
Options:
-n/--number N: how many times to execute 'statement' (default: see below)
-r/--repeat N: how many times to repeat the timer (default 3)
-s/--setup S: statement to be executed once initially (default 'pass')
-t/--time: use time.time() (default on Unix)
-c/--clock: use time.clock() (default on Windows)
-v/--verbose: print raw timing results; repeat for more digits precision
-h/--help: print this usage message and exit
--: separate options from statement, use when statement starts with -
statement: statement to be timed (default 'pass')
A multi-line statement may be given by specifying each line as a
separate argument; indented lines are possible by enclosing an
argument in quotes and using leading spaces. Multiple -s options are
treated similarly.
If -n is not given, a suitable number of loops is calculated by trying
successive powers of 10 until the total time is at least 0.2 seconds.
The difference in default timer function is because on Windows,
clock() has microsecond granularity but time()'s granularity is 1/60th
of a second; on Unix, clock() has 1/100th of a second granularity and
time() is much more precise. On either platform, the default timer
functions measure wall clock time, not the CPU time. This means that
other processes running on the same computer may interfere with the
timing. The best thing to do when accurate timing is necessary is to
repeat the timing a few times and use the best time. The -r option is
good for this; the default of 3 repetitions is probably enough in most
cases. On Unix, you can use clock() to measure CPU time.
Note: there is a certain baseline overhead associated with executing a
pass statement. The code here doesn't try to hide it, but you should
be aware of it. The baseline overhead can be measured by invoking the
program without arguments.
The baseline overhead differs between Python versions! Also, to
fairly compare older Python versions to Python 2.3, you may want to
use python -O for the older versions to avoid timing SET_LINENO
instructions.
"""
import gc
import sys
import time
try:
import itertools
except ImportError:
# Must be an older Python version (see timeit() below)
itertools = None
__all__ = ["Timer"]
dummy_src_name = "<timeit-src>"
default_number = 1000000
default_repeat = 3
if sys.platform == "win32":
# On Windows, the best timer is time.clock()
default_timer = time.clock
else:
# On most other platforms the best timer is time.time()
default_timer = time.time
# Don't change the indentation of the template; the reindent() calls
# in Timer.__init__() depend on setup being indented 4 spaces and stmt
# being indented 8 spaces.
template = """
def inner(_it, _timer):
%(setup)s
_t0 = _timer()
for _i in _it:
%(stmt)s
_t1 = _timer()
return _t1 - _t0
"""
def reindent(src, indent):
"""Helper to reindent a multi-line statement."""
return src.replace("\n", "\n" + " "*indent)
def _template_func(setup, func):
"""Create a timer function. Used if the "statement" is a callable."""
def inner(_it, _timer, _func=func):
setup()
_t0 = _timer()
for _i in _it:
_func()
_t1 = _timer()
return _t1 - _t0
return inner
class Timer:
"""Class for timing execution speed of small code snippets.
The constructor takes a statement to be timed, an additional
statement used for setup, and a timer function. Both statements
default to 'pass'; the timer function is platform-dependent (see
module doc string).
To measure the execution time of the first statement, use the
timeit() method. The repeat() method is a convenience to call
timeit() multiple times and return a list of results.
The statements may contain newlines, as long as they don't contain
multi-line string literals.
"""
def __init__(self, stmt="pass", setup="pass", timer=default_timer):
"""Constructor. See class doc string."""
self.timer = timer
ns = {}
if isinstance(stmt, basestring):
stmt = reindent(stmt, 8)
if isinstance(setup, basestring):
setup = reindent(setup, 4)
src = template % {'stmt': stmt, 'setup': setup}
elif hasattr(setup, '__call__'):
src = template % {'stmt': stmt, 'setup': '_setup()'}
ns['_setup'] = setup
else:
raise ValueError("setup is neither a string nor callable")
self.src = src # Save for traceback display
code = compile(src, dummy_src_name, "exec")
exec code in globals(), ns
self.inner = ns["inner"]
elif hasattr(stmt, '__call__'):
self.src = None
if isinstance(setup, basestring):
_setup = setup
def setup():
exec _setup in globals(), ns
elif not hasattr(setup, '__call__'):
raise ValueError("setup is neither a string nor callable")
self.inner = _template_func(setup, stmt)
else:
raise ValueError("stmt is neither a string nor callable")
def print_exc(self, file=None):
"""Helper to print a traceback from the timed code.
Typical use:
t = Timer(...) # outside the try/except
try:
t.timeit(...) # or t.repeat(...)
except:
t.print_exc()
The advantage over the standard traceback is that source lines
in the compiled template will be displayed.
The optional file argument directs where the traceback is
sent; it defaults to sys.stderr.
"""
import linecache, traceback
if self.src is not None:
linecache.cache[dummy_src_name] = (len(self.src),
None,
self.src.split("\n"),
dummy_src_name)
# else the source is already stored somewhere else
traceback.print_exc(file=file)
def timeit(self, number=default_number):
"""Time 'number' executions of the main statement.
To be precise, this executes the setup statement once, and
then returns the time it takes to execute the main statement
a number of times, as a float measured in seconds. The
argument is the number of times through the loop, defaulting
to one million. The main statement, the setup statement and
the timer function to be used are passed to the constructor.
"""
if itertools:
it = itertools.repeat(None, number)
else:
it = [None] * number
gcold = gc.isenabled()
gc.disable()
timing = self.inner(it, self.timer)
if gcold:
gc.enable()
return timing
def repeat(self, repeat=default_repeat, number=default_number):
"""Call timeit() a few times.
This is a convenience function that calls the timeit()
repeatedly, returning a list of results. The first argument
specifies how many times to call timeit(), defaulting to 3;
the second argument specifies the timer argument, defaulting
to one million.
Note: it's tempting to calculate mean and standard deviation
from the result vector and report these. However, this is not
very useful. In a typical case, the lowest value gives a
lower bound for how fast your machine can run the given code
snippet; higher values in the result vector are typically not
caused by variability in Python's speed, but by other
processes interfering with your timing accuracy. So the min()
of the result is probably the only number you should be
interested in. After that, you should look at the entire
vector and apply common sense rather than statistics.
"""
r = []
for i in range(repeat):
t = self.timeit(number)
r.append(t)
return r
def timeit(stmt="pass", setup="pass", timer=default_timer,
number=default_number):
"""Convenience function to create Timer object and call timeit method."""
return Timer(stmt, setup, timer).timeit(number)
def repeat(stmt="pass", setup="pass", timer=default_timer,
repeat=default_repeat, number=default_number):
"""Convenience function to create Timer object and call repeat method."""
return Timer(stmt, setup, timer).repeat(repeat, number)
def main(args=None):
"""Main program, used when run as a script.
The optional argument specifies the command line to be parsed,
defaulting to sys.argv[1:].
The return value is an exit code to be passed to sys.exit(); it
may be None to indicate success.
When an exception happens during timing, a traceback is printed to
stderr and the return value is 1. Exceptions at other times
(including the template compilation) are not caught.
"""
if args is None:
args = sys.argv[1:]
import getopt
try:
opts, args = getopt.getopt(args, "n:s:r:tcvh",
["number=", "setup=", "repeat=",
"time", "clock", "verbose", "help"])
except getopt.error, err:
print err
print "use -h/--help for command line help"
return 2
timer = default_timer
stmt = "\n".join(args) or "pass"
number = 0 # auto-determine
setup = []
repeat = default_repeat
verbose = 0
precision = 3
for o, a in opts:
if o in ("-n", "--number"):
number = int(a)
if o in ("-s", "--setup"):
setup.append(a)
if o in ("-r", "--repeat"):
repeat = int(a)
if repeat <= 0:
repeat = 1
if o in ("-t", "--time"):
timer = time.time
if o in ("-c", "--clock"):
timer = time.clock
if o in ("-v", "--verbose"):
if verbose:
precision += 1
verbose += 1
if o in ("-h", "--help"):
print __doc__,
return 0
setup = "\n".join(setup) or "pass"
# Include the current directory, so that local imports work (sys.path
# contains the directory of this script, rather than the current
# directory)
import os
sys.path.insert(0, os.curdir)
t = Timer(stmt, setup, timer)
if number == 0:
# determine number so that 0.2 <= total time < 2.0
for i in range(1, 10):
number = 10**i
try:
x = t.timeit(number)
except:
t.print_exc()
return 1
if verbose:
print "%d loops -> %.*g secs" % (number, precision, x)
if x >= 0.2:
break
try:
r = t.repeat(repeat, number)
except:
t.print_exc()
return 1
best = min(r)
if verbose:
print "raw times:", " ".join(["%.*g" % (precision, x) for x in r])
print "%d loops," % number,
usec = best * 1e6 / number
if usec < 1000:
print "best of %d: %.*g usec per loop" % (repeat, precision, usec)
else:
msec = usec / 1000
if msec < 1000:
print "best of %d: %.*g msec per loop" % (repeat, precision, msec)
else:
sec = msec / 1000
print "best of %d: %.*g sec per loop" % (repeat, precision, sec)
return None
if __name__ == "__main__":
sys.exit(main())
|
parkera/swift
|
refs/heads/master
|
benchmark/scripts/perf_test_driver/perf_test_driver.py
|
22
|
#!/usr/bin/env python
# ===--- perf_test_driver.py ---------------------------------------------===//
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ===---------------------------------------------------------------------===//
from __future__ import print_function
import functools
import multiprocessing
import os
import re
import subprocess
BENCHMARK_OUTPUT_RE = re.compile('([^,]+),')
class Result(object):
def __init__(self, name, status, output, xfail_list):
self.name = name
self.status = status
self.output = output
self.is_xfailed = any(
(re.match(x, self.name) is not None for x in xfail_list))
def is_failure(self):
return self.get_result() in ['FAIL', 'XPASS']
def get_result(self):
if self.is_xfailed:
if self.status:
return 'XFAIL'
return 'XPASS'
if self.status:
return 'FAIL'
return 'PASS'
def get_name(self):
return self.name
def merge_in_extra_data(self, d):
"""Rather than modifying the extra data dict, return it as a no-op"""
return d
def print_data(self, max_test_len):
fmt = '{:<%d}{:}' % (max_test_len + 5)
print(fmt.format(self.get_name(), self.get_result()))
def run_with_timeout(func, args):
# We timeout after 10 minutes.
timeout_seconds = 10 * 60
# We just use this to create a timeout since we use an older python. Once
# we update to use python >= 3.3, use the timeout API on communicate
# instead.
import multiprocessing.dummy
fakeThreadPool = multiprocessing.dummy.Pool(1)
try:
result = fakeThreadPool.apply_async(func, args=args)
return result.get(timeout_seconds)
except multiprocessing.TimeoutError:
fakeThreadPool.terminate()
raise RuntimeError("Child process aborted due to timeout. "
"Timeout: %s seconds" % timeout_seconds)
def _unwrap_self(args):
return type(args[0]).process_input(*args)
BenchmarkDriver_OptLevels = ['Onone', 'O', 'Osize']
class BenchmarkDriver(object):
def __init__(self, binary_dir, xfail_list, enable_parallel=False,
opt_levels=BenchmarkDriver_OptLevels):
self.targets = [(os.path.join(binary_dir, 'Benchmark_%s' % o), o)
for o in opt_levels]
self.xfail_list = xfail_list
self.enable_parallel = enable_parallel
self.data = None
def print_data_header(self, max_test_len):
fmt = '{:<%d}{:}' % (max_test_len + 5)
print(fmt.format('Name', 'Result'))
def prepare_input(self, name, opt_level):
raise RuntimeError("Abstract method")
def process_input(self, data):
raise RuntimeError("Abstract method")
def run_for_opt_level(self, binary, opt_level, test_filter):
print("testing driver at path: %s" % binary)
names = []
for l in subprocess.check_output([binary, "--list"]).split("\n")[1:]:
m = BENCHMARK_OUTPUT_RE.match(l)
if m is None:
continue
names.append(m.group(1))
if test_filter:
regex = re.compile(test_filter)
names = [n for n in names if regex.match(n)]
def prepare_input_wrapper(name):
x = {'opt': opt_level, 'path': binary, 'test_name': name}
x.update(self.prepare_input(name))
return x
prepared_input = [prepare_input_wrapper(n) for n in names]
results = None
if self.enable_parallel:
p = multiprocessing.Pool()
z = zip([self] * len(prepared_input), prepared_input)
results = p.map_async(_unwrap_self, z).get(999999)
else:
results = map(self.process_input, prepared_input)
def reduce_results(acc, r):
acc['result'].append(r)
acc['has_failure'] = acc['has_failure'] or r.is_failure()
acc['max_test_len'] = max(acc['max_test_len'], len(r.get_name()))
acc['extra_data'] = r.merge_in_extra_data(acc['extra_data'])
return acc
return functools.reduce(reduce_results, results, {
'result': [],
'has_failure': False,
'max_test_len': 0,
'extra_data': {}
})
def print_data(self, data, max_test_len):
print("Results:")
self.print_data_header(max_test_len)
for d in data:
for r in d['result']:
r.print_data(max_test_len)
def run(self, test_filter=None):
self.data = [
self.run_for_opt_level(binary, opt_level, test_filter)
for binary, opt_level in self.targets]
max_test_len = functools.reduce(max,
[d['max_test_len'] for d in self.data])
has_failure = functools.reduce(max,
[d['has_failure'] for d in self.data])
self.print_data(self.data, max_test_len)
return not has_failure
|
grrr2/Flexget
|
refs/heads/master
|
flexget/plugins/output/pushover.py
|
2
|
from __future__ import unicode_literals, division, absolute_import
import logging
from requests.exceptions import RequestException
from flexget import plugin
from flexget.event import event
from flexget.utils import json
from flexget.utils.template import RenderError
from flexget.config_schema import one_or_more
log = logging.getLogger("pushover")
pushover_url = "https://api.pushover.net/1/messages.json"
class OutputPushover(object):
"""
Example::
pushover:
userkey: <USER_KEY> (can also be a list of userkeys)
apikey: <API_KEY>
[device: <DEVICE_STRING>] (default: (none))
[title: <MESSAGE_TITLE>] (default: "Download started" -- accepts Jinja2)
[message: <MESSAGE_BODY>] (default: "{{series_name}} {{series_id}}" -- accepts Jinja2)
[priority: <PRIORITY>] (default = 0 -- normal = 0, high = 1, silent = -1)
[url: <URL>] (default: "{{imdb_url}}" -- accepts Jinja2)
[urltitle: <URL_TITLE>] (default: (none) -- accepts Jinja2)
[sound: <SOUND>] (default: pushover default)
Configuration parameters are also supported from entries (eg. through set).
"""
default_message = "{% if series_name is defined %}{{tvdb_series_name|d(series_name)}} " \
"{{series_id}} {{tvdb_ep_name|d('')}}{% elif imdb_name is defined %}{{imdb_name}} "\
"{{imdb_year}}{% else %}{{title}}{% endif %}"
schema = {
'type': 'object',
'properties': {
'userkey': one_or_more({'type': 'string'}),
'apikey': {'type': 'string'},
'device': {'type': 'string', 'default': ''},
'title': {'type': 'string', 'default': "{{task}}"},
'message': {'type': 'string', 'default': default_message},
'priority': {'type': 'integer', 'default': 0},
'url': {'type': 'string', 'default': '{% if imdb_url is defined %}{{imdb_url}}{% endif %}'},
'urltitle': {'type': 'string', 'default': ''},
'sound': {'type': 'string', 'default': ''}
},
'required': ['userkey', 'apikey'],
'additionalProperties': False
}
# Run last to make sure other outputs are successful before sending notification
@plugin.priority(0)
def on_task_output(self, task, config):
# Support for multiple userkeys
userkeys = config["userkey"]
if not isinstance(userkeys, list):
userkeys = [userkeys]
# Set a bunch of local variables from the config
apikey = config["apikey"]
device = config["device"]
priority = config["priority"]
sound = config["sound"]
# Loop through the provided entries
for entry in task.accepted:
title = config["title"]
message = config["message"]
url = config["url"]
urltitle = config["urltitle"]
# Attempt to render the title field
try:
title = entry.render(title)
except RenderError as e:
log.warning("Problem rendering 'title': %s" % e)
title = "Download started"
# Attempt to render the message field
try:
message = entry.render(message)
except RenderError as e:
log.warning("Problem rendering 'message': %s" % e)
message = entry["title"]
# Attempt to render the url field
try:
url = entry.render(url)
except RenderError as e:
log.warning("Problem rendering 'url': %s" % e)
url = entry.get("imdb_url", "")
# Attempt to render the urltitle field
try:
urltitle = entry.render(urltitle)
except RenderError as e:
log.warning("Problem rendering 'urltitle': %s" % e)
urltitle = ""
for userkey in userkeys:
# Build the request
data = {"user": userkey, "token": apikey, "title": title,
"message": message, "url": url, "url_title": urltitle}
if device:
data["device"] = device
if priority:
data["priority"] = priority
if sound:
data["sound"] = sound
# Check for test mode
if task.options.test:
log.info("Test mode. Pushover notification would be:")
if device:
log.info(" Device: %s" % device)
else:
log.info(" Device: [broadcast]")
log.info(" Title: %s" % title)
log.info(" Message: %s" % message)
log.info(" URL: %s" % url)
log.info(" URL Title: %s" % urltitle)
log.info(" Priority: %d" % priority)
log.info(" userkey: %s" % userkey)
log.info(" apikey: %s" % apikey)
log.info(" sound: %s" % sound)
# Test mode. Skip remainder.
continue
# Make the request
try:
response = task.requests.post(pushover_url, data=data, raise_status=False)
except RequestException as e:
log.warning('Could not get response from Pushover: {}'.format(e))
return
# Check if it succeeded
request_status = response.status_code
# error codes and messages from Pushover API
if request_status == 200:
log.debug("Pushover notification sent")
elif request_status == 500:
log.debug("Pushover notification failed, Pushover API having issues")
# TODO: Implement retrying. API requests 5 seconds between retries.
elif request_status >= 400:
errors = json.loads(response.content)['errors']
log.error("Pushover API error: %s" % errors[0])
else:
log.error("Unknown error when sending Pushover notification")
@event('plugin.register')
def register_plugin():
plugin.register(OutputPushover, "pushover", api_ver=2)
|
karek314/bitcoin
|
refs/heads/master
|
qa/rpc-tests/getchaintips.py
|
140
|
#!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the getchaintips API. We introduce a network split, work
# on chains of different lengths, and join the network together again.
# This gives us two tips, verify that it works.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class GetChainTipsTest (BitcoinTestFramework):
def run_test (self):
BitcoinTestFramework.run_test (self)
tips = self.nodes[0].getchaintips ()
assert_equal (len (tips), 1)
assert_equal (tips[0]['branchlen'], 0)
assert_equal (tips[0]['height'], 200)
assert_equal (tips[0]['status'], 'active')
# Split the network and build two chains of different lengths.
self.split_network ()
self.nodes[0].generate(10);
self.nodes[2].generate(20);
self.sync_all ()
tips = self.nodes[1].getchaintips ()
assert_equal (len (tips), 1)
shortTip = tips[0]
assert_equal (shortTip['branchlen'], 0)
assert_equal (shortTip['height'], 210)
assert_equal (tips[0]['status'], 'active')
tips = self.nodes[3].getchaintips ()
assert_equal (len (tips), 1)
longTip = tips[0]
assert_equal (longTip['branchlen'], 0)
assert_equal (longTip['height'], 220)
assert_equal (tips[0]['status'], 'active')
# Join the network halves and check that we now have two tips
# (at least at the nodes that previously had the short chain).
self.join_network ()
tips = self.nodes[0].getchaintips ()
assert_equal (len (tips), 2)
assert_equal (tips[0], longTip)
assert_equal (tips[1]['branchlen'], 10)
assert_equal (tips[1]['status'], 'valid-fork')
tips[1]['branchlen'] = 0
tips[1]['status'] = 'active'
assert_equal (tips[1], shortTip)
if __name__ == '__main__':
GetChainTipsTest ().main ()
|
rgreinho/molecule
|
refs/heads/master
|
molecule/driver/dockerdriver.py
|
1
|
# Copyright (c) 2015-2016 Cisco Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import collections
import io
import json
import sys
try:
import docker
except ImportError: # pragma: no cover
sys.exit('ERROR: Driver missing, install docker-py.')
from molecule import util
from molecule.driver import basedriver
class DockerDriver(basedriver.BaseDriver):
def __init__(self, molecule):
super(DockerDriver, self).__init__(molecule)
self._docker = docker.Client(
version='auto', **docker.utils.kwargs_from_env())
self._containers = self.molecule.config.config['docker']['containers']
self._provider = self._get_provider()
self._platform = self._get_platform()
self.image_tag = 'molecule_local/{}:{}'
if 'build_image' not in self.molecule.config.config['docker']:
self.molecule.config.config['docker']['build_image'] = True
@property
def name(self):
return 'docker'
@property
def instances(self):
created_containers = self._docker.containers(all=True)
created_container_names = [
container.get('Names')[0][1:].encode('utf-8')
for container in created_containers
]
for container in self._containers:
if container.get('name') in created_container_names:
container['created'] = True
else:
container['created'] = False
return self._containers
@property
def default_provider(self):
return self._provider
@property
def default_platform(self):
return self._platform
@property
def provider(self):
return self._provider
@property
def platform(self):
return self._platform
@platform.setter
def platform(self, val):
self._platform = val
@property
def valid_providers(self):
return [{'name': self.provider}]
@property
def valid_platforms(self):
return [{'name': self.platform}]
@property
def ssh_config_file(self):
return
@property
def ansible_connection_params(self):
return {'user': 'root', 'connection': 'docker'}
@property
def testinfra_args(self):
return {'connection': 'docker'}
@property
def serverspec_args(self):
return {}
def up(self, no_provision=True):
self.molecule.state.change_state('driver', self.name)
if self.molecule.config.config['docker']['build_image']:
self._build_ansible_compatible_image()
else:
self.image_tag = '{}:{}'
for container in self.instances:
privileged = container.get('privileged', False)
port_bindings = container.get('port_bindings', {})
volume_mounts = container.get('volume_mounts', [])
cap_add = container.get('cap_add', [])
cap_drop = container.get('cap_drop', [])
command = container.get('command', '')
environment = container.get('environment')
docker_host_config = self._docker.create_host_config(
privileged=privileged,
port_bindings=port_bindings,
binds=volume_mounts,
cap_add=cap_add,
cap_drop=cap_drop)
if (container['created'] is not True):
msg = ('Creating container {} '
'with base image {}:{}...').format(
container['name'], container['image'],
container['image_version'])
util.print_warn(msg)
container = self._docker.create_container(
image=self.image_tag.format(container['image'],
container['image_version']),
tty=True,
detach=False,
name=container['name'],
ports=port_bindings.keys(),
host_config=docker_host_config,
environment=environment,
command=command)
self._docker.start(container=container.get('Id'))
container['created'] = True
util.print_success('Container created.')
else:
self._docker.start(container['name'])
msg = 'Starting container {}...'.format(container['name'])
util.print_info(msg)
def destroy(self):
for container in self.instances:
if (container['created']):
msg = 'Stopping container {}...'.format(container['name'])
util.print_warn(msg)
self._docker.stop(container['name'], timeout=0)
self._docker.remove_container(container['name'])
msg = 'Removed container {}.'.format(container['name'])
util.print_success(msg)
container['created'] = False
def status(self):
Status = collections.namedtuple(
'Status', ['name', 'state', 'provider', 'ports'])
status_list = []
for container in self.instances:
name = container.get('name')
try:
d = self._docker.containers(filters={'name': name})[0]
state = d.get('Status')
ports = d.get('Ports')
except IndexError:
state = 'not_created'
ports = []
status_list.append(
Status(
name=name,
state=state,
provider=self.provider,
ports=ports))
return status_list
def conf(self, vm_name=None, ssh_config=False):
pass
def inventory_entry(self, instance):
template = '{} ansible_connection=docker\n'
return template.format(instance['name'])
def login_cmd(self, instance):
return 'docker exec -ti {} bash'
def login_args(self, instance):
return [instance]
def _get_platform(self):
return 'docker'
def _get_provider(self):
return 'docker'
def _build_ansible_compatible_image(self):
available_images = [
tag.encode('utf-8')
for image in self._docker.images()
for tag in image.get('RepoTags', [])
]
for container in self.instances:
if container.get('build_image'):
msg = ('Creating Ansible compatible '
'image of {}:{} ...').format(container['image'],
container['image_version'])
util.print_info(msg)
if 'registry' in container:
container['registry'] += '/'
else:
container['registry'] = ''
dockerfile = '''
FROM {container_image}:{container_version}
{container_environment}
RUN bash -c 'if [ -x "$(command -v apt-get)" ]; then apt-get update && apt-get install -y python sudo; fi'
RUN bash -c 'if [ -x "$(command -v yum)" ]; then yum makecache fast && yum update -y && yum install -y python sudo; fi'
RUN bash -c 'if [ -x "$(command -v zypper)" ]; then zypper refresh && zypper update -y && zypper install -y python sudo; fi'
''' # noqa
if 'dockerfile' in container:
dockerfile = container['dockerfile']
f = io.open(dockerfile)
else:
environment = container.get('environment')
if environment:
environment = '\n'.join(
'ENV {} {}'.format(k, v)
for k, v in environment.iteritems())
else:
environment = ''
dockerfile = dockerfile.format(
container_image=container['registry'] + container['image'],
container_version=container['image_version'],
container_environment=environment)
f = io.BytesIO(dockerfile.encode('utf-8'))
container['image'] = container['registry'].replace(
'/', '_').replace(':', '_') + container['image']
tag_string = self.image_tag.format(container['image'],
container['image_version'])
errors = False
if tag_string not in available_images or 'dockerfile' in container:
util.print_info('Building ansible compatible image...')
previous_line = ''
for line in self._docker.build(fileobj=f, tag=tag_string):
for line_split in line.split('\n'):
if len(line_split) > 0:
line = json.loads(line_split)
if 'stream' in line:
msg = '\t{}'.format(line['stream'])
util.print_warn(msg)
if 'errorDetail' in line:
ed = line['errorDetail']['message']
msg = '\t{}'.format(ed)
util.print_warn(msg)
errors = True
if 'status' in line:
if previous_line not in line['status']:
msg = '\t{} ...'.format(line['status'])
util.print_warn(msg)
previous_line = line['status']
if errors:
msg = 'Build failed for {}.'.format(tag_string)
util.print_error(msg)
return
else:
util.print_success('Finished building {}.'.format(
tag_string))
|
quanvm009/codev7
|
refs/heads/master
|
openerp/addons/survey/survey.py
|
23
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import copy
import pytz
from datetime import datetime
from dateutil.relativedelta import relativedelta
from time import strftime
import os
from openerp import netsvc, tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
class survey_type(osv.osv):
_name = 'survey.type'
_description = 'Survey Type'
_columns = {
'name': fields.char("Name", size=128, required=1, translate=True),
'code': fields.char("Code", size=64),
}
survey_type()
class survey(osv.osv):
_name = 'survey'
_description = 'Survey'
_rec_name = 'title'
def default_get(self, cr, uid, fields, context=None):
data = super(survey, self).default_get(cr, uid, fields, context)
return data
_columns = {
'id': fields.integer('ID'),
'title': fields.char('Survey Title', size=128, required=1),
'page_ids': fields.one2many('survey.page', 'survey_id', 'Page'),
'date_open': fields.datetime('Survey Open Date', readonly=1),
'date_close': fields.datetime('Survey Close Date', readonly=1),
'max_response_limit': fields.integer('Maximum Answer Limit',
help="Set to one if survey is answerable only once"),
'response_user': fields.integer('Maximum Answer per User',
help="Set to one if you require only one Answer per user"),
'state': fields.selection([('open', 'Open'), ('cancel', 'Cancelled'),('close', 'Closed') ], 'Status', readonly=True),
'responsible_id': fields.many2one('res.users', 'Responsible', help="User responsible for survey"),
'tot_start_survey': fields.integer("Total Started Survey", readonly=1),
'tot_comp_survey': fields.integer("Total Completed Survey", readonly=1),
'note': fields.text('Description', size=128),
'history': fields.one2many('survey.history', 'survey_id', 'History Lines', readonly=True),
'users': fields.many2many('res.users', 'survey_users_rel', 'sid', 'uid', 'Users'),
'send_response': fields.boolean('Email Notification on Answer'),
'type': fields.many2one('survey.type', 'Type'),
'color': fields.integer('Color Index'),
'invited_user_ids': fields.many2many('res.users', 'survey_invited_user_rel', 'sid', 'uid', 'Invited User'),
}
_defaults = {
'state': lambda * a: "open",
'tot_start_survey': lambda * a: 0,
'tot_comp_survey': lambda * a: 0,
'send_response': lambda * a: 1,
'response_user': lambda * a:1,
'date_open': fields.datetime.now,
}
def survey_open(self, cr, uid, ids, arg):
self.write(cr, uid, ids, {'state': 'open', 'date_open': strftime("%Y-%m-%d %H:%M:%S")})
return True
def survey_close(self, cr, uid, ids, arg):
self.write(cr, uid, ids, {'state': 'close', 'date_close': strftime("%Y-%m-%d %H:%M:%S") })
return True
def survey_cancel(self, cr, uid, ids, arg):
self.write(cr, uid, ids, {'state': 'cancel' })
return True
def copy(self, cr, uid, ids, default=None, context=None):
vals = {}
current_rec = self.read(cr, uid, ids, context=context)
title = _("%s (copy)") % (current_rec.get('title'))
vals.update({'title':title})
vals.update({'history':[],'tot_start_survey':0,'tot_comp_survey':0})
return super(survey, self).copy(cr, uid, ids, vals, context=context)
def action_print_survey(self, cr, uid, ids, context=None):
"""
If response is available then print this response otherwise print survey form(print template of the survey).
@param self: The object pointer
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of Survey IDs
@param context: A standard dictionary for contextual values
@return : Dictionary value for print survey form.
"""
if context is None:
context = {}
datas = {}
if 'response_id' in context:
response_id = context.get('response_id', 0)
datas['ids'] = [context.get('survey_id', 0)]
else:
response_id = self.pool.get('survey.response').search(cr, uid, [('survey_id','=', ids)], context=context)
datas['ids'] = ids
page_setting = {'orientation': 'vertical', 'without_pagebreak': 0, 'paper_size': 'letter', 'page_number': 1, 'survey_title': 1}
report = {}
if response_id and response_id[0]:
context.update({'survey_id': datas['ids']})
datas['form'] = page_setting
datas['model'] = 'survey.print.answer'
report = {
'type': 'ir.actions.report.xml',
'report_name': 'survey.browse.response',
'datas': datas,
'context' : context,
'nodestroy':True,
}
else:
datas['form'] = page_setting
datas['model'] = 'survey.print'
report = {
'type': 'ir.actions.report.xml',
'report_name': 'survey.form',
'datas': datas,
'context' : context,
'nodestroy':True,
}
return report
def fill_survey(self, cr, uid, ids, context=None):
sur_obj = self.read(cr, uid, ids,['title', 'page_ids'], context=context)
for sur in sur_obj:
name = sur['title']
pages = sur['page_ids']
if not pages:
raise osv.except_osv(_('Warning!'), _('This survey has no question defined. Please define the questions and answers first.'))
context.update({'active':False,'survey_id': ids[0]})
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'survey.question.wiz',
'type': 'ir.actions.act_window',
'target': 'new',
'name': name,
'context': context
}
def test_survey(self, cr, uid, ids, context=None):
sur_obj = self.read(cr, uid, ids,['title','page_ids'], context=context)
for sur in sur_obj:
name = sur['title']
pages = sur['page_ids']
if not pages:
raise osv.except_osv(_('Warning!'), _('This survey has no pages defined. Please define pages first.'))
context.update({'active':False,'survey_id': ids[0]})
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'survey.question.wiz',
'type': 'ir.actions.act_window',
'target': 'new',
'name': name,
'context': context
}
def edit_survey(self, cr, uid, ids, context=None):
sur_obj = self.read(cr, uid, ids,['title','page_ids'], context=context)
for sur in sur_obj:
name = sur['title']
pages = sur['page_ids']
if not pages:
raise osv.except_osv(_('Warning!'), _('This survey has no question defined. Please define the questions and answers first.'))
context.update({'survey_id': ids[0]})
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'survey.question.wiz',
'type': 'ir.actions.act_window',
'target': 'new',
'name': name,
'context': context
}
survey()
class survey_history(osv.osv):
_name = 'survey.history'
_description = 'Survey History'
_rec_name = 'date'
_columns = {
'survey_id': fields.many2one('survey', 'Survey'),
'user_id': fields.many2one('res.users', 'User', readonly=True),
'date': fields.datetime('Date started', readonly=1),
}
_defaults = {
'date': lambda * a: datetime.datetime.now()
}
survey_history()
class survey_page(osv.osv):
_name = 'survey.page'
_description = 'Survey Pages'
_rec_name = 'title'
_order = 'sequence'
_columns = {
'title': fields.char('Page Title', size=128, required=1),
'survey_id': fields.many2one('survey', 'Survey', ondelete='cascade'),
'question_ids': fields.one2many('survey.question', 'page_id', 'Questions'),
'sequence': fields.integer('Page Nr'),
'note': fields.text('Description'),
}
_defaults = {
'sequence': lambda * a: 1
}
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
data = super(survey_page, self).default_get(cr, uid, fields, context)
if context.has_key('survey_id'):
data['survey_id'] = context.get('survey_id', False)
return data
def survey_save(self, cr, uid, ids, context=None):
if context is None:
context = {}
search_obj = self.pool.get('ir.ui.view')
search_id = search_obj.search(cr,uid,[('model','=','survey.question.wiz'),('name','=','Survey Search')])
surv_name_wiz = self.pool.get('survey.name.wiz')
surv_name_wiz.write(cr, uid, [context.get('sur_name_id',False)], {'transfer':True, 'page_no' : context.get('page_number',0) })
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'survey.question.wiz',
'type': 'ir.actions.act_window',
'target': 'new',
'search_view_id': search_id[0],
'context': context
}
def copy(self, cr, uid, ids, default=None, context=None):
vals = {}
current_rec = self.read(cr, uid, ids, context=context)
title = _("%s (copy)") % (current_rec.get('title'))
vals.update({'title':title})
return super(survey_page, self).copy(cr, uid, ids, vals, context=context)
survey_page()
class survey_question(osv.osv):
_name = 'survey.question'
_description = 'Survey Question'
_rec_name = 'question'
_order = 'sequence'
def _calc_response(self, cr, uid, ids, field_name, arg, context=None):
if len(ids) == 0:
return {}
val = {}
cr.execute("select question_id, count(id) as Total_response from \
survey_response_line where state='done' and question_id IN %s\
group by question_id" ,(tuple(ids),))
ids1 = copy.deepcopy(ids)
for rec in cr.fetchall():
ids1.remove(rec[0])
val[rec[0]] = int(rec[1])
for id in ids1:
val[id] = 0
return val
_columns = {
'page_id': fields.many2one('survey.page', 'Survey Page', ondelete='cascade', required=1),
'question': fields.char('Question', size=128, required=1),
'answer_choice_ids': fields.one2many('survey.answer', 'question_id', 'Answer'),
'is_require_answer': fields.boolean('Require Answer to Question'),
'required_type': fields.selection([('all','All'), ('at least','At Least'), ('at most','At Most'), ('exactly','Exactly'), ('a range','A Range')], 'Respondent must answer'),
'req_ans': fields.integer('#Required Answer'),
'maximum_req_ans': fields.integer('Maximum Required Answer'),
'minimum_req_ans': fields.integer('Minimum Required Answer'),
'req_error_msg': fields.text('Error Message'),
'allow_comment': fields.boolean('Allow Comment Field'),
'sequence': fields.integer('Sequence'),
'tot_resp': fields.function(_calc_response, string="Total Answer"),
'survey': fields.related('page_id', 'survey_id', type='many2one', relation='survey', string='Survey'),
'descriptive_text': fields.text('Descriptive Text', size=255),
'column_heading_ids': fields.one2many('survey.question.column.heading', 'question_id',' Column heading'),
'type': fields.selection([('multiple_choice_only_one_ans','Multiple Choice (Only One Answer)'),
('multiple_choice_multiple_ans','Multiple Choice (Multiple Answer)'),
('matrix_of_choices_only_one_ans','Matrix of Choices (Only One Answers Per Row)'),
('matrix_of_choices_only_multi_ans','Matrix of Choices (Multiple Answers Per Row)'),
('matrix_of_drop_down_menus','Matrix of Drop-down Menus'),
('rating_scale','Rating Scale'),('single_textbox','Single Textbox'),
('multiple_textboxes','Multiple Textboxes'),
('multiple_textboxes_diff_type','Multiple Textboxes With Different Type'),
('comment','Comment/Essay Box'),
('numerical_textboxes','Numerical Textboxes'),('date','Date'),
('date_and_time','Date and Time'),('descriptive_text','Descriptive Text'),
('table','Table'),
], 'Question Type', required=1,),
'is_comment_require': fields.boolean('Add Comment Field'),
'comment_label': fields.char('Field Label', size = 255),
'comment_field_type': fields.selection([('char', 'Single Line Of Text'), ('text', 'Paragraph of Text')], 'Comment Field Type'),
'comment_valid_type': fields.selection([('do_not_validate', '''Don't Validate Comment Text.'''),
('must_be_specific_length', 'Must Be Specific Length'),
('must_be_whole_number', 'Must Be A Whole Number'),
('must_be_decimal_number', 'Must Be A Decimal Number'),
('must_be_date', 'Must Be A Date'),
('must_be_email_address', 'Must Be An Email Address'),
], 'Text Validation'),
'comment_minimum_no': fields.integer('Minimum number'),
'comment_maximum_no': fields.integer('Maximum number'),
'comment_minimum_float': fields.float('Minimum decimal number'),
'comment_maximum_float': fields.float('Maximum decimal number'),
'comment_minimum_date': fields.date('Minimum date'),
'comment_maximum_date': fields.date('Maximum date'),
'comment_valid_err_msg': fields.text('Error message'),
'make_comment_field': fields.boolean('Make Comment Field an Answer Choice'),
'make_comment_field_err_msg': fields.text('Error message'),
'is_validation_require': fields.boolean('Validate Text'),
'validation_type': fields.selection([('do_not_validate', '''Don't Validate Comment Text.'''),\
('must_be_specific_length', 'Must Be Specific Length'),\
('must_be_whole_number', 'Must Be A Whole Number'),\
('must_be_decimal_number', 'Must Be A Decimal Number'),\
('must_be_date', 'Must Be A Date'),\
('must_be_email_address', 'Must Be An Email Address')\
], 'Text Validation'),
'validation_minimum_no': fields.integer('Minimum number'),
'validation_maximum_no': fields.integer('Maximum number'),
'validation_minimum_float': fields.float('Minimum decimal number'),
'validation_maximum_float': fields.float('Maximum decimal number'),
'validation_minimum_date': fields.date('Minimum date'),
'validation_maximum_date': fields.date('Maximum date'),
'validation_valid_err_msg': fields.text('Error message'),
'numeric_required_sum': fields.integer('Sum of all choices'),
'numeric_required_sum_err_msg': fields.text('Error message'),
'rating_allow_one_column_require': fields.boolean('Allow Only One Answer per Column (Forced Ranking)'),
'in_visible_rating_weight': fields.boolean('Is Rating Scale Invisible?'),
'in_visible_menu_choice': fields.boolean('Is Menu Choice Invisible?'),
'in_visible_answer_type': fields.boolean('Is Answer Type Invisible?'),
'comment_column': fields.boolean('Add comment column in matrix'),
'column_name': fields.char('Column Name',size=256),
'no_of_rows': fields.integer('No of Rows'),
}
_defaults = {
'sequence': lambda * a: 1,
'type': lambda * a: 'multiple_choice_multiple_ans',
'req_error_msg': lambda * a: 'This question requires an answer.',
'required_type': lambda * a: 'at least',
'req_ans': lambda * a: 1,
'comment_field_type': lambda * a: 'char',
'comment_label': lambda * a: 'Other (please specify)',
'comment_valid_type': lambda * a: 'do_not_validate',
'comment_valid_err_msg': lambda * a : 'The comment you entered is in an invalid format.',
'validation_type': lambda * a: 'do_not_validate',
'validation_valid_err_msg': lambda * a : 'The comment you entered is in an invalid format.',
'numeric_required_sum_err_msg': lambda * a :'The choices need to add up to [enter sum here].',
'make_comment_field_err_msg': lambda * a : 'Please enter a comment.',
'in_visible_answer_type': lambda * a: 1
}
def on_change_type(self, cr, uid, ids, type, context=None):
val = {}
val['is_require_answer'] = False
val['is_comment_require'] = False
val['is_validation_require'] = False
val['comment_column'] = False
if type in ['multiple_textboxes_diff_type']:
val['in_visible_answer_type'] = False
return {'value': val}
if type in ['rating_scale']:
val.update({'in_visible_rating_weight':False, 'in_visible_menu_choice':True})
return {'value': val}
elif type in ['matrix_of_drop_down_menus']:
val.update({'in_visible_rating_weight':True, 'in_visible_menu_choice':False})
return {'value': val}
elif type in ['single_textbox']:
val.update({'in_visible_rating_weight':True, 'in_visible_menu_choice':True})
return {'value': val}
else:
val.update({'in_visible_rating_weight':True, 'in_visible_menu_choice':True,\
'in_visible_answer_type':True})
return {'value': val}
def write(self, cr, uid, ids, vals, context=None):
questions = self.read(cr,uid, ids, ['answer_choice_ids', 'type', 'required_type',\
'req_ans', 'minimum_req_ans', 'maximum_req_ans', 'column_heading_ids', 'page_id', 'question'])
for question in questions:
col_len = len(question['column_heading_ids'])
if vals.has_key('column_heading_ids'):
for col in vals['column_heading_ids']:
if type(col[2]) == type({}):
col_len += 1
else:
col_len -= 1
if vals.has_key('type'):
que_type = vals['type']
else:
que_type = question['type']
if que_type in ['matrix_of_choices_only_one_ans', 'matrix_of_choices_only_multi_ans',\
'matrix_of_drop_down_menus', 'rating_scale']:
if not col_len:
raise osv.except_osv(_('Warning!'),_('You must enter one or more column headings for question "%s" of page %s.') % (question['question'], question['page_id'][1]))
ans_len = len(question['answer_choice_ids'])
if vals.has_key('answer_choice_ids'):
for ans in vals['answer_choice_ids']:
if type(ans[2]) == type({}):
ans_len += 1
else:
ans_len -= 1
if que_type not in ['descriptive_text', 'single_textbox', 'comment','table']:
if not ans_len:
raise osv.except_osv(_('Warning!'),_('You must enter one or more Answers for question "%s" of page %s.') % (question['question'], question['page_id'][1]))
req_type = ""
if vals.has_key('required_type'):
req_type = vals['required_type']
else:
req_type = question['required_type']
if que_type in ['multiple_choice_multiple_ans','matrix_of_choices_only_one_ans', \
'matrix_of_choices_only_multi_ans', 'matrix_of_drop_down_menus',\
'rating_scale','multiple_textboxes','numerical_textboxes','date','date_and_time']:
if req_type in ['at least', 'at most', 'exactly']:
if vals.has_key('req_ans'):
if not vals['req_ans'] or vals['req_ans'] > ans_len:
raise osv.except_osv(_('Warning!'),_("#Required Answer you entered \
is greater than the number of answer. \
Please use a number that is smaller than %d.") % (ans_len + 1))
else:
if not question['req_ans'] or question['req_ans'] > ans_len:
raise osv.except_osv(_('Warning!'),_("#Required Answer you entered is \
greater than the number of answer.\
Please use a number that is smaller than %d.") % (ans_len + 1))
if req_type == 'a range':
minimum_ans = 0
maximum_ans = 0
if vals.has_key('minimum_req_ans'):
minimum_ans = vals['minimum_req_ans']
if not vals['minimum_req_ans'] or vals['minimum_req_ans'] > ans_len:
raise osv.except_osv(_('Warning!'),_("Minimum Required Answer\
you entered is greater than the number of answer.\
Please use a number that is smaller than %d.") % (ans_len + 1))
else:
minimum_ans = question['minimum_req_ans']
if not question['minimum_req_ans'] or question['minimum_req_ans'] > ans_len:
raise osv.except_osv(_('Warning!'),_("Minimum Required Answer you\
entered is greater than the number of answer. \
Please use a number that is smaller than %d.") % (ans_len + 1))
if vals.has_key('maximum_req_ans'):
maximum_ans = vals['maximum_req_ans']
if not vals['maximum_req_ans'] or vals['maximum_req_ans'] > ans_len:
raise osv.except_osv(_('Warning!'),_("Maximum Required Answer you \
entered for your maximum is greater than the number of answer.\
Please use a number that is smaller than %d.") % (ans_len + 1))
else:
maximum_ans = question['maximum_req_ans']
if not question['maximum_req_ans'] or question['maximum_req_ans'] > ans_len:
raise osv.except_osv(_('Warning!'),_("Maximum Required Answer you\
entered for your maximum is greater than the number of answer.\
Please use a number that is smaller than %d.") % (ans_len + 1))
if maximum_ans <= minimum_ans:
raise osv.except_osv(_('Warning!'),_("Maximum Required Answer is greater \
than Minimum Required Answer"))
if question['type'] == 'matrix_of_drop_down_menus' and vals.has_key('column_heading_ids'):
for col in vals['column_heading_ids']:
if not col[2] or not col[2].has_key('menu_choice') or not col[2]['menu_choice']:
raise osv.except_osv(_('Warning!'),_("You must enter one or more menu choices\
in column heading."))
elif not col[2] or not col[2].has_key('menu_choice') or\
col[2]['menu_choice'].strip() == '':
raise osv.except_osv(_('Warning!'),_("You must enter one or more menu \
choices in column heading (white spaces not allowed)."))
return super(survey_question, self).write(cr, uid, ids, vals, context=context)
def create(self, cr, uid, vals, context=None):
minimum_ans = 0
maximum_ans = 0
page = self.pool.get('survey.page').browse(cr, uid, int(vals.get('page_id', 0)), context=context).title
if vals.has_key('answer_choice_ids') and not len(vals['answer_choice_ids']):
if vals.has_key('type') and vals['type'] not in ['descriptive_text', 'single_textbox', 'comment','table']:
raise osv.except_osv(_('Warning!'),_('You must enter one or more answers for question "%s" of page %s .') % (vals['question'], page))
if vals.has_key('column_heading_ids') and not len(vals['column_heading_ids']):
if vals.has_key('type') and vals['type'] in ['matrix_of_choices_only_one_ans', 'matrix_of_choices_only_multi_ans', 'matrix_of_drop_down_menus', 'rating_scale']:
raise osv.except_osv(_('Warning!'),_('You must enter one or more column headings for question "%s" of page %s.')% (vals['question'], page))
if vals['type'] in ['multiple_choice_multiple_ans','matrix_of_choices_only_one_ans', 'matrix_of_choices_only_multi_ans', 'matrix_of_drop_down_menus', 'rating_scale','multiple_textboxes','numerical_textboxes','date','date_and_time']:
if vals.has_key('is_require_answer') and vals.has_key('required_type') and vals['required_type'] in ['at least', 'at most', 'exactly']:
if vals.has_key('answer_choice_ids') and vals['req_ans'] > len(vals['answer_choice_ids']) or not vals['req_ans']:
raise osv.except_osv(_('Warning!'),_("#Required Answer you entered is greater than the number of answer. Please use a number that is smaller than %d.") % (len(vals['answer_choice_ids'])+1))
if vals.has_key('is_require_answer') and vals.has_key('required_type') and vals['required_type'] == 'a range':
minimum_ans = vals['minimum_req_ans']
maximum_ans = vals['maximum_req_ans']
if vals.has_key('answer_choice_ids') or vals['minimum_req_ans'] > len(vals['answer_choice_ids']) or not vals['minimum_req_ans']:
raise osv.except_osv(_('Warning!'),_("Minimum Required Answer you entered is greater than the number of answer. Please use a number that is smaller than %d.") % (len(vals['answer_choice_ids'])+1))
if vals.has_key('answer_choice_ids') or vals['maximum_req_ans'] > len(vals['answer_choice_ids']) or not vals['maximum_req_ans']:
raise osv.except_osv(_('Warning!'),_("Maximum Required Answer you entered for your maximum is greater than the number of answer. Please use a number that is smaller than %d.") % (len(vals['answer_choice_ids'])+1))
if maximum_ans <= minimum_ans:
raise osv.except_osv(_('Warning!'),_("Maximum Required Answer is greater than Minimum Required Answer."))
if vals['type'] == 'matrix_of_drop_down_menus':
for col in vals['column_heading_ids']:
if not col[2] or not col[2].has_key('menu_choice') or not col[2]['menu_choice']:
raise osv.except_osv(_('Warning!'),_("You must enter one or more menu choices in column heading."))
elif not col[2] or not col[2].has_key('menu_choice') or col[2]['menu_choice'].strip() == '':
raise osv.except_osv(_('Warning!'),_("You must enter one or more menu choices in column heading (white spaces not allowed)."))
res = super(survey_question, self).create(cr, uid, vals, context)
return res
def survey_save(self, cr, uid, ids, context=None):
if context is None:
context = {}
search_obj = self.pool.get('ir.ui.view')
search_id = search_obj.search(cr,uid,[('model','=','survey.question.wiz'),('name','=','Survey Search')])
surv_name_wiz = self.pool.get('survey.name.wiz')
surv_name_wiz.write(cr, uid, [context.get('sur_name_id',False)], {'transfer':True, 'page_no' : context.get('page_number',False) })
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'survey.question.wiz',
'type': 'ir.actions.act_window',
'target': 'new',
'search_view_id': search_id[0],
'context': context
}
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
data = super(survey_question, self).default_get(cr, uid, fields, context)
if context.has_key('page_id'):
data['page_id']= context.get('page_id', False)
return data
survey_question()
class survey_question_column_heading(osv.osv):
_name = 'survey.question.column.heading'
_description = 'Survey Question Column Heading'
_rec_name = 'title'
def _get_in_visible_rating_weight(self, cr, uid, context=None):
if context is None:
context = {}
if context.get('in_visible_rating_weight', False):
return context['in_visible_rating_weight']
return False
def _get_in_visible_menu_choice(self,cr, uid, context=None):
if context is None:
context = {}
if context.get('in_visible_menu_choice', False):
return context['in_visible_menu_choice']
return False
_columns = {
'title': fields.char('Column Heading', size=128, required=1),
'menu_choice': fields.text('Menu Choice'),
'rating_weight': fields.integer('Weight'),
'question_id': fields.many2one('survey.question', 'Question', ondelete='cascade'),
'in_visible_rating_weight': fields.boolean('Is Rating Scale Invisible ??'),
'in_visible_menu_choice': fields.boolean('Is Menu Choice Invisible??')
}
_defaults={
'in_visible_rating_weight': _get_in_visible_rating_weight,
'in_visible_menu_choice': _get_in_visible_menu_choice,
}
survey_question_column_heading()
class survey_answer(osv.osv):
_name = 'survey.answer'
_description = 'Survey Answer'
_rec_name = 'answer'
_order = 'sequence'
def _calc_response_avg(self, cr, uid, ids, field_name, arg, context=None):
val = {}
for rec in self.browse(cr, uid, ids, context=context):
cr.execute("select count(question_id) ,(select count(answer_id) \
from survey_response_answer sra, survey_response_line sa \
where sra.response_id = sa.id and sra.answer_id = %d \
and sa.state='done') as tot_ans from survey_response_line \
where question_id = %d and state = 'done'"\
% (rec.id, rec.question_id.id))
res = cr.fetchone()
if res[0]:
avg = float(res[1]) * 100 / res[0]
else:
avg = 0.0
val[rec.id] = {
'response': res[1],
'average': round(avg, 2),
}
return val
def _get_in_visible_answer_type(self, cr, uid, context=None):
if context is None:
context = {}
return context.get('in_visible_answer_type', False)
_columns = {
'question_id': fields.many2one('survey.question', 'Question', ondelete='cascade'),
'answer': fields.char('Answer', size=128, required=1),
'sequence': fields.integer('Sequence'),
'response': fields.function(_calc_response_avg, string="#Answer", multi='sums'),
'average': fields.function(_calc_response_avg, string="#Avg", multi='sums'),
'type': fields.selection([('char','Character'),('date','Date'),('datetime','Date & Time'),\
('integer','Integer'),('float','Float'),('selection','Selection'),\
('email','Email')], "Type of Answer",required=1),
'menu_choice': fields.text('Menu Choices'),
'in_visible_answer_type': fields.boolean('Is Answer Type Invisible??')
}
_defaults = {
# 'sequence' : lambda * a: 1,
'type' : lambda * a: 'char',
'in_visible_answer_type':_get_in_visible_answer_type,
}
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
data = super(survey_answer, self).default_get(cr, uid, fields, context)
return data
survey_answer()
class survey_response(osv.osv):
_name = "survey.response"
_rec_name = 'date_create'
_columns = {
'survey_id' : fields.many2one('survey', 'Survey', required=1, ondelete='cascade'),
'date_create' : fields.datetime('Create Date', required=1),
'user_id' : fields.many2one('res.users', 'User'),
'response_type' : fields.selection([('manually', 'Manually'), ('link', 'Link')], \
'Answer Type', required=1, readonly=1),
'question_ids' : fields.one2many('survey.response.line', 'response_id', 'Answer'),
'state' : fields.selection([('done', 'Finished '),('skip', 'Not Finished')], \
'Status', readonly=True),
}
_defaults = {
'state' : lambda * a: "skip",
'response_type' : lambda * a: "manually",
}
def name_get(self, cr, uid, ids, context=None):
if not len(ids):
return []
if context is None:
context = {}
reads = self.read(cr, uid, ids, ['user_id','date_create'], context=context)
res = []
for record in reads:
timezone = pytz.timezone(context.get('tz') or 'UTC')
create_date = pytz.UTC.localize(datetime.strptime(record['date_create'].split('.')[0], tools.DEFAULT_SERVER_DATETIME_FORMAT))
localized_create_date = create_date.astimezone(timezone)
name = (record['user_id'] and record['user_id'][1] or '' )+ ' (' + localized_create_date.strftime("%Y-%m-%d %H:%M:%S") + ')'
res.append((record['id'], name))
return res
def copy(self, cr, uid, id, default=None, context=None):
raise osv.except_osv(_('Warning!'),_('You cannot duplicate the resource!'))
survey_response()
class survey_response_line(osv.osv):
_name = 'survey.response.line'
_description = 'Survey Response Line'
_rec_name = 'date_create'
_columns = {
'response_id': fields.many2one('survey.response', 'Answer', ondelete='cascade'),
'date_create': fields.datetime('Create Date', required=1),
'state': fields.selection([('draft', 'Draft'), ('done', 'Answered'),('skip', 'Skiped')],\
'Status', readonly=True),
'question_id': fields.many2one('survey.question', 'Question'),
'page_id': fields.related('question_id', 'page_id', type='many2one', \
relation='survey.page', string='Page'),
'response_answer_ids': fields.one2many('survey.response.answer', 'response_id', 'Answer'),
'response_table_ids': fields.one2many('survey.tbl.column.heading', \
'response_table_id', 'Answer'),
'comment': fields.text('Notes'),
'single_text': fields.char('Text', size=255),
}
_defaults = {
'state' : lambda * a: "draft",
}
survey_response_line()
class survey_tbl_column_heading(osv.osv):
_name = 'survey.tbl.column.heading'
_order = 'name'
_columns = {
'name': fields.integer('Row Number'),
'column_id': fields.many2one('survey.question.column.heading', 'Column'),
'value': fields.char('Value', size = 255),
'response_table_id': fields.many2one('survey.response.line', 'Answer', ondelete='cascade'),
}
survey_tbl_column_heading()
class survey_response_answer(osv.osv):
_name = 'survey.response.answer'
_description = 'Survey Answer'
_rec_name = 'response_id'
_columns = {
'response_id': fields.many2one('survey.response.line', 'Answer', ondelete='cascade'),
'answer_id': fields.many2one('survey.answer', 'Answer', required=1, ondelete='cascade'),
'column_id': fields.many2one('survey.question.column.heading','Column'),
'answer': fields.char('Value', size =255),
'value_choice': fields.char('Value Choice', size =255),
'comment': fields.text('Notes'),
'comment_field': fields.char('Comment', size = 255)
}
survey_response_answer()
class res_users(osv.osv):
_inherit = "res.users"
_name = "res.users"
_columns = {
'survey_id': fields.many2many('survey', 'survey_users_rel', 'uid', 'sid', 'Groups'),
}
res_users()
class survey_request(osv.osv):
_name = "survey.request"
_order = 'date_deadline'
_rec_name = 'date_deadline'
_columns = {
'date_deadline': fields.date("Deadline date"),
'user_id': fields.many2one("res.users", "User"),
'email': fields.char("Email", size=64),
'survey_id': fields.many2one("survey", "Survey", required=1, ondelete='cascade'),
'response': fields.many2one('survey.response', 'Answer'),
'state': fields.selection([('draft','Draft'),('cancel', 'Cancelled'),('waiting_answer', 'Waiting Answer'),('done', 'Done')], 'Status', readonly=1)
}
_defaults = {
'state': lambda * a: 'draft',
# 'date_deadline': lambda * a : (datetime.now() + relativedelta(months=+1)).strftime("%Y-%m-%d %H:%M:%S")
}
def survey_req_waiting_answer(self, cr, uid, ids, arg):
self.write(cr, uid, ids, { 'state' : 'waiting_answer'})
return True
def survey_req_draft(self, cr, uid, ids, arg):
self.write(cr, uid, ids, { 'state' : 'draft'})
return True
def survey_req_done(self, cr, uid, ids, arg):
self.write(cr, uid, ids, { 'state' : 'done'})
return True
def survey_req_cancel(self, cr, uid, ids, arg):
self.write(cr, uid, ids, { 'state' : 'cancel'})
return True
def on_change_user(self, cr, uid, ids, user_id, context=None):
if user_id:
user_obj = self.pool.get('res.users')
user = user_obj.browse(cr, uid, user_id, context=context)
return {'value': {'email': user.email}}
return {}
survey_request()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
niavlys/kivy
|
refs/heads/master
|
kivy/uix/rst.py
|
1
|
'''
reStructuredText renderer
=========================
.. versionadded:: 1.1.0
`reStructuredText <http://docutils.sourceforge.net/rst.html>`_ is an
easy-to-read, what-you-see-is-what-you-get plaintext markup syntax and parser
system.
.. warning::
This widget is highly experimental. The whole styling and
implementation are not stable until this warning has been removed.
Usage with Text
---------------
::
text = """
.. _top:
Hello world
===========
This is an **emphased text**, some ``interpreted text``.
And this is a reference to top_::
$ print("Hello world")
"""
document = RstDocument(text=text)
The rendering will output:
.. image:: images/rstdocument.png
Usage with Source
-----------------
You can also render a rst file using the :attr:`RstDocument.source` property::
document = RstDocument(source='index.rst')
You can reference other documents with the role ``:doc:``. For example, in the
document ``index.rst`` you can write::
Go to my next document: :doc:`moreinfo.rst`
It will generate a link that, when clicked, opens the ``moreinfo.rst``
document.
'''
__all__ = ('RstDocument', )
import os
from os.path import dirname, join, exists, abspath
from kivy.clock import Clock
from kivy.compat import PY2
from kivy.properties import ObjectProperty, NumericProperty, \
DictProperty, ListProperty, StringProperty, \
BooleanProperty, OptionProperty, AliasProperty
from kivy.lang import Builder
from kivy.utils import get_hex_from_color, get_color_from_hex
from kivy.uix.widget import Widget
from kivy.uix.scrollview import ScrollView
from kivy.uix.gridlayout import GridLayout
from kivy.uix.label import Label
from kivy.uix.image import AsyncImage, Image
from kivy.uix.videoplayer import VideoPlayer
from kivy.uix.anchorlayout import AnchorLayout
from kivy.animation import Animation
from kivy.logger import Logger
from docutils.parsers import rst
from docutils.parsers.rst import roles
from docutils import nodes, frontend, utils
from docutils.parsers.rst import Directive, directives
from docutils.parsers.rst.roles import set_classes
from kivy.parser import parse_color
#
# Handle some additional roles
#
if 'KIVY_DOC' not in os.environ:
class role_doc(nodes.Inline, nodes.TextElement):
pass
class role_video(nodes.General, nodes.TextElement):
pass
class VideoDirective(Directive):
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {'width': directives.nonnegative_int,
'height': directives.nonnegative_int}
def run(self):
set_classes(self.options)
node = role_video(source=self.arguments[0], **self.options)
return [node]
generic_docroles = {
'doc': role_doc}
for rolename, nodeclass in generic_docroles.items():
generic = roles.GenericRole(rolename, nodeclass)
role = roles.CustomRole(rolename, generic, {'classes': [rolename]})
roles.register_local_role(rolename, role)
directives.register_directive('video', VideoDirective)
Builder.load_string('''
#:import parse_color kivy.parser.parse_color
<RstDocument>:
content: content
scatter: scatter
do_scroll_x: False
canvas.before:
Color:
rgba: parse_color(root.colors['background'])
Rectangle:
pos: self.pos
size: self.size
Scatter:
id: scatter
size_hint_y: None
height: content.minimum_height
width: root.width
scale: 1
do_translation: False, False
do_scale: False
do_rotation: False
GridLayout:
id: content
cols: 1
height: self.minimum_height
width: root.width
padding: 10
<RstTitle>:
markup: True
valign: 'top'
font_size:
sp(self.document.base_font_size - self.section * (
self.document.base_font_size / 31.0 * 2))
size_hint_y: None
height: self.texture_size[1] + dp(20)
text_size: self.width, None
bold: True
canvas:
Color:
rgba: parse_color(self.document.underline_color)
Rectangle:
pos: self.x, self.y + 5
size: self.width, 1
<RstParagraph>:
markup: True
valign: 'top'
size_hint_y: None
height: self.texture_size[1] + self.my
text_size: self.width - self.mx, None
font_size: sp(self.document.base_font_size / 2.0)
<RstTerm>:
size_hint: None, None
height: label.height
anchor_x: 'left'
Label:
id: label
text: root.text
markup: True
valign: 'top'
size_hint: None, None
size: self.texture_size[0] + dp(10), self.texture_size[1] + dp(10)
font_size: sp(root.document.base_font_size / 2.0)
<RstBlockQuote>:
cols: 2
content: content
size_hint_y: None
height: content.height
Widget:
size_hint_x: None
width: 20
GridLayout:
id: content
cols: 1
size_hint_y: None
height: self.minimum_height
<RstLiteralBlock>:
cols: 1
content: content
size_hint_y: None
height: content.texture_size[1] + dp(20)
canvas:
Color:
rgb: parse_color('#cccccc')
Rectangle:
pos: self.x - 1, self.y - 1
size: self.width + 2, self.height + 2
Color:
rgb: parse_color('#eeeeee')
Rectangle:
pos: self.pos
size: self.size
Label:
id: content
markup: True
valign: 'top'
text_size: self.width - 20, None
font_name: 'data/fonts/DroidSansMono.ttf'
color: (0, 0, 0, 1)
<RstList>:
cols: 2
size_hint_y: None
height: self.minimum_height
<RstListItem>:
cols: 1
size_hint_y: None
height: self.minimum_height
<RstSystemMessage>:
cols: 1
size_hint_y: None
height: self.minimum_height
canvas:
Color:
rgba: 1, 0, 0, .3
Rectangle:
pos: self.pos
size: self.size
<RstWarning>:
content: content
cols: 1
padding: 20
size_hint_y: None
height: self.minimum_height
canvas:
Color:
rgba: 1, 0, 0, .5
Rectangle:
pos: self.x + 10, self.y + 10
size: self.width - 20, self.height - 20
GridLayout:
cols: 1
id: content
size_hint_y: None
height: self.minimum_height
<RstNote>:
content: content
cols: 1
padding: 20
size_hint_y: None
height: self.minimum_height
canvas:
Color:
rgba: 0, 1, 0, .5
Rectangle:
pos: self.x + 10, self.y + 10
size: self.width - 20, self.height - 20
GridLayout:
cols: 1
id: content
size_hint_y: None
height: self.minimum_height
<RstImage>:
size_hint: None, None
size: self.texture_size[0], self.texture_size[1] + dp(10)
<RstAsyncImage>:
size_hint: None, None
size: self.texture_size[0], self.texture_size[1] + dp(10)
<RstDefinitionList>:
cols: 1
size_hint_y: None
height: self.minimum_height
font_size: sp(self.document.base_font_size / 2.0)
<RstDefinition>:
cols: 2
size_hint_y: None
height: self.minimum_height
font_size: sp(self.document.base_font_size / 2.0)
<RstFieldList>:
cols: 2
size_hint_y: None
height: self.minimum_height
<RstFieldName>:
markup: True
valign: 'top'
size_hint: 0.2, 1
color: (0, 0, 0, 1)
bold: True
text_size: self.width-10, self.height - 10
valign: 'top'
font_size: sp(self.document.base_font_size / 2.0)
<RstFieldBody>:
cols: 1
size_hint_y: None
height: self.minimum_height
<RstTable>:
size_hint_y: None
height: self.minimum_height
<RstEntry>:
cols: 1
size_hint_y: None
height: self.minimum_height
canvas:
Color:
rgb: .2, .2, .2
Line:
points: [\
self.x,\
self.y,\
self.right,\
self.y,\
self.right,\
self.top,\
self.x,\
self.top,\
self.x,\
self.y]
<RstTransition>:
size_hint_y: None
height: 20
canvas:
Color:
rgb: .2, .2, .2
Line:
points: [self.x, self.center_y, self.right, self.center_y]
<RstListBullet>:
markup: True
valign: 'top'
size_hint_x: None
width: self.texture_size[0] + dp(10)
text_size: None, self.height - dp(10)
font_size: sp(self.document.base_font_size / 2.0)
<RstEmptySpace>:
size_hint: 0.01, 0.01
<RstDefinitionSpace>:
size_hint: None, 0.1
width: 50
font_size: sp(self.document.base_font_size / 2.0)
<RstVideoPlayer>:
options: {'allow_stretch': True}
canvas.before:
Color:
rgba: (1, 1, 1, 1)
BorderImage:
source: 'atlas://data/images/defaulttheme/player-background'
pos: self.x - 25, self.y - 25
size: self.width + 50, self.height + 50
border: (25, 25, 25, 25)
''')
class RstVideoPlayer(VideoPlayer):
pass
class RstDocument(ScrollView):
'''Base widget used to store an Rst document. See module documentation for
more information.
'''
source = StringProperty(None)
'''Filename of the RST document.
:attr:`source` is a :class:`~kivy.properties.StringProperty` and
defaults to None.
'''
source_encoding = StringProperty('utf-8')
'''Encoding to be used for the :attr:`source` file.
:attr:`source_encoding` is a :class:`~kivy.properties.StringProperty` and
defaults to `utf-8`.
.. Note::
It is your responsibility to ensure that the value provided is a
valid codec supported by python.
'''
source_error = OptionProperty('strict',
options=('strict', 'ignore', 'replace',
'xmlcharrefreplace',
'backslashreplac'))
'''Error handling to be used while encoding the :attr:`source` file.
:attr:`source_error` is an :class:`~kivy.properties.OptionProperty` and
defaults to `strict`. Can be one of 'strict', 'ignore', 'replace',
'xmlcharrefreplace' or 'backslashreplac'.
'''
text = StringProperty(None)
'''RST markup text of the document.
:attr:`text` is a :class:`~kivy.properties.StringProperty` and defaults to
None.
'''
document_root = StringProperty(None)
'''Root path where :doc: will search for rst documents. If no path is
given, it will use the directory of the first loaded source file.
:attr:`document_root` is a :class:`~kivy.properties.StringProperty` and
defaults to None.
'''
base_font_size = NumericProperty(31)
'''Font size for the biggest title, 31 by default. All other font sizes are
derived from this.
.. versionadded:: 1.8.0
'''
show_errors = BooleanProperty(False)
'''Indicate whether RST parsers errors should be shown on the screen
or not.
:attr:`show_errors` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
def _get_bgc(self):
return get_color_from_hex(self.colors.background)
def _set_bgc(self, value):
self.colors.background = get_hex_from_color(value)[1:]
background_color = AliasProperty(_get_bgc, _set_bgc, bind=('colors',))
'''Specifies the background_color to be used for the RstDocument.
.. versionadded:: 1.8.0
:attr:`background_color` is an :class:`~kivy.properties.AliasProperty`
for colors['background'].
'''
colors = DictProperty({
'background': 'e5e6e9ff',
'link': 'ce5c00ff',
'paragraph': '202020ff',
'title': '204a87ff',
'bullet': '000000ff'})
'''Dictionary of all the colors used in the RST rendering.
.. warning::
This dictionary is needs special handling. You also need to call
:meth:`RstDocument.render` if you change them after loading.
:attr:`colors` is a :class:`~kivy.properties.DictProperty`.
'''
title = StringProperty('')
'''Title of the current document.
:attr:`title` is a :class:`~kivy.properties.StringProperty` and defaults to
''. It is read-only.
'''
toctrees = DictProperty({})
'''Toctree of all loaded or preloaded documents. This dictionary is filled
when a rst document is explicitly loaded or where :meth:`preload` has been
called.
If the document has no filename, e.g. when the document is loaded from a
text file, the key will be ''.
:attr:`toctrees` is a :class:`~kivy.properties.DictProperty` and defaults
to {}.
'''
underline_color = StringProperty('204a9699')
'''underline color of the titles, expressed in html color notation
:attr:`underline_color` is a
:class:`~kivy.properties.StringProperty` and defaults to '204a9699'.
.. versionadded: 1.8.1
'''
# internals.
content = ObjectProperty(None)
scatter = ObjectProperty(None)
anchors_widgets = ListProperty([])
refs_assoc = DictProperty({})
def __init__(self, **kwargs):
self._trigger_load = Clock.create_trigger(self._load_from_text, -1)
self._parser = rst.Parser()
self._settings = frontend.OptionParser(
components=(rst.Parser, )).get_default_values()
super(RstDocument, self).__init__(**kwargs)
def on_source(self, instance, value):
if not value:
return
if self.document_root is None:
# set the documentation root to the directory name of the
# first tile
self.document_root = abspath(dirname(value))
self._load_from_source()
def on_text(self, instance, value):
self._trigger_load()
def render(self):
'''Force document rendering.
'''
self._load_from_text()
def resolve_path(self, filename):
'''Get the path for this filename. If the filename doesn't exist,
it returns the document_root + filename.
'''
if exists(filename):
return filename
return join(self.document_root, filename)
def preload(self, filename, encoding='utf-8', errors='strict'):
'''Preload a rst file to get its toctree and its title.
The result will be stored in :attr:`toctrees` with the ``filename`` as
key.
'''
with open(filename, 'rb') as fd:
text = fd.read().decode(encoding, errors)
# parse the source
document = utils.new_document('Document', self._settings)
self._parser.parse(text, document)
# fill the current document node
visitor = _ToctreeVisitor(document)
document.walkabout(visitor)
self.toctrees[filename] = visitor.toctree
return text
def _load_from_source(self):
filename = self.resolve_path(self.source)
self.text = self.preload(filename,
self.source_encoding,
self.source_error)
def _load_from_text(self, *largs):
try:
# clear the current widgets
self.content.clear_widgets()
self.anchors_widgets = []
self.refs_assoc = {}
# parse the source
document = utils.new_document('Document', self._settings)
text = self.text
if PY2 and type(text) is str:
text = text.decode('utf-8')
self._parser.parse(text, document)
# fill the current document node
visitor = _Visitor(self, document)
document.walkabout(visitor)
self.title = visitor.title or 'No title'
except:
Logger.exception('Rst: error while loading text')
def on_ref_press(self, node, ref):
self.goto(ref)
def goto(self, ref, *largs):
'''Scroll to the reference. If it's not found, nothing will be done.
For this text::
.. _myref:
This is something I always wanted.
You can do::
from kivy.clock import Clock
from functools import partial
doc = RstDocument(...)
Clock.schedule_once(partial(doc.goto, 'myref'), 0.1)
.. note::
It is preferable to delay the call of the goto if you just loaded
the document because the layout might not be finished or the
size of the RstDocument has not yet been determined. In
either case, the calculation of the scrolling would be
wrong.
You can, however, do a direct call if the document is already
loaded.
.. versionadded:: 1.3.0
'''
# check if it's a file ?
if ref.endswith('.rst'):
# whether it's a valid or invalid file, let source deal with it
self.source = ref
return
# get the association
ref = self.refs_assoc.get(ref, ref)
# search into all the nodes containing anchors
ax = ay = None
for node in self.anchors_widgets:
if ref in node.anchors:
ax, ay = node.anchors[ref]
break
# not found, stop here
if ax is None:
return
# found, calculate the real coordinate
# get the anchor coordinate inside widget space
ax += node.x
ay = node.top - ay
#ay += node.y
# what's the current coordinate for us?
sx, sy = self.scatter.x, self.scatter.top
#ax, ay = self.scatter.to_parent(ax, ay)
ay -= self.height
dx, dy = self.convert_distance_to_scroll(0, ay)
dy = max(0, min(1, dy))
Animation(scroll_y=dy, d=.25, t='in_out_expo').start(self)
def add_anchors(self, node):
self.anchors_widgets.append(node)
class RstTitle(Label):
section = NumericProperty(0)
document = ObjectProperty(None)
class RstParagraph(Label):
mx = NumericProperty(10)
my = NumericProperty(10)
document = ObjectProperty(None)
class RstTerm(AnchorLayout):
text = StringProperty('')
document = ObjectProperty(None)
class RstBlockQuote(GridLayout):
content = ObjectProperty(None)
class RstLiteralBlock(GridLayout):
content = ObjectProperty(None)
class RstList(GridLayout):
pass
class RstListItem(GridLayout):
content = ObjectProperty(None)
class RstListBullet(Label):
document = ObjectProperty(None)
class RstSystemMessage(GridLayout):
pass
class RstWarning(GridLayout):
content = ObjectProperty(None)
class RstNote(GridLayout):
content = ObjectProperty(None)
class RstImage(Image):
pass
class RstAsyncImage(AsyncImage):
pass
class RstDefinitionList(GridLayout):
document = ObjectProperty(None)
class RstDefinition(GridLayout):
document = ObjectProperty(None)
class RstFieldList(GridLayout):
pass
class RstFieldName(Label):
document = ObjectProperty(None)
class RstFieldBody(GridLayout):
pass
class RstGridLayout(GridLayout):
pass
class RstTable(GridLayout):
pass
class RstEntry(GridLayout):
pass
class RstTransition(Widget):
pass
class RstEmptySpace(Widget):
pass
class RstDefinitionSpace(Widget):
document = ObjectProperty(None)
class _ToctreeVisitor(nodes.NodeVisitor):
def __init__(self, *largs):
self.toctree = self.current = []
self.queue = []
self.text = ''
nodes.NodeVisitor.__init__(self, *largs)
def push(self, tree):
self.queue.append(tree)
self.current = tree
def pop(self):
self.current = self.queue.pop()
def dispatch_visit(self, node):
cls = node.__class__
if cls is nodes.section:
section = {
'ids': node['ids'],
'names': node['names'],
'title': '',
'children': []}
if isinstance(self.current, dict):
self.current['children'].append(section)
else:
self.current.append(section)
self.push(section)
elif cls is nodes.title:
self.text = ''
elif cls is nodes.Text:
self.text += node
def dispatch_departure(self, node):
cls = node.__class__
if cls is nodes.section:
self.pop()
elif cls is nodes.title:
self.current['title'] = self.text
class _Visitor(nodes.NodeVisitor):
def __init__(self, root, *largs):
self.root = root
self.title = None
self.current_list = []
self.current = None
self.idx_list = None
self.text = ''
self.text_have_anchor = False
self.section = 0
self.do_strip_text = False
nodes.NodeVisitor.__init__(self, *largs)
def push(self, widget):
self.current_list.append(self.current)
self.current = widget
def pop(self):
self.current = self.current_list.pop()
def dispatch_visit(self, node):
cls = node.__class__
if cls is nodes.document:
self.push(self.root.content)
elif cls is nodes.section:
self.section += 1
elif cls is nodes.title:
label = RstTitle(section=self.section, document=self.root)
self.current.add_widget(label)
self.push(label)
#assert(self.text == '')
elif cls is nodes.Text:
if self.do_strip_text:
node = node.replace('\n', ' ')
node = node.replace(' ', ' ')
node = node.replace('\t', ' ')
node = node.replace(' ', ' ')
if node.startswith(' '):
node = ' ' + node.lstrip(' ')
if node.endswith(' '):
node = node.rstrip(' ') + ' '
if self.text.endswith(' ') and node.startswith(' '):
node = node[1:]
self.text += node
elif cls is nodes.paragraph:
self.do_strip_text = True
label = RstParagraph(document=self.root)
if isinstance(self.current, RstEntry):
label.mx = 10
self.current.add_widget(label)
self.push(label)
elif cls is nodes.literal_block:
box = RstLiteralBlock()
self.current.add_widget(box)
self.push(box)
elif cls is nodes.emphasis:
self.text += '[i]'
elif cls is nodes.strong:
self.text += '[b]'
elif cls is nodes.literal:
self.text += '[font=fonts/DroidSansMono.ttf]'
elif cls is nodes.block_quote:
box = RstBlockQuote()
self.current.add_widget(box)
self.push(box.content)
assert(self.text == '')
elif cls is nodes.enumerated_list:
box = RstList()
self.current.add_widget(box)
self.push(box)
self.idx_list = 0
elif cls is nodes.bullet_list:
box = RstList()
self.current.add_widget(box)
self.push(box)
self.idx_list = None
elif cls is nodes.list_item:
bullet = '-'
if self.idx_list is not None:
self.idx_list += 1
bullet = '%d.' % self.idx_list
bullet = self.colorize(bullet, 'bullet')
item = RstListItem()
self.current.add_widget(RstListBullet(
text=bullet, document=self.root))
self.current.add_widget(item)
self.push(item)
elif cls is nodes.system_message:
label = RstSystemMessage()
if self.root.show_errors:
self.current.add_widget(label)
self.push(label)
elif cls is nodes.warning:
label = RstWarning()
self.current.add_widget(label)
self.push(label.content)
assert(self.text == '')
elif cls is nodes.note:
label = RstNote()
self.current.add_widget(label)
self.push(label.content)
assert(self.text == '')
elif cls is nodes.image:
uri = node['uri']
if uri.startswith('/') and self.root.document_root:
uri = join(self.root.document_root, uri[1:])
if uri.startswith('http://') or uri.startswith('https://'):
image = RstAsyncImage(source=uri)
else:
image = RstImage(source=uri)
align = node.get('align', 'center')
root = AnchorLayout(size_hint_y=None, anchor_x=align, height=1)
image.bind(height=root.setter('height'))
root.add_widget(image)
self.current.add_widget(root)
elif cls is nodes.definition_list:
lst = RstDefinitionList(document=self.root)
self.current.add_widget(lst)
self.push(lst)
elif cls is nodes.term:
assert(isinstance(self.current, RstDefinitionList))
term = RstTerm(document=self.root)
self.current.add_widget(term)
self.push(term)
elif cls is nodes.definition:
assert(isinstance(self.current, RstDefinitionList))
definition = RstDefinition(document=self.root)
definition.add_widget(RstDefinitionSpace(document=self.root))
self.current.add_widget(definition)
self.push(definition)
elif cls is nodes.field_list:
fieldlist = RstFieldList()
self.current.add_widget(fieldlist)
self.push(fieldlist)
elif cls is nodes.field_name:
name = RstFieldName(document=self.root)
self.current.add_widget(name)
self.push(name)
elif cls is nodes.field_body:
body = RstFieldBody()
self.current.add_widget(body)
self.push(body)
elif cls is nodes.table:
table = RstTable(cols=0)
self.current.add_widget(table)
self.push(table)
elif cls is nodes.colspec:
self.current.cols += 1
elif cls is nodes.entry:
entry = RstEntry()
self.current.add_widget(entry)
self.push(entry)
elif cls is nodes.transition:
self.current.add_widget(RstTransition())
elif cls is nodes.reference:
name = node.get('name', node.get('refuri'))
self.text += '[ref=%s][color=%s]' % (
name, self.root.colors.get(
'link', self.root.colors.get('paragraph')))
if 'refname' in node and 'name' in node:
self.root.refs_assoc[node['name']] = node['refname']
elif cls is nodes.target:
name = None
if 'ids' in node:
name = node['ids'][0]
elif 'names' in node:
name = node['names'][0]
self.text += '[anchor=%s]' % name
self.text_have_anchor = True
elif cls is role_doc:
self.doc_index = len(self.text)
elif cls is role_video:
pass
def dispatch_departure(self, node):
cls = node.__class__
if cls is nodes.document:
self.pop()
elif cls is nodes.section:
self.section -= 1
elif cls is nodes.title:
assert(isinstance(self.current, RstTitle))
if not self.title:
self.title = self.text
self.set_text(self.current, 'title')
self.pop()
elif cls is nodes.Text:
pass
elif cls is nodes.paragraph:
self.do_strip_text = False
assert(isinstance(self.current, RstParagraph))
self.set_text(self.current, 'paragraph')
self.pop()
elif cls is nodes.literal_block:
assert(isinstance(self.current, RstLiteralBlock))
self.set_text(self.current.content, 'literal_block')
self.pop()
elif cls is nodes.emphasis:
self.text += '[/i]'
elif cls is nodes.strong:
self.text += '[/b]'
elif cls is nodes.literal:
self.text += '[/font]'
elif cls is nodes.block_quote:
self.pop()
elif cls is nodes.enumerated_list:
self.idx_list = None
self.pop()
elif cls is nodes.bullet_list:
self.pop()
elif cls is nodes.list_item:
self.pop()
elif cls is nodes.system_message:
self.pop()
elif cls is nodes.warning:
self.pop()
elif cls is nodes.note:
self.pop()
elif cls is nodes.definition_list:
self.pop()
elif cls is nodes.term:
assert(isinstance(self.current, RstTerm))
self.set_text(self.current, 'term')
self.pop()
elif cls is nodes.definition:
self.pop()
elif cls is nodes.field_list:
self.pop()
elif cls is nodes.field_name:
assert(isinstance(self.current, RstFieldName))
self.set_text(self.current, 'field_name')
self.pop()
elif cls is nodes.field_body:
self.pop()
elif cls is nodes.table:
self.pop()
elif cls is nodes.colspec:
pass
elif cls is nodes.entry:
self.pop()
elif cls is nodes.reference:
self.text += '[/color][/ref]'
elif cls is role_doc:
docname = self.text[self.doc_index:]
rst_docname = docname
if rst_docname.endswith('.rst'):
docname = docname[:-4]
else:
rst_docname += '.rst'
# try to preload it
filename = self.root.resolve_path(rst_docname)
self.root.preload(filename)
# if exist, use the title of the first section found in the
# document
title = docname
if filename in self.root.toctrees:
toctree = self.root.toctrees[filename]
if len(toctree):
title = toctree[0]['title']
# replace the text with a good reference
text = '[ref=%s]%s[/ref]' % (
rst_docname,
self.colorize(title, 'link'))
self.text = self.text[:self.doc_index] + text
elif cls is role_video:
width = node['width'] if 'width' in node.attlist() else 400
height = node['height'] if 'height' in node.attlist() else 300
uri = node['source']
if uri.startswith('/') and self.root.document_root:
uri = join(self.root.document_root, uri[1:])
video = RstVideoPlayer(
source=uri,
size_hint=(None, None),
size=(width, height))
anchor = AnchorLayout(size_hint_y=None, height=height + 20)
anchor.add_widget(video)
self.current.add_widget(anchor)
def set_text(self, node, parent):
text = self.text
if parent == 'term' or parent == 'field_name':
text = '[b]%s[/b]' % text
# search anchors
node.text = self.colorize(text, parent)
node.bind(on_ref_press=self.root.on_ref_press)
if self.text_have_anchor:
self.root.add_anchors(node)
self.text = ''
self.text_have_anchor = False
def colorize(self, text, name):
return '[color=%s]%s[/color]' % (
self.root.colors.get(name, self.root.colors['paragraph']),
text)
if __name__ == '__main__':
from kivy.base import runTouchApp
import sys
runTouchApp(RstDocument(source=sys.argv[1]))
|
madgik/exareme
|
refs/heads/master
|
Exareme-Docker/src/exareme/exareme-tools/madis/src/lib/pyreadline/modes/basemode.py
|
1
|
# -*- coding: utf-8 -*-
# *****************************************************************************
# Copyright (C) 2003-2006 Gary Bishop.
# Copyright (C) 2006 Jorgen Stenarson. <jorgen.stenarson@bostream.nu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
# *****************************************************************************
import os, re, math, glob, sys
from pyreadline.logger import log
from pyreadline.keysyms.common import make_KeyPress_from_keydescr
import pyreadline.lineeditor.history as history
import pyreadline.clipboard as clipboard
from pyreadline.unicode_helper import ensure_str, ensure_unicode
in_ironpython = "IronPython" in sys.version
class BaseMode(object):
mode = "base"
def __init__(self, rlobj):
self.rlobj = rlobj
self.exit_dispatch = {}
self.key_dispatch = {}
self.argument = 1
self.prevargument = None
def __repr__(self):
return "<BaseMode>"
def _gs(x):
def g(self):
return getattr(self.rlobj, x)
def s(self, q):
setattr(self.rlobj, x, q)
return g, s
def _g(x):
def g(self):
return getattr(self.rlobj, x)
return g
def _argreset(self):
val = self.argument
self.argument = 1
return val
argument_reset = property(_argreset)
ctrl_c_tap_time_interval = property(*_gs("ctrl_c_tap_time_interval"))
allow_ctrl_c = property(*_gs("allow_ctrl_c"))
l_buffer = property(*_gs("l_buffer"))
next_meta = property(*_gs("next_meta"))
first_prompt = property(*_gs("first_prompt"))
prompt = property(*_gs("prompt"))
paste_line_buffer = property(*_gs("paste_line_buffer"))
completer_delims = property(*_gs("completer_delims"))
show_all_if_ambiguous = property(*_gs("show_all_if_ambiguous"))
mark_directories = property(*_gs("mark_directories"))
completer = property(*_gs("completer"))
begidx = property(*_gs("begidx"))
startup_hook = property(*_gs("startup_hook"))
pre_input_hook = property(*_gs("pre_input_hook"))
endidx = property(*_gs("endidx"))
console = property(_g("console"))
insert_text = property(_g("insert_text"))
_print_prompt = property(_g("_print_prompt"))
_update_line = property(_g("_update_line"))
add_history = property(_g("add_history"))
_bell = property(_g("_bell"))
_clear_after = property(_g("_clear_after"))
_set_cursor = property(_g("_set_cursor"))
_update_prompt_pos = property(_g("_update_prompt_pos"))
_update_line = property(_g("_update_line"))
enable_win32_clipboard = property(_g("enable_win32_clipboard"))
enable_ipython_paste_list_of_lists = property(_g("enable_ipython_paste_list_of_lists"))
enable_ipython_paste_for_paths = property(_g("enable_ipython_paste_for_paths"))
_bell = property(_g("_bell"))
_history = property(_g("_history"))
prompt_end_pos = property(_g("prompt_end_pos"))
prompt_begin_pos = property(_g("prompt_begin_pos"))
rl_settings_to_string = property(_g("rl_settings_to_string"))
def _readline_from_keyboard(self):
raise NotImplementedError
def readline(self, prompt=''):
raise NotImplementedError
# Create key bindings:
def _bind_key(self, key, func):
'''setup the mapping from key to call the function.'''
if type(func) != type(self._bind_key):
print "Trying to bind non method to keystroke:%s,%s" % (key, func)
raise PyreadlineError(
"Trying to bind non method to keystroke:%s,%s,%s,%s" % (key, func, type(func), type(self._bind_key)))
keyinfo = make_KeyPress_from_keydescr(key.lower()).tuple()
log(">>>%s -> %s<<<" % (keyinfo, func.__name__))
self.key_dispatch[keyinfo] = func
def _bind_exit_key(self, key):
'''setup the mapping from key to call the function.'''
keyinfo = make_KeyPress_from_keydescr(key.lower()).tuple()
self.exit_dispatch[keyinfo] = None
def init_editing_mode(self, e): # (C-e)
'''When in vi command mode, this causes a switch to emacs editing
mode.'''
raise NotImplementedError
# completion commands
def _get_completions(self):
'''Return a list of possible completions for the string ending at the point.
Also set begidx and endidx in the process.'''
completions = []
self.begidx = self.l_buffer.point
self.endidx = self.l_buffer.point
buf = self.l_buffer.line_buffer
if self.completer:
# get the string to complete
while self.begidx > 0:
self.begidx -= 1
if buf[self.begidx] in self.completer_delims:
self.begidx += 1
break
text = ensure_str(''.join(buf[self.begidx:self.endidx]))
log('complete text="%s"' % text)
i = 0
while 1:
try:
r = ensure_unicode(self.completer(text, i))
except:
break
i += 1
if r and r not in completions:
completions.append(r)
else:
break
log('text completions=%s' % completions)
if not completions:
# get the filename to complete
while self.begidx > 0:
self.begidx -= 1
if buf[self.begidx] in ' \t\n':
self.begidx += 1
break
text = ensure_str(''.join(buf[self.begidx:self.endidx]))
log('file complete text="%s"' % text)
completions = map(ensure_unicode, glob.glob(os.path.expanduser(text) + '*'))
if self.mark_directories == 'on':
mc = []
for f in completions:
if os.path.isdir(f):
mc.append(f + os.sep)
else:
mc.append(f)
completions = mc
log('fnames=%s' % completions)
return completions
def _display_completions(self, completions):
if not completions:
return
self.console.write('\n')
wmax = max(map(len, completions))
w, h = self.console.size()
cols = max(1, int((w - 1) / (wmax + 1)))
rows = int(math.ceil(float(len(completions)) / cols))
for row in range(rows):
s = ''
for col in range(cols):
i = col * rows + row
if i < len(completions):
self.console.write(completions[i].ljust(wmax + 1))
self.console.write('\n')
if in_ironpython:
self.prompt = sys.ps1
self._print_prompt()
def complete(self, e): # (TAB)
'''Attempt to perform completion on the text before point. The
actual completion performed is application-specific. The default is
filename completion.'''
completions = self._get_completions()
if completions:
cprefix = commonprefix(completions)
rep = [c for c in cprefix]
point = self.l_buffer.point
self.l_buffer[self.begidx:self.endidx] = rep
self.l_buffer.point = point + len(rep) - (self.endidx - self.begidx)
if len(completions) > 1:
if self.show_all_if_ambiguous == 'on':
self._display_completions(completions)
else:
self._bell()
else:
self._bell()
def possible_completions(self, e): # (M-?)
'''List the possible completions of the text before point. '''
completions = self._get_completions()
self._display_completions(completions)
def insert_completions(self, e): # (M-*)
'''Insert all completions of the text before point that would have
been generated by possible-completions.'''
completions = self._get_completions()
b = self.begidx
e = self.endidx
for comp in completions:
rep = [c for c in comp]
rep.append(' ')
self.l_buffer[b:e] = rep
b += len(rep)
e = b
self.line_cursor = b
def menu_complete(self, e): # ()
'''Similar to complete, but replaces the word to be completed with a
single match from the list of possible completions. Repeated
execution of menu-complete steps through the list of possible
completions, inserting each match in turn. At the end of the list of
completions, the bell is rung (subject to the setting of bell-style)
and the original text is restored. An argument of n moves n
positions forward in the list of matches; a negative argument may be
used to move backward through the list. This command is intended to
be bound to TAB, but is unbound by default.'''
pass
### Methods below here are bindable emacs functions
def beginning_of_line(self, e): # (C-a)
'''Move to the start of the current line. '''
self.l_buffer.beginning_of_line()
def end_of_line(self, e): # (C-e)
'''Move to the end of the line. '''
self.l_buffer.end_of_line()
def forward_char(self, e): # (C-f)
'''Move forward a character. '''
self.l_buffer.forward_char(self.argument_reset)
def backward_char(self, e): # (C-b)
'''Move back a character. '''
self.l_buffer.backward_char(self.argument_reset)
def forward_word(self, e): # (M-f)
'''Move forward to the end of the next word. Words are composed of
letters and digits.'''
self.l_buffer.forward_word(self.argument_reset)
def backward_word(self, e): # (M-b)
'''Move back to the start of the current or previous word. Words are
composed of letters and digits.'''
self.l_buffer.backward_word(self.argument_reset)
def forward_word_end(self, e): # ()
'''Move forward to the end of the next word. Words are composed of
letters and digits.'''
self.l_buffer.forward_word_end(self.argument_reset)
def backward_word_end(self, e): # ()
'''Move forward to the end of the next word. Words are composed of
letters and digits.'''
self.l_buffer.backward_word_end(self.argument_reset)
### Movement with extend selection
def beginning_of_line_extend_selection(self, e): #
'''Move to the start of the current line. '''
self.l_buffer.beginning_of_line_extend_selection()
def end_of_line_extend_selection(self, e): #
'''Move to the end of the line. '''
self.l_buffer.end_of_line_extend_selection()
def forward_char_extend_selection(self, e): #
'''Move forward a character. '''
self.l_buffer.forward_char_extend_selection(self.argument_reset)
def backward_char_extend_selection(self, e): #
'''Move back a character. '''
self.l_buffer.backward_char_extend_selection(self.argument_reset)
def forward_word_extend_selection(self, e): #
'''Move forward to the end of the next word. Words are composed of
letters and digits.'''
self.l_buffer.forward_word_extend_selection(self.argument_reset)
def backward_word_extend_selection(self, e): #
'''Move back to the start of the current or previous word. Words are
composed of letters and digits.'''
self.l_buffer.backward_word_extend_selection(self.argument_reset)
def forward_word_end_extend_selection(self, e): #
'''Move forward to the end of the next word. Words are composed of
letters and digits.'''
self.l_buffer.forward_word_end_extend_selection(self.argument_reset)
def backward_word_end_extend_selection(self, e): #
'''Move forward to the end of the next word. Words are composed of
letters and digits.'''
self.l_buffer.forward_word_end_extend_selection(self.argument_reset)
######## Change case
def upcase_word(self, e): # (M-u)
'''Uppercase the current (or following) word. With a negative
argument, uppercase the previous word, but do not move the cursor.'''
self.l_buffer.upcase_word()
def downcase_word(self, e): # (M-l)
'''Lowercase the current (or following) word. With a negative
argument, lowercase the previous word, but do not move the cursor.'''
self.l_buffer.downcase_word()
def capitalize_word(self, e): # (M-c)
'''Capitalize the current (or following) word. With a negative
argument, capitalize the previous word, but do not move the cursor.'''
self.l_buffer.capitalize_word()
########
def clear_screen(self, e): # (C-l)
'''Clear the screen and redraw the current line, leaving the current
line at the top of the screen.'''
self.console.page()
def redraw_current_line(self, e): # ()
'''Refresh the current line. By default, this is unbound.'''
pass
def accept_line(self, e): # (Newline or Return)
'''Accept the line regardless of where the cursor is. If this line
is non-empty, it may be added to the history list for future recall
with add_history(). If this line is a modified history line, the
history line is restored to its original state.'''
return True
def delete_char(self, e): # (C-d)
'''Delete the character at point. If point is at the beginning of
the line, there are no characters in the line, and the last
character typed was not bound to delete-char, then return EOF.'''
self.l_buffer.delete_char(self.argument_reset)
def backward_delete_char(self, e): # (Rubout)
'''Delete the character behind the cursor. A numeric argument means
to kill the characters instead of deleting them.'''
self.l_buffer.backward_delete_char(self.argument_reset)
def backward_delete_word(self, e): # (Control-Rubout)
'''Delete the character behind the cursor. A numeric argument means
to kill the characters instead of deleting them.'''
self.l_buffer.backward_delete_word(self.argument_reset)
def forward_delete_word(self, e): # (Control-Delete)
'''Delete the character behind the cursor. A numeric argument means
to kill the characters instead of deleting them.'''
self.l_buffer.forward_delete_word(self.argument_reset)
def delete_horizontal_space(self, e): # ()
'''Delete all spaces and tabs around point. By default, this is unbound. '''
self.l_buffer.delete_horizontal_space()
def self_insert(self, e): # (a, b, A, 1, !, ...)
'''Insert yourself. '''
if e.char and ord(e.char) != 0: # don't insert null character in buffer, can happen with dead keys.
self.insert_text(e.char)
# Paste from clipboard
def paste(self, e):
'''Paste windows clipboard.
Assume single line strip other lines and end of line markers and trailing spaces''' # (Control-v)
if self.enable_win32_clipboard:
txt = clipboard.get_clipboard_text_and_convert(False)
txt = txt.split("\n")[0].strip("\r").strip("\n")
log("paste: >%s<" % map(ord, txt))
self.insert_text(txt)
def paste_mulitline_code(self, e):
'''Paste windows clipboard as multiline code.
Removes any empty lines in the code'''
reg = re.compile("\r?\n")
if self.enable_win32_clipboard:
txt = clipboard.get_clipboard_text_and_convert(False)
t = reg.split(txt)
t = [row for row in t if row.strip() != ""] # remove empty lines
if t != [""]:
self.insert_text(t[0])
self.add_history(self.l_buffer.copy())
self.paste_line_buffer = t[1:]
log("multi: %s" % self.paste_line_buffer)
return True
else:
return False
def ipython_paste(self, e):
'''Paste windows clipboard. If enable_ipython_paste_list_of_lists is
True then try to convert tabseparated data to repr of list of lists or
repr of array.
If enable_ipython_paste_for_paths==True then change \\ to / and spaces to \space'''
if self.enable_win32_clipboard:
txt = clipboard.get_clipboard_text_and_convert(
self.enable_ipython_paste_list_of_lists)
if self.enable_ipython_paste_for_paths:
if len(txt) < 300 and ("\t" not in txt) and ("\n" not in txt):
txt = txt.replace("\\", "/").replace(" ", r"\ ")
self.insert_text(txt)
def copy_region_to_clipboard(self, e): # ()
'''Copy the text in the region to the windows clipboard.'''
self.l_buffer.copy_region_to_clipboard()
def copy_selection_to_clipboard(self, e): # ()
'''Copy the text in the region to the windows clipboard.'''
self.l_buffer.copy_selection_to_clipboard()
def cut_selection_to_clipboard(self, e): # ()
'''Copy the text in the region to the windows clipboard.'''
self.l_buffer.cut_selection_to_clipboard()
def dump_functions(self, e): # ()
'''Print all of the functions and their key bindings to the Readline
output stream. If a numeric argument is supplied, the output is
formatted in such a way that it can be made part of an inputrc
file. This command is unbound by default.'''
print
txt = "\n".join(self.rl_settings_to_string())
print txt
self._print_prompt()
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
prefix = m[0]
for item in m:
for i in range(len(prefix)):
if prefix[:i + 1].lower() != item[:i + 1].lower():
prefix = prefix[:i]
if i == 0: return ''
break
return prefix
|
azunite/chrome_build
|
refs/heads/master
|
third_party/boto/file/key.py
|
82
|
# Copyright 2010 Google Inc.
# Copyright (c) 2011, Nexenta Systems Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# File representation of key, for use with "file://" URIs.
import os, shutil, StringIO
import sys
class Key(object):
KEY_STREAM_READABLE = 0x01
KEY_STREAM_WRITABLE = 0x02
KEY_STREAM = (KEY_STREAM_READABLE | KEY_STREAM_WRITABLE)
KEY_REGULAR_FILE = 0x00
def __init__(self, bucket, name, fp=None, key_type=KEY_REGULAR_FILE):
self.bucket = bucket
self.full_path = name
if name == '-':
self.name = None
self.size = None
else:
self.name = name
self.size = os.stat(name).st_size
self.key_type = key_type
if key_type == self.KEY_STREAM_READABLE:
self.fp = sys.stdin
self.full_path = '<STDIN>'
elif key_type == self.KEY_STREAM_WRITABLE:
self.fp = sys.stdout
self.full_path = '<STDOUT>'
else:
self.fp = fp
def __str__(self):
return 'file://' + self.full_path
def get_file(self, fp, headers=None, cb=None, num_cb=10, torrent=False):
"""
Retrieves a file from a Key
:type fp: file
:param fp: File pointer to put the data into
:type headers: string
:param: ignored in this subclass.
:type cb: function
:param cb: ignored in this subclass.
:type cb: int
:param num_cb: ignored in this subclass.
"""
if self.key_type & self.KEY_STREAM_WRITABLE:
raise BotoClientError('Stream is not readable')
elif self.key_type & self.KEY_STREAM_READABLE:
key_file = self.fp
else:
key_file = open(self.full_path, 'rb')
try:
shutil.copyfileobj(key_file, fp)
finally:
key_file.close()
def set_contents_from_file(self, fp, headers=None, replace=True, cb=None,
num_cb=10, policy=None, md5=None):
"""
Store an object in a file using the name of the Key object as the
key in file URI and the contents of the file pointed to by 'fp' as the
contents.
:type fp: file
:param fp: the file whose contents to upload
:type headers: dict
:param headers: ignored in this subclass.
:type replace: bool
:param replace: If this parameter is False, the method
will first check to see if an object exists in the
bucket with the same key. If it does, it won't
overwrite it. The default value is True which will
overwrite the object.
:type cb: function
:param cb: ignored in this subclass.
:type cb: int
:param num_cb: ignored in this subclass.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: ignored in this subclass.
:type md5: A tuple containing the hexdigest version of the MD5 checksum
of the file as the first element and the Base64-encoded
version of the plain checksum as the second element.
This is the same format returned by the compute_md5 method.
:param md5: ignored in this subclass.
"""
if self.key_type & self.KEY_STREAM_READABLE:
raise BotoClientError('Stream is not writable')
elif self.key_type & self.KEY_STREAM_WRITABLE:
key_file = self.fp
else:
if not replace and os.path.exists(self.full_path):
return
key_file = open(self.full_path, 'wb')
try:
shutil.copyfileobj(fp, key_file)
finally:
key_file.close()
def get_contents_to_file(self, fp, headers=None, cb=None, num_cb=None,
torrent=False, version_id=None,
res_download_handler=None, response_headers=None):
"""
Copy contents from the current file to the file pointed to by 'fp'.
:type fp: File-like object
:param fp:
:type headers: dict
:param headers: Unused in this subclass.
:type cb: function
:param cb: Unused in this subclass.
:type cb: int
:param num_cb: Unused in this subclass.
:type torrent: bool
:param torrent: Unused in this subclass.
:type res_upload_handler: ResumableDownloadHandler
:param res_download_handler: Unused in this subclass.
:type response_headers: dict
:param response_headers: Unused in this subclass.
"""
shutil.copyfileobj(self.fp, fp)
def get_contents_as_string(self, headers=None, cb=None, num_cb=10,
torrent=False):
"""
Retrieve file data from the Key, and return contents as a string.
:type headers: dict
:param headers: ignored in this subclass.
:type cb: function
:param cb: ignored in this subclass.
:type cb: int
:param num_cb: ignored in this subclass.
:type cb: int
:param num_cb: ignored in this subclass.
:type torrent: bool
:param torrent: ignored in this subclass.
:rtype: string
:returns: The contents of the file as a string
"""
fp = StringIO.StringIO()
self.get_contents_to_file(fp)
return fp.getvalue()
def is_stream(self):
return (self.key_type & self.KEY_STREAM)
def close(self):
"""
Closes fp associated with underlying file.
Caller should call this method when done with this class, to avoid
using up OS resources (e.g., when iterating over a large number
of files).
"""
self.fp.close()
|
gonboy/sl4a
|
refs/heads/master
|
python/src/Lib/lib2to3/fixes/fix_raise.py
|
53
|
"""Fixer for 'raise E, V, T'
raise -> raise
raise E -> raise E
raise E, V -> raise E(V)
raise E, V, T -> raise E(V).with_traceback(T)
raise (((E, E'), E''), E'''), V -> raise E(V)
raise "foo", V, T -> warns about string exceptions
CAVEATS:
1) "raise E, V" will be incorrectly translated if V is an exception
instance. The correct Python 3 idiom is
raise E from V
but since we can't detect instance-hood by syntax alone and since
any client code would have to be changed as well, we don't automate
this.
"""
# Author: Collin Winter
# Local imports
from .. import pytree
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Name, Call, Attr, ArgList, is_tuple
class FixRaise(fixer_base.BaseFix):
PATTERN = """
raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
"""
def transform(self, node, results):
syms = self.syms
exc = results["exc"].clone()
if exc.type is token.STRING:
self.cannot_convert(node, "Python 3 does not support string exceptions")
return
# Python 2 supports
# raise ((((E1, E2), E3), E4), E5), V
# as a synonym for
# raise E1, V
# Since Python 3 will not support this, we recurse down any tuple
# literals, always taking the first element.
if is_tuple(exc):
while is_tuple(exc):
# exc.children[1:-1] is the unparenthesized tuple
# exc.children[1].children[0] is the first element of the tuple
exc = exc.children[1].children[0].clone()
exc.set_prefix(" ")
if "val" not in results:
# One-argument raise
new = pytree.Node(syms.raise_stmt, [Name("raise"), exc])
new.set_prefix(node.get_prefix())
return new
val = results["val"].clone()
if is_tuple(val):
args = [c.clone() for c in val.children[1:-1]]
else:
val.set_prefix("")
args = [val]
if "tb" in results:
tb = results["tb"].clone()
tb.set_prefix("")
e = Call(exc, args)
with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])]
new = pytree.Node(syms.simple_stmt, [Name("raise")] + with_tb)
new.set_prefix(node.get_prefix())
return new
else:
return pytree.Node(syms.raise_stmt,
[Name("raise"), Call(exc, args)],
prefix=node.get_prefix())
|
ArcherSys/ArcherSys
|
refs/heads/master
|
Lib/test/test_class.py
|
1
|
<<<<<<< HEAD
<<<<<<< HEAD
"Test the functionality of Python classes implementing operators."
import unittest
from test import support
testmeths = [
# Binary operations
"add",
"radd",
"sub",
"rsub",
"mul",
"rmul",
"truediv",
"rtruediv",
"mod",
"rmod",
"divmod",
"rdivmod",
"pow",
"rpow",
"rshift",
"rrshift",
"lshift",
"rlshift",
"and",
"rand",
"or",
"ror",
"xor",
"rxor",
# List/dict operations
"contains",
"getitem",
"setitem",
"delitem",
# Unary operations
"neg",
"pos",
"abs",
# generic operations
"init",
]
# These need to return something other than None
# "hash",
# "str",
# "repr",
# "int",
# "float",
# These are separate because they can influence the test of other methods.
# "getattr",
# "setattr",
# "delattr",
callLst = []
def trackCall(f):
def track(*args, **kwargs):
callLst.append((f.__name__, args))
return f(*args, **kwargs)
return track
statictests = """
@trackCall
def __hash__(self, *args):
return hash(id(self))
@trackCall
def __str__(self, *args):
return "AllTests"
@trackCall
def __repr__(self, *args):
return "AllTests"
@trackCall
def __int__(self, *args):
return 1
@trackCall
def __index__(self, *args):
return 1
@trackCall
def __float__(self, *args):
return 1.0
@trackCall
def __eq__(self, *args):
return True
@trackCall
def __ne__(self, *args):
return False
@trackCall
def __lt__(self, *args):
return False
@trackCall
def __le__(self, *args):
return True
@trackCall
def __gt__(self, *args):
return False
@trackCall
def __ge__(self, *args):
return True
"""
# Synthesize all the other AllTests methods from the names in testmeths.
method_template = """\
@trackCall
def __%s__(self, *args):
pass
"""
d = {}
exec(statictests, globals(), d)
for method in testmeths:
exec(method_template % method, globals(), d)
AllTests = type("AllTests", (object,), d)
del d, statictests, method, method_template
class ClassTests(unittest.TestCase):
def setUp(self):
callLst[:] = []
def assertCallStack(self, expected_calls):
actualCallList = callLst[:] # need to copy because the comparison below will add
# additional calls to callLst
if expected_calls != actualCallList:
self.fail("Expected call list:\n %s\ndoes not match actual call list\n %s" %
(expected_calls, actualCallList))
def testInit(self):
foo = AllTests()
self.assertCallStack([("__init__", (foo,))])
def testBinaryOps(self):
testme = AllTests()
# Binary operations
callLst[:] = []
testme + 1
self.assertCallStack([("__add__", (testme, 1))])
callLst[:] = []
1 + testme
self.assertCallStack([("__radd__", (testme, 1))])
callLst[:] = []
testme - 1
self.assertCallStack([("__sub__", (testme, 1))])
callLst[:] = []
1 - testme
self.assertCallStack([("__rsub__", (testme, 1))])
callLst[:] = []
testme * 1
self.assertCallStack([("__mul__", (testme, 1))])
callLst[:] = []
1 * testme
self.assertCallStack([("__rmul__", (testme, 1))])
if 1/2 == 0:
callLst[:] = []
testme / 1
self.assertCallStack([("__div__", (testme, 1))])
callLst[:] = []
1 / testme
self.assertCallStack([("__rdiv__", (testme, 1))])
callLst[:] = []
testme % 1
self.assertCallStack([("__mod__", (testme, 1))])
callLst[:] = []
1 % testme
self.assertCallStack([("__rmod__", (testme, 1))])
callLst[:] = []
divmod(testme,1)
self.assertCallStack([("__divmod__", (testme, 1))])
callLst[:] = []
divmod(1, testme)
self.assertCallStack([("__rdivmod__", (testme, 1))])
callLst[:] = []
testme ** 1
self.assertCallStack([("__pow__", (testme, 1))])
callLst[:] = []
1 ** testme
self.assertCallStack([("__rpow__", (testme, 1))])
callLst[:] = []
testme >> 1
self.assertCallStack([("__rshift__", (testme, 1))])
callLst[:] = []
1 >> testme
self.assertCallStack([("__rrshift__", (testme, 1))])
callLst[:] = []
testme << 1
self.assertCallStack([("__lshift__", (testme, 1))])
callLst[:] = []
1 << testme
self.assertCallStack([("__rlshift__", (testme, 1))])
callLst[:] = []
testme & 1
self.assertCallStack([("__and__", (testme, 1))])
callLst[:] = []
1 & testme
self.assertCallStack([("__rand__", (testme, 1))])
callLst[:] = []
testme | 1
self.assertCallStack([("__or__", (testme, 1))])
callLst[:] = []
1 | testme
self.assertCallStack([("__ror__", (testme, 1))])
callLst[:] = []
testme ^ 1
self.assertCallStack([("__xor__", (testme, 1))])
callLst[:] = []
1 ^ testme
self.assertCallStack([("__rxor__", (testme, 1))])
def testListAndDictOps(self):
testme = AllTests()
# List/dict operations
class Empty: pass
try:
1 in Empty()
self.fail('failed, should have raised TypeError')
except TypeError:
pass
callLst[:] = []
1 in testme
self.assertCallStack([('__contains__', (testme, 1))])
callLst[:] = []
testme[1]
self.assertCallStack([('__getitem__', (testme, 1))])
callLst[:] = []
testme[1] = 1
self.assertCallStack([('__setitem__', (testme, 1, 1))])
callLst[:] = []
del testme[1]
self.assertCallStack([('__delitem__', (testme, 1))])
callLst[:] = []
testme[:42]
self.assertCallStack([('__getitem__', (testme, slice(None, 42)))])
callLst[:] = []
testme[:42] = "The Answer"
self.assertCallStack([('__setitem__', (testme, slice(None, 42),
"The Answer"))])
callLst[:] = []
del testme[:42]
self.assertCallStack([('__delitem__', (testme, slice(None, 42)))])
callLst[:] = []
testme[2:1024:10]
self.assertCallStack([('__getitem__', (testme, slice(2, 1024, 10)))])
callLst[:] = []
testme[2:1024:10] = "A lot"
self.assertCallStack([('__setitem__', (testme, slice(2, 1024, 10),
"A lot"))])
callLst[:] = []
del testme[2:1024:10]
self.assertCallStack([('__delitem__', (testme, slice(2, 1024, 10)))])
callLst[:] = []
testme[:42, ..., :24:, 24, 100]
self.assertCallStack([('__getitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100)))])
callLst[:] = []
testme[:42, ..., :24:, 24, 100] = "Strange"
self.assertCallStack([('__setitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100), "Strange"))])
callLst[:] = []
del testme[:42, ..., :24:, 24, 100]
self.assertCallStack([('__delitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100)))])
def testUnaryOps(self):
testme = AllTests()
callLst[:] = []
-testme
self.assertCallStack([('__neg__', (testme,))])
callLst[:] = []
+testme
self.assertCallStack([('__pos__', (testme,))])
callLst[:] = []
abs(testme)
self.assertCallStack([('__abs__', (testme,))])
callLst[:] = []
int(testme)
self.assertCallStack([('__int__', (testme,))])
callLst[:] = []
float(testme)
self.assertCallStack([('__float__', (testme,))])
callLst[:] = []
oct(testme)
self.assertCallStack([('__index__', (testme,))])
callLst[:] = []
hex(testme)
self.assertCallStack([('__index__', (testme,))])
def testMisc(self):
testme = AllTests()
callLst[:] = []
hash(testme)
self.assertCallStack([('__hash__', (testme,))])
callLst[:] = []
repr(testme)
self.assertCallStack([('__repr__', (testme,))])
callLst[:] = []
str(testme)
self.assertCallStack([('__str__', (testme,))])
callLst[:] = []
testme == 1
self.assertCallStack([('__eq__', (testme, 1))])
callLst[:] = []
testme < 1
self.assertCallStack([('__lt__', (testme, 1))])
callLst[:] = []
testme > 1
self.assertCallStack([('__gt__', (testme, 1))])
callLst[:] = []
testme != 1
self.assertCallStack([('__ne__', (testme, 1))])
callLst[:] = []
1 == testme
self.assertCallStack([('__eq__', (1, testme))])
callLst[:] = []
1 < testme
self.assertCallStack([('__gt__', (1, testme))])
callLst[:] = []
1 > testme
self.assertCallStack([('__lt__', (1, testme))])
callLst[:] = []
1 != testme
self.assertCallStack([('__ne__', (1, testme))])
def testGetSetAndDel(self):
# Interfering tests
class ExtraTests(AllTests):
@trackCall
def __getattr__(self, *args):
return "SomeVal"
@trackCall
def __setattr__(self, *args):
pass
@trackCall
def __delattr__(self, *args):
pass
testme = ExtraTests()
callLst[:] = []
testme.spam
self.assertCallStack([('__getattr__', (testme, "spam"))])
callLst[:] = []
testme.eggs = "spam, spam, spam and ham"
self.assertCallStack([('__setattr__', (testme, "eggs",
"spam, spam, spam and ham"))])
callLst[:] = []
del testme.cardinal
self.assertCallStack([('__delattr__', (testme, "cardinal"))])
def testDel(self):
x = []
class DelTest:
def __del__(self):
x.append("crab people, crab people")
testme = DelTest()
del testme
import gc
gc.collect()
self.assertEqual(["crab people, crab people"], x)
def testBadTypeReturned(self):
# return values of some method are type-checked
class BadTypeClass:
def __int__(self):
return None
__float__ = __int__
__str__ = __int__
__repr__ = __int__
__oct__ = __int__
__hex__ = __int__
for f in [int, float, str, repr, oct, hex]:
self.assertRaises(TypeError, f, BadTypeClass())
def testHashStuff(self):
# Test correct errors from hash() on objects with comparisons but
# no __hash__
class C0:
pass
hash(C0()) # This should work; the next two should raise TypeError
class C2:
def __eq__(self, other): return 1
self.assertRaises(TypeError, hash, C2())
def testSFBug532646(self):
# Test for SF bug 532646
class A:
pass
A.__call__ = A()
a = A()
try:
a() # This should not segfault
except RuntimeError:
pass
else:
self.fail("Failed to raise RuntimeError")
def testForExceptionsRaisedInInstanceGetattr2(self):
# Tests for exceptions raised in instance_getattr2().
def booh(self):
raise AttributeError("booh")
class A:
a = property(booh)
try:
A().a # Raised AttributeError: A instance has no attribute 'a'
except AttributeError as x:
if str(x) != "booh":
self.fail("attribute error for A().a got masked: %s" % x)
class E:
__eq__ = property(booh)
E() == E() # In debug mode, caused a C-level assert() to fail
class I:
__init__ = property(booh)
try:
# In debug mode, printed XXX undetected error and
# raises AttributeError
I()
except AttributeError as x:
pass
else:
self.fail("attribute error for I.__init__ got masked")
def testHashComparisonOfMethods(self):
# Test comparison and hash of methods
class A:
def __init__(self, x):
self.x = x
def f(self):
pass
def g(self):
pass
def __eq__(self, other):
return self.x == other.x
def __hash__(self):
return self.x
class B(A):
pass
a1 = A(1)
a2 = A(2)
self.assertEqual(a1.f, a1.f)
self.assertNotEqual(a1.f, a2.f)
self.assertNotEqual(a1.f, a1.g)
self.assertEqual(a1.f, A(1).f)
self.assertEqual(hash(a1.f), hash(a1.f))
self.assertEqual(hash(a1.f), hash(A(1).f))
self.assertNotEqual(A.f, a1.f)
self.assertNotEqual(A.f, A.g)
self.assertEqual(B.f, A.f)
self.assertEqual(hash(B.f), hash(A.f))
# the following triggers a SystemError in 2.4
a = A(hash(A.f)^(-1))
hash(a.f)
def test_main():
support.run_unittest(ClassTests)
if __name__=='__main__':
test_main()
=======
"Test the functionality of Python classes implementing operators."
import unittest
from test import support
testmeths = [
# Binary operations
"add",
"radd",
"sub",
"rsub",
"mul",
"rmul",
"truediv",
"rtruediv",
"mod",
"rmod",
"divmod",
"rdivmod",
"pow",
"rpow",
"rshift",
"rrshift",
"lshift",
"rlshift",
"and",
"rand",
"or",
"ror",
"xor",
"rxor",
# List/dict operations
"contains",
"getitem",
"setitem",
"delitem",
# Unary operations
"neg",
"pos",
"abs",
# generic operations
"init",
]
# These need to return something other than None
# "hash",
# "str",
# "repr",
# "int",
# "float",
# These are separate because they can influence the test of other methods.
# "getattr",
# "setattr",
# "delattr",
callLst = []
def trackCall(f):
def track(*args, **kwargs):
callLst.append((f.__name__, args))
return f(*args, **kwargs)
return track
statictests = """
@trackCall
def __hash__(self, *args):
return hash(id(self))
@trackCall
def __str__(self, *args):
return "AllTests"
@trackCall
def __repr__(self, *args):
return "AllTests"
@trackCall
def __int__(self, *args):
return 1
@trackCall
def __index__(self, *args):
return 1
@trackCall
def __float__(self, *args):
return 1.0
@trackCall
def __eq__(self, *args):
return True
@trackCall
def __ne__(self, *args):
return False
@trackCall
def __lt__(self, *args):
return False
@trackCall
def __le__(self, *args):
return True
@trackCall
def __gt__(self, *args):
return False
@trackCall
def __ge__(self, *args):
return True
"""
# Synthesize all the other AllTests methods from the names in testmeths.
method_template = """\
@trackCall
def __%s__(self, *args):
pass
"""
d = {}
exec(statictests, globals(), d)
for method in testmeths:
exec(method_template % method, globals(), d)
AllTests = type("AllTests", (object,), d)
del d, statictests, method, method_template
class ClassTests(unittest.TestCase):
def setUp(self):
callLst[:] = []
def assertCallStack(self, expected_calls):
actualCallList = callLst[:] # need to copy because the comparison below will add
# additional calls to callLst
if expected_calls != actualCallList:
self.fail("Expected call list:\n %s\ndoes not match actual call list\n %s" %
(expected_calls, actualCallList))
def testInit(self):
foo = AllTests()
self.assertCallStack([("__init__", (foo,))])
def testBinaryOps(self):
testme = AllTests()
# Binary operations
callLst[:] = []
testme + 1
self.assertCallStack([("__add__", (testme, 1))])
callLst[:] = []
1 + testme
self.assertCallStack([("__radd__", (testme, 1))])
callLst[:] = []
testme - 1
self.assertCallStack([("__sub__", (testme, 1))])
callLst[:] = []
1 - testme
self.assertCallStack([("__rsub__", (testme, 1))])
callLst[:] = []
testme * 1
self.assertCallStack([("__mul__", (testme, 1))])
callLst[:] = []
1 * testme
self.assertCallStack([("__rmul__", (testme, 1))])
if 1/2 == 0:
callLst[:] = []
testme / 1
self.assertCallStack([("__div__", (testme, 1))])
callLst[:] = []
1 / testme
self.assertCallStack([("__rdiv__", (testme, 1))])
callLst[:] = []
testme % 1
self.assertCallStack([("__mod__", (testme, 1))])
callLst[:] = []
1 % testme
self.assertCallStack([("__rmod__", (testme, 1))])
callLst[:] = []
divmod(testme,1)
self.assertCallStack([("__divmod__", (testme, 1))])
callLst[:] = []
divmod(1, testme)
self.assertCallStack([("__rdivmod__", (testme, 1))])
callLst[:] = []
testme ** 1
self.assertCallStack([("__pow__", (testme, 1))])
callLst[:] = []
1 ** testme
self.assertCallStack([("__rpow__", (testme, 1))])
callLst[:] = []
testme >> 1
self.assertCallStack([("__rshift__", (testme, 1))])
callLst[:] = []
1 >> testme
self.assertCallStack([("__rrshift__", (testme, 1))])
callLst[:] = []
testme << 1
self.assertCallStack([("__lshift__", (testme, 1))])
callLst[:] = []
1 << testme
self.assertCallStack([("__rlshift__", (testme, 1))])
callLst[:] = []
testme & 1
self.assertCallStack([("__and__", (testme, 1))])
callLst[:] = []
1 & testme
self.assertCallStack([("__rand__", (testme, 1))])
callLst[:] = []
testme | 1
self.assertCallStack([("__or__", (testme, 1))])
callLst[:] = []
1 | testme
self.assertCallStack([("__ror__", (testme, 1))])
callLst[:] = []
testme ^ 1
self.assertCallStack([("__xor__", (testme, 1))])
callLst[:] = []
1 ^ testme
self.assertCallStack([("__rxor__", (testme, 1))])
def testListAndDictOps(self):
testme = AllTests()
# List/dict operations
class Empty: pass
try:
1 in Empty()
self.fail('failed, should have raised TypeError')
except TypeError:
pass
callLst[:] = []
1 in testme
self.assertCallStack([('__contains__', (testme, 1))])
callLst[:] = []
testme[1]
self.assertCallStack([('__getitem__', (testme, 1))])
callLst[:] = []
testme[1] = 1
self.assertCallStack([('__setitem__', (testme, 1, 1))])
callLst[:] = []
del testme[1]
self.assertCallStack([('__delitem__', (testme, 1))])
callLst[:] = []
testme[:42]
self.assertCallStack([('__getitem__', (testme, slice(None, 42)))])
callLst[:] = []
testme[:42] = "The Answer"
self.assertCallStack([('__setitem__', (testme, slice(None, 42),
"The Answer"))])
callLst[:] = []
del testme[:42]
self.assertCallStack([('__delitem__', (testme, slice(None, 42)))])
callLst[:] = []
testme[2:1024:10]
self.assertCallStack([('__getitem__', (testme, slice(2, 1024, 10)))])
callLst[:] = []
testme[2:1024:10] = "A lot"
self.assertCallStack([('__setitem__', (testme, slice(2, 1024, 10),
"A lot"))])
callLst[:] = []
del testme[2:1024:10]
self.assertCallStack([('__delitem__', (testme, slice(2, 1024, 10)))])
callLst[:] = []
testme[:42, ..., :24:, 24, 100]
self.assertCallStack([('__getitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100)))])
callLst[:] = []
testme[:42, ..., :24:, 24, 100] = "Strange"
self.assertCallStack([('__setitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100), "Strange"))])
callLst[:] = []
del testme[:42, ..., :24:, 24, 100]
self.assertCallStack([('__delitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100)))])
def testUnaryOps(self):
testme = AllTests()
callLst[:] = []
-testme
self.assertCallStack([('__neg__', (testme,))])
callLst[:] = []
+testme
self.assertCallStack([('__pos__', (testme,))])
callLst[:] = []
abs(testme)
self.assertCallStack([('__abs__', (testme,))])
callLst[:] = []
int(testme)
self.assertCallStack([('__int__', (testme,))])
callLst[:] = []
float(testme)
self.assertCallStack([('__float__', (testme,))])
callLst[:] = []
oct(testme)
self.assertCallStack([('__index__', (testme,))])
callLst[:] = []
hex(testme)
self.assertCallStack([('__index__', (testme,))])
def testMisc(self):
testme = AllTests()
callLst[:] = []
hash(testme)
self.assertCallStack([('__hash__', (testme,))])
callLst[:] = []
repr(testme)
self.assertCallStack([('__repr__', (testme,))])
callLst[:] = []
str(testme)
self.assertCallStack([('__str__', (testme,))])
callLst[:] = []
testme == 1
self.assertCallStack([('__eq__', (testme, 1))])
callLst[:] = []
testme < 1
self.assertCallStack([('__lt__', (testme, 1))])
callLst[:] = []
testme > 1
self.assertCallStack([('__gt__', (testme, 1))])
callLst[:] = []
testme != 1
self.assertCallStack([('__ne__', (testme, 1))])
callLst[:] = []
1 == testme
self.assertCallStack([('__eq__', (1, testme))])
callLst[:] = []
1 < testme
self.assertCallStack([('__gt__', (1, testme))])
callLst[:] = []
1 > testme
self.assertCallStack([('__lt__', (1, testme))])
callLst[:] = []
1 != testme
self.assertCallStack([('__ne__', (1, testme))])
def testGetSetAndDel(self):
# Interfering tests
class ExtraTests(AllTests):
@trackCall
def __getattr__(self, *args):
return "SomeVal"
@trackCall
def __setattr__(self, *args):
pass
@trackCall
def __delattr__(self, *args):
pass
testme = ExtraTests()
callLst[:] = []
testme.spam
self.assertCallStack([('__getattr__', (testme, "spam"))])
callLst[:] = []
testme.eggs = "spam, spam, spam and ham"
self.assertCallStack([('__setattr__', (testme, "eggs",
"spam, spam, spam and ham"))])
callLst[:] = []
del testme.cardinal
self.assertCallStack([('__delattr__', (testme, "cardinal"))])
def testDel(self):
x = []
class DelTest:
def __del__(self):
x.append("crab people, crab people")
testme = DelTest()
del testme
import gc
gc.collect()
self.assertEqual(["crab people, crab people"], x)
def testBadTypeReturned(self):
# return values of some method are type-checked
class BadTypeClass:
def __int__(self):
return None
__float__ = __int__
__str__ = __int__
__repr__ = __int__
__oct__ = __int__
__hex__ = __int__
for f in [int, float, str, repr, oct, hex]:
self.assertRaises(TypeError, f, BadTypeClass())
def testHashStuff(self):
# Test correct errors from hash() on objects with comparisons but
# no __hash__
class C0:
pass
hash(C0()) # This should work; the next two should raise TypeError
class C2:
def __eq__(self, other): return 1
self.assertRaises(TypeError, hash, C2())
def testSFBug532646(self):
# Test for SF bug 532646
class A:
pass
A.__call__ = A()
a = A()
try:
a() # This should not segfault
except RuntimeError:
pass
else:
self.fail("Failed to raise RuntimeError")
def testForExceptionsRaisedInInstanceGetattr2(self):
# Tests for exceptions raised in instance_getattr2().
def booh(self):
raise AttributeError("booh")
class A:
a = property(booh)
try:
A().a # Raised AttributeError: A instance has no attribute 'a'
except AttributeError as x:
if str(x) != "booh":
self.fail("attribute error for A().a got masked: %s" % x)
class E:
__eq__ = property(booh)
E() == E() # In debug mode, caused a C-level assert() to fail
class I:
__init__ = property(booh)
try:
# In debug mode, printed XXX undetected error and
# raises AttributeError
I()
except AttributeError as x:
pass
else:
self.fail("attribute error for I.__init__ got masked")
def testHashComparisonOfMethods(self):
# Test comparison and hash of methods
class A:
def __init__(self, x):
self.x = x
def f(self):
pass
def g(self):
pass
def __eq__(self, other):
return self.x == other.x
def __hash__(self):
return self.x
class B(A):
pass
a1 = A(1)
a2 = A(2)
self.assertEqual(a1.f, a1.f)
self.assertNotEqual(a1.f, a2.f)
self.assertNotEqual(a1.f, a1.g)
self.assertEqual(a1.f, A(1).f)
self.assertEqual(hash(a1.f), hash(a1.f))
self.assertEqual(hash(a1.f), hash(A(1).f))
self.assertNotEqual(A.f, a1.f)
self.assertNotEqual(A.f, A.g)
self.assertEqual(B.f, A.f)
self.assertEqual(hash(B.f), hash(A.f))
# the following triggers a SystemError in 2.4
a = A(hash(A.f)^(-1))
hash(a.f)
def test_main():
support.run_unittest(ClassTests)
if __name__=='__main__':
test_main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
"Test the functionality of Python classes implementing operators."
import unittest
from test import support
testmeths = [
# Binary operations
"add",
"radd",
"sub",
"rsub",
"mul",
"rmul",
"truediv",
"rtruediv",
"mod",
"rmod",
"divmod",
"rdivmod",
"pow",
"rpow",
"rshift",
"rrshift",
"lshift",
"rlshift",
"and",
"rand",
"or",
"ror",
"xor",
"rxor",
# List/dict operations
"contains",
"getitem",
"setitem",
"delitem",
# Unary operations
"neg",
"pos",
"abs",
# generic operations
"init",
]
# These need to return something other than None
# "hash",
# "str",
# "repr",
# "int",
# "float",
# These are separate because they can influence the test of other methods.
# "getattr",
# "setattr",
# "delattr",
callLst = []
def trackCall(f):
def track(*args, **kwargs):
callLst.append((f.__name__, args))
return f(*args, **kwargs)
return track
statictests = """
@trackCall
def __hash__(self, *args):
return hash(id(self))
@trackCall
def __str__(self, *args):
return "AllTests"
@trackCall
def __repr__(self, *args):
return "AllTests"
@trackCall
def __int__(self, *args):
return 1
@trackCall
def __index__(self, *args):
return 1
@trackCall
def __float__(self, *args):
return 1.0
@trackCall
def __eq__(self, *args):
return True
@trackCall
def __ne__(self, *args):
return False
@trackCall
def __lt__(self, *args):
return False
@trackCall
def __le__(self, *args):
return True
@trackCall
def __gt__(self, *args):
return False
@trackCall
def __ge__(self, *args):
return True
"""
# Synthesize all the other AllTests methods from the names in testmeths.
method_template = """\
@trackCall
def __%s__(self, *args):
pass
"""
d = {}
exec(statictests, globals(), d)
for method in testmeths:
exec(method_template % method, globals(), d)
AllTests = type("AllTests", (object,), d)
del d, statictests, method, method_template
class ClassTests(unittest.TestCase):
def setUp(self):
callLst[:] = []
def assertCallStack(self, expected_calls):
actualCallList = callLst[:] # need to copy because the comparison below will add
# additional calls to callLst
if expected_calls != actualCallList:
self.fail("Expected call list:\n %s\ndoes not match actual call list\n %s" %
(expected_calls, actualCallList))
def testInit(self):
foo = AllTests()
self.assertCallStack([("__init__", (foo,))])
def testBinaryOps(self):
testme = AllTests()
# Binary operations
callLst[:] = []
testme + 1
self.assertCallStack([("__add__", (testme, 1))])
callLst[:] = []
1 + testme
self.assertCallStack([("__radd__", (testme, 1))])
callLst[:] = []
testme - 1
self.assertCallStack([("__sub__", (testme, 1))])
callLst[:] = []
1 - testme
self.assertCallStack([("__rsub__", (testme, 1))])
callLst[:] = []
testme * 1
self.assertCallStack([("__mul__", (testme, 1))])
callLst[:] = []
1 * testme
self.assertCallStack([("__rmul__", (testme, 1))])
if 1/2 == 0:
callLst[:] = []
testme / 1
self.assertCallStack([("__div__", (testme, 1))])
callLst[:] = []
1 / testme
self.assertCallStack([("__rdiv__", (testme, 1))])
callLst[:] = []
testme % 1
self.assertCallStack([("__mod__", (testme, 1))])
callLst[:] = []
1 % testme
self.assertCallStack([("__rmod__", (testme, 1))])
callLst[:] = []
divmod(testme,1)
self.assertCallStack([("__divmod__", (testme, 1))])
callLst[:] = []
divmod(1, testme)
self.assertCallStack([("__rdivmod__", (testme, 1))])
callLst[:] = []
testme ** 1
self.assertCallStack([("__pow__", (testme, 1))])
callLst[:] = []
1 ** testme
self.assertCallStack([("__rpow__", (testme, 1))])
callLst[:] = []
testme >> 1
self.assertCallStack([("__rshift__", (testme, 1))])
callLst[:] = []
1 >> testme
self.assertCallStack([("__rrshift__", (testme, 1))])
callLst[:] = []
testme << 1
self.assertCallStack([("__lshift__", (testme, 1))])
callLst[:] = []
1 << testme
self.assertCallStack([("__rlshift__", (testme, 1))])
callLst[:] = []
testme & 1
self.assertCallStack([("__and__", (testme, 1))])
callLst[:] = []
1 & testme
self.assertCallStack([("__rand__", (testme, 1))])
callLst[:] = []
testme | 1
self.assertCallStack([("__or__", (testme, 1))])
callLst[:] = []
1 | testme
self.assertCallStack([("__ror__", (testme, 1))])
callLst[:] = []
testme ^ 1
self.assertCallStack([("__xor__", (testme, 1))])
callLst[:] = []
1 ^ testme
self.assertCallStack([("__rxor__", (testme, 1))])
def testListAndDictOps(self):
testme = AllTests()
# List/dict operations
class Empty: pass
try:
1 in Empty()
self.fail('failed, should have raised TypeError')
except TypeError:
pass
callLst[:] = []
1 in testme
self.assertCallStack([('__contains__', (testme, 1))])
callLst[:] = []
testme[1]
self.assertCallStack([('__getitem__', (testme, 1))])
callLst[:] = []
testme[1] = 1
self.assertCallStack([('__setitem__', (testme, 1, 1))])
callLst[:] = []
del testme[1]
self.assertCallStack([('__delitem__', (testme, 1))])
callLst[:] = []
testme[:42]
self.assertCallStack([('__getitem__', (testme, slice(None, 42)))])
callLst[:] = []
testme[:42] = "The Answer"
self.assertCallStack([('__setitem__', (testme, slice(None, 42),
"The Answer"))])
callLst[:] = []
del testme[:42]
self.assertCallStack([('__delitem__', (testme, slice(None, 42)))])
callLst[:] = []
testme[2:1024:10]
self.assertCallStack([('__getitem__', (testme, slice(2, 1024, 10)))])
callLst[:] = []
testme[2:1024:10] = "A lot"
self.assertCallStack([('__setitem__', (testme, slice(2, 1024, 10),
"A lot"))])
callLst[:] = []
del testme[2:1024:10]
self.assertCallStack([('__delitem__', (testme, slice(2, 1024, 10)))])
callLst[:] = []
testme[:42, ..., :24:, 24, 100]
self.assertCallStack([('__getitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100)))])
callLst[:] = []
testme[:42, ..., :24:, 24, 100] = "Strange"
self.assertCallStack([('__setitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100), "Strange"))])
callLst[:] = []
del testme[:42, ..., :24:, 24, 100]
self.assertCallStack([('__delitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100)))])
def testUnaryOps(self):
testme = AllTests()
callLst[:] = []
-testme
self.assertCallStack([('__neg__', (testme,))])
callLst[:] = []
+testme
self.assertCallStack([('__pos__', (testme,))])
callLst[:] = []
abs(testme)
self.assertCallStack([('__abs__', (testme,))])
callLst[:] = []
int(testme)
self.assertCallStack([('__int__', (testme,))])
callLst[:] = []
float(testme)
self.assertCallStack([('__float__', (testme,))])
callLst[:] = []
oct(testme)
self.assertCallStack([('__index__', (testme,))])
callLst[:] = []
hex(testme)
self.assertCallStack([('__index__', (testme,))])
def testMisc(self):
testme = AllTests()
callLst[:] = []
hash(testme)
self.assertCallStack([('__hash__', (testme,))])
callLst[:] = []
repr(testme)
self.assertCallStack([('__repr__', (testme,))])
callLst[:] = []
str(testme)
self.assertCallStack([('__str__', (testme,))])
callLst[:] = []
testme == 1
self.assertCallStack([('__eq__', (testme, 1))])
callLst[:] = []
testme < 1
self.assertCallStack([('__lt__', (testme, 1))])
callLst[:] = []
testme > 1
self.assertCallStack([('__gt__', (testme, 1))])
callLst[:] = []
testme != 1
self.assertCallStack([('__ne__', (testme, 1))])
callLst[:] = []
1 == testme
self.assertCallStack([('__eq__', (1, testme))])
callLst[:] = []
1 < testme
self.assertCallStack([('__gt__', (1, testme))])
callLst[:] = []
1 > testme
self.assertCallStack([('__lt__', (1, testme))])
callLst[:] = []
1 != testme
self.assertCallStack([('__ne__', (1, testme))])
def testGetSetAndDel(self):
# Interfering tests
class ExtraTests(AllTests):
@trackCall
def __getattr__(self, *args):
return "SomeVal"
@trackCall
def __setattr__(self, *args):
pass
@trackCall
def __delattr__(self, *args):
pass
testme = ExtraTests()
callLst[:] = []
testme.spam
self.assertCallStack([('__getattr__', (testme, "spam"))])
callLst[:] = []
testme.eggs = "spam, spam, spam and ham"
self.assertCallStack([('__setattr__', (testme, "eggs",
"spam, spam, spam and ham"))])
callLst[:] = []
del testme.cardinal
self.assertCallStack([('__delattr__', (testme, "cardinal"))])
def testDel(self):
x = []
class DelTest:
def __del__(self):
x.append("crab people, crab people")
testme = DelTest()
del testme
import gc
gc.collect()
self.assertEqual(["crab people, crab people"], x)
def testBadTypeReturned(self):
# return values of some method are type-checked
class BadTypeClass:
def __int__(self):
return None
__float__ = __int__
__str__ = __int__
__repr__ = __int__
__oct__ = __int__
__hex__ = __int__
for f in [int, float, str, repr, oct, hex]:
self.assertRaises(TypeError, f, BadTypeClass())
def testHashStuff(self):
# Test correct errors from hash() on objects with comparisons but
# no __hash__
class C0:
pass
hash(C0()) # This should work; the next two should raise TypeError
class C2:
def __eq__(self, other): return 1
self.assertRaises(TypeError, hash, C2())
def testSFBug532646(self):
# Test for SF bug 532646
class A:
pass
A.__call__ = A()
a = A()
try:
a() # This should not segfault
except RuntimeError:
pass
else:
self.fail("Failed to raise RuntimeError")
def testForExceptionsRaisedInInstanceGetattr2(self):
# Tests for exceptions raised in instance_getattr2().
def booh(self):
raise AttributeError("booh")
class A:
a = property(booh)
try:
A().a # Raised AttributeError: A instance has no attribute 'a'
except AttributeError as x:
if str(x) != "booh":
self.fail("attribute error for A().a got masked: %s" % x)
class E:
__eq__ = property(booh)
E() == E() # In debug mode, caused a C-level assert() to fail
class I:
__init__ = property(booh)
try:
# In debug mode, printed XXX undetected error and
# raises AttributeError
I()
except AttributeError as x:
pass
else:
self.fail("attribute error for I.__init__ got masked")
def testHashComparisonOfMethods(self):
# Test comparison and hash of methods
class A:
def __init__(self, x):
self.x = x
def f(self):
pass
def g(self):
pass
def __eq__(self, other):
return self.x == other.x
def __hash__(self):
return self.x
class B(A):
pass
a1 = A(1)
a2 = A(2)
self.assertEqual(a1.f, a1.f)
self.assertNotEqual(a1.f, a2.f)
self.assertNotEqual(a1.f, a1.g)
self.assertEqual(a1.f, A(1).f)
self.assertEqual(hash(a1.f), hash(a1.f))
self.assertEqual(hash(a1.f), hash(A(1).f))
self.assertNotEqual(A.f, a1.f)
self.assertNotEqual(A.f, A.g)
self.assertEqual(B.f, A.f)
self.assertEqual(hash(B.f), hash(A.f))
# the following triggers a SystemError in 2.4
a = A(hash(A.f)^(-1))
hash(a.f)
def test_main():
support.run_unittest(ClassTests)
if __name__=='__main__':
test_main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
indro/t2c
|
refs/heads/master
|
libs/external_libs/Pygments-0.11.1/scripts/check_sources.py
|
5
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Checker for file headers
~~~~~~~~~~~~~~~~~~~~~~~~
Make sure each Python file has a correct file header
including copyright and license information.
:copyright: 2006-2007 by Georg Brandl.
:license: GNU GPL, see LICENSE for more details.
"""
import sys, os, re
import getopt
import cStringIO
from os.path import join, splitext, abspath
checkers = {}
def checker(*suffixes, **kwds):
only_pkg = kwds.pop('only_pkg', False)
def deco(func):
for suffix in suffixes:
checkers.setdefault(suffix, []).append(func)
func.only_pkg = only_pkg
return func
return deco
name_mail_re = r'[\w ]+(<.*?>)?'
copyright_re = re.compile(r'^ :copyright: 200\d(-200\d)? by %s(, %s)*[,.]$' %
(name_mail_re, name_mail_re))
copyright_2_re = re.compile(r'^ %s(, %s)*[,.]$' %
(name_mail_re, name_mail_re))
coding_re = re.compile(r'coding[:=]\s*([-\w.]+)')
not_ix_re = re.compile(r'\bnot\s+\S+?\s+i[sn]\s\S+')
is_const_re = re.compile(r'if.*?==\s+(None|False|True)\b')
misspellings = ["developement", "adress", "verificate", # ALLOW-MISSPELLING
"informations"] # ALLOW-MISSPELLING
@checker('.py')
def check_syntax(fn, lines):
try:
compile(''.join(lines), fn, "exec")
except SyntaxError, err:
yield 0, "not compilable: %s" % err
@checker('.py')
def check_style_and_encoding(fn, lines):
encoding = 'ascii'
for lno, line in enumerate(lines):
if len(line) > 90:
yield lno+1, "line too long"
m = not_ix_re.search(line)
if m:
yield lno+1, '"' + m.group() + '"'
if is_const_re.search(line):
yield lno+1, 'using == None/True/False'
if lno < 2:
co = coding_re.search(line)
if co:
encoding = co.group(1)
try:
line.decode(encoding)
except UnicodeDecodeError, err:
yield lno+1, "not decodable: %s\n Line: %r" % (err, line)
except LookupError, err:
yield 0, "unknown encoding: %s" % encoding
encoding = 'latin1'
@checker('.py', only_pkg=True)
def check_fileheader(fn, lines):
# line number correction
c = 1
if lines[0:1] == ['#!/usr/bin/env python\n']:
lines = lines[1:]
c = 2
llist = []
docopen = False
for lno, l in enumerate(lines):
llist.append(l)
if lno == 0:
if l == '# -*- coding: rot13 -*-\n':
# special-case pony package
return
elif l != '# -*- coding: utf-8 -*-\n':
yield 1, "missing coding declaration"
elif lno == 1:
if l != '"""\n' and l != 'r"""\n':
yield 2, 'missing docstring begin (""")'
else:
docopen = True
elif docopen:
if l == '"""\n':
# end of docstring
if lno <= 4:
yield lno+c, "missing module name in docstring"
break
if l != "\n" and l[:4] != ' ' and docopen:
yield lno+c, "missing correct docstring indentation"
if lno == 2:
# if not in package, don't check the module name
modname = fn[:-3].replace('/', '.').replace('.__init__', '')
while modname:
if l.lower()[4:-1] == modname:
break
modname = '.'.join(modname.split('.')[1:])
else:
yield 3, "wrong module name in docstring heading"
modnamelen = len(l.strip())
elif lno == 3:
if l.strip() != modnamelen * "~":
yield 4, "wrong module name underline, should be ~~~...~"
else:
yield 0, "missing end and/or start of docstring..."
# check for copyright and license fields
license = llist[-2:-1]
if license != [" :license: BSD, see LICENSE for more details.\n"]:
yield 0, "no correct license info"
ci = -3
copyright = llist[ci:ci+1]
while copyright and copyright_2_re.match(copyright[0]):
ci -= 1
copyright = llist[ci:ci+1]
if not copyright or not copyright_re.match(copyright[0]):
yield 0, "no correct copyright info"
@checker('.py', '.html', '.js')
def check_whitespace_and_spelling(fn, lines):
for lno, line in enumerate(lines):
if "\t" in line:
yield lno+1, "OMG TABS!!!1 "
if line[:-1].rstrip(' \t') != line[:-1]:
yield lno+1, "trailing whitespace"
for word in misspellings:
if word in line and 'ALLOW-MISSPELLING' not in line:
yield lno+1, '"%s" used' % word
bad_tags = ('<b>', '<i>', '<u>', '<s>', '<strike>'
'<center>', '<big>', '<small>', '<font')
@checker('.html')
def check_xhtml(fn, lines):
for lno, line in enumerate(lines):
for bad_tag in bad_tags:
if bad_tag in line:
yield lno+1, "used " + bad_tag
def main(argv):
try:
gopts, args = getopt.getopt(argv[1:], "vi:")
except getopt.GetoptError:
print "Usage: %s [-v] [-i ignorepath]* [path]" % argv[0]
return 2
opts = {}
for opt, val in gopts:
if opt == '-i':
val = abspath(val)
opts.setdefault(opt, []).append(val)
if len(args) == 0:
path = '.'
elif len(args) == 1:
path = args[0]
else:
print "Usage: %s [-v] [-i ignorepath]* [path]" % argv[0]
return 2
verbose = '-v' in opts
num = 0
out = cStringIO.StringIO()
# TODO: replace os.walk run with iteration over output of
# `svn list -R`.
for root, dirs, files in os.walk(path):
if '.svn' in dirs:
dirs.remove('.svn')
if '-i' in opts and abspath(root) in opts['-i']:
del dirs[:]
continue
# XXX: awkward: for the Makefile call: don't check non-package
# files for file headers
in_pocoo_pkg = root.startswith('./pygments')
for fn in files:
fn = join(root, fn)
if fn[:2] == './': fn = fn[2:]
if '-i' in opts and abspath(fn) in opts['-i']:
continue
ext = splitext(fn)[1]
checkerlist = checkers.get(ext, None)
if not checkerlist:
continue
if verbose:
print "Checking %s..." % fn
try:
f = open(fn, 'r')
lines = list(f)
except (IOError, OSError), err:
print "%s: cannot open: %s" % (fn, err)
num += 1
continue
for checker in checkerlist:
if not in_pocoo_pkg and checker.only_pkg:
continue
for lno, msg in checker(fn, lines):
print >>out, "%s:%d: %s" % (fn, lno, msg)
num += 1
if verbose:
print
if num == 0:
print "No errors found."
else:
print out.getvalue().rstrip('\n')
print "%d error%s found." % (num, num > 1 and "s" or "")
return int(num > 0)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
dotpmrcunha/gnuradio
|
refs/heads/master
|
gr-atsc/python/atsc/atsc_rx.py
|
45
|
#!/usr/bin/env /usr/bin/python
#
# Copyright 2004, 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
# This module starts the atsc processing chain taking the captured
# off-air signal created with:
#
# uhd_rx_cfile.py --samp-rate=6.4e6
# -f <center of tv signal channel freq>
# -g <appropriate gain for best signal / noise>
# -s output shorts
#
# This python script converts from interleaved shorts to the complex data type,
# then multiplies the sample rate by 3, from 6.4e6 to 19.2e6
# complex samples / sec, then lowpass filters with a cutoff of 3.2MHz
# and a transition band width of .5MHz. Center of the tv channels is
# now at 0 with edges at -3.2MHz and 3.2MHz. This puts the pilot at
# -3MHz + 309KHz. Next a root raised cosine filter is aplied to match the one
# in the transmitter and thus reduce ISI. The phased locked loop then locks to
# the pilot and outputs just the real part of the signal ( as information is
# not stored in the phase with atsc ), this is then feed to the bit lock
# loop, this looks for the bit sync marker put at the beginning of every segment
# field, this then adjusts the timing so the amplitude will be sampled at the
# correct sample ( sub-sample is used in this case ).
#
# Output is float.
from gnuradio import gr, analog, atsc
from gnuradio import blocks
from gnuradio import filter
import sys, math, os
def graph (args):
print os.getpid()
nargs = len(args)
if nargs == 2:
infile = args[0]
outfile = args[1]
else:
raise ValueError('usage: interp.py input_file output_file.ts\n')
input_rate = 19.2e6
IF_freq = 5.75e6
tb = gr.top_block()
# Read from input file
srcf = blocks.file_source(gr.sizeof_short, infile)
# Convert interleaved shorts (I,Q,I,Q) to complex
is2c = blocks.interleaved_short_to_complex()
# 1/2 as wide because we're designing lp filter
symbol_rate = atsc.ATSC_SYMBOL_RATE/2.
NTAPS = 279
tt = filter.firdes.root_raised_cosine (1.0, input_rate / 3, symbol_rate, .1152, NTAPS)
rrc = filter.fir_filter_ccf(1, tt)
# Interpolate Filter our 6MHz wide signal centered at 0
ilp_coeffs = filter.firdes.low_pass(1, input_rate, 3.2e6, .5e6, filter.firdes.WIN_HAMMING)
ilp = filter.interp_fir_filter_ccf(3, ilp_coeffs)
# Move the center frequency to 5.75MHz ( this wont be needed soon )
duc_coeffs = filter.firdes.low_pass ( 1, 19.2e6, 9e6, 1e6, filter.firdes.WIN_HAMMING )
duc = filter.freq_xlating_fir_filter_ccf ( 1, duc_coeffs, -5.75e6, 19.2e6 )
# fpll input is float
c2f = blocks.complex_to_float()
# Phase locked loop
fpll = atsc.fpll()
# Clean fpll output
lp_coeffs2 = filter.firdes.low_pass (1.0,
input_rate,
5.75e6,
120e3,
filter.firdes.WIN_HAMMING);
lp_filter = filter.fir_filter_fff (1, lp_coeffs2)
# Remove pilot ( at DC now )
iir = filter.single_pole_iir_filter_ff(1e-5)
remove_dc = blocks.sub_ff()
# Bit Timing Loop, Field Sync Checker and Equalizer
btl = atsc.bit_timing_loop()
fsc = atsc.fs_checker()
eq = atsc.equalizer()
fsd = atsc.field_sync_demux()
# Viterbi
viterbi = atsc.viterbi_decoder()
deinter = atsc.deinterleaver()
rs_dec = atsc.rs_decoder()
derand = atsc.derandomizer()
depad = atsc.depad()
# Write to output file
outf = blocks.file_sink(gr.sizeof_char,outfile)
# Connect it all together
tb.connect( srcf, is2c, rrc, ilp, duc, c2f, fpll, lp_filter)
tb.connect( lp_filter, iir )
tb.connect( lp_filter, (remove_dc, 0) )
tb.connect( iir, (remove_dc, 1) )
tb.connect( remove_dc, btl )
tb.connect( (btl, 0), (fsc, 0), (eq, 0), (fsd,0) )
tb.connect( (btl, 1), (fsc, 1), (eq, 1), (fsd,1) )
tb.connect( fsd, viterbi, deinter, rs_dec, derand, depad, outf )
tb.run()
if __name__ == '__main__':
graph (sys.argv[1:])
|
ayepezv/GAD_ERP
|
refs/heads/master
|
addons/hr_gamification/__openerp__.py
|
2
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'HR Gamification',
'version': '1.0',
'category': 'Human Resources',
'website': 'https://www.odoo.com/page/employees',
'depends': ['gamification', 'hr'],
'description': """Use the HR resources for the gamification process.
The HR officer can now manage challenges and badges.
This allow the user to send badges to employees instead of simple users.
Badge received are displayed on the user profile.
""",
'data': [
'security/ir.model.access.csv',
'security/gamification_security.xml',
'wizard/gamification_badge_user_wizard_views.xml',
'views/gamification_views.xml',
'views/hr_employee_views.xml',
'views/gamification_templates.xml',
],
'auto_install': True,
}
|
havard024/prego
|
refs/heads/master
|
crm/lib/python2.7/site-packages/unidecode/x06e.py
|
252
|
data = (
'Ben ', # 0x00
'Yuan ', # 0x01
'Wen ', # 0x02
'Re ', # 0x03
'Fei ', # 0x04
'Qing ', # 0x05
'Yuan ', # 0x06
'Ke ', # 0x07
'Ji ', # 0x08
'She ', # 0x09
'Yuan ', # 0x0a
'Shibui ', # 0x0b
'Lu ', # 0x0c
'Zi ', # 0x0d
'Du ', # 0x0e
'[?] ', # 0x0f
'Jian ', # 0x10
'Min ', # 0x11
'Pi ', # 0x12
'Tani ', # 0x13
'Yu ', # 0x14
'Yuan ', # 0x15
'Shen ', # 0x16
'Shen ', # 0x17
'Rou ', # 0x18
'Huan ', # 0x19
'Zhu ', # 0x1a
'Jian ', # 0x1b
'Nuan ', # 0x1c
'Yu ', # 0x1d
'Qiu ', # 0x1e
'Ting ', # 0x1f
'Qu ', # 0x20
'Du ', # 0x21
'Feng ', # 0x22
'Zha ', # 0x23
'Bo ', # 0x24
'Wo ', # 0x25
'Wo ', # 0x26
'Di ', # 0x27
'Wei ', # 0x28
'Wen ', # 0x29
'Ru ', # 0x2a
'Xie ', # 0x2b
'Ce ', # 0x2c
'Wei ', # 0x2d
'Ge ', # 0x2e
'Gang ', # 0x2f
'Yan ', # 0x30
'Hong ', # 0x31
'Xuan ', # 0x32
'Mi ', # 0x33
'Ke ', # 0x34
'Mao ', # 0x35
'Ying ', # 0x36
'Yan ', # 0x37
'You ', # 0x38
'Hong ', # 0x39
'Miao ', # 0x3a
'Xing ', # 0x3b
'Mei ', # 0x3c
'Zai ', # 0x3d
'Hun ', # 0x3e
'Nai ', # 0x3f
'Kui ', # 0x40
'Shi ', # 0x41
'E ', # 0x42
'Pai ', # 0x43
'Mei ', # 0x44
'Lian ', # 0x45
'Qi ', # 0x46
'Qi ', # 0x47
'Mei ', # 0x48
'Tian ', # 0x49
'Cou ', # 0x4a
'Wei ', # 0x4b
'Can ', # 0x4c
'Tuan ', # 0x4d
'Mian ', # 0x4e
'Hui ', # 0x4f
'Mo ', # 0x50
'Xu ', # 0x51
'Ji ', # 0x52
'Pen ', # 0x53
'Jian ', # 0x54
'Jian ', # 0x55
'Hu ', # 0x56
'Feng ', # 0x57
'Xiang ', # 0x58
'Yi ', # 0x59
'Yin ', # 0x5a
'Zhan ', # 0x5b
'Shi ', # 0x5c
'Jie ', # 0x5d
'Cheng ', # 0x5e
'Huang ', # 0x5f
'Tan ', # 0x60
'Yu ', # 0x61
'Bi ', # 0x62
'Min ', # 0x63
'Shi ', # 0x64
'Tu ', # 0x65
'Sheng ', # 0x66
'Yong ', # 0x67
'Qu ', # 0x68
'Zhong ', # 0x69
'Suei ', # 0x6a
'Jiu ', # 0x6b
'Jiao ', # 0x6c
'Qiou ', # 0x6d
'Yin ', # 0x6e
'Tang ', # 0x6f
'Long ', # 0x70
'Huo ', # 0x71
'Yuan ', # 0x72
'Nan ', # 0x73
'Ban ', # 0x74
'You ', # 0x75
'Quan ', # 0x76
'Chui ', # 0x77
'Liang ', # 0x78
'Chan ', # 0x79
'Yan ', # 0x7a
'Chun ', # 0x7b
'Nie ', # 0x7c
'Zi ', # 0x7d
'Wan ', # 0x7e
'Shi ', # 0x7f
'Man ', # 0x80
'Ying ', # 0x81
'Ratsu ', # 0x82
'Kui ', # 0x83
'[?] ', # 0x84
'Jian ', # 0x85
'Xu ', # 0x86
'Lu ', # 0x87
'Gui ', # 0x88
'Gai ', # 0x89
'[?] ', # 0x8a
'[?] ', # 0x8b
'Po ', # 0x8c
'Jin ', # 0x8d
'Gui ', # 0x8e
'Tang ', # 0x8f
'Yuan ', # 0x90
'Suo ', # 0x91
'Yuan ', # 0x92
'Lian ', # 0x93
'Yao ', # 0x94
'Meng ', # 0x95
'Zhun ', # 0x96
'Sheng ', # 0x97
'Ke ', # 0x98
'Tai ', # 0x99
'Da ', # 0x9a
'Wa ', # 0x9b
'Liu ', # 0x9c
'Gou ', # 0x9d
'Sao ', # 0x9e
'Ming ', # 0x9f
'Zha ', # 0xa0
'Shi ', # 0xa1
'Yi ', # 0xa2
'Lun ', # 0xa3
'Ma ', # 0xa4
'Pu ', # 0xa5
'Wei ', # 0xa6
'Li ', # 0xa7
'Cai ', # 0xa8
'Wu ', # 0xa9
'Xi ', # 0xaa
'Wen ', # 0xab
'Qiang ', # 0xac
'Ze ', # 0xad
'Shi ', # 0xae
'Su ', # 0xaf
'Yi ', # 0xb0
'Zhen ', # 0xb1
'Sou ', # 0xb2
'Yun ', # 0xb3
'Xiu ', # 0xb4
'Yin ', # 0xb5
'Rong ', # 0xb6
'Hun ', # 0xb7
'Su ', # 0xb8
'Su ', # 0xb9
'Ni ', # 0xba
'Ta ', # 0xbb
'Shi ', # 0xbc
'Ru ', # 0xbd
'Wei ', # 0xbe
'Pan ', # 0xbf
'Chu ', # 0xc0
'Chu ', # 0xc1
'Pang ', # 0xc2
'Weng ', # 0xc3
'Cang ', # 0xc4
'Mie ', # 0xc5
'He ', # 0xc6
'Dian ', # 0xc7
'Hao ', # 0xc8
'Huang ', # 0xc9
'Xi ', # 0xca
'Zi ', # 0xcb
'Di ', # 0xcc
'Zhi ', # 0xcd
'Ying ', # 0xce
'Fu ', # 0xcf
'Jie ', # 0xd0
'Hua ', # 0xd1
'Ge ', # 0xd2
'Zi ', # 0xd3
'Tao ', # 0xd4
'Teng ', # 0xd5
'Sui ', # 0xd6
'Bi ', # 0xd7
'Jiao ', # 0xd8
'Hui ', # 0xd9
'Gun ', # 0xda
'Yin ', # 0xdb
'Gao ', # 0xdc
'Long ', # 0xdd
'Zhi ', # 0xde
'Yan ', # 0xdf
'She ', # 0xe0
'Man ', # 0xe1
'Ying ', # 0xe2
'Chun ', # 0xe3
'Lu ', # 0xe4
'Lan ', # 0xe5
'Luan ', # 0xe6
'[?] ', # 0xe7
'Bin ', # 0xe8
'Tan ', # 0xe9
'Yu ', # 0xea
'Sou ', # 0xeb
'Hu ', # 0xec
'Bi ', # 0xed
'Biao ', # 0xee
'Zhi ', # 0xef
'Jiang ', # 0xf0
'Kou ', # 0xf1
'Shen ', # 0xf2
'Shang ', # 0xf3
'Di ', # 0xf4
'Mi ', # 0xf5
'Ao ', # 0xf6
'Lu ', # 0xf7
'Hu ', # 0xf8
'Hu ', # 0xf9
'You ', # 0xfa
'Chan ', # 0xfb
'Fan ', # 0xfc
'Yong ', # 0xfd
'Gun ', # 0xfe
'Man ', # 0xff
)
|
veltri/DLV2
|
refs/heads/master
|
tests/parser/bug.03.test.py
|
1
|
input = """
a.
b.
c.
q :- p, not a.
q :- not p.
p :- c.
"""
output = """
a.
b.
c.
q :- p, not a.
q :- not p.
p :- c.
"""
|
hackerbot/DjangoDev
|
refs/heads/master
|
django/contrib/auth/context_processors.py
|
514
|
# PermWrapper and PermLookupDict proxy the permissions system into objects that
# the template system can understand.
class PermLookupDict(object):
def __init__(self, user, app_label):
self.user, self.app_label = user, app_label
def __repr__(self):
return str(self.user.get_all_permissions())
def __getitem__(self, perm_name):
return self.user.has_perm("%s.%s" % (self.app_label, perm_name))
def __iter__(self):
# To fix 'item in perms.someapp' and __getitem__ iteraction we need to
# define __iter__. See #18979 for details.
raise TypeError("PermLookupDict is not iterable.")
def __bool__(self):
return self.user.has_module_perms(self.app_label)
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
class PermWrapper(object):
def __init__(self, user):
self.user = user
def __getitem__(self, app_label):
return PermLookupDict(self.user, app_label)
def __iter__(self):
# I am large, I contain multitudes.
raise TypeError("PermWrapper is not iterable.")
def __contains__(self, perm_name):
"""
Lookup by "someapp" or "someapp.someperm" in perms.
"""
if '.' not in perm_name:
# The name refers to module.
return bool(self[perm_name])
app_label, perm_name = perm_name.split('.', 1)
return self[app_label][perm_name]
def auth(request):
"""
Returns context variables required by apps that use Django's authentication
system.
If there is no 'user' attribute in the request, uses AnonymousUser (from
django.contrib.auth).
"""
if hasattr(request, 'user'):
user = request.user
else:
from django.contrib.auth.models import AnonymousUser
user = AnonymousUser()
return {
'user': user,
'perms': PermWrapper(user),
}
|
ProfessorX/Emacs-Laptop
|
refs/heads/master
|
elpa/anaconda-mode-20140901.1158/jedi/api/keywords.py
|
20
|
import pydoc
import keyword
from jedi._compatibility import is_py3
from jedi import common
from jedi.evaluate import compiled
try:
from pydoc_data import topics as pydoc_topics
except ImportError:
# Python 2.6
import pydoc_topics
if is_py3:
keys = keyword.kwlist
else:
keys = keyword.kwlist + ['None', 'False', 'True']
def keywords(string='', pos=(0, 0), all=False):
if all:
return set([Keyword(k, pos) for k in keys])
if string in keys:
return set([Keyword(string, pos)])
return set()
def keyword_names(*args, **kwargs):
kwds = []
for k in keywords(*args, **kwargs):
start = k.start_pos
kwds.append(KeywordName(k, k.name, start))
return kwds
def get_operator(string, pos):
return Keyword(string, pos)
class KeywordName(object):
def __init__(self, parent, name, start_pos):
self.parent = parent
self.names = [name]
self.start_pos = start_pos
@property
def end_pos(self):
return self.start_pos[0], self.start_pos[1] + len(self.name)
class Keyword(object):
def __init__(self, name, pos):
self.name = name
self.start_pos = pos
self.parent = compiled.builtin
def get_parent_until(self):
return self.parent
@property
def names(self):
""" For a `parsing.Name` like comparision """
return [self.name]
@property
def docstr(self):
return imitate_pydoc(self.name)
def __repr__(self):
return '<%s: %s>' % (type(self).__name__, self.name)
def imitate_pydoc(string):
"""
It's not possible to get the pydoc's without starting the annoying pager
stuff.
"""
# str needed because of possible unicode stuff in py2k (pydoc doesn't work
# with unicode strings)
string = str(string)
h = pydoc.help
with common.ignored(KeyError):
# try to access symbols
string = h.symbols[string]
string, _, related = string.partition(' ')
get_target = lambda s: h.topics.get(s, h.keywords.get(s))
while isinstance(string, str):
string = get_target(string)
try:
# is a tuple now
label, related = string
except TypeError:
return ''
try:
return pydoc_topics.topics[label] if pydoc_topics else ''
except KeyError:
return ''
|
JiYouMCC/python
|
refs/heads/master
|
burness/0008/html_extract_text.py
|
40
|
# #-*- coding: utf-8-*-
# # Source:https://github.com/Show-Me-the-Code/show-me-the-code
# # Author:Burness Duan
# # Date:2014-12-28
# # Python 2.7
from HTMLParser import HTMLParser
from re import sub
from sys import stderr
from traceback import print_exc
import urllib2
class _DeHTMLParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.__text = []
def handle_data(self, data):
text = data.strip()
if len(text) > 0:
text = sub('[ \t\r\n]+', ' ', text)
self.__text.append(text + ' ')
def handle_starttag(self, tag, attrs):
if tag == 'p':
self.__text.append('\n\n')
elif tag == 'br':
self.__text.append('\n')
def handle_startendtag(self, tag, attrs):
if tag == 'br':
self.__text.append('\n\n')
def text(self):
return ''.join(self.__text).strip()
def dehtml(text):
try:
parser = _DeHTMLParser()
parser.feed(text)
parser.close()
return parser.text()
except:
print_exc(file=stderr)
return text
if __name__ == '__main__':
url = 'http://tech.163.com/14/1219/01/ADPT7MTE000915BF.html'
html = urllib2.urlopen(url)
html_code = html.read()
html_code = sub('<script>(.*?)</script>','',html_code)
# print dehtml(html_code).decode('gbk').encode('utf-8')
with open('result.txt','w') as f:
f.write(dehtml(html_code).decode('gbk').encode('utf-8'))
|
kartikp1995/gnuradio
|
refs/heads/master
|
grc/core/Config.py
|
17
|
"""
Copyright 2016 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import os
from os.path import expanduser, normpath, expandvars, exists
from . import Constants
class Config(object):
key = 'grc'
name = 'GNU Radio Companion (no gui)'
license = __doc__.strip()
website = 'http://gnuradio.org'
hier_block_lib_dir = os.environ.get('GRC_HIER_PATH', Constants.DEFAULT_HIER_BLOCK_LIB_DIR)
def __init__(self, prefs_file, version, version_parts=None):
self.prefs = prefs_file
self.version = version
self.version_parts = version_parts or version[1:].split('-', 1)[0].split('.')[:3]
@property
def block_paths(self):
path_list_sep = {'/': ':', '\\': ';'}[os.path.sep]
paths_sources = (
self.hier_block_lib_dir,
os.environ.get('GRC_BLOCKS_PATH', ''),
self.prefs.get_string('grc', 'local_blocks_path', ''),
self.prefs.get_string('grc', 'global_blocks_path', ''),
)
collected_paths = sum((paths.split(path_list_sep)
for paths in paths_sources), [])
valid_paths = [normpath(expanduser(expandvars(path)))
for path in collected_paths if exists(path)]
return valid_paths
@property
def default_flow_graph(self):
user_default = (
os.environ.get('GRC_DEFAULT_FLOW_GRAPH') or
self.prefs.get_string('grc', 'default_flow_graph', '') or
os.path.join(self.hier_block_lib_dir, 'default_flow_graph.grc')
)
return user_default if exists(user_default) else Constants.DEFAULT_FLOW_GRAPH
|
j00bar/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/nxos/nxos_static_route.py
|
21
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_static_route
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages static route configuration
description:
- Manages static route configuration
author: Gabriele Gerbino (@GGabriele)
notes:
- If no vrf is supplied, vrf is set to default.
- If C(state=absent), the route will be removed, regardless of the
non-required parameters.
options:
prefix:
description:
- Destination prefix of static route.
required: true
next_hop:
description:
- Next hop address or interface of static route.
If interface, it must be the fully-qualified interface name.
required: true
vrf:
description:
- VRF for static route.
required: false
default: default
tag:
description:
- Route tag value (numeric).
required: false
default: null
route_name:
description:
- Name of the route. Used with the name parameter on the CLI.
required: false
default: null
pref:
description:
- Preference or administrative difference of route (range 1-255).
required: false
default: null
state:
description:
- Manage the state of the resource.
required: true
choices: ['present','absent']
'''
EXAMPLES = '''
- nxos_static_route:
prefix: "192.168.20.64/24"
next_hop: "3.3.3.3"
route_name: testing
pref: 100
username: "{{ un }}"
password: "{{ pwd }}"
host: "{{ inventory_hostname }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"next_hop": "3.3.3.3", "pref": "100",
"prefix": "192.168.20.64/24", "route_name": "testing",
"vrf": "default"}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {"next_hop": "3.3.3.3", "pref": "100",
"prefix": "192.168.20.0/24", "route_name": "testing",
"tag": null}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["ip route 192.168.20.0/24 3.3.3.3 name testing 100"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.nxos import get_config, load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netcfg import CustomNetworkConfig
def invoke(name, *args, **kwargs):
func = globals().get(name)
if func:
return func(*args, **kwargs)
def state_present(module, candidate, prefix):
commands = list()
invoke('set_route', module, commands, prefix)
if commands:
if module.params['vrf'] == 'default':
candidate.add(commands, parents=[])
else:
candidate.add(commands, parents=['vrf context {0}'.format(module.params['vrf'])])
def state_absent(module, candidate, prefix):
netcfg = get_config(module)
commands = list()
parents = 'vrf context {0}'.format(module.params['vrf'])
invoke('set_route', module, commands, prefix)
if module.params['vrf'] == 'default':
config = netcfg.get_section(commands[0])
if config:
invoke('remove_route', module, commands, config, prefix)
candidate.add(commands, parents=[])
else:
config = netcfg.get_section(parents)
splitted_config = config.split('\n')
splitted_config = map(str.strip, splitted_config)
if commands[0] in splitted_config:
invoke('remove_route', module, commands, config, prefix)
candidate.add(commands, parents=[parents])
def fix_prefix_to_regex(prefix):
prefix = prefix.replace('.', '\.').replace('/', '\/')
return prefix
def get_existing(module, prefix, warnings):
key_map = ['tag', 'pref', 'route_name', 'next_hop']
netcfg = get_config(module)
parents = 'vrf context {0}'.format(module.params['vrf'])
prefix_to_regex = fix_prefix_to_regex(prefix)
route_regex = ('.*ip\sroute\s{0}\s(?P<next_hop>\S+)(\sname\s(?P<route_name>\S+))?'
'(\stag\s(?P<tag>\d+))?(\s(?P<pref>\d+)).*'.format(prefix_to_regex))
if module.params['vrf'] == 'default':
config = str(netcfg)
else:
config = netcfg.get_section(parents)
if config:
try:
match_route = re.match(route_regex, config, re.DOTALL)
group_route = match_route.groupdict()
for key in key_map:
if key not in group_route:
group_route[key] = ''
group_route['prefix'] = prefix
group_route['vrf'] = module.params['vrf']
except (AttributeError, TypeError):
group_route = {}
else:
group_route = {}
msg = ("VRF {0} didn't exist.".format(module.params['vrf']))
if msg not in warnings:
warnings.append(msg)
return group_route
def remove_route(module, commands, config, prefix):
commands.append('no ip route {0} {1}'.format(prefix, module.params['next_hop']))
def set_route(module, commands, prefix):
route_cmd = 'ip route {0} {1}'.format(prefix, module.params['next_hop'])
if module.params['route_name']:
route_cmd += ' name {0}'.format(module.params['route_name'])
if module.params['tag']:
route_cmd += ' tag {0}'.format(module.params['tag'])
if module.params['pref']:
route_cmd += ' {0}'.format(module.params['pref'])
commands.append(route_cmd)
def get_dotted_mask(mask):
bits = 0
for i in xrange(32-mask,32):
bits |= (1 << i)
mask = ("%d.%d.%d.%d" % ((bits & 0xff000000) >> 24,
(bits & 0xff0000) >> 16, (bits & 0xff00) >> 8 , (bits & 0xff)))
return mask
def get_network_start(address, netmask):
address = address.split('.')
netmask = netmask.split('.')
return [str(int(address[x]) & int(netmask[x])) for x in range(0, 4)]
def network_from_string(address, mask, module):
octects = address.split('.')
if len(octects) > 4:
module.fail_json(msg='Incorrect address format.', address=address)
for octect in octects:
try:
if int(octect) < 0 or int(octect) > 255:
module.fail_json(msg='Address may contain invalid values.',
address=address)
except ValueError:
module.fail_json(msg='Address may contain non-integer values.',
address=address)
try:
if int(mask) < 0 or int(mask) > 32:
module.fail_json(msg='Incorrect mask value.', mask=mask)
except ValueError:
module.fail_json(msg='Mask may contain non-integer values.', mask=mask)
netmask = get_dotted_mask(int(mask))
return '.'.join(get_network_start(address, netmask))
def normalize_prefix(module, prefix):
splitted_prefix = prefix.split('/')
address = splitted_prefix[0]
if len(splitted_prefix) > 2:
module.fail_json(msg='Incorrect address format.', address=address)
elif len(splitted_prefix) == 2:
mask = splitted_prefix[1]
network = network_from_string(address, mask, module)
normalized_prefix = str(network) + '/' + str(mask)
else:
normalized_prefix = prefix + '/' + str(32)
return normalized_prefix
def main():
argument_spec = dict(
prefix=dict(required=True, type='str'),
next_hop=dict(required=True, type='str'),
vrf=dict(type='str', default='default'),
tag=dict(type='str'),
route_name=dict(type='str'),
pref=dict(type='str'),
state=dict(choices=['absent', 'present'],
default='present'),
include_defaults=dict(default=True),
config=dict(),
save=dict(type='bool', default=False)
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
state = module.params['state']
result = dict(changed=False)
warnings = list()
prefix = invoke('normalize_prefix', module, module.params['prefix'])
existing = invoke('get_existing', module, prefix, warnings)
end_state = existing
args = ['route_name', 'vrf', 'pref', 'tag', 'next_hop', 'prefix']
proposed = dict((k, v) for k, v in module.params.items() if v is not None and k in args)
if state == 'present' or (state == 'absent' and existing):
candidate = CustomNetworkConfig(indent=3)
invoke('state_%s' % state, module, candidate, prefix)
load_config(module, candidate)
else:
result['updates'] = []
result['warnings'] = warnings
if module._verbosity > 0:
end_state = invoke('get_existing', module, prefix, warnings)
result['end_state'] = end_state
result['existing'] = existing
result['proposed'] = proposed
module.exit_json(**result)
if __name__ == '__main__':
main()
|
MadCat34/Sick-Beard
|
refs/heads/development
|
sickbeard/SentFTPChecker.py
|
35
|
# Author: Arnaud Dartois
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import time
import os
import sickbeard
import ftplib as ftp
import glob
from sickbeard import logger
class SentFTPChecker():
def run(self):
if sickbeard.USE_TORRENT_FTP:
# upload all torrent file to remote FTP
logger.log("Sending torrent file to FTP", logger.DEBUG)
self._sendToFTP("*.torrent", sickbeard.TORRENT_DIR)
def _sendToFTP(self, filter, dir):
"""
Send all of the specified filtered files (eg "*.torrent") to the appropriate FTP.
"""
iCount = self.count_files(dir, filter)
logger.log(u"Files Found (" + filter + "): " + str(iCount), logger.DEBUG)
if (iCount>0):
MyFTP = ftp.FTP()
logger.log(u"Initializing FTP Session", logger.DEBUG)
MyFTP.connect(sickbeard.FTP_HOST, sickbeard.FTP_PORT, sickbeard.FTP_TIMEOUT)
# Connect to the FTP server
MyFTP.login(sickbeard.FTP_LOGIN, sickbeard.FTP_PASSWORD, '')
# Assign passive mode
logger.log(u"Assign Session Passive Mode", logger.DEBUG)
MyFTP.set_pasv(sickbeard.FTP_PASSIVE)
# change remote directory
try:
logger.log(u"Set Remote Directory : %s" % sickbeard.FTP_DIR, logger.DEBUG)
MyFTP.cwd(sickbeard.FTP_DIR)
except Exception, e:
logger.log(u"Change directory failed :" + e.message, logger.ERROR)
for fileName in glob.glob(os.path.join(dir,filter)):
file_handler = open(fileName, 'rb')
# Send the file
logger.log(u"Send local file : " + fileName, logger.DEBUG)
MyFTP.set_debuglevel(1)
MyFTP.storbinary('STOR %s' % os.path.basename(fileName), file_handler)
MyFTP.set_debuglevel(0)
file_handler.close()
# delete local file after uploading
logger.log(u"Deleting local file : " + fileName, logger.DEBUG)
os.remove(fileName)
# Close FTP session
logger.log(u"Close FTP Session", logger.DEBUG)
MyFTP.quit()
logger.log(u"It's working ... hop a beer !", logger.DEBUG)
else:
logger.log(u"No local files found.", logger.DEBUG)
def count_files(self, path, filter):
list_dir = []
list_dir = os.listdir(path)
count = 0
for file in glob.glob(os.path.join(path,filter)):
count += 1
return count
|
nolanelena/MyProjectSite-
|
refs/heads/master
|
node_modules/node-sass/node_modules/node-gyp/gyp/tools/pretty_sln.py
|
1831
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints the information in a sln file in a diffable way.
It first outputs each projects in alphabetical order with their
dependencies.
Then it outputs a possible build order.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import re
import sys
import pretty_vcproj
def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the
# dependency.
# This is not infinite-recursion proof.
for dep in deps[project]:
if dep not in built:
BuildProject(dep, built, projects, deps)
print project
built.append(project)
def ParseSolution(solution_file):
# All projects, their clsid and paths.
projects = dict()
# A list of dependencies associated with a project.
dependencies = dict()
# Regular expressions that matches the SLN format.
# The first line of a project definition.
begin_project = re.compile(r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
r'}"\) = "(.*)", "(.*)", "(.*)"$')
# The last line of a project definition.
end_project = re.compile('^EndProject$')
# The first line of a dependency list.
begin_dep = re.compile(
r'ProjectSection\(ProjectDependencies\) = postProject$')
# The last line of a dependency list.
end_dep = re.compile('EndProjectSection$')
# A line describing a dependency.
dep_line = re.compile(' *({.*}) = ({.*})$')
in_deps = False
solution = open(solution_file)
for line in solution:
results = begin_project.search(line)
if results:
# Hack to remove icu because the diff is too different.
if results.group(1).find('icu') != -1:
continue
# We remove "_gyp" from the names because it helps to diff them.
current_project = results.group(1).replace('_gyp', '')
projects[current_project] = [results.group(2).replace('_gyp', ''),
results.group(3),
results.group(2)]
dependencies[current_project] = []
continue
results = end_project.search(line)
if results:
current_project = None
continue
results = begin_dep.search(line)
if results:
in_deps = True
continue
results = end_dep.search(line)
if results:
in_deps = False
continue
results = dep_line.search(line)
if results and in_deps and current_project:
dependencies[current_project].append(results.group(1))
continue
# Change all dependencies clsid to name instead.
for project in dependencies:
# For each dependencies in this project
new_dep_array = []
for dep in dependencies[project]:
# Look for the project name matching this cldis
for project_info in projects:
if projects[project_info][1] == dep:
new_dep_array.append(project_info)
dependencies[project] = sorted(new_dep_array)
return (projects, dependencies)
def PrintDependencies(projects, deps):
print "---------------------------------------"
print "Dependencies for all projects"
print "---------------------------------------"
print "-- --"
for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project
print "Path : %s" % projects[project][0]
if dep_list:
for dep in dep_list:
print " - %s" % dep
print ""
print "-- --"
def PrintBuildOrder(projects, deps):
print "---------------------------------------"
print "Build order "
print "---------------------------------------"
print "-- --"
built = []
for (project, _) in sorted(deps.items()):
if project not in built:
BuildProject(project, built, projects, deps)
print "-- --"
def PrintVCProj(projects):
for project in projects:
print "-------------------------------------"
print "-------------------------------------"
print project
print project
print project
print "-------------------------------------"
print "-------------------------------------"
project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
projects[project][2]))
pretty = pretty_vcproj
argv = [ '',
project_path,
'$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
]
argv.extend(sys.argv[3:])
pretty.main(argv)
def main():
# check if we have exactly 1 parameter.
if len(sys.argv) < 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
return 1
(projects, deps) = ParseSolution(sys.argv[1])
PrintDependencies(projects, deps)
PrintBuildOrder(projects, deps)
if '--recursive' in sys.argv:
PrintVCProj(projects)
return 0
if __name__ == '__main__':
sys.exit(main())
|
tellesnobrega/horizon
|
refs/heads/master
|
horizon/conf/panel_template/views.py
|
93
|
from horizon import views
class IndexView(views.APIView):
# A very simple class-based view...
template_name = '{{ dash_name }}/{{ panel_name }}/index.html'
def get_data(self, request, context, *args, **kwargs):
# Add data to the context here...
return context
|
Beramos/QuestionHistCrit
|
refs/heads/master
|
questenv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langbulgarianmodel.py
|
2964
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
# this table is modified base on win1251BulgarianCharToOrderMap, so
# only number <64 is sure valid
Latin5_BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
)
win1251BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 96.9392%
# first 1024 sequences:3.0618%
# rest sequences: 0.2992%
# negative sequences: 0.0020%
BulgarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
)
Latin5BulgarianModel = {
'charToOrderMap': Latin5_BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
Win1251BulgarianModel = {
'charToOrderMap': win1251BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
# flake8: noqa
|
kumarshubham/xhtml2pdf
|
refs/heads/master
|
setup_version.py
|
91
|
# -*- coding: utf-8 -*-
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Updates the version infos
"""
import time
import re
import cgi
VERSION = open("VERSION.txt", "r").read().strip()
BUILD = time.strftime("%Y-%m-%d")
FILES = [
"setup.py",
"setup_exe.py",
# "setup_egg.py",
"sx/pisa3/pisa_version.py",
"doc/pisa-en.html",
]
try:
HELP = cgi.escape(open("HELP.txt", "r").read(), 1)
except:
HELP = ""
HELP = "<!--HELP--><pre>" + HELP + "</pre><!--HELP-->"
rxversion = re.compile("VERSION{.*?}VERSION", re.MULTILINE | re.IGNORECASE | re.DOTALL)
rxbuild = re.compile("BUILD{.*?}BUILD", re.MULTILINE | re.IGNORECASE | re.DOTALL)
rxversionhtml = re.compile("\<\!--VERSION--\>.*?\<\!--VERSION--\>", re.MULTILINE | re.IGNORECASE | re.DOTALL)
rxhelphtml = re.compile("\<\!--HELP--\>.*?\<\!--HELP--\>", re.MULTILINE | re.IGNORECASE | re.DOTALL)
for fname in FILES:
print "Update", fname, "..."
data = open(fname, "rb").read()
data = rxversion.sub("VERSION{" + VERSION + "}VERSION", data)
data = rxversionhtml.sub("<!--VERSION-->" + VERSION + "<!--VERSION-->", data)
data = rxbuild.sub("BUILD{" + BUILD + "}BUILD", data)
data = rxhelphtml.sub(HELP, data)
open(fname, "wb").write(data)
|
tictakk/servo
|
refs/heads/ticbranch
|
tests/wpt/web-platform-tests/XMLHttpRequest/resources/auth5/auth.py
|
556
|
def main(request, response):
if request.auth.username == 'usr' and request.auth.password == 'secret':
response.headers.set('Content-type', 'text/plain')
content = ""
else:
response.status = 401
response.headers.set('Status', '401 Authorization required')
response.headers.set('WWW-Authenticate', 'Basic realm="test"')
content = 'User name/password wrong or not given: '
content += "%s\n%s" % (request.auth.username,
request.auth.password)
return content
|
chipaca/snapcraft
|
refs/heads/master
|
snapcraft/plugins/v1/catkin.py
|
2
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015-2020 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""The catkin plugin is useful for building ROS parts.
The rosdistro used depends upon the base of the snap:
- core: Uses Kinetic
- core16: Uses Kinetic
- core18: Uses Melodic
This plugin uses the common plugin keywords as well as those for "sources".
For more information check the 'plugins' topic for the former and the
'sources' topic for the latter.
Additionally, this plugin uses the following plugin-specific keywords:
- catkin-packages:
(list of strings)
List of catkin packages to build. If not specified, all packages in the
workspace will be built. If set to an empty list ([]), no packages will
be built.
- source-space:
(string)
The source space containing Catkin packages. By default this is 'src'.
- include-roscore:
(boolean)
Whether or not to include roscore with the part. Defaults to true.
- rosinstall-files:
(list of strings)
List of rosinstall files to merge while pulling. Paths are relative to
the source.
- recursive-rosinstall:
(boolean)
Whether or not to recursively merge/update rosinstall files from fetched
sources. Will continue until all rosinstall files have been merged.
Defaults to false.
- catkin-cmake-args:
(list of strings)
Configure flags to pass onto the cmake invocation from catkin.
- underlay:
(object)
Used to inform Snapcraft that this snap isn't standalone, and is actually
overlaying a workspace from another snap via content sharing. Made up of
two properties:
- build-path:
(string)
Build-time path to existing workspace to underlay the one being built,
for example '$SNAPCRAFT_STAGE/opt/ros/kinetic'.
- run-path:
(string)
Run-time path of the underlay workspace (e.g. a subdirectory of the
content interface's 'target' attribute.)
- catkin-ros-master-uri:
(string)
The URI to ros master setting the env variable ROS_MASTER_URI. Defaults
to http://localhost:11311.
"""
import contextlib
import glob
import logging
import os
import pathlib
import re
import shlex
import shutil
import subprocess
import tempfile
import textwrap
from typing import TYPE_CHECKING, List, Set
from snapcraft import file_utils, formatting_utils
from snapcraft.internal import common, errors, mangling, os_release, repo
from snapcraft.internal.meta.package_repository import (
PackageRepository,
PackageRepositoryApt,
)
from snapcraft.plugins.v1 import PluginV1, _python, _ros
if TYPE_CHECKING:
from snapcraft.project import Project
logger = logging.getLogger(__name__)
# Map bases to ROS releases
_BASE_TO_ROS_RELEASE_MAP = {"core": "kinetic", "core16": "kinetic", "core18": "melodic"}
# Map bases to Ubuntu releases
_BASE_TO_UBUNTU_RELEASE_MAP = {"core": "xenial", "core16": "xenial", "core18": "bionic"}
_SUPPORTED_DEPENDENCY_TYPES = {"apt", "pip"}
def _parse_cmake_arg(arg: str) -> str:
# Parse cmake arg string that makes catkin happy.
# The user can specify a list like:
# catkin-cmake-args:
# - -DSOMETHING=FOO
# - -DCMAKE_C_FLAGS=-Wall -Werror
# - -DCMAKE_CXX_FLAGS="-Wall -Werror"
# Catkin can handle strings (1) and (2), but will fail on parsing (3)
# because of the quotes. It will end up passing "-Wall -Werror" as a
# single quoted string to c++. To work around this, we need to
# evaluate the string like bash would. We can do this by using
# shlex.split() and rejoining the string with spaces.
# Examples:
# No quotes.
# >>> test = '-DCMAKE_C_FLAGS=-Wall -Werror'
# >>> " ".join(shlex.split(test))
# '-DCMAKE_C_FLAGS=-Wall -Werror'
# Double quotes.
# >>> test2 = '-DCMAKE_CXX_FLAGS="-Wall -Werror"'
# >>> " ".join(shlex.split(test2))
# '-DCMAKE_CXX_FLAGS=-Wall -Werror'
# Single quotes.
# >>> test3 = "-DCMAKE_CXX_FLAGS='-Wall -Werror'"
# >>> " ".join(shlex.split(test3))
# '-DCMAKE_CXX_FLAGS=-Wall -Werror'
# Nested quotes.
# >>> test4 = '-DCMAKE_CXX_FLAGS=\"-I\'/some/path with spaces\'\" -Wall -Werror'
# >>> " ".join(shlex.split(test4))
# "-DCMAKE_CXX_FLAGS=-I'/some/path with spaces' -Wall -Werror"
return " ".join(shlex.split(arg))
class CatkinInvalidSystemDependencyError(errors.SnapcraftError):
fmt = (
"Package {dependency!r} isn't a valid system dependency. Did you "
"forget to add it to catkin-packages? If not, add the Ubuntu package "
"containing it to stage-packages until you can get it into the rosdep "
"database."
)
def __init__(self, dependency):
super().__init__(dependency=dependency)
class CatkinUnsupportedDependencyTypeError(errors.SnapcraftError):
fmt = (
"Package {dependency!r} resolved to an unsupported type of "
"dependency: {dependency_type!r}."
)
def __init__(self, dependency_type, dependency):
super().__init__(dependency_type=dependency_type, dependency=dependency)
class CatkinWorkspaceIsRootError(errors.SnapcraftError):
fmt = "source-space cannot be the root of the Catkin workspace; use a subdirectory."
class CatkinCannotResolveRoscoreError(errors.SnapcraftError):
fmt = "Failed to determine system dependency for roscore."
class CatkinAptDependencyFetchError(errors.SnapcraftError):
fmt = "Failed to fetch apt dependencies: {message}"
def __init__(self, message):
super().__init__(message=message)
class CatkinNoHighestVersionPathError(errors.SnapcraftError):
fmt = "Failed to determine highest path in {path!r}: nothing found."
def __init__(self, path):
super().__init__(path=path)
class CatkinGccVersionError(errors.SnapcraftError):
fmt = "Failed to determine gcc version: {message}"
def __init__(self, message):
super().__init__(message=message)
class CatkinPackagePathNotFoundError(errors.SnapcraftError):
fmt = "Failed to find package path: {path!r}"
def __init__(self, path):
super().__init__(path=path)
class CatkinPlugin(PluginV1):
@classmethod
def schema(cls):
schema = super().schema()
schema["properties"]["catkin-packages"] = {
"type": "array",
"minitems": 1,
"uniqueItems": True,
"items": {"type": "string"},
}
schema["properties"]["source-space"] = {"type": "string", "default": "src"}
# The default is true since we expect most Catkin packages to be ROS
# packages. The only reason one wouldn't want to include ROS in the
# snap is if library snaps exist, which will still likely be the
# minority.
schema["properties"]["include-roscore"] = {"type": "boolean", "default": True}
schema["properties"]["underlay"] = {
"type": "object",
"properties": {
"build-path": {"type": "string"},
"run-path": {"type": "string"},
},
"required": ["build-path", "run-path"],
}
schema["properties"]["rosinstall-files"] = {
"type": "array",
"minitems": 1,
"uniqueItems": True,
"items": {"type": "string"},
"default": [],
}
schema["properties"]["recursive-rosinstall"] = {
"type": "boolean",
"default": False,
}
schema["properties"]["catkin-cmake-args"] = {
"type": "array",
"minitems": 1,
"items": {"type": "string"},
"default": [],
}
schema["properties"]["catkin-ros-master-uri"] = {
"type": "string",
"default": "http://localhost:11311",
}
schema["required"] = ["source"]
return schema
@classmethod
def get_pull_properties(cls):
# Inform Snapcraft of the properties associated with pulling. If these
# change in the YAML Snapcraft will consider the pull step dirty.
return [
"catkin-packages",
"source-space",
"include-roscore",
"underlay",
"rosinstall-files",
"recursive-rosinstall",
]
@classmethod
def get_build_properties(cls):
# Inform Snapcraft of the properties associated with building. If these
# change in the YAML Snapcraft will consider the build step dirty.
return ["catkin-cmake-args"]
@classmethod
def get_required_package_repositories(self) -> List[PackageRepository]:
codename = os_release.OsRelease().version_codename()
return [
PackageRepositoryApt(
formats=["deb"],
components=["main"],
key_id="C1CF6E31E6BADE8868B172B4F42ED6FBAB17C654",
url="http://packages.ros.org/ros/ubuntu/",
suites=[codename],
)
]
@property
def _pip(self):
if not self.__pip:
self.__pip = _python.Pip(
python_major_version="2", # ROS1 only supports python2
part_dir=self.partdir,
install_dir=self.installdir,
stage_dir=self.project.stage_dir,
)
return self.__pip
def __init__(self, name, options, project):
super().__init__(name, options, project)
base = self.project._get_build_base()
self._rosdistro = _BASE_TO_ROS_RELEASE_MAP[base]
self.build_packages.extend(["gcc", "g++", "libc6-dev", "make", "python-pip"])
self.__pip = None
# roslib is the base requiremet to actually create a workspace with
# setup.sh and the necessary hooks.
self.stage_packages.append("ros-{}-roslib".format(self._rosdistro))
# Get a unique set of packages
self.catkin_packages = None
if options.catkin_packages is not None:
self.catkin_packages = set(options.catkin_packages)
self.stage_packages_path = pathlib.Path(self.partdir) / "catkin_stage_packages"
self._rosdep_path = os.path.join(self.partdir, "rosdep")
self._catkin_path = os.path.join(self.partdir, "catkin")
self._wstool_path = os.path.join(self.partdir, "wstool")
# The path created via the `source` key (or a combination of `source`
# and `source-subdir` keys) needs to point to a valid Catkin workspace
# containing another subdirectory called the "source space." By
# default, this is a directory named "src," but it can be remapped via
# the `source-space` key. It's important that the source space is not
# the root of the Catkin workspace, since Catkin won't work that way
# and it'll create a circular link that causes rosdep to hang.
if self.options.source_subdir:
self._ros_package_path = os.path.join(
self.sourcedir, self.options.source_subdir, self.options.source_space
)
else:
self._ros_package_path = os.path.join(
self.sourcedir, self.options.source_space
)
if os.path.abspath(self.sourcedir) == os.path.abspath(self._ros_package_path):
raise CatkinWorkspaceIsRootError()
def env(self, root):
"""Runtime environment for ROS binaries and services."""
paths = common.get_library_paths(root, self.project.arch_triplet)
ld_library_path = formatting_utils.combine_paths(
paths, prepend="", separator=":"
)
env = [
# This environment variable tells ROS nodes where to find ROS
# master. It does not affect ROS master, however-- this is just the
# URI.
"ROS_MASTER_URI={}".format(self.options.catkin_ros_master_uri),
# Various ROS tools (e.g. roscore) keep a cache or a log,
# and use $ROS_HOME to determine where to put them.
"ROS_HOME=${SNAP_USER_DATA:-/tmp}/ros",
# FIXME: LP: #1576411 breaks ROS snaps on the desktop, so we'll
# temporarily work around that bug by forcing the locale to
# C.UTF-8.
"LC_ALL=C.UTF-8",
# The Snapcraft Core will ensure that we get a good LD_LIBRARY_PATH
# overall, but it defines it after this function runs. Some ROS
# tools will cause binaries to be run when we source the setup.sh,
# below, so we need to have a sensible LD_LIBRARY_PATH before then.
"LD_LIBRARY_PATH=$LD_LIBRARY_PATH:{}".format(ld_library_path),
]
# There's a chicken and egg problem here, everything run gets an
# env built, even package installation, so the first runs for these
# will likely fail.
try:
# The ROS packaging system tools (e.g. rospkg, etc.) don't go
# into the ROS install path (/opt/ros/$distro), so we need the
# PYTHONPATH to include the dist-packages in /usr/lib as well.
#
# Note: Empty segments in PYTHONPATH are interpreted as `.`, thus
# adding the current working directory to the PYTHONPATH. That is
# not desired in this situation, so take proper precautions when
# expanding PYTHONPATH: only add it if it's not empty.
env.append(
"PYTHONPATH={}${{PYTHONPATH:+:$PYTHONPATH}}".format(
common.get_python2_path(root)
)
)
except errors.SnapcraftEnvironmentError as e:
logger.debug(e)
# The setup.sh we source below requires the in-snap python. Here we
# make sure it's in the PATH before it's run.
env.append("PATH=$PATH:{}/usr/bin".format(root))
if self.options.underlay:
script = textwrap.dedent(
"""
if [ -f {snapcraft_setup} ]; then
. {snapcraft_setup}
fi
"""
).format(snapcraft_setup=os.path.join(self.rosdir, "snapcraft-setup.sh"))
else:
script = self._source_setup_sh(root, None)
# Each of these lines is prepended with an `export` when the
# environment is actually generated. In order to inject real shell code
# we have to hack it in by appending it on the end of an item already
# in the environment. FIXME: There should be a better way to do this.
# LP: #1792034
env[-1] = env[-1] + "\n\n" + script
return env
def pull(self):
"""Copy source into build directory and fetch dependencies.
Catkin packages can specify their system dependencies in their
package.xml. In order to support that, the Catkin packages are
interrogated for their dependencies here. Since `stage-packages` are
already installed by the time this function is run, the dependencies
from the package.xml are pulled down explicitly.
"""
super().pull()
# There may be nothing contained within the source but a rosinstall
# file. We need to use it to flesh out the workspace before continuing
# with the pull.
if self.options.rosinstall_files or self.options.recursive_rosinstall:
wstool = _ros.wstool.Wstool(
self._ros_package_path,
self._wstool_path,
self.project,
self.project._get_build_base(),
)
wstool.setup()
source_path = self.sourcedir
if self.options.source_subdir:
source_path = os.path.join(self.sourcedir, self.options.source_subdir)
# Recursively handling rosinstall files is a superset of handling
# individual rosinstall files. If both are specified, the recursive
# option will cover it.
if self.options.recursive_rosinstall:
_recursively_handle_rosinstall_files(wstool, source_path)
else:
# The rosinstall files in the YAML are relative to the part's
# source. However, _handle_rosinstall_files requires absolute
# paths.
rosinstall_files = set()
for rosinstall_file in self.options.rosinstall_files:
rosinstall_files.add(os.path.join(source_path, rosinstall_file))
_handle_rosinstall_files(wstool, rosinstall_files)
# Make sure the package path exists before continuing. We only care
# about doing this if there are actually packages to build, which is
# indicated both by self.catkin_packages being None as well as a
# non-empty list.
packages_to_build = (
self.catkin_packages is None or len(self.catkin_packages) > 0
)
if packages_to_build and not os.path.exists(self._ros_package_path):
raise CatkinPackagePathNotFoundError(self._ros_package_path)
# Validate the underlay. Note that this validation can't happen in
# __init__ as the underlay will probably only be valid once a
# dependency has been staged.
catkin = None
underlay_build_path = None
dependency_workspaces = [self.rosdir]
if self.options.underlay:
underlay_build_path = self.options.underlay["build-path"]
if underlay_build_path:
if not os.path.isdir(underlay_build_path):
raise errors.SnapcraftEnvironmentError(
"Requested underlay ({!r}) does not point to a valid "
"directory".format(underlay_build_path)
)
if not os.path.isfile(os.path.join(underlay_build_path, "setup.sh")):
raise errors.SnapcraftEnvironmentError(
"Requested underlay ({!r}) does not contain a "
"setup.sh".format(underlay_build_path)
)
dependency_workspaces.append(underlay_build_path)
self._generate_snapcraft_setup_sh(self.installdir, underlay_build_path)
# Use catkin_find to discover dependencies already in the underlay
catkin = _Catkin(
self._rosdistro, dependency_workspaces, self._catkin_path, self.project
)
catkin.setup()
# Use rosdep for dependency detection and resolution
rosdep = _ros.rosdep.Rosdep(
ros_distro=self._rosdistro,
ros_version="1",
ros_package_path=self._ros_package_path,
rosdep_path=self._rosdep_path,
ubuntu_distro=_BASE_TO_UBUNTU_RELEASE_MAP[self.project._get_build_base()],
base=self.project._get_build_base(),
target_arch=self.project._get_stage_packages_target_arch(),
)
rosdep.setup()
self._setup_dependencies(rosdep, catkin)
def _setup_dependencies(self, rosdep, catkin):
# Parse the Catkin packages to pull out their system dependencies
system_dependencies = _find_system_dependencies(
self.catkin_packages, rosdep, catkin
)
# If the package requires roscore, resolve it into a system dependency
# as well.
if self.options.include_roscore:
roscore = rosdep.resolve_dependency("ros_core")
if roscore:
for dependency_type, dependencies in roscore.items():
if dependency_type not in system_dependencies:
system_dependencies[dependency_type] = set()
system_dependencies[dependency_type] |= dependencies
else:
raise CatkinCannotResolveRoscoreError()
# Pull down and install any apt dependencies that were discovered
self._setup_apt_dependencies(system_dependencies.get("apt"))
# Pull down and install any pip dependencies that were discovered
self._setup_pip_dependencies(system_dependencies.get("pip"))
def _setup_apt_dependencies(self, apt_dependencies):
if not apt_dependencies:
return
logger.info("Installing apt dependencies...")
try:
repo.Ubuntu.fetch_stage_packages(
package_names=apt_dependencies,
stage_packages_path=self.stage_packages_path,
base=self.project._get_build_base(),
target_arch=self.project._get_stage_packages_target_arch(),
)
except repo.errors.PackageNotFoundError as e:
raise CatkinAptDependencyFetchError(e.message)
repo.Ubuntu.unpack_stage_packages(
stage_packages_path=self.stage_packages_path,
install_path=pathlib.Path(self.installdir),
)
def _setup_pip_dependencies(self, pip_dependencies):
if pip_dependencies:
self._pip.setup()
logger.info("Fetching pip dependencies...")
self._pip.download(pip_dependencies)
logger.info("Installing pip dependencies...")
self._pip.install(pip_dependencies)
def clean_pull(self):
super().clean_pull()
# Remove the rosdep path, if any
with contextlib.suppress(FileNotFoundError):
shutil.rmtree(self._rosdep_path)
# Remove the catkin path, if any
with contextlib.suppress(FileNotFoundError):
shutil.rmtree(self._catkin_path)
# Remove the catkin path, if any
with contextlib.suppress(FileNotFoundError):
shutil.rmtree(self.stage_packages_path)
# Clean pip packages, if any
self._pip.clean_packages()
def _source_setup_sh(self, root, underlay_path):
rosdir = os.path.join(root, "opt", "ros", self._rosdistro)
if underlay_path:
source_script = textwrap.dedent(
"""
if [ -f {underlay_setup} ]; then
set -- --local
_CATKIN_SETUP_DIR={underlay} . {underlay_setup}
if [ -f {rosdir_setup} ]; then
set -- --local --extend
_CATKIN_SETUP_DIR={rosdir} . {rosdir_setup}
fi
fi
"""
).format(
underlay=underlay_path,
underlay_setup=os.path.join(underlay_path, "setup.sh"),
rosdir=rosdir,
rosdir_setup=os.path.join(rosdir, "setup.sh"),
)
else:
source_script = textwrap.dedent(
"""
if [ -f {rosdir_setup} ]; then
set -- --local
_CATKIN_SETUP_DIR={rosdir} . {rosdir_setup}
fi
"""
).format(rosdir=rosdir, rosdir_setup=os.path.join(rosdir, "setup.sh"))
# We need to source ROS's setup.sh at this point. However, it accepts
# arguments (thus will parse $@), and we really don't want it to, since
# $@ in this context will be meant for the app being launched
# (LP: #1660852). So we'll backup all args, source the setup.sh, then
# restore all args for the wrapper's `exec` line.
return textwrap.dedent(
"""
# Shell quote arbitrary string by replacing every occurrence of '
# with '\\'', then put ' at the beginning and end of the string.
# Prepare yourself, fun regex ahead.
quote()
{{
for i; do
printf %s\\\\n "$i" | sed "s/\'/\'\\\\\\\\\'\'/g;1s/^/\'/;\$s/\$/\' \\\\\\\\/"
done
echo " "
}}
BACKUP_ARGS=$(quote "$@")
set --
{}
eval "set -- $BACKUP_ARGS"
""" # noqa: W605
).format(
source_script
) # noqa
def _generate_snapcraft_setup_sh(self, root, underlay_path):
script = self._source_setup_sh(root, underlay_path)
os.makedirs(self.rosdir, exist_ok=True)
with open(os.path.join(self.rosdir, "snapcraft-setup.sh"), "w") as f:
f.write(script)
@property
def rosdir(self):
return os.path.join(self.installdir, "opt", "ros", self._rosdistro)
def build(self):
"""Build Catkin packages.
This function runs some pre-build steps to prepare the sources for
building in the Snapcraft environment, builds the packages via
catkin_make_isolated, and finally runs some post-build clean steps
to prepare the newly-minted install to be packaged as a .snap.
"""
super().build()
logger.info("Preparing to build Catkin packages...")
self._prepare_build()
logger.info("Building Catkin packages...")
self._build_catkin_packages()
logger.info("Cleaning up newly installed Catkin packages...")
self._finish_build()
def _prepare_build(self):
self._use_in_snap_python()
# Each Catkin package distributes .cmake files so they can be found via
# find_package(). However, the Ubuntu packages pulled down as
# dependencies contain .cmake files pointing to system paths (e.g.
# /usr/lib, /usr/include, etc.). They need to be rewritten to point to
# the install directory.
def _new_path(path):
if not path.startswith(self.installdir):
# Not using os.path.join here as `path` is absolute.
return self.installdir + path
return path
self._rewrite_cmake_paths(_new_path)
# Also rewrite any occurrence of $SNAPCRAFT_STAGE to be our install
# directory (this may be the case if stage-snaps were used).
file_utils.replace_in_file(
self.rosdir,
re.compile(r".*Config.cmake$"),
re.compile(r"\$ENV{SNAPCRAFT_STAGE}"),
self.installdir,
)
def _rewrite_cmake_paths(self, new_path_callable):
def _rewrite_paths(match):
paths = match.group(1).strip().split(";")
for i, path in enumerate(paths):
# Offer the opportunity to rewrite this path if it's absolute.
if os.path.isabs(path):
paths[i] = new_path_callable(path)
return '"' + ";".join(paths) + '"'
# Looking for any path-like string
file_utils.replace_in_file(
self.rosdir,
re.compile(r".*Config.cmake$"),
re.compile(r'"(.*?/.*?)"'),
_rewrite_paths,
)
def _finish_build(self):
self._use_in_snap_python()
# We've finished the build, but we need to make sure we turn the cmake
# files back into something that doesn't include our installdir. This
# way it's usable from the staging area, and won't clash with the same
# file coming from other parts.
pattern = re.compile(r"^{}".format(self.installdir))
def _new_path(path):
return pattern.sub("$ENV{SNAPCRAFT_STAGE}", path)
self._rewrite_cmake_paths(_new_path)
# Replace the CMAKE_PREFIX_PATH in _setup_util.sh
setup_util_file = os.path.join(self.rosdir, "_setup_util.py")
if os.path.isfile(setup_util_file):
with open(setup_util_file, "r+") as f:
pattern = re.compile(r"CMAKE_PREFIX_PATH = '.*/opt/ros.*")
replaced = pattern.sub("CMAKE_PREFIX_PATH = []", f.read())
f.seek(0)
f.truncate()
f.write(replaced)
# Set the _CATKIN_SETUP_DIR in setup.sh to a sensible default, removing
# our installdir (this way it doesn't clash with a setup.sh coming
# from another part).
setup_sh_file = os.path.join(self.rosdir, "setup.sh")
if os.path.isfile(setup_sh_file):
with open(setup_sh_file, "r+") as f:
pattern = re.compile(r"\${_CATKIN_SETUP_DIR:=.*}")
replaced = pattern.sub(
"${{_CATKIN_SETUP_DIR:=$SNAP/opt/ros/{}}}".format(self._rosdistro),
f.read(),
)
f.seek(0)
f.truncate()
f.write(replaced)
if self.options.underlay:
underlay_run_path = self.options.underlay["run-path"]
self._generate_snapcraft_setup_sh("$SNAP", underlay_run_path)
# If pip dependencies were installed, generate a sitecustomize that
# allows access to them.
if self._pip.is_setup() and self._pip.list(user=True):
_python.generate_sitecustomize(
"2", stage_dir=self.project.stage_dir, install_dir=self.installdir
)
def _use_in_snap_python(self):
# Fix all shebangs to use the in-snap python.
mangling.rewrite_python_shebangs(self.installdir)
# Also replace all the /usr/bin/python calls in etc/catkin/profile.d/
# files with the in-snap python
profile_d_path = os.path.join(self.rosdir, "etc", "catkin", "profile.d")
file_utils.replace_in_file(
profile_d_path, re.compile(r""), re.compile(r"/usr/bin/python"), r"python"
)
def _parse_cmake_args(self):
args: List[str] = list()
for arg in self.options.catkin_cmake_args:
cmake_arg = " ".join(shlex.split(arg))
args.append(cmake_arg)
return args
def _build_catkin_packages(self):
# Nothing to do if no packages were specified
if self.catkin_packages is not None and len(self.catkin_packages) == 0:
return
catkincmd = ["catkin_make_isolated"]
# Install the package
catkincmd.append("--install")
if self.catkin_packages:
# Specify the packages to be built
catkincmd.append("--pkg")
catkincmd.extend(self.catkin_packages)
# Don't clutter the real ROS workspace-- use the Snapcraft build
# directory
catkincmd.extend(["--directory", self.builddir])
# Account for a non-default source space by always specifying it
catkincmd.extend(
["--source-space", os.path.join(self.builddir, self.options.source_space)]
)
# Specify that the package should be installed along with the rest of
# the ROS distro.
catkincmd.extend(["--install-space", self.rosdir])
# Specify the number of workers
catkincmd.append("-j{}".format(self.parallel_build_count))
# All the arguments that follow are meant for CMake
catkincmd.append("--cmake-args")
build_type = "Release"
if "debug" in self.options.build_attributes:
build_type = "Debug"
catkincmd.append("-DCMAKE_BUILD_TYPE={}".format(build_type))
# Finally, add any cmake-args requested from the plugin options
catkincmd.extend(self._parse_cmake_args())
self.run(catkincmd)
def snap_fileset(self):
"""Filter useless files out of the snap.
- opt/ros/<rosdistro>/.rosinstall points to the part installdir, and
isn't useful from the snap anyway.
"""
fileset = super().snap_fileset()
fileset.append(
"-{}".format(os.path.join("opt", "ros", self._rosdistro, ".rosinstall"))
)
return fileset
def _find_system_dependencies(catkin_packages, rosdep, catkin):
"""Find system dependencies for a given set of Catkin packages."""
resolved_dependencies = {}
dependencies = set()
logger.info("Determining system dependencies for Catkin packages...")
if catkin_packages is not None:
for package in catkin_packages:
# Query rosdep for the list of dependencies for this package
dependencies |= rosdep.get_dependencies(package)
else:
# Rather than getting dependencies for an explicit list of packages,
# let's get the dependencies for the entire workspace.
dependencies |= rosdep.get_dependencies()
for dependency in dependencies:
_resolve_package_dependencies(
catkin_packages, dependency, catkin, rosdep, resolved_dependencies
)
# We currently have nested dict structure of:
# dependency name -> package type -> package names
#
# We want to return a flattened dict of package type -> package names.
flattened_dependencies = {}
for dependency_types in resolved_dependencies.values():
for key, value in dependency_types.items():
if key not in flattened_dependencies:
flattened_dependencies[key] = set()
flattened_dependencies[key] |= value
# Finally, return that dict of dependencies
return flattened_dependencies
def _resolve_package_dependencies(
catkin_packages, dependency, catkin, rosdep, resolved_dependencies
):
# No need to resolve this dependency if we know it's local, or if
# we've already resolved it into a system dependency
if dependency in resolved_dependencies or (
catkin_packages and dependency in catkin_packages
):
return
if _dependency_is_in_underlay(catkin, dependency):
# Package was found-- don't pull anything extra to satisfy
# this dependency.
logger.debug("Satisfied dependency {!r} in underlay".format(dependency))
return
# In this situation, the package depends on something that we
# weren't instructed to build. It's probably a system dependency,
# but the developer could have also forgotten to tell us to build
# it.
try:
these_dependencies = rosdep.resolve_dependency(dependency)
except _ros.rosdep.RosdepDependencyNotResolvedError:
raise CatkinInvalidSystemDependencyError(dependency)
for key, value in these_dependencies.items():
if key not in _SUPPORTED_DEPENDENCY_TYPES:
raise CatkinUnsupportedDependencyTypeError(key, dependency)
resolved_dependencies[dependency] = {key: value}
def _dependency_is_in_underlay(catkin, dependency):
if catkin:
# Before trying to resolve this dependency into a system
# dependency, see if it's already in the underlay.
try:
catkin.find(dependency)
except CatkinPackageNotFoundError:
# No package by that name is available
pass
else:
return True
return False
def _handle_rosinstall_files(wstool, rosinstall_files):
"""Merge given rosinstall files into our workspace."""
for rosinstall_file in rosinstall_files:
logger.info("Merging {}".format(rosinstall_file))
wstool.merge(rosinstall_file)
logger.info("Updating workspace...")
wstool.update()
def _recursively_handle_rosinstall_files(wstool, source_path, *, cache=None):
"Recursively find and merge rosinstall files and update workspace"
rosinstall_files: Set[str] = set()
if not cache:
cache: Set[str] = set()
# Walk the entire source directory looking for rosinstall files. Keep track
# of any we haven't seen previously.
for root, directories, files in os.walk(source_path):
for file_name in files:
path = os.path.join(root, file_name)
if path.endswith(".rosinstall") and path not in cache:
rosinstall_files.add(path)
# If we came across previously-unseen rosinstall files, add them to the
# cache. Then handle them (merge/update). Finally, walk again. Do this
# until no new rosinstall files are discovered.
if rosinstall_files:
cache.update(rosinstall_files)
_handle_rosinstall_files(wstool, rosinstall_files)
_recursively_handle_rosinstall_files(wstool, source_path, cache=cache)
class CatkinPackageNotFoundError(errors.SnapcraftError):
fmt = "Unable to find Catkin package {package_name!r}"
def __init__(self, package_name):
super().__init__(package_name=package_name)
class _Catkin:
def __init__(
self,
ros_distro: str,
workspaces: List[str],
catkin_path: str,
project: "Project",
) -> None:
self._ros_distro = ros_distro
self._workspaces = workspaces
self._catkin_path = catkin_path
self._project = project
self._catkin_install_path = os.path.join(self._catkin_path, "install")
self._catkin_stage_packages_path = (
pathlib.Path(self._catkin_path) / "stage_packages"
)
def setup(self):
os.makedirs(self._catkin_install_path, exist_ok=True)
# With the introduction of an underlay, we no longer know where Catkin
# is. Let's just fetch/unpack our own, and use it.
logger.info("Installing catkin...")
repo.Ubuntu.fetch_stage_packages(
package_names=["ros-{}-catkin".format(self._ros_distro)],
stage_packages_path=self._catkin_stage_packages_path,
base=self._project._get_build_base(),
target_arch=self._project._get_stage_packages_target_arch(),
)
repo.Ubuntu.unpack_stage_packages(
stage_packages_path=self._catkin_stage_packages_path,
install_path=pathlib.Path(self._catkin_install_path),
)
def find(self, package_name):
with contextlib.suppress(subprocess.CalledProcessError):
path = self._run(["--first-only", package_name]).strip()
# Not a valid find if the package resolves into our own catkin
# workspace. That won't be transitioned into the snap.
if not path.startswith(self._catkin_install_path):
return path
raise CatkinPackageNotFoundError(package_name)
def _run(self, arguments):
with tempfile.NamedTemporaryFile(mode="w+") as f:
lines = [
"export PYTHONPATH={}".format(
os.path.join(
self._catkin_install_path,
"usr",
"lib",
"python2.7",
"dist-packages",
)
)
]
ros_path = os.path.join(
self._catkin_install_path, "opt", "ros", self._ros_distro
)
bin_paths = (
os.path.join(ros_path, "bin"),
os.path.join(self._catkin_install_path, "usr", "bin"),
)
lines.append(
"export {}".format(
formatting_utils.format_path_variable(
"PATH", bin_paths, prepend="", separator=":"
)
)
)
# Source our own workspace so we have all of Catkin's dependencies,
# then source the workspace we're actually supposed to be crawling.
lines.append(
"_CATKIN_SETUP_DIR={} source {} --local".format(
ros_path, os.path.join(ros_path, "setup.sh")
)
)
for workspace in self._workspaces:
lines.append(
"_CATKIN_SETUP_DIR={} source {} --local --extend".format(
workspace, os.path.join(workspace, "setup.sh")
)
)
lines.append('exec "$@"')
f.write("\n".join(lines))
f.flush()
return (
subprocess.check_output(
["/bin/bash", f.name, "catkin_find"] + arguments,
stderr=subprocess.STDOUT,
)
.decode("utf8")
.strip()
)
def _get_highest_version_path(path):
paths = sorted(glob.glob(os.path.join(path, "*")))
if not paths:
raise CatkinNoHighestVersionPathError(path)
return paths[-1]
|
mdaniel/intellij-community
|
refs/heads/master
|
python/testData/codeInsight/controlflow/ForReturn.py
|
83
|
for start, end, name in self.talks:
if hour >= start and hour < end: return name
|
lberruti/ansible-modules-core
|
refs/heads/maint1.99
|
windows/win_feature.py
|
62
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Paul Durivage <paul.durivage@rackspace.com>, Trond Hindenes <trond@hindenes.com> and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_feature
version_added: "1.7"
short_description: Installs and uninstalls Windows Features
description:
- Installs or uninstalls Windows Roles or Features
options:
name:
description:
- Names of roles or features to install as a single feature or a comma-separated list of features
required: true
default: null
aliases: []
state:
description:
- State of the features or roles on the system
required: false
choices:
- present
- absent
default: present
aliases: []
restart:
description:
- Restarts the computer automatically when installation is complete, if restarting is required by the roles or features installed.
choices:
- yes
- no
default: null
aliases: []
include_sub_features:
description:
- Adds all subfeatures of the specified feature
choices:
- yes
- no
default: null
aliases: []
include_management_tools:
description:
- Adds the corresponding management tools to the specified feature
choices:
- yes
- no
default: null
aliases: []
author: Paul Durivage / Trond Hindenes
'''
EXAMPLES = '''
# This installs IIS.
# The names of features available for install can be run by running the following Powershell Command:
# PS C:\Users\Administrator> Import-Module ServerManager; Get-WindowsFeature
$ ansible -i hosts -m win_feature -a "name=Web-Server" all
$ ansible -i hosts -m win_feature -a "name=Web-Server,Web-Common-Http" all
# Playbook example
---
- name: Install IIS
hosts: all
gather_facts: false
tasks:
- name: Install IIS
win_feature:
name: "Web-Server"
state: absent
restart: yes
include_sub_features: yes
include_management_tools: yes
'''
|
Prashant-Surya/addons-server
|
refs/heads/master
|
src/olympia/api/tests/test_permissions.py
|
1
|
from django.contrib.auth.models import AnonymousUser
from django.test import RequestFactory
from mock import Mock
from rest_framework.authentication import SessionAuthentication
from rest_framework.permissions import AllowAny, BasePermission
from rest_framework.response import Response
from rest_framework.views import APIView
from olympia.access.models import Group, GroupUser
from olympia.addons.models import Addon
from olympia.api.permissions import (
AllowAddonAuthor, AllowNone, AllowOwner, AllowReadOnlyIfReviewedAndListed,
AllowReviewer, AllowReviewerUnlisted, AnyOf, ByHttpMethod, GroupPermission)
from olympia.amo.tests import TestCase, WithDynamicEndpoints
from olympia.users.models import UserProfile
class ProtectedView(APIView):
# Use session auth for this test view because it's easy, and the goal is
# to test the permission, not the authentication.
authentication_classes = [SessionAuthentication]
permission_classes = [GroupPermission('SomeRealm', 'SomePermission')]
def get(self, request):
return Response('ok')
def myview(*args, **kwargs):
pass
class TestGroupPermissionOnView(WithDynamicEndpoints):
# Note: be careful when testing, under the hood we're using a method that
# relies on UserProfile.groups_list, which is cached on the UserProfile
# instance.
fixtures = ['base/users']
def setUp(self):
super(TestGroupPermissionOnView, self).setUp()
self.endpoint(ProtectedView)
self.url = '/en-US/firefox/dynamic-endpoint'
email = 'regular@mozilla.com'
self.user = UserProfile.objects.get(email=email)
group = Group.objects.create(rules='SomeRealm:SomePermission')
GroupUser.objects.create(group=group, user=self.user)
assert self.client.login(username=email,
password='password')
def test_user_must_be_in_required_group(self):
self.user.groups.all().delete()
res = self.client.get(self.url)
assert res.status_code == 403, res.content
assert res.data['detail'] == (
'You do not have permission to perform this action.')
def test_view_is_executed(self):
res = self.client.get(self.url)
assert res.status_code == 200, res.content
assert res.content == '"ok"'
class TestGroupPermission(TestCase):
def test_user_cannot_be_anonymous(self):
request = RequestFactory().get('/')
request.user = AnonymousUser()
view = Mock()
perm = GroupPermission('SomeRealm', 'SomePermission')
assert not perm.has_permission(request, view)
class TestAllowNone(TestCase):
def test_has_permission(self):
request = RequestFactory().get('/')
assert not AllowNone().has_permission(request, myview)
def test_has_object_permission(self):
request = RequestFactory().get('/')
assert not AllowNone().has_object_permission(request, myview, None)
class TestAnyOf(TestCase):
def test_has_permission(self):
request = RequestFactory().get('/')
assert AnyOf(AllowNone, AllowAny)().has_permission(request, myview)
assert AnyOf(AllowAny, AllowNone)().has_permission(request, myview)
def test_has_permission_fail(self):
request = RequestFactory().get('/')
assert not AnyOf(AllowNone, AllowNone)().has_permission(
request, myview)
def test_has_object_permission(self):
request = RequestFactory().get('/')
assert AnyOf(AllowNone, AllowAny)().has_object_permission(
request, myview, None)
assert AnyOf(AllowAny, AllowNone)().has_object_permission(
request, myview, None)
def test_has_object_permission_fail(self):
request = RequestFactory().get('/')
assert not AnyOf(AllowNone, AllowNone)().has_object_permission(
request, myview, None)
def test_has_object_permission_partial_fail(self):
"""Test that AnyOf.has_object_permission() does not allow access when
a permission class returns False for has_permission() without having
a has_object_permission() implementation."""
class NoObjectPerm(BasePermission):
# This class will not grant access because we do check
# has_permission() on top of just has_object_permission().
def has_permission(self, request, view):
return False
class NoPerm(BasePermission):
# This class will not grant access either when checking
# has_object_permission() since it directly returns False.
def has_object_permission(self, request, view, obj):
return False
request = RequestFactory().get('/')
assert not AnyOf(NoObjectPerm, NoPerm)().has_object_permission(
request, myview, None)
class TestAllowAddonAuthor(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
self.addon = Addon.objects.get(pk=3615)
self.permission = AllowAddonAuthor()
self.owner = self.addon.authors.all()[0]
self.request = RequestFactory().get('/')
self.request.user = AnonymousUser()
def test_has_permission_anonymous(self):
assert not self.permission.has_permission(self.request, myview)
def test_has_permission_any_authenticated_user(self):
self.request.user = UserProfile.objects.get(pk=999)
assert self.request.user not in self.addon.authors.all()
assert self.permission.has_permission(self.request, myview)
def test_has_object_permission_user(self):
self.request.user = self.owner
assert self.permission.has_object_permission(
self.request, myview, self.addon)
def test_has_object_permission_different_user(self):
self.request.user = UserProfile.objects.get(pk=999)
assert self.request.user not in self.addon.authors.all()
assert not self.permission.has_object_permission(
self.request, myview, self.addon)
def test_has_object_permission_anonymous(self):
assert not self.permission.has_object_permission(
self.request, myview, self.addon)
class TestAllowOwner(TestCase):
fixtures = ['base/users']
def setUp(self):
self.permission = AllowOwner()
self.anonymous = AnonymousUser()
self.user = UserProfile.objects.get(pk=999)
self.request = RequestFactory().get('/')
self.request.user = self.anonymous
def test_has_permission_anonymous(self):
assert not self.permission.has_permission(self.request, 'myview')
def test_has_permission_user(self):
self.request.user = self.user
assert self.permission.has_permission(self.request, 'myview')
def test_has_object_permission_user(self):
self.request.user = self.user
obj = Mock()
obj.user = self.user
assert self.permission.has_object_permission(
self.request, 'myview', obj)
def test_has_object_permission_no_user_on_obj(self):
self.request.user = self.user
obj = Mock()
assert not self.permission.has_object_permission(
self.request, 'myview', obj)
def test_has_object_permission_different_user(self):
self.request.user = self.user
obj = Mock()
obj.user = UserProfile.objects.get(pk=20)
assert not self.permission.has_object_permission(
self.request, 'myview', obj)
class TestAllowReviewer(TestCase):
fixtures = ['base/users']
# Note: be careful when testing, under the hood we're using a method that
# relies on UserProfile.groups_list, which is cached on the UserProfile
# instance.
def setUp(self):
self.permission = AllowReviewer()
self.request_factory = RequestFactory()
self.unsafe_methods = ('patch', 'post', 'put', 'delete')
self.safe_methods = ('get', 'options', 'head')
def test_user_cannot_be_anonymous(self):
request = self.request_factory.get('/')
request.user = AnonymousUser()
assert not self.permission.has_permission(request, myview)
assert not self.permission.has_object_permission(
request, myview, Mock())
def test_authenticated_but_not_reviewer(self):
request = self.request_factory.get('/')
request.user = UserProfile.objects.get(pk=999)
assert not self.permission.has_permission(request, myview)
assert not self.permission.has_object_permission(
request, myview, Mock())
def test_admin(self):
user = UserProfile.objects.get(email='admin@mozilla.com')
for method in self.safe_methods + self.unsafe_methods:
request = getattr(self.request_factory, method)('/')
request.user = user
assert self.permission.has_permission(request, myview)
assert self.permission.has_object_permission(
request, myview, Mock())
def test_reviewer_tools_access_read_only(self):
user = UserProfile.objects.get(pk=999)
group = Group.objects.create(
name='ReviewerTools Viewer', rules='ReviewerTools:View')
GroupUser.objects.create(user=user, group=group)
for method in self.safe_methods:
request = getattr(self.request_factory, method)('/')
request.user = user
assert self.permission.has_permission(request, myview)
assert self.permission.has_object_permission(
request, myview, Mock())
for method in self.unsafe_methods:
request = getattr(self.request_factory, method)('/')
request.user = user
assert not self.permission.has_permission(request, myview)
assert not self.permission.has_object_permission(
request, myview, Mock())
def test_actual_reviewer(self):
user = UserProfile.objects.get(email='editor@mozilla.com')
for method in self.safe_methods + self.unsafe_methods:
request = getattr(self.request_factory, method)('/')
request.user = user
assert self.permission.has_permission(request, myview)
assert self.permission.has_object_permission(
request, myview, Mock())
class TestAllowUnlistedReviewer(TestCase):
fixtures = ['base/users']
# Note: be careful when testing, under the hood we're using a method that
# relies on UserProfile.groups_list, which is cached on the UserProfile
# instance.
def setUp(self):
self.permission = AllowReviewerUnlisted()
self.request = RequestFactory().get('/')
def test_user_cannot_be_anonymous(self):
self.request.user = AnonymousUser()
obj = Mock()
obj.is_listed = False
assert not self.permission.has_permission(self.request, myview)
assert not self.permission.has_object_permission(
self.request, myview, obj)
def test_authenticated_but_not_reviewer(self):
self.request.user = UserProfile.objects.get(pk=999)
obj = Mock()
obj.is_listed = False
assert not self.permission.has_permission(self.request, myview)
assert not self.permission.has_object_permission(
self.request, myview, obj)
def test_admin(self):
self.request.user = UserProfile.objects.get(email='admin@mozilla.com')
obj = Mock()
obj.is_listed = False
assert self.permission.has_permission(self.request, myview)
assert self.permission.has_object_permission(self.request, myview, obj)
def test_unlisted_reviewer(self):
self.request.user = UserProfile.objects.get(
email='senioreditor@mozilla.com')
obj = Mock()
obj.is_listed = False
assert self.permission.has_permission(self.request, myview)
assert self.permission.has_object_permission(self.request, myview, obj)
class TestAllowReadOnlyIfReviewedAndListed(TestCase):
def setUp(self):
self.permission = AllowReadOnlyIfReviewedAndListed()
self.request_factory = RequestFactory()
self.unsafe_methods = ('patch', 'post', 'put', 'delete')
self.safe_methods = ('get', 'options', 'head')
def request(self, verb):
request = getattr(self.request_factory, verb)('/')
request.user = AnonymousUser()
return request
def test_has_permission(self):
for verb in self.safe_methods:
assert self.permission.has_permission(self.request(verb), myview)
for verb in self.unsafe_methods:
assert not self.permission.has_permission(
self.request(verb), myview)
def test_has_object_permission_reviewed(self):
obj = Mock()
obj.is_reviewed.return_value = True
obj.is_listed = True
obj.disabled_by_user = False
for verb in self.safe_methods:
assert self.permission.has_object_permission(
self.request(verb), myview, obj)
for verb in self.unsafe_methods:
assert not self.permission.has_object_permission(
self.request(verb), myview, obj)
def test_has_object_permission_reviewed_but_disabled_by_user(self):
obj = Mock()
obj.is_reviewed.return_value = True
obj.is_listed = False
obj.disabled_by_user = True
for verb in self.unsafe_methods + self.safe_methods:
assert not self.permission.has_object_permission(
self.request(verb), myview, obj)
def test_has_object_permission_not_reviewed(self):
obj = Mock()
obj.is_reviewed.return_value = False
obj.is_listed = True
obj.disabled_by_user = False
for verb in self.unsafe_methods + self.safe_methods:
assert not self.permission.has_object_permission(
self.request(verb), myview, obj)
def test_has_object_permission_not_listed(self):
obj = Mock()
obj.is_reviewed.return_value = True
obj.is_listed = False
obj.disabled_by_user = False
for verb in self.unsafe_methods + self.safe_methods:
assert not self.permission.has_object_permission(
self.request(verb), myview, obj)
def test_has_object_permission_not_listed_nor_reviewed(self):
obj = Mock()
obj.is_reviewed.return_value = False
obj.is_listed = False
obj.disabled_by_user = False
for verb in self.unsafe_methods + self.safe_methods:
assert not self.permission.has_object_permission(
self.request(verb), myview, obj)
class TestByHttpMethod(TestCase):
def setUp(self):
self.get_permission = Mock
self.patch_permission = Mock
self.post_permission = Mock
self.put_permission = Mock
self.permission = ByHttpMethod({
'get': self.get_permission,
})
self.set_permission_mock('get', True)
def set_permission_mock(self, method, value):
mock = self.permission.method_permissions[method]
mock.has_permission.return_value = value
def set_object_permission_mock(self, method, value):
mock = self.permission.method_permissions[method]
mock.has_object_permission.return_value = value
def test_get(self):
self.request = RequestFactory().get('/')
assert self.permission.has_permission(self.request, 'myview') is True
self.set_permission_mock('get', False)
assert self.permission.has_permission(self.request, 'myview') is False
def test_get_obj(self):
obj = Mock()
self.request = RequestFactory().get('/')
self.set_object_permission_mock('get', True)
assert self.permission.has_object_permission(
self.request, 'myview', obj) is True
self.set_object_permission_mock('get', False)
assert self.permission.has_object_permission(
self.request, 'myview', obj) is False
def test_missing_method(self):
self.request = RequestFactory().post('/')
assert self.permission.has_permission(self.request, 'myview') is False
obj = Mock()
self.request = RequestFactory().post('/')
assert self.permission.has_object_permission(
self.request, 'myview', obj) is False
self.request = RequestFactory().options('/')
assert self.permission.has_permission(self.request, 'myview') is False
|
tuxfux-hlp-notes/python-batches
|
refs/heads/master
|
archieves/batch-59/databases/create_table.py
|
2
|
#!/usr/bin/python
import MySQLdb as mq
con = mq.connect('localhost','user58','user58','batch58')
cur = con.cursor()
cur.execute('create table student(name varchar(25),gender varchar(6))')
con.close()
'''
mysql> help create table;
mysql> use batch58;
Reading table information for completion of table and column names
You can turn off this feature to get a quicker startup with -A
Database changed
mysql> select database();
+------------+
| database() |
+------------+
| batch58 |
+------------+
1 row in set (0.00 sec)
mysql> show tables;
+-------------------+
| Tables_in_batch58 |
+-------------------+
| student |
+-------------------+
1 row in set (0.00 sec)
mysql> desc student;
+--------+-------------+------+-----+---------+-------+
| Field | Type | Null | Key | Default | Extra |
+--------+-------------+------+-----+---------+-------+
| name | varchar(25) | YES | | NULL | |
| gender | varchar(6) | YES | | NULL | |
+--------+-------------+------+-----+---------+-------+
2 rows in set (0.00 sec)
mysql> select * from student;
Empty set (0.00 sec)
mysql>
'''
|
noinil/euler_pysolution
|
refs/heads/master
|
python_solutions/p062_Cubic_permutations.py
|
1
|
#!/usr/bin/env python3
def cube_digi_score(n):
s = str(n*n*n)
return sum([10**int(i) for i in s[:]])
def main():
score_list = [cube_digi_score(i) for i in range(1, 10000)]
printed_numbers = set()
for j in score_list:
if j in printed_numbers:
continue
if score_list.count(j) == 5:
a1 = score_list.index(j)+1
a2 = score_list.index(j, a1,) + 1
a3 = score_list.index(j, a2,) + 1
printed_numbers.add(j)
print(a1, a2, a3)
print(a1*a1*a1)
if __name__ == '__main__':
main()
|
jsg7440/jQuery-ToDo
|
refs/heads/master
|
node_modules/node-gyp/gyp/tools/pretty_sln.py
|
1831
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints the information in a sln file in a diffable way.
It first outputs each projects in alphabetical order with their
dependencies.
Then it outputs a possible build order.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import re
import sys
import pretty_vcproj
def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the
# dependency.
# This is not infinite-recursion proof.
for dep in deps[project]:
if dep not in built:
BuildProject(dep, built, projects, deps)
print project
built.append(project)
def ParseSolution(solution_file):
# All projects, their clsid and paths.
projects = dict()
# A list of dependencies associated with a project.
dependencies = dict()
# Regular expressions that matches the SLN format.
# The first line of a project definition.
begin_project = re.compile(r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
r'}"\) = "(.*)", "(.*)", "(.*)"$')
# The last line of a project definition.
end_project = re.compile('^EndProject$')
# The first line of a dependency list.
begin_dep = re.compile(
r'ProjectSection\(ProjectDependencies\) = postProject$')
# The last line of a dependency list.
end_dep = re.compile('EndProjectSection$')
# A line describing a dependency.
dep_line = re.compile(' *({.*}) = ({.*})$')
in_deps = False
solution = open(solution_file)
for line in solution:
results = begin_project.search(line)
if results:
# Hack to remove icu because the diff is too different.
if results.group(1).find('icu') != -1:
continue
# We remove "_gyp" from the names because it helps to diff them.
current_project = results.group(1).replace('_gyp', '')
projects[current_project] = [results.group(2).replace('_gyp', ''),
results.group(3),
results.group(2)]
dependencies[current_project] = []
continue
results = end_project.search(line)
if results:
current_project = None
continue
results = begin_dep.search(line)
if results:
in_deps = True
continue
results = end_dep.search(line)
if results:
in_deps = False
continue
results = dep_line.search(line)
if results and in_deps and current_project:
dependencies[current_project].append(results.group(1))
continue
# Change all dependencies clsid to name instead.
for project in dependencies:
# For each dependencies in this project
new_dep_array = []
for dep in dependencies[project]:
# Look for the project name matching this cldis
for project_info in projects:
if projects[project_info][1] == dep:
new_dep_array.append(project_info)
dependencies[project] = sorted(new_dep_array)
return (projects, dependencies)
def PrintDependencies(projects, deps):
print "---------------------------------------"
print "Dependencies for all projects"
print "---------------------------------------"
print "-- --"
for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project
print "Path : %s" % projects[project][0]
if dep_list:
for dep in dep_list:
print " - %s" % dep
print ""
print "-- --"
def PrintBuildOrder(projects, deps):
print "---------------------------------------"
print "Build order "
print "---------------------------------------"
print "-- --"
built = []
for (project, _) in sorted(deps.items()):
if project not in built:
BuildProject(project, built, projects, deps)
print "-- --"
def PrintVCProj(projects):
for project in projects:
print "-------------------------------------"
print "-------------------------------------"
print project
print project
print project
print "-------------------------------------"
print "-------------------------------------"
project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
projects[project][2]))
pretty = pretty_vcproj
argv = [ '',
project_path,
'$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
]
argv.extend(sys.argv[3:])
pretty.main(argv)
def main():
# check if we have exactly 1 parameter.
if len(sys.argv) < 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
return 1
(projects, deps) = ParseSolution(sys.argv[1])
PrintDependencies(projects, deps)
PrintBuildOrder(projects, deps)
if '--recursive' in sys.argv:
PrintVCProj(projects)
return 0
if __name__ == '__main__':
sys.exit(main())
|
alexforencich/python-ivi
|
refs/heads/master
|
ivi/agilent/agilentMSO6034A.py
|
2
|
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent6000 import *
class agilentMSO6034A(agilent6000):
"Agilent InfiniiVision MSO6034A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'MSO6034A')
super(agilentMSO6034A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 16
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 350e6
self._init_channels()
|
Aravinthu/odoo
|
refs/heads/master
|
addons/l10n_us/models/res_partner_bank.py
|
3
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
aba_routing = fields.Integer(string="ABA/Routing", help="American Bankers Association Routing Number")
|
chineyting/project4-Info3180
|
refs/heads/master
|
server/lib/werkzeug/local.py
|
310
|
# -*- coding: utf-8 -*-
"""
werkzeug.local
~~~~~~~~~~~~~~
This module implements context-local objects.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from functools import update_wrapper
from werkzeug.wsgi import ClosingIterator
from werkzeug._compat import PY2, implements_bool
# since each thread has its own greenlet we can just use those as identifiers
# for the context. If greenlets are not available we fall back to the
# current thread ident depending on where it is.
try:
from greenlet import getcurrent as get_ident
except ImportError:
try:
from thread import get_ident
except ImportError:
from _thread import get_ident
def release_local(local):
"""Releases the contents of the local for the current context.
This makes it possible to use locals without a manager.
Example::
>>> loc = Local()
>>> loc.foo = 42
>>> release_local(loc)
>>> hasattr(loc, 'foo')
False
With this function one can release :class:`Local` objects as well
as :class:`LocalStack` objects. However it is not possible to
release data held by proxies that way, one always has to retain
a reference to the underlying local object in order to be able
to release it.
.. versionadded:: 0.6.1
"""
local.__release_local__()
class Local(object):
__slots__ = ('__storage__', '__ident_func__')
def __init__(self):
object.__setattr__(self, '__storage__', {})
object.__setattr__(self, '__ident_func__', get_ident)
def __iter__(self):
return iter(self.__storage__.items())
def __call__(self, proxy):
"""Create a proxy for a name."""
return LocalProxy(self, proxy)
def __release_local__(self):
self.__storage__.pop(self.__ident_func__(), None)
def __getattr__(self, name):
try:
return self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
ident = self.__ident_func__()
storage = self.__storage__
try:
storage[ident][name] = value
except KeyError:
storage[ident] = {name: value}
def __delattr__(self, name):
try:
del self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
class LocalStack(object):
"""This class works similar to a :class:`Local` but keeps a stack
of objects instead. This is best explained with an example::
>>> ls = LocalStack()
>>> ls.push(42)
>>> ls.top
42
>>> ls.push(23)
>>> ls.top
23
>>> ls.pop()
23
>>> ls.top
42
They can be force released by using a :class:`LocalManager` or with
the :func:`release_local` function but the correct way is to pop the
item from the stack after using. When the stack is empty it will
no longer be bound to the current context (and as such released).
By calling the stack without arguments it returns a proxy that resolves to
the topmost item on the stack.
.. versionadded:: 0.6.1
"""
def __init__(self):
self._local = Local()
def __release_local__(self):
self._local.__release_local__()
def _get__ident_func__(self):
return self._local.__ident_func__
def _set__ident_func__(self, value):
object.__setattr__(self._local, '__ident_func__', value)
__ident_func__ = property(_get__ident_func__, _set__ident_func__)
del _get__ident_func__, _set__ident_func__
def __call__(self):
def _lookup():
rv = self.top
if rv is None:
raise RuntimeError('object unbound')
return rv
return LocalProxy(_lookup)
def push(self, obj):
"""Pushes a new item to the stack"""
rv = getattr(self._local, 'stack', None)
if rv is None:
self._local.stack = rv = []
rv.append(obj)
return rv
def pop(self):
"""Removes the topmost item from the stack, will return the
old value or `None` if the stack was already empty.
"""
stack = getattr(self._local, 'stack', None)
if stack is None:
return None
elif len(stack) == 1:
release_local(self._local)
return stack[-1]
else:
return stack.pop()
@property
def top(self):
"""The topmost item on the stack. If the stack is empty,
`None` is returned.
"""
try:
return self._local.stack[-1]
except (AttributeError, IndexError):
return None
class LocalManager(object):
"""Local objects cannot manage themselves. For that you need a local
manager. You can pass a local manager multiple locals or add them later
by appending them to `manager.locals`. Everytime the manager cleans up
it, will clean up all the data left in the locals for this context.
The `ident_func` parameter can be added to override the default ident
function for the wrapped locals.
.. versionchanged:: 0.6.1
Instead of a manager the :func:`release_local` function can be used
as well.
.. versionchanged:: 0.7
`ident_func` was added.
"""
def __init__(self, locals=None, ident_func=None):
if locals is None:
self.locals = []
elif isinstance(locals, Local):
self.locals = [locals]
else:
self.locals = list(locals)
if ident_func is not None:
self.ident_func = ident_func
for local in self.locals:
object.__setattr__(local, '__ident_func__', ident_func)
else:
self.ident_func = get_ident
def get_ident(self):
"""Return the context identifier the local objects use internally for
this context. You cannot override this method to change the behavior
but use it to link other context local objects (such as SQLAlchemy's
scoped sessions) to the Werkzeug locals.
.. versionchanged:: 0.7
Yu can pass a different ident function to the local manager that
will then be propagated to all the locals passed to the
constructor.
"""
return self.ident_func()
def cleanup(self):
"""Manually clean up the data in the locals for this context. Call
this at the end of the request or use `make_middleware()`.
"""
for local in self.locals:
release_local(local)
def make_middleware(self, app):
"""Wrap a WSGI application so that cleaning up happens after
request end.
"""
def application(environ, start_response):
return ClosingIterator(app(environ, start_response), self.cleanup)
return application
def middleware(self, func):
"""Like `make_middleware` but for decorating functions.
Example usage::
@manager.middleware
def application(environ, start_response):
...
The difference to `make_middleware` is that the function passed
will have all the arguments copied from the inner application
(name, docstring, module).
"""
return update_wrapper(self.make_middleware(func), func)
def __repr__(self):
return '<%s storages: %d>' % (
self.__class__.__name__,
len(self.locals)
)
@implements_bool
class LocalProxy(object):
"""Acts as a proxy for a werkzeug local. Forwards all operations to
a proxied object. The only operations not supported for forwarding
are right handed operands and any kind of assignment.
Example usage::
from werkzeug.local import Local
l = Local()
# these are proxies
request = l('request')
user = l('user')
from werkzeug.local import LocalStack
_response_local = LocalStack()
# this is a proxy
response = _response_local()
Whenever something is bound to l.user / l.request the proxy objects
will forward all operations. If no object is bound a :exc:`RuntimeError`
will be raised.
To create proxies to :class:`Local` or :class:`LocalStack` objects,
call the object as shown above. If you want to have a proxy to an
object looked up by a function, you can (as of Werkzeug 0.6.1) pass
a function to the :class:`LocalProxy` constructor::
session = LocalProxy(lambda: get_current_request().session)
.. versionchanged:: 0.6.1
The class can be instanciated with a callable as well now.
"""
__slots__ = ('__local', '__dict__', '__name__')
def __init__(self, local, name=None):
object.__setattr__(self, '_LocalProxy__local', local)
object.__setattr__(self, '__name__', name)
def _get_current_object(self):
"""Return the current object. This is useful if you want the real
object behind the proxy at a time for performance reasons or because
you want to pass the object into a different context.
"""
if not hasattr(self.__local, '__release_local__'):
return self.__local()
try:
return getattr(self.__local, self.__name__)
except AttributeError:
raise RuntimeError('no object bound to %s' % self.__name__)
@property
def __dict__(self):
try:
return self._get_current_object().__dict__
except RuntimeError:
raise AttributeError('__dict__')
def __repr__(self):
try:
obj = self._get_current_object()
except RuntimeError:
return '<%s unbound>' % self.__class__.__name__
return repr(obj)
def __bool__(self):
try:
return bool(self._get_current_object())
except RuntimeError:
return False
def __unicode__(self):
try:
return unicode(self._get_current_object())
except RuntimeError:
return repr(self)
def __dir__(self):
try:
return dir(self._get_current_object())
except RuntimeError:
return []
def __getattr__(self, name):
if name == '__members__':
return dir(self._get_current_object())
return getattr(self._get_current_object(), name)
def __setitem__(self, key, value):
self._get_current_object()[key] = value
def __delitem__(self, key):
del self._get_current_object()[key]
if PY2:
__getslice__ = lambda x, i, j: x._get_current_object()[i:j]
def __setslice__(self, i, j, seq):
self._get_current_object()[i:j] = seq
def __delslice__(self, i, j):
del self._get_current_object()[i:j]
__setattr__ = lambda x, n, v: setattr(x._get_current_object(), n, v)
__delattr__ = lambda x, n: delattr(x._get_current_object(), n)
__str__ = lambda x: str(x._get_current_object())
__lt__ = lambda x, o: x._get_current_object() < o
__le__ = lambda x, o: x._get_current_object() <= o
__eq__ = lambda x, o: x._get_current_object() == o
__ne__ = lambda x, o: x._get_current_object() != o
__gt__ = lambda x, o: x._get_current_object() > o
__ge__ = lambda x, o: x._get_current_object() >= o
__cmp__ = lambda x, o: cmp(x._get_current_object(), o)
__hash__ = lambda x: hash(x._get_current_object())
__call__ = lambda x, *a, **kw: x._get_current_object()(*a, **kw)
__len__ = lambda x: len(x._get_current_object())
__getitem__ = lambda x, i: x._get_current_object()[i]
__iter__ = lambda x: iter(x._get_current_object())
__contains__ = lambda x, i: i in x._get_current_object()
__add__ = lambda x, o: x._get_current_object() + o
__sub__ = lambda x, o: x._get_current_object() - o
__mul__ = lambda x, o: x._get_current_object() * o
__floordiv__ = lambda x, o: x._get_current_object() // o
__mod__ = lambda x, o: x._get_current_object() % o
__divmod__ = lambda x, o: x._get_current_object().__divmod__(o)
__pow__ = lambda x, o: x._get_current_object() ** o
__lshift__ = lambda x, o: x._get_current_object() << o
__rshift__ = lambda x, o: x._get_current_object() >> o
__and__ = lambda x, o: x._get_current_object() & o
__xor__ = lambda x, o: x._get_current_object() ^ o
__or__ = lambda x, o: x._get_current_object() | o
__div__ = lambda x, o: x._get_current_object().__div__(o)
__truediv__ = lambda x, o: x._get_current_object().__truediv__(o)
__neg__ = lambda x: -(x._get_current_object())
__pos__ = lambda x: +(x._get_current_object())
__abs__ = lambda x: abs(x._get_current_object())
__invert__ = lambda x: ~(x._get_current_object())
__complex__ = lambda x: complex(x._get_current_object())
__int__ = lambda x: int(x._get_current_object())
__long__ = lambda x: long(x._get_current_object())
__float__ = lambda x: float(x._get_current_object())
__oct__ = lambda x: oct(x._get_current_object())
__hex__ = lambda x: hex(x._get_current_object())
__index__ = lambda x: x._get_current_object().__index__()
__coerce__ = lambda x, o: x._get_current_object().__coerce__(x, o)
__enter__ = lambda x: x._get_current_object().__enter__()
__exit__ = lambda x, *a, **kw: x._get_current_object().__exit__(*a, **kw)
__radd__ = lambda x, o: o + x._get_current_object()
__rsub__ = lambda x, o: o - x._get_current_object()
__rmul__ = lambda x, o: o * x._get_current_object()
__rdiv__ = lambda x, o: o / x._get_current_object()
if PY2:
__rtruediv__ = lambda x, o: x._get_current_object().__rtruediv__(o)
else:
__rtruediv__ = __rdiv__
__rfloordiv__ = lambda x, o: o // x._get_current_object()
__rmod__ = lambda x, o: o % x._get_current_object()
__rdivmod__ = lambda x, o: x._get_current_object().__rdivmod__(o)
|
leiferikb/bitpop
|
refs/heads/master
|
build/third_party/buildbot_8_4p1/buildbot/test/util/connector_component.py
|
8
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from buildbot.db import model
from buildbot.test.util import db
from buildbot.test.fake import fakemaster
class FakeDBConnector(object):
pass
class ConnectorComponentMixin(db.RealDatabaseMixin):
"""
Implements a mock DBConnector object, replete with a thread pool and a DB
model. This includes a RealDatabaseMixin, so subclasses should not
instantiate that class directly. The connector appears at C{self.db}, and
the component should be attached to it as an attribute.
@ivar db: fake database connector
@ivar db.pool: DB thread pool
@ivar db.model: DB model
"""
def setUpConnectorComponent(self, table_names=[], basedir='basedir'):
"""Set up C{self.db}, using the given db_url and basedir."""
d = self.setUpRealDatabase(table_names=table_names, basedir=basedir)
def finish_setup(_):
self.db = FakeDBConnector()
self.db.pool = self.db_pool
self.db.model = model.Model(self.db)
self.db.master = fakemaster.make_master()
d.addCallback(finish_setup)
return d
def tearDownConnectorComponent(self):
d = self.tearDownRealDatabase()
def finish_cleanup(_):
self.db_pool.shutdown()
# break some reference loops, just for fun
del self.db.pool
del self.db.model
del self.db
d.addCallback(finish_cleanup)
return d
|
nzavagli/UnrealPy
|
refs/heads/master
|
UnrealPyEmbed/Source/Python/Lib/python27/encodings/undefined.py
|
860
|
""" Python 'undefined' Codec
This codec will always raise a ValueError exception when being
used. It is intended for use by the site.py file to switch off
automatic string to Unicode coercion.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
raise UnicodeError("undefined encoding")
def decode(self,input,errors='strict'):
raise UnicodeError("undefined encoding")
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
raise UnicodeError("undefined encoding")
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
raise UnicodeError("undefined encoding")
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='undefined',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
|
choldrim/jumpserver
|
refs/heads/zhenai
|
config_docker.py
|
1
|
"""
jumpserver.config
~~~~~~~~~~~~~~~~~
Jumpserver project setting file
:copyright: (c) 2014-2016 by Jumpserver Team.
:license: GPL v2, see LICENSE for more details.
"""
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
LOG_DIR = os.path.join(BASE_DIR, 'logs')
class Config:
# Use it to encrypt or decrypt data
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY') or '2vym+ky!997d5kkcc64mnz06y1mmui3lut#(^wd=%s_qj$1%x'
# How many line display every page, default 25
DISPLAY_PER_PAGE = 25
# It's used to identify your site, When we send a create mail to user, we only know login url is /login/
# But we should know the absolute url like: http://jms.jumpserver.org/login/, so SITE_URL is
# HTTP_PROTOCOL://HOST[:PORT]
SITE_URL = 'http://localhost'
# Domain name, If set app email will set as it
DOMAIN_NAME = 'jumpserver.org'
# Django security setting, if your disable debug model, you should setting that
ALLOWED_HOSTS = ['*']
# Development env open this, when error occur display the full process track, Production disable it
DEBUG = True
# DEBUG, INFO, WARNING, ERROR, CRITICAL can set. See https://docs.djangoproject.com/en/1.10/topics/logging/
LOG_LEVEL = 'DEBUG'
# Database setting, Support sqlite3, mysql, postgres ....
# See https://docs.djangoproject.com/en/1.10/ref/settings/#databases
# Sqlite setting:
DB_ENGINE = 'sqlite3'
DB_NAME = os.path.join(BASE_DIR, 'data', 'db.sqlite3')
# Mysql or postgres setting like:
# DB_ENGINE = 'mysql'
# DB_HOST = '127.0.0.1'
# DB_PORT = 3306
# DB_USER = 'root'
# DB_PASSWORD = ''
# DB_NAME = 'jumpserver'
# When Django start it will bind this host and port
# ./manage.py runserver 127.0.0.1:8080
# Todo: Gunicorn or uwsgi run may be use it
HTTP_BIND_HOST = '0.0.0.0'
HTTP_LISTEN_PORT = 8080
# Use Redis as broker for celery and web socket
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
REDIS_PASSWORD = ''
BROKER_URL = 'redis://%(password)s%(host)s:%(port)s/3' % {
'password': REDIS_PASSWORD,
'host': REDIS_HOST,
'port': REDIS_PORT,
}
# Api token expiration when create
TOKEN_EXPIRATION = 3600
# Session and csrf domain settings, If you deploy jumpserver,coco,luna standby,
# So than share cookie, and you need use a same top-level domain name
# SESSION_COOKIE_DOMAIN = '.jms.com'
# CSRF_COOKIE_DOMAIN = '.jms.com'
SESSION_COOKIE_AGE = 3600*24
# Email SMTP setting, we only support smtp send mail
# EMAIL_HOST = 'smtp.qq.com'
# EMAIL_PORT = 25
# EMAIL_HOST_USER = ''
# EMAIL_HOST_PASSWORD = ''
# EMAIL_USE_SSL = False # If port is 465, set True
# EMAIL_USE_TLS = False # If port is 587, set True
# EMAIL_SUBJECT_PREFIX = '[Jumpserver] '
CAPTCHA_TEST_MODE = False
# You can set jumpserver usage url here, that when user submit wizard redirect to
USER_GUIDE_URL = ''
# LDAP Auth settings
AUTH_LDAP = False
AUTH_LDAP_SERVER_URI = 'ldap://localhost:389'
AUTH_LDAP_BIND_DN = 'cn=admin,dc=jumpserver,dc=org'
AUTH_LDAP_BIND_PASSWORD = ''
AUTH_LDAP_USER_DN_TEMPLATE = "uid=%(user)s,ou=people,dc=jumpserver,dc=org"
AUTH_LDAP_USER_ATTR_MAP = {
"username": "cn",
"name": "sn",
"email": "mail"
}
AUTH_LDAP_START_TLS = False
def __init__(self):
pass
def __getattr__(self, item):
return None
class DevelopmentConfig(Config):
DEBUG = True
DISPLAY_PER_PAGE = 20
DB_ENGINE = 'sqlite'
DB_NAME = os.path.join(BASE_DIR, 'data', 'db.sqlite3')
EMAIL_HOST = 'smtp.exmail.qq.com'
EMAIL_PORT = 465
EMAIL_HOST_USER = 'a@jumpserver.org'
EMAIL_HOST_PASSWORD = 'somepasswrd'
EMAIL_USE_SSL = True
EMAIL_USE_TLS = False
EMAIL_SUBJECT_PREFIX = '[Jumpserver] '
SITE_URL = 'http://localhost:8080'
class ProductionConfig(Config):
DEBUG = False
DB_ENGINE = 'mysql'
DB_HOST = '127.0.0.1'
DB_PORT = 3306
DB_USER = 'root'
DB_PASSWORD = ''
DB_NAME = 'jumpserver'
class DockerConfig(Config):
DB_ENGINE = 'sqlite'
DB_NAME = os.path.join(BASE_DIR, 'data', 'db.sqlite3')
REDIS_HOST = os.environ.get('REDIS_HOST') or 'redis'
EMAIL_HOST = os.environ.get('EMAIL_HOST') or 'smtp.qq.com'
EMAIL_PORT = int(os.environ.get('EMAIL_PORT', 465))
EMAIL_HOST_USER = os.environ.get('EMAIL_HOST_USER') or 'admin'
EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_HOST_PASSWORD') or 'somepasswrd'
EMAIL_USE_SSL = True if EMAIL_PORT == 465 else False
EMAIL_USE_TLS = True if EMAIL_PORT == 587 else False
EMAIL_SUBJECT_PREFIX = os.environ.get('EMAIL_SUBJECT_PREFIX') or '[Jumpserver] '
SITE_URL = os.environ.get('SITE_URL') or 'http://localhost:8080'
config = {
'development': DevelopmentConfig,
'production': ProductionConfig,
'default': DevelopmentConfig,
'docker': DockerConfig,
}
env = 'docker'
|
rledisez/shinken
|
refs/heads/master
|
shinken/objects/satellitelink.py
|
8
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# Gregory Starck, g.starck@gmail.com
# Hartmut Goebel, h.goebel@goebel-consult.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import time
import cPickle
from shinken.util import get_obj_name_two_args_and_void
from shinken.objects.item import Item, Items
from shinken.property import BoolProp, IntegerProp, StringProp, ListProp, DictProp, AddrProp
from shinken.log import logger
from shinken.http_client import HTTPClient, HTTPExceptions
class SatelliteLink(Item):
"""SatelliteLink is a common Class for link to satellite for
Arbiter with Conf Dispatcher.
"""
# id = 0 each Class will have it's own id
properties = Item.properties.copy()
properties.update({
'address': StringProp(default='localhost', fill_brok=['full_status']),
'timeout': IntegerProp(default=3, fill_brok=['full_status']),
'data_timeout': IntegerProp(default=120, fill_brok=['full_status']),
'check_interval': IntegerProp(default=60, fill_brok=['full_status']),
'max_check_attempts': IntegerProp(default=3, fill_brok=['full_status']),
'spare': BoolProp(default=False, fill_brok=['full_status']),
'manage_sub_realms': BoolProp(default=True, fill_brok=['full_status']),
'manage_arbiters': BoolProp(default=False, fill_brok=['full_status'], to_send=True),
'modules': ListProp(default=[''], to_send=True, split_on_coma=True),
'polling_interval': IntegerProp(default=1, fill_brok=['full_status'], to_send=True),
'use_timezone': StringProp(default='NOTSET', to_send=True),
'realm': StringProp(default='', fill_brok=['full_status'],
brok_transformation=get_obj_name_two_args_and_void),
'satellitemap': DictProp(default={}, elts_prop=AddrProp, to_send=True, override=True),
'use_ssl': BoolProp(default=False, fill_brok=['full_status']),
'hard_ssl_name_check': BoolProp(default=True, fill_brok=['full_status']),
'passive': BoolProp(default=False, fill_brok=['full_status'], to_send=True),
})
running_properties = Item.running_properties.copy()
running_properties.update({
'con': StringProp(default=None),
'alive': BoolProp(default=True, fill_brok=['full_status']),
'broks': StringProp(default=[]),
# the number of failed attempt
'attempt': StringProp(default=0, fill_brok=['full_status']),
# can be network ask or not (dead or check in timeout or error)
'reachable': BoolProp(default=False, fill_brok=['full_status']),
'last_check': IntegerProp(default=0, fill_brok=['full_status']),
'managed_confs': StringProp(default={}),
})
def __init__(self, *args, **kwargs):
super(SatelliteLink, self).__init__(*args, **kwargs)
self.arb_satmap = {'address': '0.0.0.0', 'port': 0}
if hasattr(self, 'address'):
self.arb_satmap['address'] = self.address
if hasattr(self, 'port'):
try:
self.arb_satmap['port'] = int(self.port)
except Exception:
pass
def set_arbiter_satellitemap(self, satellitemap):
"""
arb_satmap is the satellitemap in current context:
- A SatelliteLink is owned by an Arbiter
- satellitemap attribute of SatelliteLink is the map
defined IN THE satellite configuration
but for creating connections, we need the have the satellitemap of the Arbiter
"""
self.arb_satmap = {'address': self.address, 'port': self.port, 'use_ssl': self.use_ssl,
'hard_ssl_name_check': self.hard_ssl_name_check}
self.arb_satmap.update(satellitemap)
def create_connection(self):
self.con = HTTPClient(address=self.arb_satmap['address'], port=self.arb_satmap['port'],
timeout=self.timeout, data_timeout=self.data_timeout,
use_ssl=self.use_ssl,
strong_ssl=self.hard_ssl_name_check
)
self.uri = self.con.uri
def put_conf(self, conf):
if self.con is None:
self.create_connection()
# Maybe the connection was not ok, bail out
if not self.con:
return False
try:
self.con.get('ping')
self.con.post('put_conf', {'conf': conf}, wait='long')
print "PUT CONF SUCESS", self.get_name()
return True
except HTTPExceptions, exp:
self.con = None
logger.error("Failed sending configuration for %s: %s", self.get_name(), str(exp))
return False
# Get and clean all of our broks
def get_all_broks(self):
res = self.broks
self.broks = []
return res
# Set alive, reachable, and reset attempts.
# If we change state, raise a status brok update
def set_alive(self):
was_alive = self.alive
self.alive = True
self.attempt = 0
self.reachable = True
# We came from dead to alive
# so we must add a brok update
if not was_alive:
b = self.get_update_status_brok()
self.broks.append(b)
def set_dead(self):
was_alive = self.alive
self.alive = False
self.con = None
# We are dead now. Must raise
# a brok to say it
if was_alive:
logger.warning("Setting the satellite %s to a dead state.", self.get_name())
b = self.get_update_status_brok()
self.broks.append(b)
# Go in reachable=False and add a failed attempt
# if we reach the max, go dead
def add_failed_check_attempt(self, reason=''):
self.reachable = False
self.attempt += 1
self.attempt = min(self.attempt, self.max_check_attempts)
# Don't need to warn again and again if the satellite is already dead
if self.alive:
logger.warning("Add failed attempt to %s (%d/%d) %s",
self.get_name(), self.attempt, self.max_check_attempts, reason)
# check when we just go HARD (dead)
if self.attempt == self.max_check_attempts:
self.set_dead()
# Update satellite info each self.check_interval seconds
# so we smooth arbiter actions for just useful actions
# and not cry for a little timeout
def update_infos(self):
# First look if it's not too early to ping
now = time.time()
since_last_check = now - self.last_check
if since_last_check < self.check_interval:
return
self.last_check = now
# We ping and update the managed list
self.ping()
self.update_managed_list()
# Update the state of this element
b = self.get_update_status_brok()
self.broks.append(b)
# The elements just got a new conf_id, we put it in our list
# because maybe the satellite is too busy to answer now
def known_conf_managed_push(self, cfg_id, push_flavor):
self.managed_confs[cfg_id] = push_flavor
def ping(self):
logger.debug("Pinging %s", self.get_name())
try:
if self.con is None:
self.create_connection()
logger.debug(" (%s)", self.uri)
# If the connection failed to initialize, bail out
if self.con is None:
self.add_failed_check_attempt()
return
r = self.con.get('ping')
# Should return us pong string
if r == 'pong':
self.set_alive()
else:
self.add_failed_check_attempt()
except HTTPExceptions, exp:
self.add_failed_check_attempt(reason=str(exp))
def wait_new_conf(self):
if self.con is None:
self.create_connection()
try:
r = self.con.get('wait_new_conf')
return True
except HTTPExceptions, exp:
self.con = None
return False
# To know if the satellite have a conf (magic_hash = None)
# OR to know if the satellite have THIS conf (magic_hash != None)
# Magic_hash is for arbiter check only
def have_conf(self, magic_hash=None):
if self.con is None:
self.create_connection()
# If the connection failed to initialize, bail out
if self.con is None:
return False
try:
if magic_hash is None:
r = self.con.get('have_conf')
else:
r = self.con.get('have_conf', {'magic_hash': magic_hash})
print "have_conf RAW CALL", r, type(r)
if not isinstance(r, bool):
return False
return r
except HTTPExceptions, exp:
self.con = None
return False
# To know if a receiver got a conf or not
def got_conf(self):
if self.con is None:
self.create_connection()
# If the connection failed to initialize, bail out
if self.con is None:
return False
try:
r = self.con.get('got_conf')
# Protect against bad return
if not isinstance(r, bool):
return False
return r
except HTTPExceptions, exp:
self.con = None
return False
def remove_from_conf(self, sched_id):
if self.con is None:
self.create_connection()
# If the connection failed to initialize, bail out
if self.con is None:
return
try:
self.con.get('remove_from_conf', {'sched_id': sched_id})
return True
except HTTPExceptions, exp:
self.con = None
return False
def update_managed_list(self):
if self.con is None:
self.create_connection()
# If the connection failed to initialize, bail out
if self.con is None:
self.managed_confs = {}
return
try:
tab = self.con.get('what_i_managed')
print "[%s]What i managed raw value is %s" % (self.get_name(), tab)
# Protect against bad return
if not isinstance(tab, dict):
print "[%s]What i managed: Got exception: bad what_i_managed returns" % \
self.get_name(), tab
self.con = None
self.managed_confs = {}
return
# Ok protect against json that is chaning keys as string instead of int
tab_cleaned = {}
for (k, v) in tab.iteritems():
try:
tab_cleaned[int(k)] = v
except ValueError:
print "[%s]What i managed: Got exception: bad what_i_managed returns" % \
self.get_name(), tab
# We can update our list now
self.managed_confs = tab_cleaned
except HTTPExceptions, exp:
print "EXCEPTION INwhat_i_managed", str(exp)
# A timeout is not a crime, put this case aside
# TODO : fix the timeout part?
self.con = None
print "[%s]What i managed: Got exception: %s %s %s" % \
(self.get_name(), exp, type(exp), exp.__dict__)
self.managed_confs = {}
# Return True if the satellite said to managed a configuration
def do_i_manage(self, cfg_id, push_flavor):
# If not even the cfg_id in the managed_conf, bail out
if cfg_id not in self.managed_confs:
return False
# maybe it's in but with a false push_flavor. check it :)
return self.managed_confs[cfg_id] == push_flavor
def push_broks(self, broks):
if self.con is None:
self.create_connection()
# If the connection failed to initialize, bail out
if self.con is None:
return False
try:
# Always do a simple ping to avoid a LOOOONG lock
self.con.get('ping')
self.con.post('push_broks', {'broks': broks}, wait='long')
return True
except HTTPExceptions, exp:
self.con = None
return False
def get_external_commands(self):
if self.con is None:
self.create_connection()
# If the connection failed to initialize, bail out
if self.con is None:
return []
try:
self.con.get('ping')
tab = self.con.get('get_external_commands', wait='long')
tab = cPickle.loads(str(tab))
# Protect against bad return
if not isinstance(tab, list):
self.con = None
return []
return tab
except HTTPExceptions, exp:
self.con = None
return []
except AttributeError:
self.con = None
return []
def prepare_for_conf(self):
self.cfg = {'global': {}, 'schedulers': {}, 'arbiters': {}}
properties = self.__class__.properties
for prop, entry in properties.items():
if entry.to_send:
self.cfg['global'][prop] = getattr(self, prop)
cls = self.__class__
# Also add global values
self.cfg['global']['api_key'] = cls.api_key
self.cfg['global']['secret'] = cls.secret
self.cfg['global']['http_proxy'] = cls.http_proxy
self.cfg['global']['statsd_host'] = cls.statsd_host
self.cfg['global']['statsd_port'] = cls.statsd_port
self.cfg['global']['statsd_prefix'] = cls.statsd_prefix
self.cfg['global']['statsd_enabled'] = cls.statsd_enabled
# Some parameters for satellites are not defined in the satellites conf
# but in the global configuration. We can pass them in the global
# property
def add_global_conf_parameters(self, params):
for prop in params:
self.cfg['global'][prop] = params[prop]
def get_my_type(self):
return self.__class__.my_type
# Here for poller and reactionner. Scheduler have its own function
def give_satellite_cfg(self):
return {'port': self.port,
'address': self.address,
'use_ssl': self.use_ssl,
'hard_ssl_name_check': self.hard_ssl_name_check,
'name': self.get_name(),
'instance_id': self.id,
'active': True,
'passive': self.passive,
'poller_tags': getattr(self, 'poller_tags', []),
'reactionner_tags': getattr(self, 'reactionner_tags', []),
'api_key': self.__class__.api_key,
'secret': self.__class__.secret,
}
# Call by pickle for dataify the downtime
# because we DO NOT WANT REF in this pickleisation!
def __getstate__(self):
cls = self.__class__
# id is not in *_properties
res = {'id': self.id}
for prop in cls.properties:
if prop != 'realm':
if hasattr(self, prop):
res[prop] = getattr(self, prop)
for prop in cls.running_properties:
if prop != 'con':
if hasattr(self, prop):
res[prop] = getattr(self, prop)
return res
# Inverted function of getstate
def __setstate__(self, state):
cls = self.__class__
self.id = state['id']
for prop in cls.properties:
if prop in state:
setattr(self, prop, state[prop])
for prop in cls.running_properties:
if prop in state:
setattr(self, prop, state[prop])
# con needs to be explicitly set:
self.con = None
class SatelliteLinks(Items):
"""Please Add a Docstring to describe the class here"""
# name_property = "name"
# inner_class = SchedulerLink
# We must have a realm property, so we find our realm
def linkify(self, realms, modules):
self.linkify_s_by_p(realms)
self.linkify_s_by_plug(modules)
def linkify_s_by_p(self, realms):
for s in self:
p_name = s.realm.strip()
# If no realm name, take the default one
if p_name == '':
p = realms.get_default()
s.realm = p
else: # find the realm one
p = realms.find_by_name(p_name)
s.realm = p
# Check if what we get is OK or not
if p is not None:
s.register_to_my_realm()
else:
err = "The %s %s got a unknown realm '%s'" % \
(s.__class__.my_type, s.get_name(), p_name)
s.configuration_errors.append(err)
|
jmcarbo/openerp7
|
refs/heads/master
|
openerp/addons/base_report_designer/__init__.py
|
421
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
import base_report_designer
import installer
import openerp_sxw2rml
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
sysadminmatmoz/ingadhoc
|
refs/heads/8.0
|
project_task_contract/project.py
|
8
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class account_analytic_account(models.Model):
_inherit = 'account.analytic.account'
task_project_type = fields.Selection(
[('contract_project', 'Contract Own Project'),
('other_project', 'Other Project')],
'Project Type',
default='contract_project')
other_project_id = fields.Many2one(
'project.project',
'Other Project')
other_project_task_ids = fields.One2many(
'project.task', 'analytic_account_id', 'Tasks')
task_count = fields.Integer(
compute='_task_count', string="Tasks",)
@api.one
@api.depends('other_project_task_ids')
def _task_count(self):
self.task_count = len(self.other_project_task_ids)
@api.onchange('use_tasks')
def change_use_tasks(self):
if not self.use_tasks:
self.other_project_id = False
self.task_project_type = False
def _trigger_project_creation(self, cr, uid, vals, context=None):
'''
This function is used to decide if a project needs to be automatically created or not when an analytic account is created. It returns True if it needs to be so, False otherwise.
'''
if vals.get('task_project_type') != 'contract_project':
return False
return super(account_analytic_account, self)._trigger_project_creation(
cr, uid, vals, context)
def on_change_template(
self, cr, uid, ids, template_id, date_start=False, context=None):
res = super(account_analytic_account, self).on_change_template(
cr, uid, ids, template_id, date_start=date_start, context=context)
if template_id and 'value' in res:
template = self.browse(cr, uid, template_id, context=context)
res['value']['task_project_type'] = template.task_project_type
res['value']['other_project_id'] = template.other_project_id
return res
class project(models.Model):
_inherit = 'project.task'
analytic_account_id = fields.Many2one(
'account.analytic.account', 'Contract/Analytic',
help="Link this task to an analytic account if you need financial management on tasks. "
"It enables you to connect tasks with budgets, planning, cost and revenue analysis, timesheets on task, etc.",
ondelete="cascade",
domain=[('type', '=', 'contract'), ('state', 'in', ['open'])],
auto_join=True)
@api.onchange('analytic_account_id')
def change_analytic_account_id(self):
if self.analytic_account_id:
self.project_id = self.analytic_account_id.other_project_id
self.partner_id = self.analytic_account_id.partner_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
with-git/tensorflow
|
refs/heads/master
|
tensorflow/contrib/opt/python/training/moving_average_optimizer.py
|
84
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Moving average optimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import variables
from tensorflow.python.training import moving_averages
from tensorflow.python.training import optimizer
from tensorflow.python.training import saver
class MovingAverageOptimizer(optimizer.Optimizer):
"""Optimizer that computes a moving average of the variables.
Empirically it has been found that using the moving average of the trained
parameters of a deep network is better than using its trained parameters
directly. This optimizer allows you to compute this moving average and swap
the variables at save time so that any code outside of the training loop will
use by default the averaged values instead of the original ones.
Example of usage:
```python
// Encapsulate your favorite optimizer (here the momentum one)
// inside the MovingAverageOptimizer.
opt = tf.train.MomentumOptimizer(learning_rate, FLAGS.momentum)
opt = tf.contrib.opt.MovingAverageOptimizer(opt)
// Then create your model and all its variables.
model = build_model()
// Add the training op that optimizes using opt.
// This needs to be called before swapping_saver().
opt.minimize(cost, var_list)
// Then create your saver like this:
saver = opt.swapping_saver()
// Pass it to your training loop.
slim.learning.train(
model,
...
saver=saver)
```
Note that for evaluation, the normal saver should be used instead of
swapping_saver().
"""
def __init__(self, opt, average_decay=0.9999, num_updates=None,
sequential_update=True):
"""Construct a new MovingAverageOptimizer.
Args:
opt: A tf.Optimizer that will be used to compute and apply gradients.
average_decay: Float. Decay to use to maintain the moving averages
of trained variables.
See tf.train.ExponentialMovingAverage for details.
num_updates: Optional count of number of updates applied to variables.
See tf.train.ExponentialMovingAverage for details.
sequential_update: Bool. If False, will compute the moving average at the
same time as the model is updated, potentially doing
benign data races.
If True, will update the moving average after gradient
updates.
"""
self._optimizer = opt
self._ema = moving_averages.ExponentialMovingAverage(
average_decay, num_updates=num_updates)
self._variable_map = None
self._sequential_update = sequential_update
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
train_op = self._optimizer.apply_gradients(
grads_and_vars, global_step=global_step, name=name)
var_list = [x[1] for x in grads_and_vars if x[0] is not None]
self._variable_map = {}
if self._sequential_update:
with ops.control_dependencies([train_op]):
ma_op = self._ema.apply(var_list)
else:
ma_op = self._ema.apply(var_list)
for v in var_list:
v_avg = self._ema.average(v)
self._variable_map[v.op.name] = v_avg
self._variable_map[v_avg.op.name] = v
return control_flow_ops.group(train_op, ma_op, name="train_with_avg")
def swapping_saver(self, var_list=None, name='swapping_saver', **kwargs):
"""Create a saver swapping moving averages and variables.
You should use this saver during training. It will save the moving averages
of the trained parameters under the original parameter names. For
evaluations or inference you should use a regular saver and it will
automatically use the moving averages for the trained variable.
You must call this function after all variables have been created and after
you have called Optimizer.minimize().
Args:
var_list: List of variables to save, as per `Saver()`.
If set to None, will save all the variables that have been
created before this call.
name: The name of the saver.
**kwargs: Keyword arguments of `Saver()`.
Returns:
A `tf.train.Saver` object.
Raises:
RuntimeError: If apply_gradients or minimize has not been called before.
"""
if self._variable_map is None:
raise RuntimeError('Must call apply_gradients or minimize before '
'creating the swapping_saver')
if var_list is None:
var_list = variables.global_variables()
if not isinstance(var_list, dict):
var_list = saver.BaseSaverBuilder.OpListToDict(var_list)
# Now swap variables and moving averages
swapped_var_list = {}
for k, v in six.iteritems(var_list):
v_swap = self._variable_map.get(v.op.name, None)
if v_swap:
swapped_var_list[k] = v_swap
else:
swapped_var_list[k] = v
# Build the swapping saver.
return saver.Saver(swapped_var_list, name=name, **kwargs)
|
uprare/p2pool
|
refs/heads/master
|
p2pool/bitcoin/script.py
|
282
|
from p2pool.util import math, pack
def reads_nothing(f):
return None, f
def protoPUSH(length):
return lambda f: pack.read(f, length)
def protoPUSHDATA(size_len):
def _(f):
length_str, f = pack.read(f, size_len)
length = math.string_to_natural(length_str[::-1].lstrip(chr(0)))
data, f = pack.read(f, length)
return data, f
return _
opcodes = {}
for i in xrange(256):
opcodes[i] = 'UNK_' + str(i), reads_nothing
opcodes[0] = 'PUSH', lambda f: ('', f)
for i in xrange(1, 76):
opcodes[i] = 'PUSH', protoPUSH(i)
opcodes[76] = 'PUSH', protoPUSHDATA(1)
opcodes[77] = 'PUSH', protoPUSHDATA(2)
opcodes[78] = 'PUSH', protoPUSHDATA(4)
opcodes[79] = 'PUSH', lambda f: ('\x81', f)
for i in xrange(81, 97):
opcodes[i] = 'PUSH', lambda f, _i=i: (chr(_i - 80), f)
opcodes[172] = 'CHECKSIG', reads_nothing
opcodes[173] = 'CHECKSIGVERIFY', reads_nothing
opcodes[174] = 'CHECKMULTISIG', reads_nothing
opcodes[175] = 'CHECKMULTISIGVERIFY', reads_nothing
def parse(script):
f = script, 0
while pack.size(f):
opcode_str, f = pack.read(f, 1)
opcode = ord(opcode_str)
opcode_name, read_func = opcodes[opcode]
opcode_arg, f = read_func(f)
yield opcode_name, opcode_arg
def get_sigop_count(script):
weights = {
'CHECKSIG': 1,
'CHECKSIGVERIFY': 1,
'CHECKMULTISIG': 20,
'CHECKMULTISIGVERIFY': 20,
}
return sum(weights.get(opcode_name, 0) for opcode_name, opcode_arg in parse(script))
def create_push_script(datums): # datums can be ints or strs
res = []
for datum in datums:
if isinstance(datum, (int, long)):
if datum == -1 or 1 <= datum <= 16:
res.append(chr(datum + 80))
continue
negative = datum < 0
datum = math.natural_to_string(abs(datum))
if datum and ord(datum[0]) & 128:
datum = '\x00' + datum
if negative:
datum = chr(ord(datum[0]) + 128) + datum[1:]
datum = datum[::-1]
if len(datum) < 76:
res.append(chr(len(datum)))
elif len(datum) <= 0xff:
res.append(76)
res.append(chr(len(datum)))
elif len(datum) <= 0xffff:
res.append(77)
res.append(pack.IntType(16).pack(len(datum)))
elif len(datum) <= 0xffffffff:
res.append(78)
res.append(pack.IntType(32).pack(len(datum)))
else:
raise ValueError('string too long')
res.append(datum)
return ''.join(res)
|
Sweetgrassbuffalo/ReactionSweeGrass-v2
|
refs/heads/master
|
.meteor/local/dev_bundle/python/Lib/test/test_ftplib.py
|
18
|
"""Test script for ftplib module."""
# Modified by Giampaolo Rodola' to test FTP class, IPv6 and TLS
# environment
import ftplib
import asyncore
import asynchat
import socket
import StringIO
import errno
import os
try:
import ssl
except ImportError:
ssl = None
from unittest import TestCase, SkipTest, skipUnless
from test import test_support
from test.test_support import HOST, HOSTv6
threading = test_support.import_module('threading')
TIMEOUT = 3
# the dummy data returned by server over the data channel when
# RETR, LIST and NLST commands are issued
RETR_DATA = 'abcde12345\r\n' * 1000
LIST_DATA = 'foo\r\nbar\r\n'
NLST_DATA = 'foo\r\nbar\r\n'
class DummyDTPHandler(asynchat.async_chat):
dtp_conn_closed = False
def __init__(self, conn, baseclass):
asynchat.async_chat.__init__(self, conn)
self.baseclass = baseclass
self.baseclass.last_received_data = ''
def handle_read(self):
self.baseclass.last_received_data += self.recv(1024)
def handle_close(self):
# XXX: this method can be called many times in a row for a single
# connection, including in clear-text (non-TLS) mode.
# (behaviour witnessed with test_data_connection)
if not self.dtp_conn_closed:
self.baseclass.push('226 transfer complete')
self.close()
self.dtp_conn_closed = True
def handle_error(self):
raise
class DummyFTPHandler(asynchat.async_chat):
dtp_handler = DummyDTPHandler
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.set_terminator("\r\n")
self.in_buffer = []
self.dtp = None
self.last_received_cmd = None
self.last_received_data = ''
self.next_response = ''
self.rest = None
self.next_retr_data = RETR_DATA
self.push('220 welcome')
def collect_incoming_data(self, data):
self.in_buffer.append(data)
def found_terminator(self):
line = ''.join(self.in_buffer)
self.in_buffer = []
if self.next_response:
self.push(self.next_response)
self.next_response = ''
cmd = line.split(' ')[0].lower()
self.last_received_cmd = cmd
space = line.find(' ')
if space != -1:
arg = line[space + 1:]
else:
arg = ""
if hasattr(self, 'cmd_' + cmd):
method = getattr(self, 'cmd_' + cmd)
method(arg)
else:
self.push('550 command "%s" not understood.' %cmd)
def handle_error(self):
raise
def push(self, data):
asynchat.async_chat.push(self, data + '\r\n')
def cmd_port(self, arg):
addr = map(int, arg.split(','))
ip = '%d.%d.%d.%d' %tuple(addr[:4])
port = (addr[4] * 256) + addr[5]
s = socket.create_connection((ip, port), timeout=10)
self.dtp = self.dtp_handler(s, baseclass=self)
self.push('200 active data connection established')
def cmd_pasv(self, arg):
sock = socket.socket()
sock.bind((self.socket.getsockname()[0], 0))
sock.listen(5)
sock.settimeout(10)
ip, port = sock.getsockname()[:2]
ip = ip.replace('.', ',')
p1, p2 = divmod(port, 256)
self.push('227 entering passive mode (%s,%d,%d)' %(ip, p1, p2))
conn, addr = sock.accept()
self.dtp = self.dtp_handler(conn, baseclass=self)
def cmd_eprt(self, arg):
af, ip, port = arg.split(arg[0])[1:-1]
port = int(port)
s = socket.create_connection((ip, port), timeout=10)
self.dtp = self.dtp_handler(s, baseclass=self)
self.push('200 active data connection established')
def cmd_epsv(self, arg):
sock = socket.socket(socket.AF_INET6)
sock.bind((self.socket.getsockname()[0], 0))
sock.listen(5)
sock.settimeout(10)
port = sock.getsockname()[1]
self.push('229 entering extended passive mode (|||%d|)' %port)
conn, addr = sock.accept()
self.dtp = self.dtp_handler(conn, baseclass=self)
def cmd_echo(self, arg):
# sends back the received string (used by the test suite)
self.push(arg)
def cmd_user(self, arg):
self.push('331 username ok')
def cmd_pass(self, arg):
self.push('230 password ok')
def cmd_acct(self, arg):
self.push('230 acct ok')
def cmd_rnfr(self, arg):
self.push('350 rnfr ok')
def cmd_rnto(self, arg):
self.push('250 rnto ok')
def cmd_dele(self, arg):
self.push('250 dele ok')
def cmd_cwd(self, arg):
self.push('250 cwd ok')
def cmd_size(self, arg):
self.push('250 1000')
def cmd_mkd(self, arg):
self.push('257 "%s"' %arg)
def cmd_rmd(self, arg):
self.push('250 rmd ok')
def cmd_pwd(self, arg):
self.push('257 "pwd ok"')
def cmd_type(self, arg):
self.push('200 type ok')
def cmd_quit(self, arg):
self.push('221 quit ok')
self.close()
def cmd_stor(self, arg):
self.push('125 stor ok')
def cmd_rest(self, arg):
self.rest = arg
self.push('350 rest ok')
def cmd_retr(self, arg):
self.push('125 retr ok')
if self.rest is not None:
offset = int(self.rest)
else:
offset = 0
self.dtp.push(self.next_retr_data[offset:])
self.dtp.close_when_done()
self.rest = None
def cmd_list(self, arg):
self.push('125 list ok')
self.dtp.push(LIST_DATA)
self.dtp.close_when_done()
def cmd_nlst(self, arg):
self.push('125 nlst ok')
self.dtp.push(NLST_DATA)
self.dtp.close_when_done()
def cmd_setlongretr(self, arg):
# For testing. Next RETR will return long line.
self.next_retr_data = 'x' * int(arg)
self.push('125 setlongretr ok')
class DummyFTPServer(asyncore.dispatcher, threading.Thread):
handler = DummyFTPHandler
def __init__(self, address, af=socket.AF_INET):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(af, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.active = False
self.active_lock = threading.Lock()
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
self.active_lock.acquire()
asyncore.loop(timeout=0.1, count=1)
self.active_lock.release()
asyncore.close_all(ignore_all=True)
def stop(self):
assert self.active
self.active = False
self.join()
def handle_accept(self):
conn, addr = self.accept()
self.handler_instance = self.handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
if ssl is not None:
CERTFILE = os.path.join(os.path.dirname(__file__), "keycert3.pem")
CAFILE = os.path.join(os.path.dirname(__file__), "pycacert.pem")
class SSLConnection(object, asyncore.dispatcher):
"""An asyncore.dispatcher subclass supporting TLS/SSL."""
_ssl_accepting = False
_ssl_closing = False
def secure_connection(self):
socket = ssl.wrap_socket(self.socket, suppress_ragged_eofs=False,
certfile=CERTFILE, server_side=True,
do_handshake_on_connect=False,
ssl_version=ssl.PROTOCOL_SSLv23)
self.del_channel()
self.set_socket(socket)
self._ssl_accepting = True
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
except socket.error as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self._ssl_accepting = False
def _do_ssl_shutdown(self):
self._ssl_closing = True
try:
self.socket = self.socket.unwrap()
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
except socket.error as err:
# Any "socket error" corresponds to a SSL_ERROR_SYSCALL return
# from OpenSSL's SSL_shutdown(), corresponding to a
# closed socket condition. See also:
# http://www.mail-archive.com/openssl-users@openssl.org/msg60710.html
pass
self._ssl_closing = False
if getattr(self, '_ccc', False) is False:
super(SSLConnection, self).close()
else:
pass
def handle_read_event(self):
if self._ssl_accepting:
self._do_ssl_handshake()
elif self._ssl_closing:
self._do_ssl_shutdown()
else:
super(SSLConnection, self).handle_read_event()
def handle_write_event(self):
if self._ssl_accepting:
self._do_ssl_handshake()
elif self._ssl_closing:
self._do_ssl_shutdown()
else:
super(SSLConnection, self).handle_write_event()
def send(self, data):
try:
return super(SSLConnection, self).send(data)
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN,
ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return 0
raise
def recv(self, buffer_size):
try:
return super(SSLConnection, self).recv(buffer_size)
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return b''
if err.args[0] in (ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_ZERO_RETURN):
self.handle_close()
return b''
raise
def handle_error(self):
raise
def close(self):
if (isinstance(self.socket, ssl.SSLSocket) and
self.socket._sslobj is not None):
self._do_ssl_shutdown()
else:
super(SSLConnection, self).close()
class DummyTLS_DTPHandler(SSLConnection, DummyDTPHandler):
"""A DummyDTPHandler subclass supporting TLS/SSL."""
def __init__(self, conn, baseclass):
DummyDTPHandler.__init__(self, conn, baseclass)
if self.baseclass.secure_data_channel:
self.secure_connection()
class DummyTLS_FTPHandler(SSLConnection, DummyFTPHandler):
"""A DummyFTPHandler subclass supporting TLS/SSL."""
dtp_handler = DummyTLS_DTPHandler
def __init__(self, conn):
DummyFTPHandler.__init__(self, conn)
self.secure_data_channel = False
def cmd_auth(self, line):
"""Set up secure control channel."""
self.push('234 AUTH TLS successful')
self.secure_connection()
def cmd_pbsz(self, line):
"""Negotiate size of buffer for secure data transfer.
For TLS/SSL the only valid value for the parameter is '0'.
Any other value is accepted but ignored.
"""
self.push('200 PBSZ=0 successful.')
def cmd_prot(self, line):
"""Setup un/secure data channel."""
arg = line.upper()
if arg == 'C':
self.push('200 Protection set to Clear')
self.secure_data_channel = False
elif arg == 'P':
self.push('200 Protection set to Private')
self.secure_data_channel = True
else:
self.push("502 Unrecognized PROT type (use C or P).")
class DummyTLS_FTPServer(DummyFTPServer):
handler = DummyTLS_FTPHandler
class TestFTPClass(TestCase):
def setUp(self):
self.server = DummyFTPServer((HOST, 0))
self.server.start()
self.client = ftplib.FTP(timeout=10)
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
self.client.close()
self.server.stop()
def test_getwelcome(self):
self.assertEqual(self.client.getwelcome(), '220 welcome')
def test_sanitize(self):
self.assertEqual(self.client.sanitize('foo'), repr('foo'))
self.assertEqual(self.client.sanitize('pass 12345'), repr('pass *****'))
self.assertEqual(self.client.sanitize('PASS 12345'), repr('PASS *****'))
def test_exceptions(self):
self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 400')
self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 499')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'echo 500')
self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'echo 599')
self.assertRaises(ftplib.error_proto, self.client.sendcmd, 'echo 999')
def test_all_errors(self):
exceptions = (ftplib.error_reply, ftplib.error_temp, ftplib.error_perm,
ftplib.error_proto, ftplib.Error, IOError, EOFError)
for x in exceptions:
try:
raise x('exception not included in all_errors set')
except ftplib.all_errors:
pass
def test_set_pasv(self):
# passive mode is supposed to be enabled by default
self.assertTrue(self.client.passiveserver)
self.client.set_pasv(True)
self.assertTrue(self.client.passiveserver)
self.client.set_pasv(False)
self.assertFalse(self.client.passiveserver)
def test_voidcmd(self):
self.client.voidcmd('echo 200')
self.client.voidcmd('echo 299')
self.assertRaises(ftplib.error_reply, self.client.voidcmd, 'echo 199')
self.assertRaises(ftplib.error_reply, self.client.voidcmd, 'echo 300')
def test_login(self):
self.client.login()
def test_acct(self):
self.client.acct('passwd')
def test_rename(self):
self.client.rename('a', 'b')
self.server.handler_instance.next_response = '200'
self.assertRaises(ftplib.error_reply, self.client.rename, 'a', 'b')
def test_delete(self):
self.client.delete('foo')
self.server.handler_instance.next_response = '199'
self.assertRaises(ftplib.error_reply, self.client.delete, 'foo')
def test_size(self):
self.client.size('foo')
def test_mkd(self):
dir = self.client.mkd('/foo')
self.assertEqual(dir, '/foo')
def test_rmd(self):
self.client.rmd('foo')
def test_cwd(self):
dir = self.client.cwd('/foo')
self.assertEqual(dir, '250 cwd ok')
def test_pwd(self):
dir = self.client.pwd()
self.assertEqual(dir, 'pwd ok')
def test_quit(self):
self.assertEqual(self.client.quit(), '221 quit ok')
# Ensure the connection gets closed; sock attribute should be None
self.assertEqual(self.client.sock, None)
def test_retrbinary(self):
received = []
self.client.retrbinary('retr', received.append)
self.assertEqual(''.join(received), RETR_DATA)
def test_retrbinary_rest(self):
for rest in (0, 10, 20):
received = []
self.client.retrbinary('retr', received.append, rest=rest)
self.assertEqual(''.join(received), RETR_DATA[rest:],
msg='rest test case %d %d %d' % (rest,
len(''.join(received)),
len(RETR_DATA[rest:])))
def test_retrlines(self):
received = []
self.client.retrlines('retr', received.append)
self.assertEqual(''.join(received), RETR_DATA.replace('\r\n', ''))
def test_storbinary(self):
f = StringIO.StringIO(RETR_DATA)
self.client.storbinary('stor', f)
self.assertEqual(self.server.handler_instance.last_received_data, RETR_DATA)
# test new callback arg
flag = []
f.seek(0)
self.client.storbinary('stor', f, callback=lambda x: flag.append(None))
self.assertTrue(flag)
def test_storbinary_rest(self):
f = StringIO.StringIO(RETR_DATA)
for r in (30, '30'):
f.seek(0)
self.client.storbinary('stor', f, rest=r)
self.assertEqual(self.server.handler_instance.rest, str(r))
def test_storlines(self):
f = StringIO.StringIO(RETR_DATA.replace('\r\n', '\n'))
self.client.storlines('stor', f)
self.assertEqual(self.server.handler_instance.last_received_data, RETR_DATA)
# test new callback arg
flag = []
f.seek(0)
self.client.storlines('stor foo', f, callback=lambda x: flag.append(None))
self.assertTrue(flag)
def test_nlst(self):
self.client.nlst()
self.assertEqual(self.client.nlst(), NLST_DATA.split('\r\n')[:-1])
def test_dir(self):
l = []
self.client.dir(lambda x: l.append(x))
self.assertEqual(''.join(l), LIST_DATA.replace('\r\n', ''))
def test_makeport(self):
self.client.makeport()
# IPv4 is in use, just make sure send_eprt has not been used
self.assertEqual(self.server.handler_instance.last_received_cmd, 'port')
def test_makepasv(self):
host, port = self.client.makepasv()
conn = socket.create_connection((host, port), 10)
conn.close()
# IPv4 is in use, just make sure send_epsv has not been used
self.assertEqual(self.server.handler_instance.last_received_cmd, 'pasv')
def test_line_too_long(self):
self.assertRaises(ftplib.Error, self.client.sendcmd,
'x' * self.client.maxline * 2)
def test_retrlines_too_long(self):
self.client.sendcmd('SETLONGRETR %d' % (self.client.maxline * 2))
received = []
self.assertRaises(ftplib.Error,
self.client.retrlines, 'retr', received.append)
def test_storlines_too_long(self):
f = StringIO.StringIO('x' * self.client.maxline * 2)
self.assertRaises(ftplib.Error, self.client.storlines, 'stor', f)
@skipUnless(socket.has_ipv6, "IPv6 not enabled")
class TestIPv6Environment(TestCase):
@classmethod
def setUpClass(cls):
try:
DummyFTPServer((HOST, 0), af=socket.AF_INET6)
except socket.error:
raise SkipTest("IPv6 not enabled")
def setUp(self):
self.server = DummyFTPServer((HOSTv6, 0), af=socket.AF_INET6)
self.server.start()
self.client = ftplib.FTP()
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
self.client.close()
self.server.stop()
def test_af(self):
self.assertEqual(self.client.af, socket.AF_INET6)
def test_makeport(self):
self.client.makeport()
self.assertEqual(self.server.handler_instance.last_received_cmd, 'eprt')
def test_makepasv(self):
host, port = self.client.makepasv()
conn = socket.create_connection((host, port), 10)
conn.close()
self.assertEqual(self.server.handler_instance.last_received_cmd, 'epsv')
def test_transfer(self):
def retr():
received = []
self.client.retrbinary('retr', received.append)
self.assertEqual(''.join(received), RETR_DATA)
self.client.set_pasv(True)
retr()
self.client.set_pasv(False)
retr()
@skipUnless(ssl, "SSL not available")
class TestTLS_FTPClassMixin(TestFTPClass):
"""Repeat TestFTPClass tests starting the TLS layer for both control
and data connections first.
"""
def setUp(self):
self.server = DummyTLS_FTPServer((HOST, 0))
self.server.start()
self.client = ftplib.FTP_TLS(timeout=10)
self.client.connect(self.server.host, self.server.port)
# enable TLS
self.client.auth()
self.client.prot_p()
@skipUnless(ssl, "SSL not available")
class TestTLS_FTPClass(TestCase):
"""Specific TLS_FTP class tests."""
def setUp(self):
self.server = DummyTLS_FTPServer((HOST, 0))
self.server.start()
self.client = ftplib.FTP_TLS(timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
def tearDown(self):
self.client.close()
self.server.stop()
def test_control_connection(self):
self.assertNotIsInstance(self.client.sock, ssl.SSLSocket)
self.client.auth()
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
def test_data_connection(self):
# clear text
sock = self.client.transfercmd('list')
self.assertNotIsInstance(sock, ssl.SSLSocket)
sock.close()
self.assertEqual(self.client.voidresp(), "226 transfer complete")
# secured, after PROT P
self.client.prot_p()
sock = self.client.transfercmd('list')
self.assertIsInstance(sock, ssl.SSLSocket)
sock.close()
self.assertEqual(self.client.voidresp(), "226 transfer complete")
# PROT C is issued, the connection must be in cleartext again
self.client.prot_c()
sock = self.client.transfercmd('list')
self.assertNotIsInstance(sock, ssl.SSLSocket)
sock.close()
self.assertEqual(self.client.voidresp(), "226 transfer complete")
def test_login(self):
# login() is supposed to implicitly secure the control connection
self.assertNotIsInstance(self.client.sock, ssl.SSLSocket)
self.client.login()
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
# make sure that AUTH TLS doesn't get issued again
self.client.login()
def test_auth_issued_twice(self):
self.client.auth()
self.assertRaises(ValueError, self.client.auth)
def test_auth_ssl(self):
try:
self.client.ssl_version = ssl.PROTOCOL_SSLv23
self.client.auth()
self.assertRaises(ValueError, self.client.auth)
finally:
self.client.ssl_version = ssl.PROTOCOL_TLSv1
def test_context(self):
self.client.quit()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(ValueError, ftplib.FTP_TLS, keyfile=CERTFILE,
context=ctx)
self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE,
context=ctx)
self.assertRaises(ValueError, ftplib.FTP_TLS, certfile=CERTFILE,
keyfile=CERTFILE, context=ctx)
self.client = ftplib.FTP_TLS(context=ctx, timeout=TIMEOUT)
self.client.connect(self.server.host, self.server.port)
self.assertNotIsInstance(self.client.sock, ssl.SSLSocket)
self.client.auth()
self.assertIs(self.client.sock.context, ctx)
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
self.client.prot_p()
sock = self.client.transfercmd('list')
try:
self.assertIs(sock.context, ctx)
self.assertIsInstance(sock, ssl.SSLSocket)
finally:
sock.close()
def test_check_hostname(self):
self.client.quit()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.check_hostname = True
ctx.load_verify_locations(CAFILE)
self.client = ftplib.FTP_TLS(context=ctx, timeout=TIMEOUT)
# 127.0.0.1 doesn't match SAN
self.client.connect(self.server.host, self.server.port)
with self.assertRaises(ssl.CertificateError):
self.client.auth()
# exception quits connection
self.client.connect(self.server.host, self.server.port)
self.client.prot_p()
with self.assertRaises(ssl.CertificateError):
self.client.transfercmd("list").close()
self.client.quit()
self.client.connect("localhost", self.server.port)
self.client.auth()
self.client.quit()
self.client.connect("localhost", self.server.port)
self.client.prot_p()
self.client.transfercmd("list").close()
class TestTimeouts(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(10)
self.port = test_support.bind_port(self.sock)
threading.Thread(target=self.server, args=(self.evt,self.sock)).start()
# Wait for the server to be ready.
self.evt.wait()
self.evt.clear()
ftplib.FTP.port = self.port
def tearDown(self):
self.evt.wait()
def server(self, evt, serv):
# This method sets the evt 3 times:
# 1) when the connection is ready to be accepted.
# 2) when it is safe for the caller to close the connection
# 3) when we have closed the socket
serv.listen(5)
# (1) Signal the caller that we are ready to accept the connection.
evt.set()
try:
conn, addr = serv.accept()
except socket.timeout:
pass
else:
conn.send("1 Hola mundo\n")
# (2) Signal the caller that it is safe to close the socket.
evt.set()
conn.close()
finally:
serv.close()
# (3) Signal the caller that we are done.
evt.set()
def testTimeoutDefault(self):
# default -- use global socket timeout
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
ftp = ftplib.FTP(HOST)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutNone(self):
# no timeout -- do not use global socket timeout
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
ftp = ftplib.FTP(HOST, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(ftp.sock.gettimeout())
self.evt.wait()
ftp.close()
def testTimeoutValue(self):
# a value
ftp = ftplib.FTP(HOST, timeout=30)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutConnect(self):
ftp = ftplib.FTP()
ftp.connect(HOST, timeout=30)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutDifferentOrder(self):
ftp = ftplib.FTP(timeout=30)
ftp.connect(HOST)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def testTimeoutDirectAccess(self):
ftp = ftplib.FTP()
ftp.timeout = 30
ftp.connect(HOST)
self.assertEqual(ftp.sock.gettimeout(), 30)
self.evt.wait()
ftp.close()
def test_main():
tests = [TestFTPClass, TestTimeouts,
TestIPv6Environment,
TestTLS_FTPClassMixin, TestTLS_FTPClass]
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
finally:
test_support.threading_cleanup(*thread_info)
if __name__ == '__main__':
test_main()
|
Beauhurst/django
|
refs/heads/master
|
tests/utils_tests/test_termcolors.py
|
134
|
import unittest
from django.utils.termcolors import (
DARK_PALETTE, DEFAULT_PALETTE, LIGHT_PALETTE, NOCOLOR_PALETTE, PALETTES,
colorize, parse_color_setting,
)
class TermColorTests(unittest.TestCase):
def test_empty_string(self):
self.assertEqual(parse_color_setting(''), PALETTES[DEFAULT_PALETTE])
def test_simple_palette(self):
self.assertEqual(parse_color_setting('light'), PALETTES[LIGHT_PALETTE])
self.assertEqual(parse_color_setting('dark'), PALETTES[DARK_PALETTE])
self.assertIsNone(parse_color_setting('nocolor'))
def test_fg(self):
self.assertEqual(
parse_color_setting('error=green'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'})
)
def test_fg_bg(self):
self.assertEqual(
parse_color_setting('error=green/blue'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'})
)
def test_fg_opts(self):
self.assertEqual(
parse_color_setting('error=green,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)})
)
self.assertEqual(
parse_color_setting('error=green,bold,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink', 'bold')})
)
def test_fg_bg_opts(self):
self.assertEqual(
parse_color_setting('error=green/blue,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink',)})
)
self.assertEqual(
parse_color_setting('error=green/blue,bold,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink', 'bold')})
)
def test_override_palette(self):
self.assertEqual(
parse_color_setting('light;error=green'),
dict(PALETTES[LIGHT_PALETTE], ERROR={'fg': 'green'})
)
def test_override_nocolor(self):
self.assertEqual(
parse_color_setting('nocolor;error=green'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'})
)
def test_reverse_override(self):
self.assertEqual(parse_color_setting('error=green;light'), PALETTES[LIGHT_PALETTE])
def test_multiple_roles(self):
self.assertEqual(
parse_color_setting('error=green;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}, SQL_FIELD={'fg': 'blue'})
)
def test_override_with_multiple_roles(self):
self.assertEqual(
parse_color_setting('light;error=green;sql_field=blue'),
dict(PALETTES[LIGHT_PALETTE], ERROR={'fg': 'green'}, SQL_FIELD={'fg': 'blue'})
)
def test_empty_definition(self):
self.assertIsNone(parse_color_setting(';'))
self.assertEqual(parse_color_setting('light;'), PALETTES[LIGHT_PALETTE])
self.assertIsNone(parse_color_setting(';;;'))
def test_empty_options(self):
self.assertEqual(
parse_color_setting('error=green,'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'})
)
self.assertEqual(
parse_color_setting('error=green,,,'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'})
)
self.assertEqual(
parse_color_setting('error=green,,blink,,'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)})
)
def test_bad_palette(self):
self.assertIsNone(parse_color_setting('unknown'))
def test_bad_role(self):
self.assertIsNone(parse_color_setting('unknown='))
self.assertIsNone(parse_color_setting('unknown=green'))
self.assertEqual(
parse_color_setting('unknown=green;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={'fg': 'blue'})
)
def test_bad_color(self):
self.assertIsNone(parse_color_setting('error='))
self.assertEqual(
parse_color_setting('error=;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={'fg': 'blue'})
)
self.assertIsNone(parse_color_setting('error=unknown'))
self.assertEqual(
parse_color_setting('error=unknown;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={'fg': 'blue'})
)
self.assertEqual(
parse_color_setting('error=green/unknown'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'})
)
self.assertEqual(
parse_color_setting('error=green/blue/something'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'})
)
self.assertEqual(
parse_color_setting('error=green/blue/something,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink',)})
)
def test_bad_option(self):
self.assertEqual(
parse_color_setting('error=green,unknown'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'})
)
self.assertEqual(
parse_color_setting('error=green,unknown,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)})
)
def test_role_case(self):
self.assertEqual(
parse_color_setting('ERROR=green'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'})
)
self.assertEqual(
parse_color_setting('eRrOr=green'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'})
)
def test_color_case(self):
self.assertEqual(
parse_color_setting('error=GREEN'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'})
)
self.assertEqual(
parse_color_setting('error=GREEN/BLUE'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'})
)
self.assertEqual(
parse_color_setting('error=gReEn'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'})
)
self.assertEqual(
parse_color_setting('error=gReEn/bLuE'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'})
)
def test_opts_case(self):
self.assertEqual(
parse_color_setting('error=green,BLINK'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)})
)
self.assertEqual(
parse_color_setting('error=green,bLiNk'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)})
)
def test_colorize_empty_text(self):
self.assertEqual(colorize(text=None), '\x1b[m\x1b[0m')
self.assertEqual(colorize(text=''), '\x1b[m\x1b[0m')
self.assertEqual(colorize(text=None, opts=('noreset')), '\x1b[m')
self.assertEqual(colorize(text='', opts=('noreset')), '\x1b[m')
|
gauribhoite/personfinder
|
refs/heads/master
|
env/google_appengine/lib/django-1.4/django/contrib/sitemaps/tests/urls/http.py
|
109
|
from datetime import datetime
from django.conf.urls import patterns, url
from django.contrib.sitemaps import Sitemap, GenericSitemap, FlatPageSitemap, views
from django.contrib.auth.models import User
from django.views.decorators.cache import cache_page
class SimpleSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
lastmod = datetime.now()
def items(self):
return [object()]
simple_sitemaps = {
'simple': SimpleSitemap,
}
generic_sitemaps = {
'generic': GenericSitemap({'queryset': User.objects.all()}),
}
flatpage_sitemaps = {
'flatpages': FlatPageSitemap,
}
urlpatterns = patterns('django.contrib.sitemaps.views',
(r'^simple/index\.xml$', 'index', {'sitemaps': simple_sitemaps}),
(r'^simple/custom-index\.xml$', 'index',
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap_index.xml'}),
(r'^simple/sitemap-(?P<section>.+)\.xml$', 'sitemap',
{'sitemaps': simple_sitemaps}),
(r'^simple/sitemap\.xml$', 'sitemap', {'sitemaps': simple_sitemaps}),
(r'^simple/custom-sitemap\.xml$', 'sitemap',
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap.xml'}),
(r'^generic/sitemap\.xml$', 'sitemap', {'sitemaps': generic_sitemaps}),
(r'^flatpages/sitemap\.xml$', 'sitemap', {'sitemaps': flatpage_sitemaps}),
url(r'^cached/index\.xml$', cache_page(1)(views.index),
{'sitemaps': simple_sitemaps, 'sitemap_url_name': 'cached_sitemap'}),
url(r'^cached/sitemap-(?P<section>.+)\.xml', cache_page(1)(views.sitemap),
{'sitemaps': simple_sitemaps}, name='cached_sitemap')
)
|
BackupGGCode/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/distutils/tests/test_dir_util.py
|
49
|
"""Tests for distutils.dir_util."""
import unittest
import os
import stat
import shutil
import sys
from distutils.dir_util import (mkpath, remove_tree, create_tree, copy_tree,
ensure_relative)
from distutils import log
from distutils.tests import support
from test.support import run_unittest
class DirUtilTestCase(support.TempdirManager, unittest.TestCase):
def _log(self, msg, *args):
if len(args) > 0:
self._logs.append(msg % args)
else:
self._logs.append(msg)
def setUp(self):
super(DirUtilTestCase, self).setUp()
self._logs = []
tmp_dir = self.mkdtemp()
self.root_target = os.path.join(tmp_dir, 'deep')
self.target = os.path.join(self.root_target, 'here')
self.target2 = os.path.join(tmp_dir, 'deep2')
self.old_log = log.info
log.info = self._log
def tearDown(self):
log.info = self.old_log
super(DirUtilTestCase, self).tearDown()
def test_mkpath_remove_tree_verbosity(self):
mkpath(self.target, verbose=0)
wanted = []
self.assertEqual(self._logs, wanted)
remove_tree(self.root_target, verbose=0)
mkpath(self.target, verbose=1)
wanted = ['creating %s' % self.root_target,
'creating %s' % self.target]
self.assertEqual(self._logs, wanted)
self._logs = []
remove_tree(self.root_target, verbose=1)
wanted = ["removing '%s' (and everything under it)" % self.root_target]
self.assertEqual(self._logs, wanted)
@unittest.skipIf(sys.platform.startswith('win'),
"This test is only appropriate for POSIX-like systems.")
def test_mkpath_with_custom_mode(self):
# Get and set the current umask value for testing mode bits.
umask = os.umask(0o002)
os.umask(umask)
mkpath(self.target, 0o700)
self.assertEqual(
stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask)
mkpath(self.target2, 0o555)
self.assertEqual(
stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask)
def test_create_tree_verbosity(self):
create_tree(self.root_target, ['one', 'two', 'three'], verbose=0)
self.assertEqual(self._logs, [])
remove_tree(self.root_target, verbose=0)
wanted = ['creating %s' % self.root_target]
create_tree(self.root_target, ['one', 'two', 'three'], verbose=1)
self.assertEqual(self._logs, wanted)
remove_tree(self.root_target, verbose=0)
def test_copy_tree_verbosity(self):
mkpath(self.target, verbose=0)
copy_tree(self.target, self.target2, verbose=0)
self.assertEqual(self._logs, [])
remove_tree(self.root_target, verbose=0)
mkpath(self.target, verbose=0)
a_file = os.path.join(self.target, 'ok.txt')
f = open(a_file, 'w')
try:
f.write('some content')
finally:
f.close()
wanted = ['copying %s -> %s' % (a_file, self.target2)]
copy_tree(self.target, self.target2, verbose=1)
self.assertEqual(self._logs, wanted)
remove_tree(self.root_target, verbose=0)
remove_tree(self.target2, verbose=0)
def test_ensure_relative(self):
if os.sep == '/':
self.assertEqual(ensure_relative('/home/foo'), 'home/foo')
self.assertEqual(ensure_relative('some/path'), 'some/path')
else: # \\
self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo')
self.assertEqual(ensure_relative('home\\foo'), 'home\\foo')
def test_suite():
return unittest.makeSuite(DirUtilTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.