repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
katiewsimon/JP-Morgan-Hackathon-Project | refs/heads/master | jp_server/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.py | 1835 | from __future__ import absolute_import, division, unicode_literals
from xml.sax.xmlreader import AttributesNSImpl
from ..constants import adjustForeignAttributes, unadjustForeignAttributes
prefix_mapping = {}
for prefix, localName, namespace in adjustForeignAttributes.values():
if prefix is not None:
prefix_mapping[prefix] = namespace
def to_sax(walker, handler):
"""Call SAX-like content handler based on treewalker walker"""
handler.startDocument()
for prefix, namespace in prefix_mapping.items():
handler.startPrefixMapping(prefix, namespace)
for token in walker:
type = token["type"]
if type == "Doctype":
continue
elif type in ("StartTag", "EmptyTag"):
attrs = AttributesNSImpl(token["data"],
unadjustForeignAttributes)
handler.startElementNS((token["namespace"], token["name"]),
token["name"],
attrs)
if type == "EmptyTag":
handler.endElementNS((token["namespace"], token["name"]),
token["name"])
elif type == "EndTag":
handler.endElementNS((token["namespace"], token["name"]),
token["name"])
elif type in ("Characters", "SpaceCharacters"):
handler.characters(token["data"])
elif type == "Comment":
pass
else:
assert False, "Unknown token type"
for prefix, namespace in prefix_mapping.items():
handler.endPrefixMapping(prefix)
handler.endDocument()
|
iModels/mbuild | refs/heads/master | mbuild/utils/io.py | 2 | """Module for working with external libraries.
Portions of this code are adapted from MDTraj and are released under the
following license.
##############################################################################
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
"""
import importlib
import inspect
import os
import sys
import textwrap
import warnings
from unittest import SkipTest
from pkg_resources import resource_filename
class DelayImportError(ImportError, SkipTest):
"""Error to allow better import handling."""
pass
MESSAGES = dict()
MESSAGES[
"gsd"
] = """
The code at {filename}:{line_number} requires the "gsd" package
gsd can be installed with conda using:
# conda install -c conda-forge gsd
"""
MESSAGES[
"nglview"
] = """
The code at {filename}:{line_number} requires the "nglview" package
nglview can be installed using:
# conda install -c conda-forge nglview
or
# pip install nglview
"""
MESSAGES[
"py3Dmol"
] = """
The code at {filename}:{line_number} requires the "py3Dmol" package
py3Dmol can be installed using:
# conda install -c conda-forge py3Dmol
or
# pip install py3Dmol
"""
MESSAGES[
"rdkit"
] = """
The code at {filename}:{line_number} requires the "rdkit" package
rdkit can be installed with conda using:
# conda install -c conda-forge rdkit
or from source following instructions at:
https://www.rdkit.org/docs/Install.html#installation-from-source
"""
MESSAGES[
"openbabel"
] = """
The code at {filename}:{line_number} requires the "openbabel" package
openbabel can be installed with conda using:
# conda install -c conda-forge openbabel
or from source following instructions at:
# http://openbabel.org/docs/current/UseTheLibrary/PythonInstall.html
"""
MESSAGES["pybel"] = MESSAGES["openbabel"]
MESSAGES[
"mdtraj"
] = """
The code at {filename}:{line_number} requires the "mdtraj" package
mdtraj can be installed using:
# conda install -c conda-forge mdtraj
or
# pip install mdtraj
"""
MESSAGES[
"foyer"
] = """
The code at {filename}:{line_number} requires the "foyer" package
foyer can be installed using:
# conda install -c conda-forge foyer
or
# pip install foyer
"""
MESSAGES[
"garnett"
] = """
The code at {filename}:{line_number} requires the "garnett" package
garnett can be installed with conda using:
# conda install -c conda-forge garnett
"""
MESSAGES[
"pycifrw"
] = """
The code at {filename}:{line_number} requires the "pycifrw" package
pycifrw can be installed with conda using:
# conda install -c conda-forge pycifrw
"""
MESSAGES[
"protobuf"
] = """
The code at {filename}:{line_number} requires the "protobuf" package
protobuf can be installed using:
# conda install -c conda-forge protobuf
or
# pip install protobuf
"""
def import_(module):
"""Import a module and issue a nice message to stderr if it isn't installed.
Parameters
----------
module : str
The module you'd like to import, as a string
Returns
-------
module : {module, object}
The module object
Examples
--------
>>> # the following two lines are equivalent. the difference is that the
>>> # second will check for an ImportError and print you a very nice
>>> # user-facing message about what's wrong (where you can install the
>>> # module from, etc) if the import fails
>>> import tables
>>> tables = import_('tables')
Notes
-----
The pybel/openbabel block is meant to resolve compatibility between
openbabel 2.x and 3.0. There may be other breaking changes but the change
in importing them is the major one we are aware of. For details, see
https://open-babel.readthedocs.io/en/latest/UseTheLibrary/migration.
html#python-module
"""
if module == "pybel":
try:
return importlib.import_module("openbabel.pybel")
except ModuleNotFoundError:
pass
try:
pybel = importlib.import_module("pybel")
msg = (
"openbabel 2.0 detected and will be dropped in a future "
"release. Consider upgrading to 3.x."
)
warnings.warn(msg, DeprecationWarning)
return pybel
except ModuleNotFoundError:
pass
if module == "openbabel":
try:
return importlib.import_module("openbabel.openbabel")
except ModuleNotFoundError:
pass
try:
openbabel = importlib.import_module("openbabel")
msg = (
"openbabel 2.0 detected and will be dropped in a future "
"release. Consider upgrading to 3.x."
)
warnings.warn(msg, DeprecationWarning)
return pybel
except ModuleNotFoundError:
pass
try:
return importlib.import_module(module)
except ImportError as e:
try:
message = MESSAGES[module]
except KeyError:
message = (
"The code at {filename}:{line_number} requires the "
f"{module} package"
)
e = ImportError(f"No module named {module}")
(
frame,
filename,
line_number,
function_name,
lines,
index,
) = inspect.getouterframes(inspect.currentframe())[1]
m = message.format(
filename=os.path.basename(filename), line_number=line_number
)
m = textwrap.dedent(m)
bar = (
"\033[91m"
+ "#" * max(len(line) for line in m.split(os.linesep))
+ "\033[0m"
)
print("", file=sys.stderr)
print(bar, file=sys.stderr)
print(m, file=sys.stderr)
print(bar, file=sys.stderr)
raise DelayImportError(m)
try:
import intermol
has_intermol = True
del intermol
except ImportError:
has_intermol = False
try:
import gsd
has_gsd = True
del gsd
except ImportError:
has_gsd = False
try:
from openbabel import openbabel
has_openbabel = True
del openbabel
except ImportError:
has_openbabel = False
try:
import mdtraj
has_mdtraj = True
del mdtraj
except ImportError:
has_mdtraj = False
try:
import foyer
has_foyer = True
del foyer
except ImportError:
has_foyer = False
try:
import networkx
has_networkx = True
del networkx
except ImportError:
has_networkx = False
try:
import hoomd
has_hoomd = True
del hoomd
except ImportError:
has_hoomd = False
try:
import py3Dmol
has_py3Dmol = True
del py3Dmol
except ImportError:
has_py3Dmol = False
try:
from google import protobuf
has_protobuf = True
del protobuf
except ImportError:
has_protobuf = False
try:
import garnett
has_garnett = True
del garnett
except ImportError:
has_garnett = False
try:
import CifFile
has_pycifrw = True
del CifFile
except ImportError:
has_pycifrw = False
try:
import rdkit
has_rdkit = True
del rdkit
except ImportError:
has_rdkit = False
def get_fn(name):
"""Get the full path to one of the reference files shipped for utils.
In the source distribution, these files are in ``mbuild/utils/reference``,
but on installation, they're moved to somewhere in the user's python
site-packages directory.
Parameters
----------
name : str
Name of the file to load (with respect to the reference/ folder).
"""
fn = resource_filename("mbuild", os.path.join("utils", "reference", name))
if not os.path.exists(fn):
raise IOError("Sorry! {} does not exists.".format(fn))
return fn
def run_from_ipython():
"""Get whether python is being run interactively."""
try:
__IPYTHON__
return True
except NameError:
return False
|
gmccance/pylsf | refs/heads/master | pylsf/__init__.py | 1 | from pylsf import *
|
DandyDev/slack-machine | refs/heads/master | machine/__init__.py | 1 | from .core import Machine
from .__about__ import (__title__, __description__, __uri__, __version__, __author__,
__email__, __license__, __copyright__)
__all__ = [
'__title__', '__description__', '__uri__', '__version__', '__author__', '__email__',
'__license__', '__copyright__', 'Machine'
]
|
andars/rust | refs/heads/master | src/etc/regex-match-tests.py | 58 | #!/usr/bin/env python2
# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
from __future__ import absolute_import, division, print_function
import argparse
import datetime
import os.path as path
def print_tests(tests):
print('\n'.join([test_tostr(t) for t in tests]))
def read_tests(f):
basename, _ = path.splitext(path.basename(f))
tests = []
for lineno, line in enumerate(open(f), 1):
fields = filter(None, map(str.strip, line.split('\t')))
if not (4 <= len(fields) <= 5) \
or 'E' not in fields[0] or fields[0][0] == '#':
continue
opts, pat, text, sgroups = fields[0:4]
groups = [] # groups as integer ranges
if sgroups == 'NOMATCH':
groups = [None]
elif ',' in sgroups:
noparen = map(lambda s: s.strip('()'), sgroups.split(')('))
for g in noparen:
s, e = map(str.strip, g.split(','))
if s == '?' and e == '?':
groups.append(None)
else:
groups.append((int(s), int(e)))
else:
# This skips tests that should result in an error.
# There aren't many, so I think we can just capture those
# manually. Possibly fix this in future.
continue
if pat == 'SAME':
pat = tests[-1][1]
if '$' in opts:
pat = pat.decode('string_escape')
text = text.decode('string_escape')
if 'i' in opts:
pat = '(?i)%s' % pat
name = '%s_%d' % (basename, lineno)
tests.append((name, pat, text, groups))
return tests
def test_tostr(t):
lineno, pat, text, groups = t
options = map(group_tostr, groups)
return 'mat!{match_%s, r"%s", r"%s", %s}' \
% (lineno, pat, '' if text == "NULL" else text, ', '.join(options))
def group_tostr(g):
if g is None:
return 'None'
else:
return 'Some((%d, %d))' % (g[0], g[1])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Generate match tests from an AT&T POSIX test file.')
aa = parser.add_argument
aa('files', nargs='+',
help='A list of dat AT&T POSIX test files. See src/libregexp/testdata')
args = parser.parse_args()
tests = []
for f in args.files:
tests += read_tests(f)
tpl = '''// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// DO NOT EDIT. Automatically generated by 'src/etc/regexp-match-tests'
// on {date}.
'''
print(tpl.format(date=str(datetime.datetime.now())))
for f in args.files:
print('// Tests from %s' % path.basename(f))
print_tests(read_tests(f))
print('')
|
otherness-space/myProject002 | refs/heads/master | my_project_002/lib/python2.7/site-packages/django/contrib/localflavor/es/forms.py | 108 | # -*- coding: utf-8 -*-
"""
Spanish-specific Form helpers
"""
from __future__ import absolute_import, unicode_literals
import re
from django.contrib.localflavor.es.es_provinces import PROVINCE_CHOICES
from django.contrib.localflavor.es.es_regions import REGION_CHOICES
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import RegexField, Select
from django.utils.translation import ugettext_lazy as _
class ESPostalCodeField(RegexField):
"""
A form field that validates its input as a spanish postal code.
Spanish postal code is a five digits string, with two first digits
between 01 and 52, assigned to provinces code.
"""
default_error_messages = {
'invalid': _('Enter a valid postal code in the range and format 01XXX - 52XXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(ESPostalCodeField, self).__init__(
r'^(0[1-9]|[1-4][0-9]|5[0-2])\d{3}$',
max_length, min_length, *args, **kwargs)
class ESPhoneNumberField(RegexField):
"""
A form field that validates its input as a Spanish phone number.
Information numbers are ommited.
Spanish phone numbers are nine digit numbers, where first digit is 6 (for
cell phones), 8 (for special phones), or 9 (for landlines and special
phones)
TODO: accept and strip characters like dot, hyphen... in phone number
"""
default_error_messages = {
'invalid': _('Enter a valid phone number in one of the formats 6XXXXXXXX, 8XXXXXXXX or 9XXXXXXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(ESPhoneNumberField, self).__init__(r'^(6|7|8|9)\d{8}$',
max_length, min_length, *args, **kwargs)
class ESIdentityCardNumberField(RegexField):
"""
Spanish NIF/NIE/CIF (Fiscal Identification Number) code.
Validates three diferent formats:
NIF (individuals): 12345678A
CIF (companies): A12345678
NIE (foreigners): X12345678A
according to a couple of simple checksum algorithms.
Value can include a space or hyphen separator between number and letters.
Number length is not checked for NIF (or NIE), old values start with a 1,
and future values can contain digits greater than 8. The CIF control digit
can be a number or a letter depending on company type. Algorithm is not
public, and different authors have different opinions on which ones allows
letters, so both validations are assumed true for all types.
"""
default_error_messages = {
'invalid': _('Please enter a valid NIF, NIE, or CIF.'),
'invalid_only_nif': _('Please enter a valid NIF or NIE.'),
'invalid_nif': _('Invalid checksum for NIF.'),
'invalid_nie': _('Invalid checksum for NIE.'),
'invalid_cif': _('Invalid checksum for CIF.'),
}
def __init__(self, only_nif=False, max_length=None, min_length=None, *args, **kwargs):
self.only_nif = only_nif
self.nif_control = 'TRWAGMYFPDXBNJZSQVHLCKE'
self.cif_control = 'JABCDEFGHI'
self.cif_types = 'ABCDEFGHKLMNPQS'
self.nie_types = 'XT'
id_card_re = re.compile(r'^([%s]?)[ -]?(\d+)[ -]?([%s]?)$' % (self.cif_types + self.nie_types, self.nif_control + self.cif_control), re.IGNORECASE)
super(ESIdentityCardNumberField, self).__init__(id_card_re, max_length, min_length,
error_message=self.default_error_messages['invalid%s' % (self.only_nif and '_only_nif' or '')],
*args, **kwargs)
def clean(self, value):
super(ESIdentityCardNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
nif_get_checksum = lambda d: self.nif_control[int(d)%23]
value = value.upper().replace(' ', '').replace('-', '')
m = re.match(r'^([%s]?)[ -]?(\d+)[ -]?([%s]?)$' % (self.cif_types + self.nie_types, self.nif_control + self.cif_control), value)
letter1, number, letter2 = m.groups()
if not letter1 and letter2:
# NIF
if letter2 == nif_get_checksum(number):
return value
else:
raise ValidationError(self.error_messages['invalid_nif'])
elif letter1 in self.nie_types and letter2:
# NIE
if letter2 == nif_get_checksum(number):
return value
else:
raise ValidationError(self.error_messages['invalid_nie'])
elif not self.only_nif and letter1 in self.cif_types and len(number) in [7, 8]:
# CIF
if not letter2:
number, letter2 = number[:-1], int(number[-1])
checksum = cif_get_checksum(number)
if letter2 in (checksum, self.cif_control[checksum]):
return value
else:
raise ValidationError(self.error_messages['invalid_cif'])
else:
raise ValidationError(self.error_messages['invalid'])
class ESCCCField(RegexField):
"""
A form field that validates its input as a Spanish bank account or CCC
(Codigo Cuenta Cliente).
Spanish CCC is in format EEEE-OOOO-CC-AAAAAAAAAA where:
E = entity
O = office
C = checksum
A = account
It's also valid to use a space as delimiter, or to use no delimiter.
First checksum digit validates entity and office, and last one
validates account. Validation is done multiplying every digit of 10
digit value (with leading 0 if necessary) by number in its position in
string 1, 2, 4, 8, 5, 10, 9, 7, 3, 6. Sum resulting numbers and extract
it from 11. Result is checksum except when 10 then is 1, or when 11
then is 0.
TODO: allow IBAN validation too
"""
default_error_messages = {
'invalid': _('Please enter a valid bank account number in format XXXX-XXXX-XX-XXXXXXXXXX.'),
'checksum': _('Invalid checksum for bank account number.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(ESCCCField, self).__init__(r'^\d{4}[ -]?\d{4}[ -]?\d{2}[ -]?\d{10}$',
max_length, min_length, *args, **kwargs)
def clean(self, value):
super(ESCCCField, self).clean(value)
if value in EMPTY_VALUES:
return ''
control_str = [1, 2, 4, 8, 5, 10, 9, 7, 3, 6]
m = re.match(r'^(\d{4})[ -]?(\d{4})[ -]?(\d{2})[ -]?(\d{10})$', value)
entity, office, checksum, account = m.groups()
get_checksum = lambda d: str(11 - sum([int(digit) * int(control) for digit, control in zip(d, control_str)]) % 11).replace('10', '1').replace('11', '0')
if get_checksum('00' + entity + office) + get_checksum(account) == checksum:
return value
else:
raise ValidationError(self.error_messages['checksum'])
class ESRegionSelect(Select):
"""
A Select widget that uses a list of spanish regions as its choices.
"""
def __init__(self, attrs=None):
super(ESRegionSelect, self).__init__(attrs, choices=REGION_CHOICES)
class ESProvinceSelect(Select):
"""
A Select widget that uses a list of spanish provinces as its choices.
"""
def __init__(self, attrs=None):
super(ESProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
def cif_get_checksum(number):
s1 = sum([int(digit) for pos, digit in enumerate(number) if int(pos) % 2])
s2 = sum([sum([int(unit) for unit in str(int(digit) * 2)]) for pos, digit in enumerate(number) if not int(pos) % 2])
return (10 - ((s1 + s2) % 10)) % 10
|
BT-csanchez/account-financial-reporting | refs/heads/8.0 | __unported__/account_financial_report/__openerp__.py | 24 | # -*- encoding: utf-8 -*-
###########################################################################
# Copyright (C) OpenERP Venezuela (<http://openerp.com.ve>).
# All Rights Reserved
# Credits######################################################
# Coded by: Humberto Arocha humberto@openerp.com.ve
# Angelica Barrios angelicaisabelb@gmail.com
# Jordi Esteve <jesteve@zikzakmedia.com>
# Planified by: Humberto Arocha
# Finance by: LUBCAN COL S.A.S http://www.lubcancol.com
# Audited by: Humberto Arocha humberto@openerp.com.ve
#############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
{
"name": "Common financial reports",
"version": "2.0",
"author": "Vauxoo,Odoo Community Association (OCA)",
"website": "http://www.vauxoo.com",
"license": "GPL-3 or any later version",
"depends": ["base",
"account"
],
"category": "Accounting",
"description": """
Multiporpuse Accounting report generator.
=========================================
From the wizard you will be asked to provide information needed to create your
report.
Not only you can set the option within the wizard you can create your own
Customized Account Financial Reports, in here, you will be able to create
Templates for generating Two types of Reports: Balance Sheets and Income
Statements, incluiding Analytic Ledgers. Besides, you can select within a set
of choices to get better detailed report, be it that you ask it by one or
several periods, by months (12 Months + YTD), or by quarters (4QRT's + YTD).
Even you can get your reports in currencies other than the one set on your
company.
In the [ Account's Sign on Reports ] Section in the Company will be able to
set the sign conventions for the Accounts, so that you will be able to see in
positives Values in your reports for those accounts with Accreditable nature
where appropriate""",
"data": [
"security/security.xml",
"security/ir.model.access.csv",
"view/report.xml",
"view/wizard.xml",
"view/company_view.xml",
"view/account_financial_report_view.xml",
],
"active": False,
'installable': False
}
|
brendandburns/tensorflow | refs/heads/master | tensorflow/models/embedding/word2vec_optimized_test.py | 5 | """Tests for word2vec_optimized module."""
import os
import tensorflow.python.platform
import tensorflow as tf
from tensorflow.models.embedding import word2vec_optimized as word2vec_optimized
flags = tf.app.flags
FLAGS = flags.FLAGS
class Word2VecTest(tf.test.TestCase):
def setUp(self):
FLAGS.train_data = os.path.join(self.get_temp_dir() + "test-text.txt")
FLAGS.eval_data = os.path.join(self.get_temp_dir() + "eval-text.txt")
FLAGS.save_path = self.get_temp_dir()
with open(FLAGS.train_data, "w") as f:
f.write(
"""alice was beginning to get very tired of sitting by her sister on
the bank, and of having nothing to do: once or twice she had peeped
into the book her sister was reading, but it had no pictures or
conversations in it, 'and what is the use of a book,' thought alice
'without pictures or conversations?' So she was considering in her own
mind (as well as she could, for the hot day made her feel very sleepy
and stupid), whether the pleasure of making a daisy-chain would be
worth the trouble of getting up and picking the daisies, when suddenly
a White rabbit with pink eyes ran close by her.\n""")
with open(FLAGS.eval_data, "w") as f:
f.write("alice she rabbit once\n")
def testWord2VecOptimized(self):
FLAGS.batch_size = 5
FLAGS.num_neg_samples = 10
FLAGS.epochs_to_train = 1
FLAGS.min_count = 0
word2vec_optimized.main([])
if __name__ == "__main__":
tf.test.main()
|
yceruto/django | refs/heads/master | tests/admin_inlines/admin.py | 30 | from django.contrib import admin
from django import forms
from .models import (
Author, BinaryTree, CapoFamiglia, Chapter, ChildModel1, ChildModel2,
Consigliere, EditablePKBook, ExtraTerrestrial, Fashionista, Holder,
Holder2, Holder3, Holder4, Inner, Inner2, Inner3, Inner4Stacked,
Inner4Tabular, NonAutoPKBook, Novel, ParentModelWithCustomPk, Poll,
Profile, ProfileCollection, Question, ReadOnlyInline, ShoppingWeakness,
Sighting, SomeChildModel, SomeParentModel, SottoCapo, Title,
TitleCollection,
)
site = admin.AdminSite(name="admin")
class BookInline(admin.TabularInline):
model = Author.books.through
class NonAutoPKBookTabularInline(admin.TabularInline):
model = NonAutoPKBook
class NonAutoPKBookStackedInline(admin.StackedInline):
model = NonAutoPKBook
class EditablePKBookTabularInline(admin.TabularInline):
model = EditablePKBook
class EditablePKBookStackedInline(admin.StackedInline):
model = EditablePKBook
class AuthorAdmin(admin.ModelAdmin):
inlines = [BookInline,
NonAutoPKBookTabularInline, NonAutoPKBookStackedInline,
EditablePKBookTabularInline, EditablePKBookStackedInline]
class InnerInline(admin.StackedInline):
model = Inner
can_delete = False
readonly_fields = ('readonly',) # For bug #13174 tests.
class HolderAdmin(admin.ModelAdmin):
class Media:
js = ('my_awesome_admin_scripts.js',)
class ReadOnlyInlineInline(admin.TabularInline):
model = ReadOnlyInline
readonly_fields = ['name']
class InnerInline2(admin.StackedInline):
model = Inner2
class Media:
js = ('my_awesome_inline_scripts.js',)
class InnerInline3(admin.StackedInline):
model = Inner3
class Media:
js = ('my_awesome_inline_scripts.js',)
class TitleForm(forms.ModelForm):
def clean(self):
cleaned_data = self.cleaned_data
title1 = cleaned_data.get("title1")
title2 = cleaned_data.get("title2")
if title1 != title2:
raise forms.ValidationError("The two titles must be the same")
return cleaned_data
class TitleInline(admin.TabularInline):
model = Title
form = TitleForm
extra = 1
class Inner4StackedInline(admin.StackedInline):
model = Inner4Stacked
class Inner4TabularInline(admin.TabularInline):
model = Inner4Tabular
class Holder4Admin(admin.ModelAdmin):
inlines = [Inner4StackedInline, Inner4TabularInline]
class InlineWeakness(admin.TabularInline):
model = ShoppingWeakness
extra = 1
class QuestionInline(admin.TabularInline):
model = Question
readonly_fields = ['call_me']
def call_me(self, obj):
return 'Callable in QuestionInline'
class PollAdmin(admin.ModelAdmin):
inlines = [QuestionInline]
def call_me(self, obj):
return 'Callable in PollAdmin'
class ChapterInline(admin.TabularInline):
model = Chapter
readonly_fields = ['call_me']
def call_me(self, obj):
return 'Callable in ChapterInline'
class NovelAdmin(admin.ModelAdmin):
inlines = [ChapterInline]
class ConsigliereInline(admin.TabularInline):
model = Consigliere
class SottoCapoInline(admin.TabularInline):
model = SottoCapo
class ProfileInline(admin.TabularInline):
model = Profile
extra = 1
# admin for #18433
class ChildModel1Inline(admin.TabularInline):
model = ChildModel1
class ChildModel2Inline(admin.StackedInline):
model = ChildModel2
# admin for #19425 and #18388
class BinaryTreeAdmin(admin.TabularInline):
model = BinaryTree
def get_extra(self, request, obj=None, **kwargs):
extra = 2
if obj:
return extra - obj.binarytree_set.count()
return extra
def get_max_num(self, request, obj=None, **kwargs):
max_num = 3
if obj:
return max_num - obj.binarytree_set.count()
return max_num
# admin for #19524
class SightingInline(admin.TabularInline):
model = Sighting
# admin and form for #18263
class SomeChildModelForm(forms.ModelForm):
class Meta:
fields = '__all__'
model = SomeChildModel
widgets = {
'position': forms.HiddenInput,
}
class SomeChildModelInline(admin.TabularInline):
model = SomeChildModel
form = SomeChildModelForm
site.register(TitleCollection, inlines=[TitleInline])
# Test bug #12561 and #12778
# only ModelAdmin media
site.register(Holder, HolderAdmin, inlines=[InnerInline])
# ModelAdmin and Inline media
site.register(Holder2, HolderAdmin, inlines=[InnerInline2])
# only Inline media
site.register(Holder3, inlines=[InnerInline3])
site.register(Poll, PollAdmin)
site.register(Novel, NovelAdmin)
site.register(Fashionista, inlines=[InlineWeakness])
site.register(Holder4, Holder4Admin)
site.register(Author, AuthorAdmin)
site.register(CapoFamiglia, inlines=[ConsigliereInline, SottoCapoInline, ReadOnlyInlineInline])
site.register(ProfileCollection, inlines=[ProfileInline])
site.register(ParentModelWithCustomPk, inlines=[ChildModel1Inline, ChildModel2Inline])
site.register(BinaryTree, inlines=[BinaryTreeAdmin])
site.register(ExtraTerrestrial, inlines=[SightingInline])
site.register(SomeParentModel, inlines=[SomeChildModelInline])
|
Pquips/Portfolio | refs/heads/master | tk.py | 1 | from Tkinter import *
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(4,GPIO.OUT)
GPIO.setup(17,GPIO.OUT)
GPIO.setup(27,GPIO.OUT)
GPIO.setup(22,GPIO.OUT)
GPIO.setup(9,GPIO.OUT)
GPIO.setup(20,GPIO.OUT)
def keydownq(q):
GPIO.output(20,GPIO.HIGH)
def keyupq(q):
GPIO.output(20,GPIO.LOW)
def keydownw(w):
GPIO.output(27,GPIO.HIGH)
def keyupw(w):
GPIO.output(27,GPIO.LOW)
def keydowne(e):
GPIO.output(9,GPIO.HIGH)
def keyupe(e):
GPIO.output(9,GPIO.LOW)
def keydowni(i):
GPIO.output(4,GPIO.HIGH)
def keyupi(i):
GPIO.output(4,GPIO.LOW)
def keydowno(o):
GPIO.output(17,GPIO.HIGH)
def keyupo(o):
GPIO.output(17,GPIO.LOW)
def keydownp(p):
GPIO.output(22,GPIO.HIGH)
def keyupp(p):
GPIO.output(22,GPIO.LOW)
root = Tk()
frame = Frame(root, width=100, height=100)
frame.bind("<KeyPress-q>", keydownq)
frame.bind("<KeyRelease-q>", keyupq)
frame.bind("<KeyPress-w>", keydownw)
frame.bind("<KeyRelease-w>", keyupw)
frame.bind("<KeyPress-e>", keydowne)
frame.bind("<KeyRelease-e>", keyupe)
frame.bind("<KeyPress-i>", keydowni)
frame.bind("<KeyRelease-i>", keyupi)
frame.bind("<KeyPress-o>", keydowno)
frame.bind("<KeyRelease-o>", keyupo)
frame.bind("<KeyPress-p>", keydownp)
frame.bind("<KeyRelease-p>", keyupp)
frame.pack()
frame.focus_set()
root.mainloop()
GPIO.cleanup()
os.system('xset r off')
|
prestodb/presto-admin | refs/heads/master | tests/unit/standalone/test_help.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import patch
import os
import prestoadmin
from prestoadmin import main
from tests.unit.test_main import BaseMainCase
# Consult the comment on yarn_slider.test_help.TestSliderHelp for more info.
class TestStandaloneHelp(BaseMainCase):
@patch('prestoadmin.mode.get_mode', return_value='standalone')
def setUp(self, mode_mock):
super(TestStandaloneHelp, self).setUp()
reload(prestoadmin)
reload(main)
def get_short_help_path(self):
return os.path.join('resources', 'standalone-help.txt')
def get_extended_help_path(self):
return os.path.join('resources', 'standalone-extended-help.txt')
def test_standalone_help_text_short(self):
self._run_command_compare_to_file(
["-h"], 0, self.get_short_help_path())
def test_standalone_help_text_long(self):
self._run_command_compare_to_file(
["--help"], 0, self.get_short_help_path())
def test_standalone_help_displayed_with_no_args(self):
self._run_command_compare_to_file(
[], 0, self.get_short_help_path())
def test_standalone_extended_help(self):
self._run_command_compare_to_file(
['--extended-help'], 0, self.get_extended_help_path())
|
waldocollective/swiftwind | refs/heads/master | swiftwind/settings/migrations/0002_settings_tellerio_enable.py | 2 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-05 15:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('settings', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='settings',
name='tellerio_enable',
field=models.BooleanField(default=False, help_text='Enable daily imports from teller.io'),
),
]
|
thethythy/Mnemopwd | refs/heads/master | mnemopwd/server/clients/protocol/StateS2.py | 1 | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016, Thierry Lemeunier <thierry at lemeunier dot net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
State S2 : Login or CountCreation
"""
from ...util.funcutils import singleton
@singleton
class StateS2:
"""State S2 : select Login substate (S21) or
CountCreation substate (S22)"""
def do(self, client, data):
"""Action of the state S2: select substate S21 or S22"""
is_cd_S21 = data[170:175] == b"LOGIN" # Test for S21 substate
is_cd_S22 = data[170:178] == b"CREATION" # Test for S22 substate
if is_cd_S21:
client.state = client.states['21'] # S21 is the new state
if is_cd_S22:
client.state = client.states['22'] # S22 is the new state
if is_cd_S21 or is_cd_S22:
# Schedule an execution of the new state
client.loop.run_in_executor(None, client.state.do, client, data)
else:
# Schedule a callback to client exception handler
client.loop.call_soon_threadsafe(
client.exception_handler, Exception('S2 protocol error'))
|
nzavagli/UnrealPy | refs/heads/master | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Python-2.7.10/Tools/scripts/eptags.py | 102 | #! /usr/bin/env python
"""Create a TAGS file for Python programs, usable with GNU Emacs.
usage: eptags pyfiles...
The output TAGS file is usable with Emacs version 18, 19, 20.
Tagged are:
- functions (even inside other defs or classes)
- classes
eptags warns about files it cannot open.
eptags will not give warnings about duplicate tags.
BUGS:
Because of tag duplication (methods with the same name in different
classes), TAGS files are not very useful for most object-oriented
python projects.
"""
import sys,re
expr = r'^[ \t]*(def|class)[ \t]+([a-zA-Z_][a-zA-Z0-9_]*)[ \t]*[:\(]'
matcher = re.compile(expr)
def treat_file(filename, outfp):
"""Append tags found in file named 'filename' to the open file 'outfp'"""
try:
fp = open(filename, 'r')
except:
sys.stderr.write('Cannot open %s\n'%filename)
return
charno = 0
lineno = 0
tags = []
size = 0
while 1:
line = fp.readline()
if not line:
break
lineno = lineno + 1
m = matcher.search(line)
if m:
tag = m.group(0) + '\177%d,%d\n' % (lineno, charno)
tags.append(tag)
size = size + len(tag)
charno = charno + len(line)
outfp.write('\f\n%s,%d\n' % (filename,size))
for tag in tags:
outfp.write(tag)
def main():
outfp = open('TAGS', 'w')
for filename in sys.argv[1:]:
treat_file(filename, outfp)
if __name__=="__main__":
main()
|
modulexcite/blink | refs/heads/nw12 | Source/devtools/scripts/generate_devtools_grd.py | 15 | #!/usr/bin/env python
#
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Creates a grd file for packaging the inspector files."""
from __future__ import with_statement
from os import path
import errno
import os
import shutil
import sys
from xml.dom import minidom
kDevToolsResourcePrefix = 'IDR_DEVTOOLS_'
kGrdTemplate = '''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="0" current_release="1">
<outputs>
<output filename="grit/devtools_resources.h" type="rc_header">
<emit emit_type='prepend'></emit>
</output>
<output filename="grit/devtools_resources_map.cc" type="resource_file_map_source" />
<output filename="grit/devtools_resources_map.h" type="resource_map_header" />
<output filename="devtools_resources.pak" type="data_package" />
</outputs>
<release seq="1">
<includes></includes>
</release>
</grit>
'''
class ParsedArgs:
def __init__(self, source_files, relative_path_dirs, image_dirs, output_filename):
self.source_files = source_files
self.relative_path_dirs = relative_path_dirs
self.image_dirs = image_dirs
self.output_filename = output_filename
def parse_args(argv):
static_files_list_position = argv.index('--static_files_list')
relative_path_dirs_position = argv.index('--relative_path_dirs')
images_position = argv.index('--images')
output_position = argv.index('--output')
static_files_list_path = argv[static_files_list_position + 1]
source_files = argv[:static_files_list_position]
with open(static_files_list_path, 'r') as static_list_file:
source_files.extend([line.rstrip('\n') for line in static_list_file.readlines()])
relative_path_dirs = argv[relative_path_dirs_position + 1:images_position]
image_dirs = argv[images_position + 1:output_position]
return ParsedArgs(source_files, relative_path_dirs, image_dirs, argv[output_position + 1])
def make_name_from_filename(filename):
return (filename.replace('/', '_')
.replace('\\', '_')
.replace('-', '_')
.replace('.', '_')).upper()
def add_file_to_grd(grd_doc, relative_filename):
includes_node = grd_doc.getElementsByTagName('includes')[0]
includes_node.appendChild(grd_doc.createTextNode('\n '))
new_include_node = grd_doc.createElement('include')
new_include_node.setAttribute('name', make_name_from_filename(relative_filename))
new_include_node.setAttribute('file', relative_filename)
new_include_node.setAttribute('type', 'BINDATA')
includes_node.appendChild(new_include_node)
def build_relative_filename(relative_path_dirs, filename):
for relative_path_dir in relative_path_dirs:
index = filename.find(relative_path_dir)
if index == 0:
return filename[len(relative_path_dir) + 1:]
return path.basename(filename)
def main(argv):
parsed_args = parse_args(argv[1:])
doc = minidom.parseString(kGrdTemplate)
output_directory = path.dirname(parsed_args.output_filename)
try:
os.makedirs(path.join(output_directory, 'Images'))
except OSError, e:
if e.errno != errno.EEXIST:
raise e
written_filenames = set()
for filename in parsed_args.source_files:
relative_filename = build_relative_filename(parsed_args.relative_path_dirs, filename)
# Avoid writing duplicate relative filenames.
if relative_filename in written_filenames:
continue
written_filenames.add(relative_filename)
target_dir = path.join(output_directory, path.dirname(relative_filename))
if not path.exists(target_dir):
os.makedirs(target_dir)
shutil.copy(filename, target_dir)
add_file_to_grd(doc, relative_filename)
for dirname in parsed_args.image_dirs:
for filename in os.listdir(dirname):
if not filename.endswith('.png') and not filename.endswith('.gif'):
continue
shutil.copy(path.join(dirname, filename),
path.join(output_directory, 'Images'))
add_file_to_grd(doc, path.join('Images', filename))
with open(parsed_args.output_filename, 'w') as output_file:
output_file.write(doc.toxml(encoding='UTF-8'))
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
brockwhittaker/zulip | refs/heads/master | tools/lib/sanity_check.py | 2 | import os
import pwd
import sys
def check_venv(filename):
# type: (str) -> None
try:
import django
import ujson
import zulip
except ImportError:
print("You need to run %s inside a Zulip dev environment." % (filename,))
user_id = os.getuid()
user_name = pwd.getpwuid(user_id).pw_name
if user_name != 'vagrant' and user_name != 'zulipdev':
print("If you are using Vagrant, you can `vagrant ssh` to enter the Vagrant guest.")
else:
print("You can `source /srv/zulip-py3-venv/bin/activate` to enter the Zulip development environment.")
sys.exit(1)
|
camagenta/youtube-dl | refs/heads/master | youtube_dl/extractor/jeuxvideo.py | 85 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
class JeuxVideoIE(InfoExtractor):
_VALID_URL = r'http://.*?\.jeuxvideo\.com/.*/(.*?)\.htm'
_TESTS = [{
'url': 'http://www.jeuxvideo.com/reportages-videos-jeux/0004/00046170/tearaway-playstation-vita-gc-2013-tearaway-nous-presente-ses-papiers-d-identite-00115182.htm',
'md5': '046e491afb32a8aaac1f44dd4ddd54ee',
'info_dict': {
'id': '114765',
'ext': 'mp4',
'title': 'Tearaway : GC 2013 : Tearaway nous présente ses papiers d\'identité',
'description': 'Lorsque les développeurs de LittleBigPlanet proposent un nouveau titre, on ne peut que s\'attendre à un résultat original et fort attrayant.',
},
}, {
'url': 'http://www.jeuxvideo.com/videos/chroniques/434220/l-histoire-du-jeu-video-la-saturn.htm',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
title = mobj.group(1)
webpage = self._download_webpage(url, title)
title = self._html_search_meta('name', webpage)
config_url = self._html_search_regex(
r'data-src="(/contenu/medias/video.php.*?)"',
webpage, 'config URL')
config_url = 'http://www.jeuxvideo.com' + config_url
video_id = self._search_regex(
r'id=(\d+)',
config_url, 'video ID')
config = self._download_json(
config_url, title, 'Downloading JSON config')
formats = [{
'url': source['file'],
'format_id': source['label'],
'resolution': source['label'],
} for source in reversed(config['sources'])]
return {
'id': video_id,
'title': title,
'formats': formats,
'description': self._og_search_description(webpage),
'thumbnail': config.get('image'),
}
|
kg-bot/SupyBot | refs/heads/master | plugins/Steps/steps/trad.py | 1 | name = 'Traditions'
url = ''
abbrevs = ['traditions','aatraditions','aatrad','trad']
steps = [
'Our common welfare should come first; personal recovery depends upon A.A. unity.',
'For our group purpose there is but one ultimate authority - a loving God as He may express Himself in our group conscience. Our leaders are but trusted servants; they do not govern.',
'The only requirement for A.A. membership is a desire to stop drinking.',
'Each group should be autonomous except in matters affecting other groups or A.A. as a whole.',
'Each group has but one primary purpose - to carry its message to the alcoholic who still suffers.',
'An A.A. group ought never endorse, finance, or lend the A.A. name to any related facility or outside enterprise, lest problems of money, property, and prestige divert us from our primary purpose.',
'Every A.A. group ought to be fully self-supporting, declining outside contributions.',
'Alcoholics Anonymous should remain forever nonprofessional, but our service centers may employ special workers.',
'A.A., as such, ought never be organized; but we may create service boards or committees directly responsible to those they serve.',
'Alcoholics Anonymous has no opinion on outside issues; hence the A.A. name ought never be drawn into public controversy.',
'Our public relations policy is based on attraction rather than promotion; we need always maintain personal anonymity at the level of press, radio, and films.',
'Anonymity is the spiritual foundation of all our Traditions, ever reminding us to place principles before personalities.',
]
|
DaiYue/shadowsocks | refs/heads/master | shadowsocks/asyncdns.py | 655 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import socket
import struct
import re
import logging
from shadowsocks import common, lru_cache, eventloop, shell
CACHE_SWEEP_INTERVAL = 30
VALID_HOSTNAME = re.compile(br"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
common.patch_socket()
# rfc1035
# format
# +---------------------+
# | Header |
# +---------------------+
# | Question | the question for the name server
# +---------------------+
# | Answer | RRs answering the question
# +---------------------+
# | Authority | RRs pointing toward an authority
# +---------------------+
# | Additional | RRs holding additional information
# +---------------------+
#
# header
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ID |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | QDCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ANCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | NSCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | ARCOUNT |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
QTYPE_ANY = 255
QTYPE_A = 1
QTYPE_AAAA = 28
QTYPE_CNAME = 5
QTYPE_NS = 2
QCLASS_IN = 1
def build_address(address):
address = address.strip(b'.')
labels = address.split(b'.')
results = []
for label in labels:
l = len(label)
if l > 63:
return None
results.append(common.chr(l))
results.append(label)
results.append(b'\0')
return b''.join(results)
def build_request(address, qtype):
request_id = os.urandom(2)
header = struct.pack('!BBHHHH', 1, 0, 1, 0, 0, 0)
addr = build_address(address)
qtype_qclass = struct.pack('!HH', qtype, QCLASS_IN)
return request_id + header + addr + qtype_qclass
def parse_ip(addrtype, data, length, offset):
if addrtype == QTYPE_A:
return socket.inet_ntop(socket.AF_INET, data[offset:offset + length])
elif addrtype == QTYPE_AAAA:
return socket.inet_ntop(socket.AF_INET6, data[offset:offset + length])
elif addrtype in [QTYPE_CNAME, QTYPE_NS]:
return parse_name(data, offset)[1]
else:
return data[offset:offset + length]
def parse_name(data, offset):
p = offset
labels = []
l = common.ord(data[p])
while l > 0:
if (l & (128 + 64)) == (128 + 64):
# pointer
pointer = struct.unpack('!H', data[p:p + 2])[0]
pointer &= 0x3FFF
r = parse_name(data, pointer)
labels.append(r[1])
p += 2
# pointer is the end
return p - offset, b'.'.join(labels)
else:
labels.append(data[p + 1:p + 1 + l])
p += 1 + l
l = common.ord(data[p])
return p - offset + 1, b'.'.join(labels)
# rfc1035
# record
# 1 1 1 1 1 1
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | |
# / /
# / NAME /
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TYPE |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | CLASS |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | TTL |
# | |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
# | RDLENGTH |
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
# / RDATA /
# / /
# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
def parse_record(data, offset, question=False):
nlen, name = parse_name(data, offset)
if not question:
record_type, record_class, record_ttl, record_rdlength = struct.unpack(
'!HHiH', data[offset + nlen:offset + nlen + 10]
)
ip = parse_ip(record_type, data, record_rdlength, offset + nlen + 10)
return nlen + 10 + record_rdlength, \
(name, ip, record_type, record_class, record_ttl)
else:
record_type, record_class = struct.unpack(
'!HH', data[offset + nlen:offset + nlen + 4]
)
return nlen + 4, (name, None, record_type, record_class, None, None)
def parse_header(data):
if len(data) >= 12:
header = struct.unpack('!HBBHHHH', data[:12])
res_id = header[0]
res_qr = header[1] & 128
res_tc = header[1] & 2
res_ra = header[2] & 128
res_rcode = header[2] & 15
# assert res_tc == 0
# assert res_rcode in [0, 3]
res_qdcount = header[3]
res_ancount = header[4]
res_nscount = header[5]
res_arcount = header[6]
return (res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount,
res_ancount, res_nscount, res_arcount)
return None
def parse_response(data):
try:
if len(data) >= 12:
header = parse_header(data)
if not header:
return None
res_id, res_qr, res_tc, res_ra, res_rcode, res_qdcount, \
res_ancount, res_nscount, res_arcount = header
qds = []
ans = []
offset = 12
for i in range(0, res_qdcount):
l, r = parse_record(data, offset, True)
offset += l
if r:
qds.append(r)
for i in range(0, res_ancount):
l, r = parse_record(data, offset)
offset += l
if r:
ans.append(r)
for i in range(0, res_nscount):
l, r = parse_record(data, offset)
offset += l
for i in range(0, res_arcount):
l, r = parse_record(data, offset)
offset += l
response = DNSResponse()
if qds:
response.hostname = qds[0][0]
for an in qds:
response.questions.append((an[1], an[2], an[3]))
for an in ans:
response.answers.append((an[1], an[2], an[3]))
return response
except Exception as e:
shell.print_exception(e)
return None
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == b'.':
hostname = hostname[:-1]
return all(VALID_HOSTNAME.match(x) for x in hostname.split(b'.'))
class DNSResponse(object):
def __init__(self):
self.hostname = None
self.questions = [] # each: (addr, type, class)
self.answers = [] # each: (addr, type, class)
def __str__(self):
return '%s: %s' % (self.hostname, str(self.answers))
STATUS_IPV4 = 0
STATUS_IPV6 = 1
class DNSResolver(object):
def __init__(self, server_list=None):
self._loop = None
self._hosts = {}
self._hostname_status = {}
self._hostname_to_cb = {}
self._cb_to_hostname = {}
self._cache = lru_cache.LRUCache(timeout=300)
self._sock = None
if server_list is None:
self._servers = None
self._parse_resolv()
else:
self._servers = server_list
self._parse_hosts()
# TODO monitor hosts change and reload hosts
# TODO parse /etc/gai.conf and follow its rules
def _parse_resolv(self):
self._servers = []
try:
with open('/etc/resolv.conf', 'rb') as f:
content = f.readlines()
for line in content:
line = line.strip()
if line:
if line.startswith(b'nameserver'):
parts = line.split()
if len(parts) >= 2:
server = parts[1]
if common.is_ip(server) == socket.AF_INET:
if type(server) != str:
server = server.decode('utf8')
self._servers.append(server)
except IOError:
pass
if not self._servers:
self._servers = ['8.8.4.4', '8.8.8.8']
def _parse_hosts(self):
etc_path = '/etc/hosts'
if 'WINDIR' in os.environ:
etc_path = os.environ['WINDIR'] + '/system32/drivers/etc/hosts'
try:
with open(etc_path, 'rb') as f:
for line in f.readlines():
line = line.strip()
parts = line.split()
if len(parts) >= 2:
ip = parts[0]
if common.is_ip(ip):
for i in range(1, len(parts)):
hostname = parts[i]
if hostname:
self._hosts[hostname] = ip
except IOError:
self._hosts['localhost'] = '127.0.0.1'
def add_to_loop(self, loop):
if self._loop:
raise Exception('already add to loop')
self._loop = loop
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
loop.add(self._sock, eventloop.POLL_IN, self)
loop.add_periodic(self.handle_periodic)
def _call_callback(self, hostname, ip, error=None):
callbacks = self._hostname_to_cb.get(hostname, [])
for callback in callbacks:
if callback in self._cb_to_hostname:
del self._cb_to_hostname[callback]
if ip or error:
callback((hostname, ip), error)
else:
callback((hostname, None),
Exception('unknown hostname %s' % hostname))
if hostname in self._hostname_to_cb:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _handle_data(self, data):
response = parse_response(data)
if response and response.hostname:
hostname = response.hostname
ip = None
for answer in response.answers:
if answer[1] in (QTYPE_A, QTYPE_AAAA) and \
answer[2] == QCLASS_IN:
ip = answer[0]
break
if not ip and self._hostname_status.get(hostname, STATUS_IPV6) \
== STATUS_IPV4:
self._hostname_status[hostname] = STATUS_IPV6
self._send_req(hostname, QTYPE_AAAA)
else:
if ip:
self._cache[hostname] = ip
self._call_callback(hostname, ip)
elif self._hostname_status.get(hostname, None) == STATUS_IPV6:
for question in response.questions:
if question[1] == QTYPE_AAAA:
self._call_callback(hostname, None)
break
def handle_event(self, sock, fd, event):
if sock != self._sock:
return
if event & eventloop.POLL_ERR:
logging.error('dns socket err')
self._loop.remove(self._sock)
self._sock.close()
# TODO when dns server is IPv6
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.SOL_UDP)
self._sock.setblocking(False)
self._loop.add(self._sock, eventloop.POLL_IN, self)
else:
data, addr = sock.recvfrom(1024)
if addr[0] not in self._servers:
logging.warn('received a packet other than our dns')
return
self._handle_data(data)
def handle_periodic(self):
self._cache.sweep()
def remove_callback(self, callback):
hostname = self._cb_to_hostname.get(callback)
if hostname:
del self._cb_to_hostname[callback]
arr = self._hostname_to_cb.get(hostname, None)
if arr:
arr.remove(callback)
if not arr:
del self._hostname_to_cb[hostname]
if hostname in self._hostname_status:
del self._hostname_status[hostname]
def _send_req(self, hostname, qtype):
req = build_request(hostname, qtype)
for server in self._servers:
logging.debug('resolving %s with type %d using server %s',
hostname, qtype, server)
self._sock.sendto(req, (server, 53))
def resolve(self, hostname, callback):
if type(hostname) != bytes:
hostname = hostname.encode('utf8')
if not hostname:
callback(None, Exception('empty hostname'))
elif common.is_ip(hostname):
callback((hostname, hostname), None)
elif hostname in self._hosts:
logging.debug('hit hosts: %s', hostname)
ip = self._hosts[hostname]
callback((hostname, ip), None)
elif hostname in self._cache:
logging.debug('hit cache: %s', hostname)
ip = self._cache[hostname]
callback((hostname, ip), None)
else:
if not is_valid_hostname(hostname):
callback(None, Exception('invalid hostname: %s' % hostname))
return
arr = self._hostname_to_cb.get(hostname, None)
if not arr:
self._hostname_status[hostname] = STATUS_IPV4
self._send_req(hostname, QTYPE_A)
self._hostname_to_cb[hostname] = [callback]
self._cb_to_hostname[callback] = hostname
else:
arr.append(callback)
# TODO send again only if waited too long
self._send_req(hostname, QTYPE_A)
def close(self):
if self._sock:
if self._loop:
self._loop.remove_periodic(self.handle_periodic)
self._loop.remove(self._sock)
self._sock.close()
self._sock = None
def test():
dns_resolver = DNSResolver()
loop = eventloop.EventLoop()
dns_resolver.add_to_loop(loop)
global counter
counter = 0
def make_callback():
global counter
def callback(result, error):
global counter
# TODO: what can we assert?
print(result, error)
counter += 1
if counter == 9:
dns_resolver.close()
loop.stop()
a_callback = callback
return a_callback
assert(make_callback() != make_callback())
dns_resolver.resolve(b'google.com', make_callback())
dns_resolver.resolve('google.com', make_callback())
dns_resolver.resolve('example.com', make_callback())
dns_resolver.resolve('ipv6.google.com', make_callback())
dns_resolver.resolve('www.facebook.com', make_callback())
dns_resolver.resolve('ns2.google.com', make_callback())
dns_resolver.resolve('invalid.@!#$%^&$@.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
dns_resolver.resolve('toooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'ooooooooooooooooooooooooooooooooooooooooooooooooooo'
'long.hostname', make_callback())
loop.run()
if __name__ == '__main__':
test()
|
citrix-openstack-build/cinder | refs/heads/master | cinder/volume/drivers/windows.py | 1 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Pedro Navarro Perez
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Volume driver for Windows Server 2012
This driver requires ISCSI target role installed
"""
import os
import sys
from cinder import exception
from cinder import flags
from cinder.openstack.common import cfg
from cinder.openstack.common import log as logging
from cinder.volume import driver
# Check needed for unit testing on Unix
if os.name == 'nt':
import wmi
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
windows_opts = [
cfg.StrOpt('windows_iscsi_lun_path',
default='C:\iSCSIVirtualDisks',
help='Path to store VHD backed volumes'),
]
FLAGS.register_opts(windows_opts)
class WindowsDriver(driver.ISCSIDriver):
"""Executes volume driver commands on Windows Storage server."""
def __init__(self):
super(WindowsDriver, self).__init__()
def do_setup(self, context):
"""Setup the Windows Volume driver.
Called one time by the manager after the driver is loaded.
Validate the flags we care about
"""
#Set the flags
self._conn_wmi = wmi.WMI(moniker='//./root/wmi')
self._conn_cimv2 = wmi.WMI(moniker='//./root/cimv2')
def check_for_setup_error(self):
"""Check that the driver is working and can communicate.
"""
#Invoking the portal an checking that is listening
wt_portal = self._conn_wmi.WT_Portal()[0]
listen = wt_portal.Listen
if not listen:
raise exception.VolumeBackendAPIException()
def initialize_connection(self, volume, connector):
"""Driver entry point to attach a volume to an instance.
"""
initiator_name = connector['initiator']
target_name = volume['provider_location']
cl = self._conn_wmi.__getattr__("WT_IDMethod")
wt_idmethod = cl.new()
wt_idmethod.HostName = target_name
wt_idmethod.Method = 4
wt_idmethod.Value = initiator_name
wt_idmethod.put()
#Getting the portal and port information
wt_portal = self._conn_wmi.WT_Portal()[0]
(address, port) = (wt_portal.Address, wt_portal.Port)
#Getting the host information
hosts = self._conn_wmi.WT_Host(Hostname=target_name)
host = hosts[0]
properties = {}
properties['target_discovered'] = False
properties['target_portal'] = '%s:%s' % (address, port)
properties['target_iqn'] = host.TargetIQN
properties['target_lun'] = 0
properties['volume_id'] = volume['id']
auth = volume['provider_auth']
if auth:
(auth_method, auth_username, auth_secret) = auth.split()
properties['auth_method'] = auth_method
properties['auth_username'] = auth_username
properties['auth_password'] = auth_secret
return {
'driver_volume_type': 'iscsi',
'data': properties,
}
def terminate_connection(self, volume, connector, **kwargs):
"""Driver entry point to unattach a volume from an instance.
Unmask the LUN on the storage system so the given intiator can no
longer access it.
"""
initiator_name = connector['initiator']
#DesAssigning target to initiators
wt_idmethod = self._conn_wmi.WT_IDMethod(HostName=volume['name'],
Method=4,
Value=initiator_name)
wt_idmethod.Delete_()
def create_volume(self, volume):
"""Driver entry point for creating a new volume."""
vhd_path = self._get_vhd_path(volume)
vol_name = volume['name']
#The WMI procedure returns a Generic failure
cl = self._conn_wmi.__getattr__("WT_Disk")
cl.NewWTDisk(DevicePath=vhd_path,
Description=vol_name,
SizeInMB=volume['size'] * 1024)
def _get_vhd_path(self, volume):
base_vhd_folder = FLAGS.windows_iscsi_lun_path
if not os.path.exists(base_vhd_folder):
LOG.debug(_('Creating folder %s '), base_vhd_folder)
os.makedirs(base_vhd_folder)
return os.path.join(base_vhd_folder, str(volume['name']) + ".vhd")
def delete_volume(self, volume):
"""Driver entry point for destroying existing volumes."""
vol_name = volume['name']
wt_disk = self._conn_wmi.WT_Disk(Description=vol_name)[0]
wt_disk.Delete_()
vhdfiles = self._conn_cimv2.query(
"Select * from CIM_DataFile where Name = '" +
self._get_vhd_path(volume) + "'")
if len(vhdfiles) > 0:
vhdfiles[0].Delete()
def create_snapshot(self, snapshot):
"""Driver entry point for creating a snapshot.
"""
#Getting WT_Snapshot class
vol_name = snapshot['volume_name']
snapshot_name = snapshot['name']
wt_disk = self._conn_wmi.WT_Disk(Description=vol_name)[0]
#API Calls gets Generic Failure
cl = self._conn_wmi.__getattr__("WT_Snapshot")
disk_id = wt_disk.WTD
out = cl.Create(WTD=disk_id)
#Setting description since it used as a KEY
wt_snapshot_created = self._conn_wmi.WT_Snapshot(Id=out[0])[0]
wt_snapshot_created.Description = snapshot_name
wt_snapshot_created.put()
def create_volume_from_snapshot(self, volume, snapshot):
"""Driver entry point for exporting snapshots as volumes."""
snapshot_name = snapshot['name']
wt_snapshot = self._conn_wmi.WT_Snapshot(Description=snapshot_name)[0]
disk_id = wt_snapshot.Export()[0]
wt_disk = self._conn_wmi.WT_Disk(WTD=disk_id)[0]
wt_disk.Description = volume['name']
wt_disk.put()
def delete_snapshot(self, snapshot):
"""Driver entry point for deleting a snapshot."""
snapshot_name = snapshot['name']
wt_snapshot = self._conn_wmi.WT_Snapshot(Description=snapshot_name)[0]
wt_snapshot.Delete_()
def _do_export(self, _ctx, volume, ensure=False):
"""Do all steps to get disk exported as LUN 0 at separate target.
:param volume: reference of volume to be exported
:param ensure: if True, ignore errors caused by already existing
resources
:return: iscsiadm-formatted provider location string
"""
target_name = "%s%s" % (FLAGS.iscsi_target_prefix, volume['name'])
#ISCSI target creation
try:
cl = self._conn_wmi.__getattr__("WT_Host")
cl.NewHost(HostName=target_name)
except Exception as exc:
excep_info = exc.com_error.excepinfo[2]
if not ensure or excep_info.find(u'The file exists') == -1:
raise
else:
LOG.info(_('Ignored target creation error "%s"'
' while ensuring export'), exc)
#Get the disk to add
vol_name = volume['name']
q = self._conn_wmi.WT_Disk(Description=vol_name)
if not len(q):
LOG.debug(_('Disk not found: %s'), vol_name)
return None
wt_disk = q[0]
wt_host = self._conn_wmi.WT_Host(HostName=target_name)[0]
wt_host.AddWTDisk(wt_disk.WTD)
return target_name
def ensure_export(self, context, volume):
"""Driver entry point to get the export info for an existing volume."""
self._do_export(context, volume, ensure=True)
def create_export(self, context, volume):
"""Driver entry point to get the export info for a new volume."""
loc = self._do_export(context, volume, ensure=False)
return {'provider_location': loc}
def remove_export(self, context, volume):
"""Driver exntry point to remove an export for a volume.
"""
target_name = "%s%s" % (FLAGS.iscsi_target_prefix, volume['name'])
#Get ISCSI target
wt_host = self._conn_wmi.WT_Host(HostName=target_name)[0]
wt_host.RemoveAllWTDisks()
wt_host.Delete_()
def copy_image_to_volume(self, context, volume, image_service, image_id):
"""Fetch the image from image_service and write it to the volume."""
raise NotImplementedError()
def copy_volume_to_image(self, context, volume, image_service, image_meta):
"""Copy the volume to the specified image."""
raise NotImplementedError()
|
Timus1712/boto | refs/heads/master | boto/elastictranscoder/__init__.py | 18 | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo
def regions():
"""
Get all available regions for the AWS Elastic Transcoder service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
from boto.elastictranscoder.layer1 import ElasticTranscoderConnection
cls = ElasticTranscoderConnection
return [
RegionInfo(name='us-east-1',
endpoint='elastictranscoder.us-east-1.amazonaws.com',
connection_cls=cls),
RegionInfo(name='us-west-1',
endpoint='elastictranscoder.us-west-1.amazonaws.com',
connection_cls=cls),
RegionInfo(name='us-west-2',
endpoint='elastictranscoder.us-west-2.amazonaws.com',
connection_cls=cls),
RegionInfo(name='ap-northeast-1',
endpoint='elastictranscoder.ap-northeast-1.amazonaws.com',
connection_cls=cls),
RegionInfo(name='ap-southeast-1',
endpoint='elastictranscoder.ap-southeast-1.amazonaws.com',
connection_cls=cls),
RegionInfo(name='eu-west-1',
endpoint='elastictranscoder.eu-west-1.amazonaws.com',
connection_cls=cls),
]
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
|
cryptorinium/num2 | refs/heads/master | src/vm-builder-0.12.4+bzr489/VMBuilder/tests/plugin_tests.py | 2 | import unittest
import VMBuilder.plugins
from VMBuilder.exception import VMBuilderException
class TestPluginsSettings(unittest.TestCase):
class VM(VMBuilder.plugins.Plugin):
def __init__(self, *args, **kwargs):
self._config = {}
self.context = self
class TestPlugin(VMBuilder.plugins.Plugin):
pass
def setUp(self):
self.vm = self.VM()
self.plugin = self.TestPlugin(self.vm)
self.i = 0
def test_add_setting_group_and_setting(self):
setting_group = self.plugin.setting_group('Test Setting Group')
self.assertTrue(setting_group in self.plugin._setting_groups, "Setting not added correctly to plugin's registry of setting groups.")
setting_group.add_setting('testsetting')
self.assertEqual(self.vm.get_setting('testsetting'), None, "Setting's default value is not None.")
self.vm.set_setting_default('testsetting', 'newdefault')
self.assertEqual(self.vm.get_setting('testsetting'), 'newdefault', "Setting does not return custom default value when no value is set.")
self.assertEqual(self.vm.get_setting_default('testsetting'), 'newdefault', "Setting does not return custom default value through get_setting_default().")
self.vm.set_setting('testsetting', 'foo')
self.assertEqual(self.vm.get_setting('testsetting'), 'foo', "Setting does not return set value.")
self.vm.set_setting_default('testsetting', 'newerdefault')
self.assertEqual(self.vm.get_setting('testsetting'), 'foo', "Setting does not return set value after setting new default value.")
def test_invalid_type_raises_exception(self):
setting_group = self.plugin.setting_group('Test Setting Group')
self.assertRaises(VMBuilderException, setting_group.add_setting, 'oddsetting', type='odd')
def test_valid_options(self):
setting_group = self.plugin.setting_group('Test Setting Group')
setting_group.add_setting('strsetting')
self.assertRaises(VMBuilderException, self.vm.set_setting_valid_options, 'strsetting', '')
self.vm.set_setting_valid_options('strsetting', ['foo', 'bar'])
self.assertEqual(self.vm.get_setting_valid_options('strsetting'), ['foo', 'bar'])
self.vm.set_setting('strsetting', 'foo')
self.assertRaises(VMBuilderException, self.vm.set_setting, 'strsetting', 'baz')
self.vm.set_setting_valid_options('strsetting', None)
self.vm.set_setting('strsetting', 'baz')
def test_invalid_type_setting_raises_exception(self):
setting_group = self.plugin.setting_group('Test Setting Group')
test_table = [{ 'type' : 'str',
'good' : [''],
'fuzzy': [''],
'bad' : [0, True, ['foo']]
},
{ 'type' : 'int',
'good' : [0],
'fuzzy': [('0', 0), ('34', 34), (0, 0), (34, 34)],
'bad' : ['', '0', True, ['foo']]
},
{ 'type' : 'bool',
'good' : [True],
'fuzzy': [(True, True), ('tRuE', True), ('oN', True), ('yEs', True), ('1', True),
(False, False), ('fAlSe', False), ('oFf', False), ('nO', False), ('0', False) ],
'bad' : ['', 0, '0', ['foo'], '1']
},
{ 'type' : 'list',
'good' : [['foo']],
'fuzzy': [('main , universe,multiverse', ['main', 'universe', 'multiverse']),
('main:universe:multiverse', ['main', 'universe', 'multiverse']),
('''main:
universe:multiverse''', ['main', 'universe', 'multiverse']),
('', [])],
'bad' : [True, '', 0, '0']
}]
def get_new_setting(setting_type):
setting_name = '%ssetting%d' % (setting_type, self.i)
self.i += 1
setting_group.add_setting(setting_name, type=setting_type)
return setting_name
def try_bad_setting(setting_type, bad, setter):
setting_name = get_new_setting(setting_type)
self.assertRaises(VMBuilderException, setter, setting_name, bad)
def try_good_setting(setting_type, good, getter, setter):
setting_name = get_new_setting(setting_type)
if type(good) == tuple:
in_value, out_value = good
else:
in_value, out_value = good, good
# print setting_name, in_value
setter(setting_name, in_value)
self.assertEqual(getter(setting_name), out_value)
for setting_type in test_table:
for good in setting_type['good']:
try_good_setting(setting_type['type'], good, self.vm.get_setting, self.vm.set_setting)
try_good_setting(setting_type['type'], good, self.vm.get_setting, self.vm.set_setting_default)
try_good_setting(setting_type['type'], good, self.vm.get_setting_default, self.vm.set_setting_default)
try_good_setting(setting_type['type'], good, self.vm.get_setting, self.vm.set_setting_fuzzy)
for fuzzy in setting_type['fuzzy']:
try_good_setting(setting_type['type'], fuzzy, self.vm.get_setting, self.vm.set_setting_fuzzy)
for bad in setting_type['bad']:
try_bad_setting(setting_type['type'], bad, self.vm.set_setting)
try_bad_setting(setting_type['type'], bad, self.vm.set_setting_default)
def test_set_setting_raises_exception_on_invalid_setting(self):
self.assertRaises(VMBuilderException, self.vm.set_setting_default, 'testsetting', 'newdefault')
def test_add_setting(self):
setting_group = self.plugin.setting_group('Test Setting Group')
|
shastikk/youtube-dl | refs/heads/master | youtube_dl/extractor/gdcvault.py | 77 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
)
from ..utils import (
remove_end,
HEADRequest,
)
class GDCVaultIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?gdcvault\.com/play/(?P<id>\d+)/(?P<name>(\w|-)+)?'
_NETRC_MACHINE = 'gdcvault'
_TESTS = [
{
'url': 'http://www.gdcvault.com/play/1019721/Doki-Doki-Universe-Sweet-Simple',
'md5': '7ce8388f544c88b7ac11c7ab1b593704',
'info_dict': {
'id': '1019721',
'display_id': 'Doki-Doki-Universe-Sweet-Simple',
'ext': 'mp4',
'title': 'Doki-Doki Universe: Sweet, Simple and Genuine (GDC Next 10)'
}
},
{
'url': 'http://www.gdcvault.com/play/1015683/Embracing-the-Dark-Art-of',
'info_dict': {
'id': '1015683',
'display_id': 'Embracing-the-Dark-Art-of',
'ext': 'flv',
'title': 'Embracing the Dark Art of Mathematical Modeling in AI'
},
'params': {
'skip_download': True, # Requires rtmpdump
}
},
{
'url': 'http://www.gdcvault.com/play/1015301/Thexder-Meets-Windows-95-or',
'md5': 'a5eb77996ef82118afbbe8e48731b98e',
'info_dict': {
'id': '1015301',
'display_id': 'Thexder-Meets-Windows-95-or',
'ext': 'flv',
'title': 'Thexder Meets Windows 95, or Writing Great Games in the Windows 95 Environment',
},
'skip': 'Requires login',
},
{
'url': 'http://gdcvault.com/play/1020791/',
'only_matching': True,
}
]
def _parse_mp4(self, xml_description):
video_formats = []
mp4_video = xml_description.find('./metadata/mp4video')
if mp4_video is None:
return None
mobj = re.match(r'(?P<root>https?://.*?/).*', mp4_video.text)
video_root = mobj.group('root')
formats = xml_description.findall('./metadata/MBRVideos/MBRVideo')
for format in formats:
mobj = re.match(r'mp4\:(?P<path>.*)', format.find('streamName').text)
url = video_root + mobj.group('path')
vbr = format.find('bitrate').text
video_formats.append({
'url': url,
'vbr': int(vbr),
})
return video_formats
def _parse_flv(self, xml_description):
formats = []
akamai_url = xml_description.find('./metadata/akamaiHost').text
audios = xml_description.find('./metadata/audios')
if audios is not None:
for audio in audios:
formats.append({
'url': 'rtmp://%s/ondemand?ovpfv=1.1' % akamai_url,
'play_path': remove_end(audio.get('url'), '.flv'),
'ext': 'flv',
'vcodec': 'none',
'format_id': audio.get('code'),
})
slide_video_path = xml_description.find('./metadata/slideVideo').text
formats.append({
'url': 'rtmp://%s/ondemand?ovpfv=1.1' % akamai_url,
'play_path': remove_end(slide_video_path, '.flv'),
'ext': 'flv',
'format_note': 'slide deck video',
'quality': -2,
'preference': -2,
'format_id': 'slides',
})
speaker_video_path = xml_description.find('./metadata/speakerVideo').text
formats.append({
'url': 'rtmp://%s/ondemand?ovpfv=1.1' % akamai_url,
'play_path': remove_end(speaker_video_path, '.flv'),
'ext': 'flv',
'format_note': 'speaker video',
'quality': -1,
'preference': -1,
'format_id': 'speaker',
})
return formats
def _login(self, webpage_url, display_id):
(username, password) = self._get_login_info()
if username is None or password is None:
self.report_warning('It looks like ' + webpage_url + ' requires a login. Try specifying a username and password and try again.')
return None
mobj = re.match(r'(?P<root_url>https?://.*?/).*', webpage_url)
login_url = mobj.group('root_url') + 'api/login.php'
logout_url = mobj.group('root_url') + 'logout'
login_form = {
'email': username,
'password': password,
}
request = compat_urllib_request.Request(login_url, compat_urllib_parse.urlencode(login_form))
request.add_header('Content-Type', 'application/x-www-form-urlencoded')
self._download_webpage(request, display_id, 'Logging in')
start_page = self._download_webpage(webpage_url, display_id, 'Getting authenticated video page')
self._download_webpage(logout_url, display_id, 'Logging out')
return start_page
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('name') or video_id
webpage_url = 'http://www.gdcvault.com/play/' + video_id
start_page = self._download_webpage(webpage_url, display_id)
direct_url = self._search_regex(
r's1\.addVariable\("file",\s*encodeURIComponent\("(/[^"]+)"\)\);',
start_page, 'url', default=None)
if direct_url:
title = self._html_search_regex(
r'<td><strong>Session Name</strong></td>\s*<td>(.*?)</td>',
start_page, 'title')
video_url = 'http://www.gdcvault.com' + direct_url
# resolve the url so that we can detect the correct extension
head = self._request_webpage(HEADRequest(video_url), video_id)
video_url = head.geturl()
return {
'id': video_id,
'display_id': display_id,
'url': video_url,
'title': title,
}
xml_root = self._html_search_regex(
r'<iframe src="(?P<xml_root>.*?)player.html.*?".*?</iframe>',
start_page, 'xml root', default=None)
if xml_root is None:
# Probably need to authenticate
login_res = self._login(webpage_url, display_id)
if login_res is None:
self.report_warning('Could not login.')
else:
start_page = login_res
# Grab the url from the authenticated page
xml_root = self._html_search_regex(
r'<iframe src="(.*?)player.html.*?".*?</iframe>',
start_page, 'xml root')
xml_name = self._html_search_regex(
r'<iframe src=".*?\?xml=(.+?\.xml).*?".*?</iframe>',
start_page, 'xml filename', default=None)
if xml_name is None:
# Fallback to the older format
xml_name = self._html_search_regex(r'<iframe src=".*?\?xmlURL=xml/(?P<xml_file>.+?\.xml).*?".*?</iframe>', start_page, 'xml filename')
xml_description_url = xml_root + 'xml/' + xml_name
xml_description = self._download_xml(xml_description_url, display_id)
video_title = xml_description.find('./metadata/title').text
video_formats = self._parse_mp4(xml_description)
if video_formats is None:
video_formats = self._parse_flv(xml_description)
return {
'id': video_id,
'display_id': display_id,
'title': video_title,
'formats': video_formats,
}
|
youfoh/webkit-efl | refs/heads/tizen | Tools/Scripts/webkitpy/tool/steps/update.py | 4 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.tool.steps.abstractstep import AbstractStep
from webkitpy.tool.steps.options import Options
from webkitpy.common.system.deprecated_logging import log
class Update(AbstractStep):
@classmethod
def options(cls):
return AbstractStep.options() + [
Options.non_interactive,
Options.update,
Options.quiet,
]
def run(self, state):
if not self._options.update:
return
log("Updating working directory")
self._tool.executive.run_and_throw_if_fail(self._update_command(), quiet=self._options.quiet, cwd=self._tool.scm().checkout_root)
def _update_command(self):
update_command = self._tool.port().update_webkit_command(self._options.non_interactive)
return update_command
|
sriki18/scipy | refs/heads/master | scipy/linalg/setup.py | 52 | #!/usr/bin/env python
from __future__ import division, print_function, absolute_import
import os
from os.path import join
def configuration(parent_package='', top_path=None):
from distutils.sysconfig import get_python_inc
from numpy.distutils.system_info import get_info, NotFoundError, numpy_info
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
from scipy._build_utils import (get_sgemv_fix, get_g77_abi_wrappers,
split_fortran_files)
config = Configuration('linalg', parent_package, top_path)
lapack_opt = get_info('lapack_opt')
if not lapack_opt:
raise NotFoundError('no lapack/blas resources found')
atlas_version = ([v[3:-3] for k, v in lapack_opt.get('define_macros', [])
if k == 'ATLAS_INFO']+[None])[0]
if atlas_version:
print(('ATLAS version: %s' % atlas_version))
# fblas:
sources = ['fblas.pyf.src']
sources += get_g77_abi_wrappers(lapack_opt)
sources += get_sgemv_fix(lapack_opt)
config.add_extension('_fblas',
sources=sources,
depends=['fblas_l?.pyf.src'],
extra_info=lapack_opt
)
# flapack:
sources = ['flapack.pyf.src']
sources += get_g77_abi_wrappers(lapack_opt)
dep_pfx = join('src', 'lapack_deprecations')
deprecated_lapack_routines = [join(dep_pfx, c + 'gegv.f') for c in 'cdsz']
sources += deprecated_lapack_routines
config.add_extension('_flapack',
sources=sources,
depends=['flapack_user.pyf.src'],
extra_info=lapack_opt
)
if atlas_version is not None:
# cblas:
config.add_extension('_cblas',
sources=['cblas.pyf.src'],
depends=['cblas.pyf.src', 'cblas_l1.pyf.src'],
extra_info=lapack_opt
)
# clapack:
config.add_extension('_clapack',
sources=['clapack.pyf.src'],
depends=['clapack.pyf.src'],
extra_info=lapack_opt
)
# _flinalg:
config.add_extension('_flinalg',
sources=[join('src', 'det.f'), join('src', 'lu.f')],
extra_info=lapack_opt
)
# _interpolative:
routines_to_split = [
'dfftb1',
'dfftf1',
'dffti1',
'dsint1',
'dzfft1',
'id_srand',
'idd_copyints',
'idd_id2svd0',
'idd_pairsamps',
'idd_permute',
'idd_permuter',
'idd_random_transf0',
'idd_random_transf0_inv',
'idd_random_transf_init0',
'idd_subselect',
'iddp_asvd0',
'iddp_rsvd0',
'iddr_asvd0',
'iddr_rsvd0',
'idz_estrank0',
'idz_id2svd0',
'idz_permute',
'idz_permuter',
'idz_random_transf0_inv',
'idz_random_transf_init0',
'idz_random_transf_init00',
'idz_realcomp',
'idz_realcomplex',
'idz_reco',
'idz_subselect',
'idzp_aid0',
'idzp_aid1',
'idzp_asvd0',
'idzp_rsvd0',
'idzr_asvd0',
'idzr_reco',
'idzr_rsvd0',
'zfftb1',
'zfftf1',
'zffti1',
]
print('Splitting linalg.interpolative Fortran source files')
dirname = os.path.split(os.path.abspath(__file__))[0]
fnames = split_fortran_files(join(dirname, 'src', 'id_dist', 'src'),
routines_to_split)
fnames = [join('src', 'id_dist', 'src', f) for f in fnames]
config.add_extension('_interpolative', fnames + ["interpolative.pyf"],
extra_info=lapack_opt
)
# _calc_lwork:
config.add_extension('_calc_lwork',
[join('src', 'calc_lwork.f')],
extra_info=lapack_opt)
# _solve_toeplitz:
config.add_extension('_solve_toeplitz',
sources=[('_solve_toeplitz.c')],
include_dirs=[get_numpy_include_dirs()])
config.add_data_dir('tests')
# Cython BLAS/LAPACK
config.add_data_files('cython_blas.pxd')
config.add_data_files('cython_lapack.pxd')
sources = ['_blas_subroutine_wrappers.f', '_lapack_subroutine_wrappers.f']
sources += get_g77_abi_wrappers(lapack_opt)
sources += get_sgemv_fix(lapack_opt)
includes = numpy_info().get_include_dirs() + [get_python_inc()]
config.add_library('fwrappers', sources=sources, include_dirs=includes)
config.add_extension('cython_blas',
sources=['cython_blas.c'],
depends=['cython_blas.pyx', 'cython_blas.pxd',
'fortran_defs.h', '_blas_subroutines.h'],
include_dirs=['.'],
libraries=['fwrappers'],
extra_info=lapack_opt)
config.add_extension('cython_lapack',
sources=['cython_lapack.c'],
depends=['cython_lapack.pyx', 'cython_lapack.pxd',
'fortran_defs.h', '_lapack_subroutines.h'],
include_dirs=['.'],
libraries=['fwrappers'],
extra_info=lapack_opt)
config.add_extension('_decomp_update',
sources=['_decomp_update.c'])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
from linalg_version import linalg_version
setup(version=linalg_version,
**configuration(top_path='').todict())
|
bsmrstu-warriors/Moytri--The-Drone-Aider | refs/heads/master | Lib/site-packages/numpy/testing/tests/test_utils.py | 53 | import warnings
import sys
import numpy as np
from numpy.testing import *
import unittest
class _GenericTest(object):
def _test_equal(self, a, b):
self._assert_func(a, b)
def _test_not_equal(self, a, b):
try:
self._assert_func(a, b)
passed = True
except AssertionError:
pass
else:
raise AssertionError("a and b are found equal but are not")
def test_array_rank1_eq(self):
"""Test two equal array of rank 1 are found equal."""
a = np.array([1, 2])
b = np.array([1, 2])
self._test_equal(a, b)
def test_array_rank1_noteq(self):
"""Test two different array of rank 1 are found not equal."""
a = np.array([1, 2])
b = np.array([2, 2])
self._test_not_equal(a, b)
def test_array_rank2_eq(self):
"""Test two equal array of rank 2 are found equal."""
a = np.array([[1, 2], [3, 4]])
b = np.array([[1, 2], [3, 4]])
self._test_equal(a, b)
def test_array_diffshape(self):
"""Test two arrays with different shapes are found not equal."""
a = np.array([1, 2])
b = np.array([[1, 2], [1, 2]])
self._test_not_equal(a, b)
def test_objarray(self):
"""Test object arrays."""
a = np.array([1, 1], dtype=np.object)
self._test_equal(a, 1)
class TestArrayEqual(_GenericTest, unittest.TestCase):
def setUp(self):
self._assert_func = assert_array_equal
def test_generic_rank1(self):
"""Test rank 1 array for all dtypes."""
def foo(t):
a = np.empty(2, t)
a.fill(1)
b = a.copy()
c = a.copy()
c.fill(0)
self._test_equal(a, b)
self._test_not_equal(c, b)
# Test numeric types and object
for t in '?bhilqpBHILQPfdgFDG':
foo(t)
# Test strings
for t in ['S1', 'U1']:
foo(t)
def test_generic_rank3(self):
"""Test rank 3 array for all dtypes."""
def foo(t):
a = np.empty((4, 2, 3), t)
a.fill(1)
b = a.copy()
c = a.copy()
c.fill(0)
self._test_equal(a, b)
self._test_not_equal(c, b)
# Test numeric types and object
for t in '?bhilqpBHILQPfdgFDG':
foo(t)
# Test strings
for t in ['S1', 'U1']:
foo(t)
def test_nan_array(self):
"""Test arrays with nan values in them."""
a = np.array([1, 2, np.nan])
b = np.array([1, 2, np.nan])
self._test_equal(a, b)
c = np.array([1, 2, 3])
self._test_not_equal(c, b)
def test_string_arrays(self):
"""Test two arrays with different shapes are found not equal."""
a = np.array(['floupi', 'floupa'])
b = np.array(['floupi', 'floupa'])
self._test_equal(a, b)
c = np.array(['floupipi', 'floupa'])
self._test_not_equal(c, b)
def test_recarrays(self):
"""Test record arrays."""
a = np.empty(2, [('floupi', np.float), ('floupa', np.float)])
a['floupi'] = [1, 2]
a['floupa'] = [1, 2]
b = a.copy()
self._test_equal(a, b)
c = np.empty(2, [('floupipi', np.float), ('floupa', np.float)])
c['floupipi'] = a['floupi'].copy()
c['floupa'] = a['floupa'].copy()
self._test_not_equal(c, b)
class TestEqual(TestArrayEqual):
def setUp(self):
self._assert_func = assert_equal
def test_nan_items(self):
self._assert_func(np.nan, np.nan)
self._assert_func([np.nan], [np.nan])
self._test_not_equal(np.nan, [np.nan])
self._test_not_equal(np.nan, 1)
def test_inf_items(self):
self._assert_func(np.inf, np.inf)
self._assert_func([np.inf], [np.inf])
self._test_not_equal(np.inf, [np.inf])
def test_non_numeric(self):
self._assert_func('ab', 'ab')
self._test_not_equal('ab', 'abb')
def test_complex_item(self):
self._assert_func(complex(1, 2), complex(1, 2))
self._assert_func(complex(1, np.nan), complex(1, np.nan))
self._test_not_equal(complex(1, np.nan), complex(1, 2))
self._test_not_equal(complex(np.nan, 1), complex(1, np.nan))
self._test_not_equal(complex(np.nan, np.inf), complex(np.nan, 2))
def test_negative_zero(self):
self._test_not_equal(np.PZERO, np.NZERO)
def test_complex(self):
x = np.array([complex(1, 2), complex(1, np.nan)])
y = np.array([complex(1, 2), complex(1, 2)])
self._assert_func(x, x)
self._test_not_equal(x, y)
class TestArrayAlmostEqual(_GenericTest, unittest.TestCase):
def setUp(self):
self._assert_func = assert_array_almost_equal
def test_simple(self):
x = np.array([1234.2222])
y = np.array([1234.2223])
self._assert_func(x, y, decimal=3)
self._assert_func(x, y, decimal=4)
self.assertRaises(AssertionError,
lambda: self._assert_func(x, y, decimal=5))
def test_nan(self):
anan = np.array([np.nan])
aone = np.array([1])
ainf = np.array([np.inf])
self._assert_func(anan, anan)
self.assertRaises(AssertionError,
lambda : self._assert_func(anan, aone))
self.assertRaises(AssertionError,
lambda : self._assert_func(anan, ainf))
self.assertRaises(AssertionError,
lambda : self._assert_func(ainf, anan))
class TestAlmostEqual(_GenericTest, unittest.TestCase):
def setUp(self):
self._assert_func = assert_almost_equal
def test_nan_item(self):
self._assert_func(np.nan, np.nan)
self.assertRaises(AssertionError,
lambda : self._assert_func(np.nan, 1))
self.assertRaises(AssertionError,
lambda : self._assert_func(np.nan, np.inf))
self.assertRaises(AssertionError,
lambda : self._assert_func(np.inf, np.nan))
def test_inf_item(self):
self._assert_func(np.inf, np.inf)
self._assert_func(-np.inf, -np.inf)
def test_simple_item(self):
self._test_not_equal(1, 2)
def test_complex_item(self):
self._assert_func(complex(1, 2), complex(1, 2))
self._assert_func(complex(1, np.nan), complex(1, np.nan))
self._assert_func(complex(np.inf, np.nan), complex(np.inf, np.nan))
self._test_not_equal(complex(1, np.nan), complex(1, 2))
self._test_not_equal(complex(np.nan, 1), complex(1, np.nan))
self._test_not_equal(complex(np.nan, np.inf), complex(np.nan, 2))
def test_complex(self):
x = np.array([complex(1, 2), complex(1, np.nan)])
z = np.array([complex(1, 2), complex(np.nan, 1)])
y = np.array([complex(1, 2), complex(1, 2)])
self._assert_func(x, x)
self._test_not_equal(x, y)
self._test_not_equal(x, z)
class TestApproxEqual(unittest.TestCase):
def setUp(self):
self._assert_func = assert_approx_equal
def test_simple_arrays(self):
x = np.array([1234.22])
y = np.array([1234.23])
self._assert_func(x, y, significant=5)
self._assert_func(x, y, significant=6)
self.assertRaises(AssertionError,
lambda: self._assert_func(x, y, significant=7))
def test_simple_items(self):
x = 1234.22
y = 1234.23
self._assert_func(x, y, significant=4)
self._assert_func(x, y, significant=5)
self._assert_func(x, y, significant=6)
self.assertRaises(AssertionError,
lambda: self._assert_func(x, y, significant=7))
def test_nan_array(self):
anan = np.array(np.nan)
aone = np.array(1)
ainf = np.array(np.inf)
self._assert_func(anan, anan)
self.assertRaises(AssertionError,
lambda : self._assert_func(anan, aone))
self.assertRaises(AssertionError,
lambda : self._assert_func(anan, ainf))
self.assertRaises(AssertionError,
lambda : self._assert_func(ainf, anan))
def test_nan_items(self):
anan = np.array(np.nan)
aone = np.array(1)
ainf = np.array(np.inf)
self._assert_func(anan, anan)
self.assertRaises(AssertionError,
lambda : self._assert_func(anan, aone))
self.assertRaises(AssertionError,
lambda : self._assert_func(anan, ainf))
self.assertRaises(AssertionError,
lambda : self._assert_func(ainf, anan))
class TestRaises(unittest.TestCase):
def setUp(self):
class MyException(Exception):
pass
self.e = MyException
def raises_exception(self, e):
raise e
def does_not_raise_exception(self):
pass
def test_correct_catch(self):
f = raises(self.e)(self.raises_exception)(self.e)
def test_wrong_exception(self):
try:
f = raises(self.e)(self.raises_exception)(RuntimeError)
except RuntimeError:
return
else:
raise AssertionError("should have caught RuntimeError")
def test_catch_no_raise(self):
try:
f = raises(self.e)(self.does_not_raise_exception)()
except AssertionError:
return
else:
raise AssertionError("should have raised an AssertionError")
class TestWarns(unittest.TestCase):
def test_warn(self):
def f():
warnings.warn("yo")
before_filters = sys.modules['warnings'].filters[:]
assert_warns(UserWarning, f)
after_filters = sys.modules['warnings'].filters
# Check that the warnings state is unchanged
assert_equal(before_filters, after_filters,
"assert_warns does not preserver warnings state")
def test_warn_wrong_warning(self):
def f():
warnings.warn("yo", DeprecationWarning)
failed = False
filters = sys.modules['warnings'].filters[:]
try:
try:
# Should raise an AssertionError
assert_warns(UserWarning, f)
failed = True
except AssertionError:
pass
finally:
sys.modules['warnings'].filters = filters
if failed:
raise AssertionError("wrong warning caught by assert_warn")
class TestAssertAllclose(unittest.TestCase):
def test_simple(self):
x = 1e-3
y = 1e-9
assert_allclose(x, y, atol=1)
self.assertRaises(AssertionError, assert_allclose, x, y)
a = np.array([x, y, x, y])
b = np.array([x, y, x, x])
assert_allclose(a, b, atol=1)
self.assertRaises(AssertionError, assert_allclose, a, b)
b[-1] = y * (1 + 1e-8)
assert_allclose(a, b)
self.assertRaises(AssertionError, assert_allclose, a, b,
rtol=1e-9)
assert_allclose(6, 10, rtol=0.5)
self.assertRaises(AssertionError, assert_allclose, 10, 6, rtol=0.5)
class TestArrayAlmostEqualNulp(unittest.TestCase):
def test_simple(self):
dev = np.random.randn(10)
x = np.ones(10)
y = x + dev * np.finfo(np.float64).eps
assert_array_almost_equal_nulp(x, y, nulp=2 * np.max(dev))
def test_simple2(self):
x = np.random.randn(10)
y = 2 * x
def failure():
return assert_array_almost_equal_nulp(x, y,
nulp=1000)
self.assertRaises(AssertionError, failure)
def test_big_float32(self):
x = (1e10 * np.random.randn(10)).astype(np.float32)
y = x + 1
assert_array_almost_equal_nulp(x, y, nulp=1000)
def test_big_float64(self):
x = 1e10 * np.random.randn(10)
y = x + 1
def failure():
assert_array_almost_equal_nulp(x, y, nulp=1000)
self.assertRaises(AssertionError, failure)
def test_complex(self):
x = np.random.randn(10) + 1j * np.random.randn(10)
y = x + 1
def failure():
assert_array_almost_equal_nulp(x, y, nulp=1000)
self.assertRaises(AssertionError, failure)
def test_complex2(self):
x = np.random.randn(10)
y = np.array(x, np.complex) + 1e-16 * np.random.randn(10)
assert_array_almost_equal_nulp(x, y, nulp=1000)
class TestULP(unittest.TestCase):
def test_equal(self):
x = np.random.randn(10)
assert_array_max_ulp(x, x, maxulp=0)
def test_single(self):
# Generate 1 + small deviation, check that adding eps gives a few UNL
x = np.ones(10).astype(np.float32)
x += 0.01 * np.random.randn(10).astype(np.float32)
eps = np.finfo(np.float32).eps
assert_array_max_ulp(x, x+eps, maxulp=20)
def test_double(self):
# Generate 1 + small deviation, check that adding eps gives a few UNL
x = np.ones(10).astype(np.float32)
x += 0.01 * np.random.randn(10).astype(np.float64)
eps = np.finfo(np.float64).eps
assert_array_max_ulp(x, x+eps, maxulp=200)
def test_inf(self):
for dt in [np.float32, np.float64]:
inf = np.array([np.inf]).astype(dt)
big = np.array([np.finfo(dt).max])
assert_array_max_ulp(inf, big, maxulp=200)
def test_nan(self):
# Test that nan is 'far' from small, tiny, inf, max and min
for dt in [np.float32, np.float64]:
if dt == np.float32:
maxulp = 1e6
else:
maxulp = 1e12
inf = np.array([np.inf]).astype(dt)
nan = np.array([np.nan]).astype(dt)
big = np.array([np.finfo(dt).max])
tiny = np.array([np.finfo(dt).tiny])
zero = np.array([np.PZERO]).astype(dt)
nzero = np.array([np.NZERO]).astype(dt)
self.assertRaises(AssertionError,
lambda: assert_array_max_ulp(nan, inf,
maxulp=maxulp))
self.assertRaises(AssertionError,
lambda: assert_array_max_ulp(nan, big,
maxulp=maxulp))
self.assertRaises(AssertionError,
lambda: assert_array_max_ulp(nan, tiny,
maxulp=maxulp))
self.assertRaises(AssertionError,
lambda: assert_array_max_ulp(nan, zero,
maxulp=maxulp))
self.assertRaises(AssertionError,
lambda: assert_array_max_ulp(nan, nzero,
maxulp=maxulp))
if __name__ == '__main__':
run_module_suite()
|
sparkslabs/kamaelia_ | refs/heads/master | Code/Python/Apps/Europython09/App/MiniAxon-2.py | 3 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class microprocess(object):
def __init__(self):
super(microprocess, self).__init__()
def main(self):
yield 1
class printer(microprocess):
def __init__(self, tag):
super(printer, self).__init__()
self.tag = tag
def main(self):
while 1:
yield 1
print self.tag
class scheduler(microprocess):
def __init__(self):
super(scheduler, self).__init__()
self.active = []
self.newqueue = []
def main(self):
for i in xrange(100):
for current in self.active:
yield 1
try:
result = current.next()
if result is not -1:
self.newqueue.append(current)
except StopIteration:
pass
self.active = self.newqueue
self.newqueue = []
def activateMicroprocess(self, someprocess):
microthread = someprocess.main()
self.newqueue.append(microthread)
X = printer("Hello World")
Y = printer("Game Over") # :-)
myscheduler = scheduler()
myscheduler.activateMicroprocess(X)
myscheduler.activateMicroprocess(Y)
for _ in myscheduler.main():
pass
|
arnaudsj/pybrain | refs/heads/master | pybrain/structure/connections/fullnotself.py | 31 | __author__ = 'Thomas Rueckstiess, ruecksti@in.tum.de'
from scipy import reshape, dot, outer, eye
from pybrain.structure.connections import FullConnection
class FullNotSelfConnection(FullConnection):
"""Connection which connects every element from the first module's
output buffer to the second module's input buffer in a matrix multiplicative
manner, EXCEPT the corresponding elements with the same index of each buffer
(the diagonal of the parameter matrix is 0). Asserts that in and out dimensions
are equal. """
#:TODO: the values on the diagonal are counted as parameters but not used! FIX!
def __init__(self, *args, **kwargs):
FullConnection.__init__(self, *args, **kwargs)
assert self.indim == self.outdim, \
"Indim (%i) does not equal outdim (%i)" % (
self.indim, self.outdim)
def _forwardImplementation(self, inbuf, outbuf):
p = reshape(self.params, (self.outdim, self.indim)) * (1-eye(self.outdim))
outbuf += dot(p, inbuf)
def _backwardImplementation(self, outerr, inerr, inbuf):
p = reshape(self.params, (self.outdim, self.indim)) * (1-eye(self.outdim))
inerr += dot(p.T, outerr)
ds = self.derivs
ds += outer(inbuf, outerr).T.flatten()
|
greenlin/tushare | refs/heads/master | tushare/futures/ctp/futures/ApiStruct.py | 35 | # -*- coding: utf-8 -*-
from __future__ import absolute_import as _init
T = {}
T['TE_RESUME'] = 'int' #流重传方式
TERT_RESTART = 0 #从本交易日开始重传
TERT_RESUME = 1 #从上次收到的续传
TERT_QUICK = 2 #只传送登录后的流内容
T['TraderID'] = 'char[21]' #交易所交易员代码
T['InvestorID'] = 'char[13]' #投资者代码
T['BrokerID'] = 'char[11]' #经纪公司代码
T['BrokerAbbr'] = 'char[9]' #经纪公司简称
T['BrokerName'] = 'char[81]' #经纪公司名称
T['ExchangeInstID'] = 'char[31]' #合约在交易所的代码
T['OrderRef'] = 'char[13]' #报单引用
T['ParticipantID'] = 'char[11]' #会员代码
T['UserID'] = 'char[16]' #用户代码
T['Password'] = 'char[41]' #密码
T['ClientID'] = 'char[11]' #交易编码
T['InstrumentID'] = 'char[31]' #合约代码
T['MarketID'] = 'char[31]' #市场代码
T['ProductName'] = 'char[21]' #产品名称
T['ExchangeID'] = 'char[9]' #交易所代码
T['ExchangeName'] = 'char[31]' #交易所名称
T['ExchangeAbbr'] = 'char[9]' #交易所简称
T['ExchangeFlag'] = 'char[2]' #交易所标志
T['MacAddress'] = 'char[21]' #Mac地址
T['SystemID'] = 'char[21]' #系统编号
T['ExchangeProperty'] = 'char' #交易所属性
EXP_Normal = '0' #正常
EXP_GenOrderByTrade = '1' #根据成交生成报单
T['Date'] = 'char[9]' #日期
T['Time'] = 'char[9]' #时间
T['LongTime'] = 'char[13]' #长时间
T['InstrumentName'] = 'char[21]' #合约名称
T['SettlementGroupID'] = 'char[9]' #结算组代码
T['OrderSysID'] = 'char[21]' #报单编号
T['TradeID'] = 'char[21]' #成交编号
T['CommandType'] = 'char[65]' #DB命令类型
T['IPAddress'] = 'char[16]' #IP地址
T['IPPort'] = 'int' #IP端口
T['ProductInfo'] = 'char[11]' #产品信息
T['ProtocolInfo'] = 'char[11]' #协议信息
T['BusinessUnit'] = 'char[21]' #业务单元
T['DepositSeqNo'] = 'char[15]' #出入金流水号
T['IdentifiedCardNo'] = 'char[51]' #证件号码
T['IdCardType'] = 'char' #证件类型
ICT_EID = '0' #组织机构代码
ICT_IDCard = '1' #身份证
ICT_OfficerIDCard = '2' #军官证
ICT_PoliceIDCard = '3' #警官证
ICT_SoldierIDCard = '4' #士兵证
ICT_HouseholdRegister = '5' #户口簿
ICT_Passport = '6' #护照
ICT_TaiwanCompatriotIDCard = '7' #台胞证
ICT_HomeComingCard = '8' #回乡证
ICT_LicenseNo = '9' #营业执照号
ICT_TaxNo = 'A' #税务登记号
ICT_OtherCard = 'x' #其他证件
T['OrderLocalID'] = 'char[13]' #本地报单编号
T['UserName'] = 'char[81]' #用户名称
T['PartyName'] = 'char[81]' #参与人名称
T['ErrorMsg'] = 'char[81]' #错误信息
T['FieldName'] = 'char[2049]' #字段名
T['FieldContent'] = 'char[2049]' #字段内容
T['SystemName'] = 'char[41]' #系统名称
T['Content'] = 'char[501]' #消息正文
T['InvestorRange'] = 'char' #投资者范围
IR_All = '1' #所有
IR_Group = '2' #投资者组
IR_Single = '3' #单一投资者
T['DepartmentRange'] = 'char' #投资者范围
DR_All = '1' #所有
DR_Group = '2' #组织架构
DR_Single = '3' #单一投资者
T['DataSyncStatus'] = 'char' #数据同步状态
DS_Asynchronous = '1' #未同步
DS_Synchronizing = '2' #同步中
DS_Synchronized = '3' #已同步
T['BrokerDataSyncStatus'] = 'char' #经纪公司数据同步状态
BDS_Synchronized = '1' #已同步
BDS_Synchronizing = '2' #同步中
T['ExchangeConnectStatus'] = 'char' #交易所连接状态
ECS_NoConnection = '1' #没有任何连接
ECS_QryInstrumentSent = '2' #已经发出合约查询请求
ECS_GotInformation = '9' #已经获取信息
T['TraderConnectStatus'] = 'char' #交易所交易员连接状态
TCS_NotConnected = '1' #没有任何连接
TCS_Connected = '2' #已经连接
TCS_QryInstrumentSent = '3' #已经发出合约查询请求
TCS_SubPrivateFlow = '4' #订阅私有流
T['FunctionCode'] = 'char' #功能代码
FC_DataAsync = '1' #数据异步化
FC_ForceUserLogout = '2' #强制用户登出
FC_UserPasswordUpdate = '3' #变更管理用户口令
FC_BrokerPasswordUpdate = '4' #变更经纪公司口令
FC_InvestorPasswordUpdate = '5' #变更投资者口令
FC_OrderInsert = '6' #报单插入
FC_OrderAction = '7' #报单操作
FC_SyncSystemData = '8' #同步系统数据
FC_SyncBrokerData = '9' #同步经纪公司数据
FC_BachSyncBrokerData = 'A' #批量同步经纪公司数据
FC_SuperQuery = 'B' #超级查询
FC_ParkedOrderInsert = 'C' #报单插入
FC_ParkedOrderAction = 'D' #报单操作
FC_SyncOTP = 'E' #同步动态令牌
T['BrokerFunctionCode'] = 'char' #经纪公司功能代码
BFC_ForceUserLogout = '1' #强制用户登出
BFC_UserPasswordUpdate = '2' #变更用户口令
BFC_SyncBrokerData = '3' #同步经纪公司数据
BFC_BachSyncBrokerData = '4' #批量同步经纪公司数据
BFC_OrderInsert = '5' #报单插入
BFC_OrderAction = '6' #报单操作
BFC_AllQuery = '7' #全部查询
BFC_log = 'a' #系统功能:登入/登出/修改密码等
BFC_BaseQry = 'b' #基本查询:查询基础数据,如合约,交易所等常量
BFC_TradeQry = 'c' #交易查询:如查成交,委托
BFC_Trade = 'd' #交易功能:报单,撤单
BFC_Virement = 'e' #银期转账
BFC_Risk = 'f' #风险监控
BFC_Session = 'g' #查询/管理:查询会话,踢人等
BFC_RiskNoticeCtl = 'h' #风控通知控制
BFC_RiskNotice = 'i' #风控通知发送
BFC_BrokerDeposit = 'j' #察看经纪公司资金权限
BFC_QueryFund = 'k' #资金查询
BFC_QueryOrder = 'l' #报单查询
BFC_QueryTrade = 'm' #成交查询
BFC_QueryPosition = 'n' #持仓查询
BFC_QueryMarketData = 'o' #行情查询
BFC_QueryUserEvent = 'p' #用户事件查询
BFC_QueryRiskNotify = 'q' #风险通知查询
BFC_QueryFundChange = 'r' #出入金查询
BFC_QueryInvestor = 's' #投资者信息查询
BFC_QueryTradingCode = 't' #交易编码查询
BFC_ForceClose = 'u' #强平
BFC_PressTest = 'v' #压力测试
BFC_RemainCalc = 'w' #权益反算
BFC_NetPositionInd = 'x' #净持仓保证金指标
BFC_RiskPredict = 'y' #风险预算
BFC_DataExport = 'z' #数据导出
BFC_RiskTargetSetup = 'A' #风控指标设置
BFC_MarketDataWarn = 'B' #行情预警
BFC_QryBizNotice = 'C' #业务通知查询
BFC_CfgBizNotice = 'D' #业务通知模板设置
BFC_SyncOTP = 'E' #同步动态令牌
BFC_SendBizNotice = 'F' #发送业务通知
BFC_CfgRiskLevelStd = 'G' #风险级别标准设置
BFC_TbCommand = 'H' #交易终端应急功能
T['OrderActionStatus'] = 'char' #报单操作状态
OAS_Submitted = 'a' #已经提交
OAS_Accepted = 'b' #已经接受
OAS_Rejected = 'c' #已经被拒绝
T['OrderStatus'] = 'char' #报单状态
OST_AllTraded = '0' #全部成交
OST_PartTradedQueueing = '1' #部分成交还在队列中
OST_PartTradedNotQueueing = '2' #部分成交不在队列中
OST_NoTradeQueueing = '3' #未成交还在队列中
OST_NoTradeNotQueueing = '4' #未成交不在队列中
OST_Canceled = '5' #撤单
OST_Unknown = 'a' #未知
OST_NotTouched = 'b' #尚未触发
OST_Touched = 'c' #已触发
T['OrderSubmitStatus'] = 'char' #报单提交状态
OSS_InsertSubmitted = '0' #已经提交
OSS_CancelSubmitted = '1' #撤单已经提交
OSS_ModifySubmitted = '2' #修改已经提交
OSS_Accepted = '3' #已经接受
OSS_InsertRejected = '4' #报单已经被拒绝
OSS_CancelRejected = '5' #撤单已经被拒绝
OSS_ModifyRejected = '6' #改单已经被拒绝
T['PositionDate'] = 'char' #持仓日期
PSD_Today = '1' #今日持仓
PSD_History = '2' #历史持仓
T['PositionDateType'] = 'char' #持仓日期类型
PDT_UseHistory = '1' #使用历史持仓
PDT_NoUseHistory = '2' #不使用历史持仓
T['TradingRole'] = 'char' #交易角色
ER_Broker = '1' #代理
ER_Host = '2' #自营
ER_Maker = '3' #做市商
T['ProductClass'] = 'char' #产品类型
PC_Futures = '1' #期货
PC_Options = '2' #期权
PC_Combination = '3' #组合
PC_Spot = '4' #即期
PC_EFP = '5' #期转现
T['InstLifePhase'] = 'char' #合约生命周期状态
IP_NotStart = '0' #未上市
IP_Started = '1' #上市
IP_Pause = '2' #停牌
IP_Expired = '3' #到期
T['Direction'] = 'char' #买卖方向
D_Buy = '0' #买
D_Sell = '1' #卖
T['PositionType'] = 'char' #持仓类型
PT_Net = '1' #净持仓
PT_Gross = '2' #综合持仓
T['PosiDirection'] = 'char' #持仓多空方向
PD_Net = '1' #净
PD_Long = '2' #多头
PD_Short = '3' #空头
T['SysSettlementStatus'] = 'char' #系统结算状态
SS_NonActive = '1' #不活跃
SS_Startup = '2' #启动
SS_Operating = '3' #操作
SS_Settlement = '4' #结算
SS_SettlementFinished = '5' #结算完成
T['RatioAttr'] = 'char' #费率属性
RA_Trade = '0' #交易费率
RA_Settlement = '1' #结算费率
T['HedgeFlag'] = 'char' #投机套保标志
HF_Speculation = '1' #投机
HF_Arbitrage = '2' #套利
HF_Hedge = '3' #套保
T['BillHedgeFlag'] = 'char' #投机套保标志
BHF_Speculation = '1' #投机
BHF_Arbitrage = '2' #套利
BHF_Hedge = '3' #套保
T['ClientIDType'] = 'char' #交易编码类型
CIDT_Speculation = '1' #投机
CIDT_Arbitrage = '2' #套利
CIDT_Hedge = '3' #套保
T['OrderPriceType'] = 'char' #报单价格条件
OPT_AnyPrice = '1' #任意价
OPT_LimitPrice = '2' #限价
OPT_BestPrice = '3' #最优价
OPT_LastPrice = '4' #最新价
OPT_LastPricePlusOneTicks = '5' #最新价浮动上浮1个ticks
OPT_LastPricePlusTwoTicks = '6' #最新价浮动上浮2个ticks
OPT_LastPricePlusThreeTicks = '7' #最新价浮动上浮3个ticks
OPT_AskPrice1 = '8' #卖一价
OPT_AskPrice1PlusOneTicks = '9' #卖一价浮动上浮1个ticks
OPT_AskPrice1PlusTwoTicks = 'A' #卖一价浮动上浮2个ticks
OPT_AskPrice1PlusThreeTicks = 'B' #卖一价浮动上浮3个ticks
OPT_BidPrice1 = 'C' #买一价
OPT_BidPrice1PlusOneTicks = 'D' #买一价浮动上浮1个ticks
OPT_BidPrice1PlusTwoTicks = 'E' #买一价浮动上浮2个ticks
OPT_BidPrice1PlusThreeTicks = 'F' #买一价浮动上浮3个ticks
T['OffsetFlag'] = 'char' #开平标志
OF_Open = '0' #开仓
OF_Close = '1' #平仓
OF_ForceClose = '2' #强平
OF_CloseToday = '3' #平今
OF_CloseYesterday = '4' #平昨
OF_ForceOff = '5' #强减
OF_LocalForceClose = '6' #本地强平
T['ForceCloseReason'] = 'char' #强平原因
FCC_NotForceClose = '0' #非强平
FCC_LackDeposit = '1' #资金不足
FCC_ClientOverPositionLimit = '2' #客户超仓
FCC_MemberOverPositionLimit = '3' #会员超仓
FCC_NotMultiple = '4' #持仓非整数倍
FCC_Violation = '5' #违规
FCC_Other = '6' #其它
FCC_PersonDeliv = '7' #自然人临近交割
T['OrderType'] = 'char' #报单类型
ORDT_Normal = '0' #正常
ORDT_DeriveFromQuote = '1' #报价衍生
ORDT_DeriveFromCombination = '2' #组合衍生
ORDT_Combination = '3' #组合报单
ORDT_ConditionalOrder = '4' #条件单
ORDT_Swap = '5' #互换单
T['TimeCondition'] = 'char' #有效期类型
TC_IOC = '1' #立即完成,否则撤销
TC_GFS = '2' #本节有效
TC_GFD = '3' #当日有效
TC_GTD = '4' #指定日期前有效
TC_GTC = '5' #撤销前有效
TC_GFA = '6' #集合竞价有效
T['VolumeCondition'] = 'char' #成交量类型
VC_AV = '1' #任何数量
VC_MV = '2' #最小数量
VC_CV = '3' #全部数量
T['ContingentCondition'] = 'char' #触发条件
CC_Immediately = '1' #立即
CC_Touch = '2' #止损
CC_TouchProfit = '3' #止赢
CC_ParkedOrder = '4' #预埋单
CC_LastPriceGreaterThanStopPrice = '5' #最新价大于条件价
CC_LastPriceGreaterEqualStopPrice = '6' #最新价大于等于条件价
CC_LastPriceLesserThanStopPrice = '7' #最新价小于条件价
CC_LastPriceLesserEqualStopPrice = '8' #最新价小于等于条件价
CC_AskPriceGreaterThanStopPrice = '9' #卖一价大于条件价
CC_AskPriceGreaterEqualStopPrice = 'A' #卖一价大于等于条件价
CC_AskPriceLesserThanStopPrice = 'B' #卖一价小于条件价
CC_AskPriceLesserEqualStopPrice = 'C' #卖一价小于等于条件价
CC_BidPriceGreaterThanStopPrice = 'D' #买一价大于条件价
CC_BidPriceGreaterEqualStopPrice = 'E' #买一价大于等于条件价
CC_BidPriceLesserThanStopPrice = 'F' #买一价小于条件价
CC_BidPriceLesserEqualStopPrice = 'H' #买一价小于等于条件价
T['ActionFlag'] = 'char' #操作标志
AF_Delete = '0' #删除
AF_Modify = '3' #修改
T['TradingRight'] = 'char' #交易权限
TR_Allow = '0' #可以交易
TR_CloseOnly = '1' #只能平仓
TR_Forbidden = '2' #不能交易
T['OrderSource'] = 'char' #报单来源
OSRC_Participant = '0' #来自参与者
OSRC_Administrator = '1' #来自管理员
T['TradeType'] = 'char' #成交类型
TRDT_Common = '0' #普通成交
TRDT_OptionsExecution = '1' #期权执行
TRDT_OTC = '2' #OTC成交
TRDT_EFPDerived = '3' #期转现衍生成交
TRDT_CombinationDerived = '4' #组合衍生成交
T['PriceSource'] = 'char' #成交价来源
PSRC_LastPrice = '0' #前成交价
PSRC_Buy = '1' #买委托价
PSRC_Sell = '2' #卖委托价
T['InstrumentStatus'] = 'char' #合约交易状态
IS_BeforeTrading = '0' #开盘前
IS_NoTrading = '1' #非交易
IS_Continous = '2' #连续交易
IS_AuctionOrdering = '3' #集合竞价报单
IS_AuctionBalance = '4' #集合竞价价格平衡
IS_AuctionMatch = '5' #集合竞价撮合
IS_Closed = '6' #收盘
T['InstStatusEnterReason'] = 'char' #品种进入交易状态原因
IER_Automatic = '1' #自动切换
IER_Manual = '2' #手动切换
IER_Fuse = '3' #熔断
T['OrderActionRef'] = 'int' #报单操作引用
T['InstallCount'] = 'int' #安装数量
T['InstallID'] = 'int' #安装编号
T['ErrorID'] = 'int' #错误代码
T['SettlementID'] = 'int' #结算编号
T['Volume'] = 'int' #数量
T['FrontID'] = 'int' #前置编号
T['SessionID'] = 'int' #会话编号
T['SequenceNo'] = 'int' #序号
T['CommandNo'] = 'int' #DB命令序号
T['Millisec'] = 'int' #时间(毫秒)
T['VolumeMultiple'] = 'int' #合约数量乘数
T['TradingSegmentSN'] = 'int' #交易阶段编号
T['RequestID'] = 'int' #请求编号
T['Year'] = 'int' #年份
T['Month'] = 'int' #月份
T['Bool'] = 'int' #布尔型
T['Price'] = 'double' #价格
T['CombOffsetFlag'] = 'char[5]' #组合开平标志
T['CombHedgeFlag'] = 'char[5]' #组合投机套保标志
T['Ratio'] = 'double' #比率
T['Money'] = 'double' #资金
T['LargeVolume'] = 'double' #大额数量
T['SequenceSeries'] = 'short' #序列系列号
T['CommPhaseNo'] = 'short' #通讯时段编号
T['SequenceLabel'] = 'char[2]' #序列编号
T['Priority'] = 'int' #优先级
T['ContractCode'] = 'char[41]' #合同编号
T['City'] = 'char[41]' #市
T['IsStock'] = 'char[11]' #是否股民
T['Channel'] = 'char[51]' #渠道
T['Address'] = 'char[101]' #通讯地址
T['ZipCode'] = 'char[7]' #邮政编码
T['Telephone'] = 'char[41]' #联系电话
T['Fax'] = 'char[41]' #传真
T['Mobile'] = 'char[41]' #手机
T['EMail'] = 'char[41]' #电子邮件
T['Memo'] = 'char[161]' #备注
T['CompanyCode'] = 'char[51]' #企业代码
T['Website'] = 'char[51]' #网站地址
T['TaxNo'] = 'char[31]' #税务登记号
T['BatchStatus'] = 'char' #处理状态
BS_NoUpload = '1' #未上传
BS_Uploaded = '2' #已上传
BS_Failed = '3' #审核失败
T['PropertyID'] = 'char[33]' #属性代码
T['PropertyName'] = 'char[65]' #属性名称
T['LicenseNo'] = 'char[51]' #营业执照号
T['AgentID'] = 'char[13]' #经纪人代码
T['AgentName'] = 'char[41]' #经纪人名称
T['AgentGroupID'] = 'char[13]' #经纪人组代码
T['AgentGroupName'] = 'char[41]' #经纪人组名称
T['ReturnStyle'] = 'char' #按品种返还方式
RS_All = '1' #按所有品种
RS_ByProduct = '2' #按品种
T['ReturnPattern'] = 'char' #返还模式
RP_ByVolume = '1' #按成交手数
RP_ByFeeOnHand = '2' #按留存手续费
T['ReturnLevel'] = 'char' #返还级别
RL_Level1 = '1' #级别1
RL_Level2 = '2' #级别2
RL_Level3 = '3' #级别3
RL_Level4 = '4' #级别4
RL_Level5 = '5' #级别5
RL_Level6 = '6' #级别6
RL_Level7 = '7' #级别7
RL_Level8 = '8' #级别8
RL_Level9 = '9' #级别9
T['ReturnStandard'] = 'char' #返还标准
RSD_ByPeriod = '1' #分阶段返还
RSD_ByStandard = '2' #按某一标准
T['MortgageType'] = 'char' #质押类型
MT_Out = '0' #质出
MT_In = '1' #质入
T['InvestorSettlementParamID'] = 'char' #投资者结算参数代码
ISPI_BaseMargin = '1' #基础保证金
ISPI_LowestInterest = '2' #最低权益标准
ISPI_MortgageRatio = '4' #质押比例
ISPI_MarginWay = '5' #保证金算法
ISPI_BillDeposit = '9' #结算单结存是否包含质押
T['ExchangeSettlementParamID'] = 'char' #交易所结算参数代码
ESPI_MortgageRatio = '1' #质押比例
ESPI_OtherFundItem = '2' #分项资金导入项
ESPI_OtherFundImport = '3' #分项资金入交易所出入金
ESPI_SHFEDelivFee = '4' #上期所交割手续费收取方式
ESPI_DCEDelivFee = '5' #大商所交割手续费收取方式
ESPI_CFFEXMinPrepa = '6' #中金所开户最低可用金额
ESPI_CZCESettlementType = '7' #郑商所结算方式
ESPI_CFFEXDelivFee = '8' #中金所实物交割手续费收取方式
T['SystemParamID'] = 'char' #系统参数代码
SPI_InvestorIDMinLength = '1' #投资者代码最小长度
SPI_AccountIDMinLength = '2' #投资者帐号代码最小长度
SPI_UserRightLogon = '3' #投资者开户默认登录权限
SPI_SettlementBillTrade = '4' #投资者交易结算单成交汇总方式
SPI_TradingCode = '5' #统一开户更新交易编码方式
SPI_CheckFund = '6' #结算是否判断存在未复核的出入金和分项资金
SPI_CommModelRight = '7' #是否启用手续费模板数据权限
SPI_MarginModelRight = '9' #是否启用保证金率模板数据权限
SPI_IsStandardActive = '8' #是否规范用户才能激活
SPI_UploadSettlementFile = 'U' #上传的交易所结算文件路径
SPI_DownloadCSRCFile = 'D' #上报保证金监控中心文件路径
SPI_SettlementBillFile = 'S' #生成的结算单文件路径
SPI_CSRCOthersFile = 'C' #证监会文件标识
SPI_InvestorPhoto = 'P' #投资者照片路径
SPI_CSRCData = 'R' #全结经纪公司上传文件路径
SPI_InvestorPwdModel = 'I' #开户密码录入方式
SPI_CFFEXInvestorSettleFile = 'F' #投资者中金所结算文件下载路径
SPI_InvestorIDType = 'a' #投资者代码编码方式
SPI_FreezeMaxReMain = 'r' #休眠户最高权益
SPI_IsSync = 'A' #手续费相关操作实时上场开关
SPI_RelieveOpenLimit = 'O' #解除开仓权限限制
SPI_IsStandardFreeze = 'X' #是否规范用户才能休眠
T['TradeParamID'] = 'char' #交易系统参数代码
TPID_EncryptionStandard = 'E' #系统加密算法
TPID_RiskMode = 'R' #系统风险算法
TPID_RiskModeGlobal = 'G' #系统风险算法是否全局 0-否 1-是
TPID_modeEncode = 'P' #密码加密算法
TPID_tickMode = 'T' #价格小数位数参数
TPID_SingleUserSessionMaxNum = 'S' #用户最大会话数
TPID_LoginFailMaxNum = 'L' #最大连续登录失败数
TPID_IsAuthForce = 'A' #是否强制认证
T['SettlementParamValue'] = 'char[256]' #参数代码值
T['CounterID'] = 'char[33]' #计数器代码
T['InvestorGroupName'] = 'char[41]' #投资者分组名称
T['BrandCode'] = 'char[257]' #牌号
T['Warehouse'] = 'char[257]' #仓库
T['ProductDate'] = 'char[41]' #产期
T['Grade'] = 'char[41]' #等级
T['Classify'] = 'char[41]' #类别
T['Position'] = 'char[41]' #货位
T['Yieldly'] = 'char[41]' #产地
T['Weight'] = 'char[41]' #公定重量
T['SubEntryFundNo'] = 'int' #分项资金流水号
T['FileID'] = 'char' #文件标识
FI_SettlementFund = 'F' #资金数据
FI_Trade = 'T' #成交数据
FI_InvestorPosition = 'P' #投资者持仓数据
FI_SubEntryFund = 'O' #投资者分项资金数据
FI_CZCECombinationPos = 'C' #郑商所组合持仓数据
FI_CSRCData = 'R' #上报保证金监控中心数据
FI_CZCEClose = 'L' #郑商所平仓了结数据
FI_CZCENoClose = 'N' #郑商所非平仓了结数据
T['FileName'] = 'char[257]' #文件名称
T['FileType'] = 'char' #文件上传类型
FUT_Settlement = '0' #结算
FUT_Check = '1' #核对
T['FileFormat'] = 'char' #文件格式
FFT_Txt = '0' #文本文件(.txt)
FFT_Zip = '1' #压缩文件(.zip)
FFT_DBF = '2' #DBF文件(.dbf)
T['FileUploadStatus'] = 'char' #文件状态
FUS_SucceedUpload = '1' #上传成功
FUS_FailedUpload = '2' #上传失败
FUS_SucceedLoad = '3' #导入成功
FUS_PartSucceedLoad = '4' #导入部分成功
FUS_FailedLoad = '5' #导入失败
T['TransferDirection'] = 'char' #移仓方向
TD_Out = '0' #移出
TD_In = '1' #移入
T['UploadMode'] = 'char[21]' #上传文件类型
T['AccountID'] = 'char[13]' #投资者帐号
T['BankFlag'] = 'char' #银行统一标识类型
BF_ICBC = '1' #工商银行
BF_ABC = '2' #农业银行
BF_BC = '3' #中国银行
BF_CBC = '4' #建设银行
BF_BOC = '5' #交通银行
BF_Other = 'Z' #其他银行
T['BankAccount'] = 'char[41]' #银行账户
T['OpenName'] = 'char[61]' #银行账户的开户人名称
T['OpenBank'] = 'char[101]' #银行账户的开户行
T['BankName'] = 'char[101]' #银行名称
T['PublishPath'] = 'char[257]' #发布路径
T['OperatorID'] = 'char[65]' #操作员代码
T['MonthCount'] = 'int' #月份数量
T['AdvanceMonthArray'] = 'char[13]' #月份提前数组
T['DateExpr'] = 'char[1025]' #日期表达式
T['InstrumentIDExpr'] = 'char[41]' #合约代码表达式
T['InstrumentNameExpr'] = 'char[41]' #合约名称表达式
T['SpecialCreateRule'] = 'char' #特殊的创建规则
SC_NoSpecialRule = '0' #没有特殊创建规则
SC_NoSpringFestival = '1' #不包含春节
T['BasisPriceType'] = 'char' #挂牌基准价类型
IPT_LastSettlement = '1' #上一合约结算价
IPT_LaseClose = '2' #上一合约收盘价
T['ProductLifePhase'] = 'char' #产品生命周期状态
PLP_Active = '1' #活跃
PLP_NonActive = '2' #不活跃
PLP_Canceled = '3' #注销
T['DeliveryMode'] = 'char' #交割方式
DM_CashDeliv = '1' #现金交割
DM_CommodityDeliv = '2' #实物交割
T['LogLevel'] = 'char[33]' #日志级别
T['ProcessName'] = 'char[257]' #存储过程名称
T['OperationMemo'] = 'char[1025]' #操作摘要
T['FundIOType'] = 'char' #出入金类型
FIOT_FundIO = '1' #出入金
FIOT_Transfer = '2' #银期转帐
T['FundType'] = 'char' #资金类型
FT_Deposite = '1' #银行存款
FT_ItemFund = '2' #分项资金
FT_Company = '3' #公司调整
T['FundDirection'] = 'char' #出入金方向
FD_In = '1' #入金
FD_Out = '2' #出金
T['FundStatus'] = 'char' #资金状态
FS_Record = '1' #已录入
FS_Check = '2' #已复核
FS_Charge = '3' #已冲销
T['BillNo'] = 'char[15]' #票据号
T['BillName'] = 'char[33]' #票据名称
T['PublishStatus'] = 'char' #发布状态
PS_None = '1' #未发布
PS_Publishing = '2' #正在发布
PS_Published = '3' #已发布
T['EnumValueID'] = 'char[65]' #枚举值代码
T['EnumValueType'] = 'char[33]' #枚举值类型
T['EnumValueLabel'] = 'char[65]' #枚举值名称
T['EnumValueResult'] = 'char[33]' #枚举值结果
T['SystemStatus'] = 'char' #系统状态
ES_NonActive = '1' #不活跃
ES_Startup = '2' #启动
ES_Initialize = '3' #交易开始初始化
ES_Initialized = '4' #交易完成初始化
ES_Close = '5' #收市开始
ES_Closed = '6' #收市完成
ES_Settlement = '7' #结算
T['SettlementStatus'] = 'char' #结算状态
STS_Initialize = '0' #初始
STS_Settlementing = '1' #结算中
STS_Settlemented = '2' #已结算
STS_Finished = '3' #结算完成
T['RangeIntType'] = 'char[33]' #限定值类型
T['RangeIntFrom'] = 'char[33]' #限定值下限
T['RangeIntTo'] = 'char[33]' #限定值上限
T['FunctionID'] = 'char[25]' #功能代码
T['FunctionValueCode'] = 'char[257]' #功能编码
T['FunctionName'] = 'char[65]' #功能名称
T['RoleID'] = 'char[11]' #角色编号
T['RoleName'] = 'char[41]' #角色名称
T['Description'] = 'char[401]' #描述
T['CombineID'] = 'char[25]' #组合编号
T['CombineType'] = 'char[25]' #组合类型
T['InvestorType'] = 'char' #投资者类型
CT_Person = '0' #自然人
CT_Company = '1' #法人
CT_Fund = '2' #投资基金
T['BrokerType'] = 'char' #经纪公司类型
BT_Trade = '0' #交易会员
BT_TradeSettle = '1' #交易结算会员
T['RiskLevel'] = 'char' #风险等级
FAS_Low = '1' #低风险客户
FAS_Normal = '2' #普通客户
FAS_Focus = '3' #关注客户
FAS_Risk = '4' #风险客户
T['FeeAcceptStyle'] = 'char' #手续费收取方式
FAS_ByTrade = '1' #按交易收取
FAS_ByDeliv = '2' #按交割收取
FAS_None = '3' #不收
FAS_FixFee = '4' #按指定手续费收取
T['PasswordType'] = 'char' #密码类型
PWDT_Trade = '1' #交易密码
PWDT_Account = '2' #资金密码
T['Algorithm'] = 'char' #盈亏算法
AG_All = '1' #浮盈浮亏都计算
AG_OnlyLost = '2' #浮盈不计,浮亏计
AG_OnlyGain = '3' #浮盈计,浮亏不计
AG_None = '4' #浮盈浮亏都不计算
T['IncludeCloseProfit'] = 'char' #是否包含平仓盈利
ICP_Include = '0' #包含平仓盈利
ICP_NotInclude = '2' #不包含平仓盈利
T['AllWithoutTrade'] = 'char' #是否受可提比例限制
AWT_Enable = '0' #不受可提比例限制
AWT_Disable = '2' #受可提比例限制
AWT_NoHoldEnable = '3' #无仓不受可提比例限制
T['Comment'] = 'char[31]' #盈亏算法说明
T['Version'] = 'char[4]' #版本号
T['TradeCode'] = 'char[7]' #交易代码
T['TradeDate'] = 'char[9]' #交易日期
T['TradeTime'] = 'char[9]' #交易时间
T['TradeSerial'] = 'char[9]' #发起方流水号
T['TradeSerialNo'] = 'int' #发起方流水号
T['FutureID'] = 'char[11]' #期货公司代码
T['BankID'] = 'char[4]' #银行代码
T['BankBrchID'] = 'char[5]' #银行分中心代码
T['BankBranchID'] = 'char[11]' #分中心代码
T['OperNo'] = 'char[17]' #交易柜员
T['DeviceID'] = 'char[3]' #渠道标志
T['RecordNum'] = 'char[7]' #记录数
T['FutureAccount'] = 'char[22]' #期货资金账号
T['FuturePwdFlag'] = 'char' #资金密码核对标志
FPWD_UnCheck = '0' #不核对
FPWD_Check = '1' #核对
T['TransferType'] = 'char' #银期转账类型
TT_BankToFuture = '0' #银行转期货
TT_FutureToBank = '1' #期货转银行
T['FutureAccPwd'] = 'char[17]' #期货资金密码
T['CurrencyCode'] = 'char[4]' #币种
T['RetCode'] = 'char[5]' #响应代码
T['RetInfo'] = 'char[129]' #响应信息
T['TradeAmt'] = 'char[20]' #银行总余额
T['UseAmt'] = 'char[20]' #银行可用余额
T['FetchAmt'] = 'char[20]' #银行可取余额
T['TransferValidFlag'] = 'char' #转账有效标志
TVF_Invalid = '0' #无效或失败
TVF_Valid = '1' #有效
TVF_Reverse = '2' #冲正
T['CertCode'] = 'char[21]' #证件号码
T['Reason'] = 'char' #事由
RN_CD = '0' #错单
RN_ZT = '1' #资金在途
RN_QT = '2' #其它
T['FundProjectID'] = 'char[5]' #资金项目编号
T['Sex'] = 'char' #性别
SEX_None = '0' #未知
SEX_Man = '1' #男
SEX_Woman = '2' #女
T['Profession'] = 'char[41]' #职业
T['National'] = 'char[31]' #国籍
T['Province'] = 'char[16]' #省
T['Region'] = 'char[16]' #区
T['Country'] = 'char[16]' #国家
T['LicenseNO'] = 'char[33]' #营业执照
T['CompanyType'] = 'char[16]' #企业性质
T['BusinessScope'] = 'char[1001]' #经营范围
T['CapitalCurrency'] = 'char[4]' #注册资本币种
T['UserType'] = 'char' #用户类型
UT_Investor = '0' #投资者
UT_Operator = '1' #操作员
UT_SuperUser = '2' #管理员
T['RateType'] = 'char' #费率类型
RATETYPE_MarginRate = '2' #保证金率
T['NoteType'] = 'char' #通知类型
NOTETYPE_TradeSettleBill = '1' #交易结算单
NOTETYPE_TradeSettleMonth = '2' #交易结算月报
NOTETYPE_CallMarginNotes = '3' #追加保证金通知书
NOTETYPE_ForceCloseNotes = '4' #强行平仓通知书
NOTETYPE_TradeNotes = '5' #成交通知书
NOTETYPE_DelivNotes = '6' #交割通知书
T['SettlementStyle'] = 'char' #结算单方式
SBS_Day = '1' #逐日盯市
SBS_Volume = '2' #逐笔对冲
T['BrokerDNS'] = 'char[256]' #域名
T['Sentence'] = 'char[501]' #语句
T['SettlementBillType'] = 'char' #结算单类型
ST_Day = '0' #日报
ST_Month = '1' #月报
T['UserRightType'] = 'char' #客户权限类型
URT_Logon = '1' #登录
URT_Transfer = '2' #银期转帐
URT_EMail = '3' #邮寄结算单
URT_Fax = '4' #传真结算单
URT_ConditionOrder = '5' #条件单
T['MarginPriceType'] = 'char' #保证金价格类型
MPT_PreSettlementPrice = '1' #昨结算价
MPT_SettlementPrice = '2' #最新价
MPT_AveragePrice = '3' #成交均价
MPT_OpenPrice = '4' #开仓价
T['BillGenStatus'] = 'char' #结算单生成状态
BGS_None = '0' #未生成
BGS_NoGenerated = '1' #生成中
BGS_Generated = '2' #已生成
T['AlgoType'] = 'char' #算法类型
AT_HandlePositionAlgo = '1' #持仓处理算法
AT_FindMarginRateAlgo = '2' #寻找保证金率算法
T['HandlePositionAlgoID'] = 'char' #持仓处理算法编号
HPA_Base = '1' #基本
HPA_DCE = '2' #大连商品交易所
HPA_CZCE = '3' #郑州商品交易所
T['FindMarginRateAlgoID'] = 'char' #寻找保证金率算法编号
FMRA_Base = '1' #基本
FMRA_DCE = '2' #大连商品交易所
FMRA_CZCE = '3' #郑州商品交易所
T['HandleTradingAccountAlgoID'] = 'char' #资金处理算法编号
HTAA_Base = '1' #基本
HTAA_DCE = '2' #大连商品交易所
HTAA_CZCE = '3' #郑州商品交易所
T['PersonType'] = 'char' #联系人类型
PST_Order = '1' #指定下单人
PST_Open = '2' #开户授权人
PST_Fund = '3' #资金调拨人
PST_Settlement = '4' #结算单确认人
PST_Company = '5' #法人
PST_Corporation = '6' #法人代表
PST_LinkMan = '7' #投资者联系人
T['QueryInvestorRange'] = 'char' #查询范围
QIR_All = '1' #所有
QIR_Group = '2' #查询分类
QIR_Single = '3' #单一投资者
T['InvestorRiskStatus'] = 'char' #投资者风险状态
IRS_Normal = '1' #正常
IRS_Warn = '2' #警告
IRS_Call = '3' #追保
IRS_Force = '4' #强平
IRS_Exception = '5' #异常
T['LegID'] = 'int' #单腿编号
T['LegMultiple'] = 'int' #单腿乘数
T['ImplyLevel'] = 'int' #派生层数
T['ClearAccount'] = 'char[33]' #结算账户
T['OrganNO'] = 'char[6]' #结算账户
T['ClearbarchID'] = 'char[6]' #结算账户联行号
T['UserEventType'] = 'char' #用户事件类型
UET_Login = '1' #登录
UET_Logout = '2' #登出
UET_Trading = '3' #交易成功
UET_TradingError = '4' #交易失败
UET_UpdatePassword = '5' #修改密码
UET_Authenticate = '6' #客户端认证
UET_Other = '9' #其他
T['UserEventInfo'] = 'char[1025]' #用户事件信息
T['CloseStyle'] = 'char' #平仓方式
ICS_Close = '0' #先开先平
ICS_CloseToday = '1' #先平今再平昨
T['StatMode'] = 'char' #统计方式
SM_Non = '0' #----
SM_Instrument = '1' #按合约统计
SM_Product = '2' #按产品统计
SM_Investor = '3' #按投资者统计
T['ParkedOrderStatus'] = 'char' #预埋单状态
PAOS_NotSend = '1' #未发送
PAOS_Send = '2' #已发送
PAOS_Deleted = '3' #已删除
T['ParkedOrderID'] = 'char[13]' #预埋报单编号
T['ParkedOrderActionID'] = 'char[13]' #预埋撤单编号
T['VirDealStatus'] = 'char' #处理状态
VDS_Dealing = '1' #正在处理
VDS_DeaclSucceed = '2' #处理成功
T['OrgSystemID'] = 'char' #原有系统代码
ORGS_Standard = '0' #综合交易平台
ORGS_ESunny = '1' #易盛系统
ORGS_KingStarV6 = '2' #金仕达V6系统
T['VirTradeStatus'] = 'char' #交易状态
VTS_NaturalDeal = '0' #正常处理中
VTS_SucceedEnd = '1' #成功结束
VTS_FailedEND = '2' #失败结束
VTS_Exception = '3' #异常中
VTS_ManualDeal = '4' #已人工异常处理
VTS_MesException = '5' #通讯异常 ,请人工处理
VTS_SysException = '6' #系统出错,请人工处理
T['VirBankAccType'] = 'char' #银行帐户类型
VBAT_BankBook = '1' #存折
VBAT_BankCard = '2' #储蓄卡
VBAT_CreditCard = '3' #信用卡
T['VirementStatus'] = 'char' #银行帐户类型
VMS_Natural = '0' #正常
VMS_Canceled = '9' #销户
T['VirementAvailAbility'] = 'char' #有效标志
VAA_NoAvailAbility = '0' #未确认
VAA_AvailAbility = '1' #有效
VAA_Repeal = '2' #冲正
T['VirementTradeCode'] = 'char[7]' #交易代码
VTC_BankBankToFuture = '102001' #银行发起银行资金转期货
VTC_BankFutureToBank = '102002' #银行发起期货资金转银行
VTC_FutureBankToFuture = '202001' #期货发起银行资金转期货
VTC_FutureFutureToBank = '202002' #期货发起期货资金转银行
T['PhotoTypeName'] = 'char[41]' #影像类型名称
T['PhotoTypeID'] = 'char[5]' #影像类型代码
T['PhotoName'] = 'char[161]' #影像名称
T['TopicID'] = 'int' #主题代码
T['ReportTypeID'] = 'char[3]' #交易报告类型标识
T['CharacterID'] = 'char[5]' #交易特征代码
T['AMLParamID'] = 'char[21]' #参数代码
T['AMLInvestorType'] = 'char[3]' #投资者类型
T['AMLIdCardType'] = 'char[3]' #证件类型
T['AMLTradeDirect'] = 'char[3]' #资金进出方向
T['AMLTradeModel'] = 'char[3]' #资金进出方式
T['AMLParamID'] = 'char[21]' #参数代码
T['AMLOpParamValue'] = 'double' #业务参数代码值
T['AMLCustomerCardType'] = 'char[81]' #客户身份证件/证明文件类型
T['AMLInstitutionName'] = 'char[65]' #金融机构网点名称
T['AMLDistrictID'] = 'char[7]' #金融机构网点所在地区行政区划代码
T['AMLRelationShip'] = 'char[3]' #金融机构网点与大额交易的关系
T['AMLInstitutionType'] = 'char[3]' #金融机构网点代码类型
T['AMLInstitutionID'] = 'char[13]' #金融机构网点代码
T['AMLAccountType'] = 'char[5]' #账户类型
T['AMLTradingType'] = 'char[7]' #交易方式
T['AMLTransactClass'] = 'char[7]' #涉外收支交易分类与代码
T['AMLCapitalIO'] = 'char[3]' #资金收付标识
T['AMLSite'] = 'char[10]' #交易地点
T['AMLCapitalPurpose'] = 'char[129]' #资金用途
T['AMLReportType'] = 'char[2]' #报文类型
T['AMLSerialNo'] = 'char[5]' #编号
T['AMLStatus'] = 'char[2]' #状态
T['AMLGenStatus'] = 'char' #Aml生成方式
GEN_Program = '0' #程序生成
GEN_HandWork = '1' #人工生成
T['AMLSeqCode'] = 'char[65]' #业务标识号
T['AMLFileName'] = 'char[257]' #AML文件名
T['AMLMoney'] = 'double' #反洗钱资金
T['AMLFileAmount'] = 'int' #反洗钱资金
T['CFMMCKey'] = 'char[21]' #密钥类型(保证金监管)
T['CFMMCKeyKind'] = 'char' #动态密钥类别(保证金监管)
CFMMCKK_REQUEST = 'R' #主动请求更新
CFMMCKK_AUTO = 'A' #CFMMC自动更新
CFMMCKK_MANUAL = 'M' #CFMMC手动更新
T['AMLReportName'] = 'char[81]' #报文名称
T['IndividualName'] = 'char[51]' #个人姓名
T['CurrencyID'] = 'char[4]' #币种代码
T['CustNumber'] = 'char[36]' #客户编号
T['OrganCode'] = 'char[36]' #机构编码
T['OrganName'] = 'char[71]' #机构名称
T['SuperOrganCode'] = 'char[12]' #上级机构编码,即期货公司总部、银行总行
T['SubBranchID'] = 'char[31]' #分支机构
T['SubBranchName'] = 'char[71]' #分支机构名称
T['BranchNetCode'] = 'char[31]' #机构网点号
T['BranchNetName'] = 'char[71]' #机构网点名称
T['OrganFlag'] = 'char[2]' #机构标识
T['BankCodingForFuture'] = 'char[33]' #银行对期货公司的编码
T['BankReturnCode'] = 'char[7]' #银行对返回码的定义
T['PlateReturnCode'] = 'char[5]' #银期转帐平台对返回码的定义
T['BankSubBranchID'] = 'char[31]' #银行分支机构编码
T['FutureBranchID'] = 'char[31]' #期货分支机构编码
T['ReturnCode'] = 'char[7]' #返回代码
T['OperatorCode'] = 'char[17]' #操作员
T['ClearDepID'] = 'char[6]' #机构结算帐户机构号
T['ClearBrchID'] = 'char[6]' #机构结算帐户联行号
T['ClearName'] = 'char[71]' #机构结算帐户名称
T['BankAccountName'] = 'char[71]' #银行帐户名称
T['InvDepID'] = 'char[6]' #机构投资人账号机构号
T['InvBrchID'] = 'char[6]' #机构投资人联行号
T['MessageFormatVersion'] = 'char[36]' #信息格式版本
T['Digest'] = 'char[36]' #摘要
T['AuthenticData'] = 'char[129]' #认证数据
T['PasswordKey'] = 'char[129]' #密钥
T['FutureAccountName'] = 'char[129]' #期货帐户名称
T['MobilePhone'] = 'char[21]' #手机
T['FutureMainKey'] = 'char[129]' #期货公司主密钥
T['FutureWorkKey'] = 'char[129]' #期货公司工作密钥
T['FutureTransKey'] = 'char[129]' #期货公司传输密钥
T['BankMainKey'] = 'char[129]' #银行主密钥
T['BankWorkKey'] = 'char[129]' #银行工作密钥
T['BankTransKey'] = 'char[129]' #银行传输密钥
T['BankServerDescription'] = 'char[129]' #银行服务器描述信息
T['AddInfo'] = 'char[129]' #附加信息
T['DescrInfoForReturnCode'] = 'char[129]' #返回码描述
T['CountryCode'] = 'char[21]' #国家代码
T['Serial'] = 'int' #流水号
T['PlateSerial'] = 'int' #平台流水号
T['BankSerial'] = 'char[13]' #银行流水号
T['CorrectSerial'] = 'int' #被冲正交易流水号
T['FutureSerial'] = 'int' #期货公司流水号
T['ApplicationID'] = 'int' #应用标识
T['BankProxyID'] = 'int' #银行代理标识
T['FBTCoreID'] = 'int' #银期转帐核心系统标识
T['ServerPort'] = 'int' #服务端口号
T['RepealedTimes'] = 'int' #已经冲正次数
T['RepealTimeInterval'] = 'int' #冲正时间间隔
T['TotalTimes'] = 'int' #每日累计转帐次数
T['FBTRequestID'] = 'int' #请求ID
T['TID'] = 'int' #交易ID
T['TradeAmount'] = 'double' #交易金额(元)
T['CustFee'] = 'double' #应收客户费用(元)
T['FutureFee'] = 'double' #应收期货公司费用(元)
T['SingleMaxAmt'] = 'double' #单笔最高限额
T['SingleMinAmt'] = 'double' #单笔最低限额
T['TotalAmt'] = 'double' #每日累计转帐额度
T['CertificationType'] = 'char' #证件类型
CFT_IDCard = '0' #身份证
CFT_Passport = '1' #护照
CFT_OfficerIDCard = '2' #军官证
CFT_SoldierIDCard = '3' #士兵证
CFT_HomeComingCard = '4' #回乡证
CFT_HouseholdRegister = '5' #户口簿
CFT_LicenseNo = '6' #营业执照号
CFT_InstitutionCodeCard = '7' #组织机构代码证
CFT_TempLicenseNo = '8' #临时营业执照号
CFT_NoEnterpriseLicenseNo = '9' #民办非企业登记证书
CFT_OtherCard = 'x' #其他证件
CFT_SuperDepAgree = 'a' #主管部门批文
T['FileBusinessCode'] = 'char' #文件业务功能
FBC_Others = '0' #其他
FBC_TransferDetails = '1' #转账交易明细对账
FBC_CustAccStatus = '2' #客户账户状态对账
FBC_AccountTradeDetails = '3' #账户类交易明细对账
FBC_FutureAccountChangeInfoDetails = '4' #期货账户信息变更明细对账
FBC_CustMoneyDetail = '5' #客户资金台账余额明细对账
FBC_CustCancelAccountInfo = '6' #客户销户结息明细对账
FBC_CustMoneyResult = '7' #客户资金余额对账结果
FBC_OthersExceptionResult = '8' #其它对账异常结果文件
FBC_CustInterestNetMoneyDetails = '9' #客户结息净额明细
FBC_CustMoneySendAndReceiveDetails = 'a' #客户资金交收明细
FBC_CorporationMoneyTotal = 'b' #法人存管银行资金交收汇总
FBC_MainbodyMoneyTotal = 'c' #主体间资金交收汇总
FBC_MainPartMonitorData = 'd' #总分平衡监管数据
FBC_PreparationMoney = 'e' #存管银行备付金余额
FBC_BankMoneyMonitorData = 'f' #协办存管银行资金监管数据
T['CashExchangeCode'] = 'char' #汇钞标志
CEC_Exchange = '1' #汇
CEC_Cash = '2' #钞
T['YesNoIndicator'] = 'char' #是或否标识
YNI_Yes = '0' #是
YNI_No = '1' #否
T['BanlanceType'] = 'char' #余额类型
BLT_CurrentMoney = '0' #当前余额
BLT_UsableMoney = '1' #可用余额
BLT_FetchableMoney = '2' #可取余额
BLT_FreezeMoney = '3' #冻结余额
T['Gender'] = 'char' #性别
GD_Unknown = '0' #未知状态
GD_Male = '1' #男
GD_Female = '2' #女
T['FeePayFlag'] = 'char' #费用支付标志
FPF_BEN = '0' #由受益方支付费用
FPF_OUR = '1' #由发送方支付费用
FPF_SHA = '2' #由发送方支付发起的费用,受益方支付接受的费用
T['PassWordKeyType'] = 'char' #密钥类型
PWKT_ExchangeKey = '0' #交换密钥
PWKT_PassWordKey = '1' #密码密钥
PWKT_MACKey = '2' #MAC密钥
PWKT_MessageKey = '3' #报文密钥
T['FBTPassWordType'] = 'char' #密码类型
PWT_Query = '0' #查询
PWT_Fetch = '1' #取款
PWT_Transfer = '2' #转帐
PWT_Trade = '3' #交易
T['FBTEncryMode'] = 'char' #加密方式
EM_NoEncry = '0' #不加密
EM_DES = '1' #DES
EM_3DES = '2' #3DES
T['BankRepealFlag'] = 'char' #银行冲正标志
BRF_BankNotNeedRepeal = '0' #银行无需自动冲正
BRF_BankWaitingRepeal = '1' #银行待自动冲正
BRF_BankBeenRepealed = '2' #银行已自动冲正
T['BrokerRepealFlag'] = 'char' #期商冲正标志
BRORF_BrokerNotNeedRepeal = '0' #期商无需自动冲正
BRORF_BrokerWaitingRepeal = '1' #期商待自动冲正
BRORF_BrokerBeenRepealed = '2' #期商已自动冲正
T['InstitutionType'] = 'char' #机构类别
TS_Bank = '0' #银行
TS_Future = '1' #期商
TS_Store = '2' #券商
T['LastFragment'] = 'char' #最后分片标志
LF_Yes = '0' #是最后分片
LF_No = '1' #不是最后分片
T['BankAccStatus'] = 'char' #银行账户状态
BAS_Normal = '0' #正常
BAS_Freeze = '1' #冻结
BAS_ReportLoss = '2' #挂失
T['MoneyAccountStatus'] = 'char' #资金账户状态
MAS_Normal = '0' #正常
MAS_Cancel = '1' #销户
T['ManageStatus'] = 'char' #存管状态
MSS_Point = '0' #指定存管
MSS_PrePoint = '1' #预指定
MSS_CancelPoint = '2' #撤销指定
T['SystemType'] = 'char' #应用系统类型
SYT_FutureBankTransfer = '0' #银期转帐
SYT_StockBankTransfer = '1' #银证转帐
SYT_TheThirdPartStore = '2' #第三方存管
T['TxnEndFlag'] = 'char' #银期转帐划转结果标志
TEF_NormalProcessing = '0' #正常处理中
TEF_Success = '1' #成功结束
TEF_Failed = '2' #失败结束
TEF_Abnormal = '3' #异常中
TEF_ManualProcessedForException = '4' #已人工异常处理
TEF_CommuFailedNeedManualProcess = '5' #通讯异常 ,请人工处理
TEF_SysErrorNeedManualProcess = '6' #系统出错,请人工处理
T['ProcessStatus'] = 'char' #银期转帐服务处理状态
PSS_NotProcess = '0' #未处理
PSS_StartProcess = '1' #开始处理
PSS_Finished = '2' #处理完成
T['CustType'] = 'char' #客户类型
CUSTT_Person = '0' #自然人
CUSTT_Institution = '1' #机构户
T['FBTTransferDirection'] = 'char' #银期转帐方向
FBTTD_FromBankToFuture = '1' #入金,银行转期货
FBTTD_FromFutureToBank = '2' #出金,期货转银行
T['OpenOrDestroy'] = 'char' #开销户类别
OOD_Open = '1' #开户
OOD_Destroy = '0' #销户
T['AvailabilityFlag'] = 'char' #有效标志
AVAF_Invalid = '0' #未确认
AVAF_Valid = '1' #有效
AVAF_Repeal = '2' #冲正
T['OrganType'] = 'char' #机构类型
OT_Bank = '1' #银行代理
OT_Future = '2' #交易前置
OT_PlateForm = '9' #银期转帐平台管理
T['OrganLevel'] = 'char' #机构级别
OL_HeadQuarters = '1' #银行总行或期商总部
OL_Branch = '2' #银行分中心或期货公司营业部
T['ProtocalID'] = 'char' #协议类型
PID_FutureProtocal = '0' #期商协议
PID_ICBCProtocal = '1' #工行协议
PID_ABCProtocal = '2' #农行协议
PID_CBCProtocal = '3' #中国银行协议
PID_CCBProtocal = '4' #建行协议
PID_BOCOMProtocal = '5' #交行协议
PID_FBTPlateFormProtocal = 'X' #银期转帐平台协议
T['ConnectMode'] = 'char' #套接字连接方式
CM_ShortConnect = '0' #短连接
CM_LongConnect = '1' #长连接
T['SyncMode'] = 'char' #套接字通信方式
SRM_ASync = '0' #异步
SRM_Sync = '1' #同步
T['BankAccType'] = 'char' #银行帐号类型
BAT_BankBook = '1' #银行存折
BAT_SavingCard = '2' #储蓄卡
BAT_CreditCard = '3' #信用卡
T['FutureAccType'] = 'char' #期货公司帐号类型
FAT_BankBook = '1' #银行存折
FAT_SavingCard = '2' #储蓄卡
FAT_CreditCard = '3' #信用卡
T['OrganStatus'] = 'char' #接入机构状态
OS_Ready = '0' #启用
OS_CheckIn = '1' #签到
OS_CheckOut = '2' #签退
OS_CheckFileArrived = '3' #对帐文件到达
OS_CheckDetail = '4' #对帐
OS_DayEndClean = '5' #日终清理
OS_Invalid = '9' #注销
T['CCBFeeMode'] = 'char' #建行收费模式
CCBFM_ByAmount = '1' #按金额扣收
CCBFM_ByMonth = '2' #按月扣收
T['CommApiType'] = 'char' #通讯API类型
CAPIT_Client = '1' #客户端
CAPIT_Server = '2' #服务端
CAPIT_UserApi = '3' #交易系统的UserApi
T['ServiceID'] = 'int' #服务编号
T['ServiceLineNo'] = 'int' #服务线路编号
T['ServiceName'] = 'char[61]' #服务名
T['LinkStatus'] = 'char' #连接状态
LS_Connected = '1' #已经连接
LS_Disconnected = '2' #没有连接
T['CommApiPointer'] = 'int' #通讯API指针
T['PwdFlag'] = 'char' #密码核对标志
BPWDF_NoCheck = '0' #不核对
BPWDF_BlankCheck = '1' #明文核对
BPWDF_EncryptCheck = '2' #密文核对
T['SecuAccType'] = 'char' #期货帐号类型
SAT_AccountID = '1' #资金帐号
SAT_CardID = '2' #资金卡号
SAT_SHStockholderID = '3' #上海股东帐号
SAT_SZStockholderID = '4' #深圳股东帐号
T['TransferStatus'] = 'char' #转账交易状态
TRFS_Normal = '0' #正常
TRFS_Repealed = '1' #被冲正
T['SponsorType'] = 'char' #发起方
SPTYPE_Broker = '0' #期商
SPTYPE_Bank = '1' #银行
T['ReqRspType'] = 'char' #请求响应类别
REQRSP_Request = '0' #请求
REQRSP_Response = '1' #响应
T['FBTUserEventType'] = 'char' #银期转帐用户事件类型
FBTUET_SignIn = '0' #签到
FBTUET_FromBankToFuture = '1' #银行转期货
FBTUET_FromFutureToBank = '2' #期货转银行
FBTUET_OpenAccount = '3' #开户
FBTUET_CancelAccount = '4' #销户
FBTUET_ChangeAccount = '5' #变更银行账户
FBTUET_RepealFromBankToFuture = '6' #冲正银行转期货
FBTUET_RepealFromFutureToBank = '7' #冲正期货转银行
FBTUET_QueryBankAccount = '8' #查询银行账户
FBTUET_QueryFutureAccount = '9' #查询期货账户
FBTUET_SignOut = 'A' #签退
FBTUET_SyncKey = 'B' #密钥同步
FBTUET_Other = 'Z' #其他
T['BankIDByBank'] = 'char[21]' #银行自己的编码
T['DBOPSeqNo'] = 'int' #递增的序列号
T['TableName'] = 'char[61]' #FBT表名
T['PKName'] = 'char[201]' #FBT表操作主键名
T['PKValue'] = 'char[501]' #FBT表操作主键值
T['DBOperation'] = 'char' #记录操作类型
DBOP_Insert = '0' #插入
DBOP_Update = '1' #更新
DBOP_Delete = '2' #删除
T['SyncFlag'] = 'char' #同步标记
SYNF_Yes = '0' #已同步
SYNF_No = '1' #未同步
T['TargetID'] = 'char[4]' #同步目标编号
T['SyncType'] = 'char' #同步类型
SYNT_OneOffSync = '0' #一次同步
SYNT_TimerSync = '1' #定时同步
SYNT_TimerFullSync = '2' #定时完全同步
T['NotifyClass'] = 'char' #风险通知类型
NC_NOERROR = '0' #正常
NC_Warn = '1' #警示
NC_Call = '2' #追保
NC_Force = '3' #强平
NC_CHUANCANG = '4' #穿仓
NC_Exception = '5' #异常
T['RiskNofityInfo'] = 'char[257]' #客户风险通知消息
T['ForceCloseSceneId'] = 'char[24]' #强平场景编号
T['ForceCloseType'] = 'char' #强平单类型
FCT_Manual = '0' #手工强平
FCT_Single = '1' #单一投资者辅助强平
FCT_Group = '2' #批量投资者辅助强平
T['InstrumentIDs'] = 'char[101]' #多个产品代码,用+分隔,如cu+zn
T['RiskNotifyMethod'] = 'char' #风险通知途径
RNM_System = '0' #系统通知
RNM_SMS = '1' #短信通知
RNM_EMail = '2' #邮件通知
RNM_Manual = '3' #人工通知
T['RiskNotifyStatus'] = 'char' #风险通知状态
RNS_NotGen = '0' #未生成
RNS_Generated = '1' #已生成未发送
RNS_SendError = '2' #发送失败
RNS_SendOk = '3' #已发送未接收
RNS_Received = '4' #已接收未确认
RNS_Confirmed = '5' #已确认
T['RiskUserEvent'] = 'char' #风控用户操作事件
RUE_ExportData = '0' #导出数据
T['ParamID'] = 'int' #参数代码
T['ParamName'] = 'char[41]' #参数名
T['ParamValue'] = 'char[41]' #参数值
T['ConditionalOrderSortType'] = 'char' #条件单索引条件
COST_LastPriceAsc = '0' #使用最新价升序
COST_LastPriceDesc = '1' #使用最新价降序
COST_AskPriceAsc = '2' #使用卖价升序
COST_AskPriceDesc = '3' #使用卖价降序
COST_BidPriceAsc = '4' #使用买价升序
COST_BidPriceDesc = '5' #使用买价降序
T['SendType'] = 'char' #报送状态
UOAST_NoSend = '0' #未发送
UOAST_Sended = '1' #已发送
UOAST_Generated = '2' #已生成
UOAST_SendFail = '3' #报送失败
UOAST_Success = '4' #接收成功
UOAST_Fail = '5' #接收失败
UOAST_Cancel = '6' #取消报送
T['ClientIDStatus'] = 'char' #交易编码状态
UOACS_NoApply = '1' #未申请
UOACS_Submited = '2' #已提交申请
UOACS_Sended = '3' #已发送申请
UOACS_Success = '4' #完成
UOACS_Refuse = '5' #拒绝
UOACS_Cancel = '6' #已撤销编码
T['IndustryID'] = 'char[17]' #行业编码
T['QuestionID'] = 'char[5]' #特有信息编号
T['QuestionContent'] = 'char[41]' #特有信息说明
T['OptionID'] = 'char[13]' #选项编号
T['OptionContent'] = 'char[61]' #选项说明
T['QuestionType'] = 'char' #特有信息类型
QT_Radio = '1' #单选
QT_Option = '2' #多选
QT_Blank = '3' #填空
T['ProcessID'] = 'char[33]' #业务流水号
T['SeqNo'] = 'int' #流水号
T['UOAProcessStatus'] = 'char[3]' #流程状态
T['ProcessType'] = 'char[3]' #流程功能类型
T['BusinessType'] = 'char' #业务类型
BT_Request = '1' #请求
BT_Response = '2' #应答
BT_Notice = '3' #通知
T['CfmmcReturnCode'] = 'char' #监控中心返回码
CRC_Success = '0' #成功
CRC_Working = '1' #该客户已经有流程在处理中
CRC_InfoFail = '2' #监控中客户资料检查失败
CRC_IDCardFail = '3' #监控中实名制检查失败
CRC_OtherFail = '4' #其他错误
T['ExReturnCode'] = 'int' #交易所返回码
T['ClientType'] = 'char' #客户类型
CfMMCCT_All = '0' #所有
CfMMCCT_Person = '1' #个人
CfMMCCT_Company = '2' #单位
T['ExchangeIDType'] = 'char' #交易所编号
EIDT_SHFE = 'S' #上海期货交易所
EIDT_CZCE = 'Z' #郑州商品交易所
EIDT_DCE = 'D' #大连商品交易所
EIDT_CFFEX = 'J' #中国金融期货交易所
T['ExClientIDType'] = 'char' #交易编码类型
ECIDT_Hedge = '1' #套保
ECIDT_Arbitrage = '2' #套利
ECIDT_Speculation = '3' #投机
T['ClientClassify'] = 'char[11]' #客户分类码
T['UOAOrganType'] = 'char[9]' #单位性质
T['UOACountryCode'] = 'char[9]' #国家代码
T['AreaCode'] = 'char[9]' #区号
T['FuturesID'] = 'char[21]' #监控中心为客户分配的代码
T['CffmcDate'] = 'char[11]' #日期
T['CffmcTime'] = 'char[11]' #时间
T['NocID'] = 'char[21]' #组织机构代码
T['UpdateFlag'] = 'char' #更新状态
UF_NoUpdate = '0' #未更新
UF_Success = '1' #更新全部信息成功
UF_Fail = '2' #更新全部信息失败
UF_TCSuccess = '3' #更新交易编码成功
UF_TCFail = '4' #更新交易编码失败
UF_Cancel = '5' #已丢弃
T['ApplyOperateID'] = 'char' #申请动作
AOID_OpenInvestor = '1' #开户
AOID_ModifyIDCard = '2' #修改身份信息
AOID_ModifyNoIDCard = '3' #修改一般信息
AOID_ApplyTradingCode = '4' #申请交易编码
AOID_CancelTradingCode = '5' #撤销交易编码
AOID_CancelInvestor = '6' #销户
AOID_FreezeAccount = '8' #账户休眠
AOID_ActiveFreezeAccount = '9' #激活休眠账户
T['ApplyStatusID'] = 'char' #申请状态
ASID_NoComplete = '1' #未补全
ASID_Submited = '2' #已提交
ASID_Checked = '3' #已审核
ASID_Refused = '4' #已拒绝
ASID_Deleted = '5' #已删除
T['SendMethod'] = 'char' #发送方式
UOASM_ByAPI = '1' #文件发送
UOASM_ByFile = '2' #电子发送
T['EventType'] = 'char[33]' #业务操作类型
T['EventMode'] = 'char' #操作方法
EvM_ADD = '1' #增加
EvM_UPDATE = '2' #修改
EvM_DELETE = '3' #删除
EvM_CHECK = '4' #复核
EvM_COPY = '5' #复制
EvM_CANCEL = '6' #注销
EvM_Reverse = '7' #冲销
T['UOAAutoSend'] = 'char' #统一开户申请自动发送
UOAA_ASR = '1' #自动发送并接收
UOAA_ASNR = '2' #自动发送,不自动接收
UOAA_NSAR = '3' #不自动发送,自动接收
UOAA_NSR = '4' #不自动发送,也不自动接收
T['QueryDepth'] = 'int' #查询深度
T['DataCenterID'] = 'int' #数据中心代码
T['FlowID'] = 'char' #流程ID
EvM_InvestorGroupFlow = '1' #投资者对应投资者组设置
EvM_InvestorRate = '2' #投资者手续费率设置
EvM_InvestorCommRateModel = '3' #投资者手续费率模板关系设置
T['CheckLevel'] = 'char' #复核级别
CL_Zero = '0' #零级复核
CL_One = '1' #一级复核
CL_Two = '2' #二级复核
T['CheckNo'] = 'int' #操作次数
T['CheckStatus'] = 'char' #复核级别
CHS_Init = '0' #未复核
CHS_Checking = '1' #复核中
CHS_Checked = '2' #已复核
CHS_Refuse = '3' #拒绝
CHS_Cancel = '4' #作废
T['UsedStatus'] = 'char' #生效状态
CHU_Unused = '0' #未生效
CHU_Used = '1' #已生效
CHU_Fail = '2' #生效失败
T['RateTemplateName'] = 'char[61]' #模型名称
T['PropertyString'] = 'char[2049]' #用于查询的投资属性字段
T['BankAcountOrigin'] = 'char' #账户来源
BAO_ByAccProperty = '0' #手工录入
BAO_ByFBTransfer = '1' #银期转账
T['MonthBillTradeSum'] = 'char' #结算单月报成交汇总方式
MBTS_ByInstrument = '0' #同日同合约
MBTS_ByDayInsPrc = '1' #同日同合约同价格
MBTS_ByDayIns = '2' #同合约
T['FBTTradeCodeEnum'] = 'char[7]' #银期交易代码枚举
FTC_BankLaunchBankToBroker = '102001' #银行发起银行转期货
FTC_BrokerLaunchBankToBroker = '202001' #期货发起银行转期货
FTC_BankLaunchBrokerToBank = '102002' #银行发起期货转银行
FTC_BrokerLaunchBrokerToBank = '202002' #期货发起期货转银行
T['RateTemplateID'] = 'char[9]' #模型代码
T['RiskRate'] = 'char[21]' #风险度
T['Timestamp'] = 'int' #时间戳
T['InvestorIDRuleName'] = 'char[61]' #号段规则名称
T['InvestorIDRuleExpr'] = 'char[513]' #号段规则表达式
T['LastDrift'] = 'int' #上次OTP漂移值
T['LastSuccess'] = 'int' #上次OTP成功值
T['AuthKey'] = 'char[41]' #令牌密钥
T['SerialNumber'] = 'char[17]' #序列号
T['OTPType'] = 'char' #动态令牌类型
OTP_NONE = '0' #无动态令牌
OTP_TOTP = '1' #时间令牌
T['OTPVendorsID'] = 'char[2]' #动态令牌提供商
T['OTPVendorsName'] = 'char[61]' #动态令牌提供商名称
T['OTPStatus'] = 'char' #动态令牌状态
OTPS_Unused = '0' #未使用
OTPS_Used = '1' #已使用
OTPS_Disuse = '2' #注销
T['BrokerUserType'] = 'char' #经济公司用户类型
BUT_Investor = '1' #投资者
BUT_BrokerUser = '2' #操作员
T['FutureType'] = 'char' #期货类型
FUTT_Commodity = '1' #商品期货
FUTT_Financial = '2' #金融期货
T['FundEventType'] = 'char' #资金管理操作类型
FET_Restriction = '0' #转账限额
FET_TodayRestriction = '1' #当日转账限额
FET_Transfer = '2' #期商流水
FET_Credit = '3' #资金冻结
FET_InvestorWithdrawAlm = '4' #投资者可提资金比例
FET_BankRestriction = '5' #单个银行帐户转账限额
FET_Accountregister = '6' #银期签约账户
FET_ExchangeFundIO = '7' #交易所出入金
FET_InvestorFundIO = '8' #投资者出入金
T['AccountSourceType'] = 'char' #资金账户来源
AST_FBTransfer = '0' #银期同步
AST_ManualEntry = '1' #手工录入
T['CodeSourceType'] = 'char' #交易编码来源
CST_UnifyAccount = '0' #统一开户(已规范)
CST_ManualEntry = '1' #手工录入(未规范)
T['UserRange'] = 'char' #操作员范围
UR_All = '0' #所有
UR_Single = '1' #单一操作员
T['TimeSpan'] = 'char[9]' #时间跨度
T['ImportSequenceID'] = 'char[17]' #动态令牌导入批次编号
T['ByGroup'] = 'char' #交易统计表按客户统计方式
BG_Investor = '2' #按投资者统计
BG_Group = '1' #按类统计
T['TradeSumStatMode'] = 'char' #交易统计表按范围统计方式
TSSM_Instrument = '1' #按合约统计
TSSM_Product = '2' #按产品统计
TSSM_Exchange = '3' #按交易所统计
T['ComType'] = 'int' #组合成交类型
T['UserProductID'] = 'char[33]' #产品标识
T['UserProductName'] = 'char[65]' #产品名称
T['UserProductMemo'] = 'char[129]' #产品说明
T['CSRCCancelFlag'] = 'char[2]' #新增或变更标志
T['CSRCDate'] = 'char[11]' #日期
T['CSRCInvestorName'] = 'char[81]' #客户名称
T['CSRCInvestorID'] = 'char[13]' #客户代码
T['CSRCIdentifiedCardNo'] = 'char[41]' #证件号码
T['CSRCClientID'] = 'char[11]' #交易编码
T['CSRCBankFlag'] = 'char[3]' #银行标识
T['CSRCBankAccount'] = 'char[23]' #银行账户
T['CSRCOpenName'] = 'char[41]' #开户人
T['CSRCMemo'] = 'char[101]' #说明
T['CSRCTime'] = 'char[11]' #时间
T['CSRCTradeID'] = 'char[21]' #成交流水号
T['CSRCExchangeInstID'] = 'char[7]' #合约代码
T['CSRCMortgageName'] = 'char[7]' #质押品名称
T['CSRCReason'] = 'char[3]' #事由
T['IsSettlement'] = 'char[2]' #是否为非结算会员
T['CSRCMoney'] = 'double' #资金
T['CSRCPrice'] = 'double' #价格
T['CommModelName'] = 'char[161]' #手续费率模板名称
T['CommModelMemo'] = 'char[1025]' #手续费率模板备注
T['ExprSetMode'] = 'char' #日期表达式设置类型
ESM_Relative = '1' #相对已有规则设置
ESM_Typical = '2' #典型设置
T['RateInvestorRange'] = 'char' #投资者范围
RIR_All = '1' #公司标准
RIR_Model = '2' #模板
RIR_Single = '3' #单一投资者
T['AgentBrokerID'] = 'char[13]' #代理经纪公司代码
T['DRIdentityID'] = 'int' #交易中心代码
T['DRIdentityName'] = 'char[65]' #交易中心名称
T['DBLinkID'] = 'char[31]' #DBLink标识号
T['SyncDataStatus'] = 'char' #主次用系统数据同步状态
SDS_Initialize = '0' #未同步
SDS_Settlementing = '1' #同步中
SDS_Settlemented = '2' #已同步
T['TradeSource'] = 'char' #成交来源
TSRC_NORMAL = '0' #来自交易所普通回报
TSRC_QUERY = '1' #来自查询
T['FlexStatMode'] = 'char' #产品合约统计方式
FSM_Product = '1' #产品统计
FSM_Exchange = '2' #交易所统计
FSM_All = '3' #统计所有
T['ByInvestorRange'] = 'char' #投资者范围统计方式
BIR_Property = '1' #属性统计
BIR_All = '2' #统计所有
T['SRiskRate'] = 'char[21]' #风险度
T['FBTBankID'] = 'char[2]' #银行标识
T['SequenceNo12'] = 'int' #序号
T['PropertyInvestorRange'] = 'char' #投资者范围
PIR_All = '1' #所有
PIR_Property = '2' #投资者属性
PIR_Single = '3' #单一投资者
T['FileStatus'] = 'char' #文件状态
FIS_NoCreate = '0' #未生成
FIS_Created = '1' #已生成
FIS_Failed = '2' #生成失败
T['FileGenStyle'] = 'char' #文件生成方式
FGS_FileTransmit = '0' #下发
FGS_FileGen = '1' #生成
T['SysOperMode'] = 'char' #系统日志操作方法
SoM_Add = '1' #增加
SoM_Update = '2' #修改
SoM_Delete = '3' #删除
SoM_Copy = '4' #复制
SoM_AcTive = '5' #激活
SoM_CanCel = '6' #注销
SoM_ReSet = '7' #重置
T['SysOperType'] = 'char' #系统日志操作类型
SoT_UpdatePassword = '0' #修改操作员密码
SoT_UserDepartment = '1' #操作员组织架构关系
SoT_RoleManager = '2' #角色管理
SoT_RoleFunction = '3' #角色功能设置
SoT_BaseParam = '4' #基础参数设置
SoT_SetUserID = '5' #设置操作员
SoT_SetUserRole = '6' #用户角色设置
SoT_UserIpRestriction = '7' #用户IP限制
SoT_DepartmentManager = '8' #组织架构管理
SoT_DepartmentCopy = '9' #组织架构向查询分类复制
SoT_Tradingcode = 'A' #交易编码管理
SoT_InvestorStatus = 'B' #投资者状态维护
SoT_InvestorAuthority = 'C' #投资者权限管理
SoT_PropertySet = 'D' #属性设置
SoT_ReSetInvestorPasswd = 'E' #重置投资者密码
SoT_InvestorPersonalityInfo = 'F' #投资者个性信息维护
T['CSRCDataQueyType'] = 'char' #上报数据查询类型
CSRCQ_Current = '0' #查询当前交易日报送的数据
CSRCQ_History = '1' #查询历史报送的代理经纪公司的数据
T['FreezeStatus'] = 'char' #休眠状态
FRS_Normal = '1' #活跃
FRS_Freeze = '0' #休眠
T['StandardStatus'] = 'char' #规范状态
STST_Standard = '0' #已规范
STST_NonStandard = '1' #未规范
T['CSRCFreezeStatus'] = 'char[2]' #休眠状态
T['RightParamType'] = 'char' #配置类型
RPT_Freeze = '1' #休眠户
RPT_FreezeActive = '2' #激活休眠户
RPT_OpenLimit = '3' #开仓权限限制
RPT_RelieveOpenLimit = '4' #解除开仓权限限制
T['RightTemplateID'] = 'char[9]' #模板代码
T['RightTemplateName'] = 'char[61]' #模板名称
T['DataStatus'] = 'char' #反洗钱审核表数据状态
AMLDS_Normal = '0' #正常
AMLDS_Deleted = '1' #已删除
T['AMLCheckStatus'] = 'char' #审核状态
AMLCHS_Init = '0' #未复核
AMLCHS_Checking = '1' #复核中
AMLCHS_Checked = '2' #已复核
AMLCHS_RefuseReport = '3' #拒绝上报
T['AmlDateType'] = 'char' #日期类型
AMLDT_DrawDay = '0' #检查日期
AMLDT_TouchDay = '1' #发生日期
T['AmlCheckLevel'] = 'char' #审核级别
AMLCL_CheckLevel0 = '0' #零级审核
AMLCL_CheckLevel1 = '1' #一级审核
AMLCL_CheckLevel2 = '2' #二级审核
AMLCL_CheckLevel3 = '3' #三级审核
T['AmlCheckFlow'] = 'char[2]' #反洗钱数据抽取审核流程
T['DataType'] = 'char[129]' #数据类型
T['ExportFileType'] = 'char' #导出文件类型
EFT_CSV = '0' #CSV
EFT_EXCEL = '1' #Excel
EFT_DBF = '2' #DBF
T['SettleManagerType'] = 'char' #结算配置类型
SMT_Before = '1' #结算前准备
SMT_Settlement = '2' #结算
SMT_After = '3' #结算后核对
SMT_Settlemented = '4' #结算后处理
T['SettleManagerID'] = 'char[33]' #结算配置代码
T['SettleManagerName'] = 'char[129]' #结算配置名称
T['SettleManagerLevel'] = 'char' #结算配置等级
SML_Must = '1' #必要
SML_Alarm = '2' #警告
SML_Prompt = '3' #提示
SML_Ignore = '4' #不检查
T['SettleManagerGroup'] = 'char' #模块分组
SMG_Exhcange = '1' #交易所核对
SMG_ASP = '2' #内部核对
SMG_CSRC = '3' #上报数据核对
T['CheckResultMemo'] = 'char[1025]' #核对结果说明
T['FunctionUrl'] = 'char[1025]' #功能链接
T['AuthInfo'] = 'char[129]' #客户端认证信息
T['AuthCode'] = 'char[17]' #客户端认证码
T['LimitUseType'] = 'char' #保值额度使用类型
LUT_Repeatable = '1' #可重复使用
LUT_Unrepeatable = '2' #不可重复使用
T['DataResource'] = 'char' #数据来源
DAR_Settle = '1' #本系统
DAR_Exchange = '2' #交易所
DAR_CSRC = '3' #报送数据
T['MarginType'] = 'char' #保证金类型
MGT_ExchMarginRate = '0' #交易所保证金率
MGT_InstrMarginRate = '1' #投资者保证金率
MGT_InstrMarginRateTrade = '2' #投资者交易保证金率
T['ActiveType'] = 'char' #生效类型
ACT_Intraday = '1' #仅当日生效
ACT_Long = '2' #长期生效
T['MarginRateType'] = 'char' #冲突保证金率类型
MRT_Exchange = '1' #交易所保证金率
MRT_Investor = '2' #投资者保证金率
MRT_InvestorTrade = '3' #投资者交易保证金率
T['BackUpStatus'] = 'char' #备份数据状态
BUS_UnBak = '0' #未生成备份数据
BUS_BakUp = '1' #备份数据生成中
BUS_BakUped = '2' #已生成备份数据
BUS_BakFail = '3' #备份数据失败
T['InitSettlement'] = 'char' #结算初始化状态
SIS_UnInitialize = '0' #结算初始化未开始
SIS_Initialize = '1' #结算初始化中
SIS_Initialized = '2' #结算初始化完成
T['ReportStatus'] = 'char' #报表数据生成状态
SRS_NoCreate = '0' #未生成报表数据
SRS_Create = '1' #报表数据生成中
SRS_Created = '2' #已生成报表数据
SRS_CreateFail = '3' #生成报表数据失败
T['SaveStatus'] = 'char' #数据归档状态
SSS_UnSaveData = '0' #归档未完成
SSS_SaveDatad = '1' #归档完成
T['CombineType'] = 'char[25]' #组合类型
T['SettArchiveStatus'] = 'char' #结算确认数据归档状态
SAS_UnArchived = '0' #未归档数据
SAS_Archiving = '1' #数据归档中
SAS_Archived = '2' #已归档数据
SAS_ArchiveFail = '3' #归档数据失败
T['CTPType'] = 'char' #CTP交易系统类型
CTPT_Unkown = '0' #未知类型
CTPT_MainCenter = '1' #主中心
CTPT_BackUp = '2' #备中心
T['ToolID'] = 'char[9]' #工具代码
T['ToolName'] = 'char[81]' #工具名称
T['CloseDealType'] = 'char' #平仓处理类型
CDT_Normal = '0' #正常
CDT_SpecFirst = '1' #投机平仓优先
T['StartMode'] = 'char' #启动模式
SM_Normal = '1' #正常
SM_Emerge = '2' #应急
SM_Restore = '3' #恢复
T['LoginMode'] = 'char' #登录模式
LM_Trade = '0' #交易
LM_Transfer = '1' #转账
class BaseStruct(object):
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, ', '.join('%s=%r'%(k,getattr(self,k)) for k,t in self._fields_))
class Dissemination(BaseStruct): #信息分发
def __init__(self, SequenceSeries=0, SequenceNo=0):
self.SequenceSeries = '' #序列系列号, short
self.SequenceNo = '' #序列号, int
class ReqUserLogin(BaseStruct): #用户登录请求
def __init__(self, TradingDay='', BrokerID='', UserID='', Password='', UserProductInfo='', InterfaceProductInfo='', ProtocolInfo='', MacAddress='', OneTimePassword='', ClientIPAddress=''):
self.TradingDay = 'Date' #交易日, char[9]
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.Password = '' #密码, char[41]
self.UserProductInfo = 'ProductInfo' #用户端产品信息, char[11]
self.InterfaceProductInfo = 'ProductInfo' #接口端产品信息, char[11]
self.ProtocolInfo = '' #协议信息, char[11]
self.MacAddress = '' #Mac地址, char[21]
self.OneTimePassword = 'Password' #动态密码, char[41]
self.ClientIPAddress = 'IPAddress' #终端IP地址, char[16]
class RspUserLogin(BaseStruct): #用户登录应答
def __init__(self, TradingDay='', LoginTime='', BrokerID='', UserID='', SystemName='', FrontID=0, SessionID=0, MaxOrderRef='', SHFETime='', DCETime='', CZCETime='', FFEXTime=''):
self.TradingDay = 'Date' #交易日, char[9]
self.LoginTime = 'Time' #登录成功时间, char[9]
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.SystemName = '' #交易系统名称, char[41]
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.MaxOrderRef = 'OrderRef' #最大报单引用, char[13]
self.SHFETime = 'Time' #上期所时间, char[9]
self.DCETime = 'Time' #大商所时间, char[9]
self.CZCETime = 'Time' #郑商所时间, char[9]
self.FFEXTime = 'Time' #中金所时间, char[9]
class UserLogout(BaseStruct): #用户登出请求
def __init__(self, BrokerID='', UserID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
class ForceUserLogout(BaseStruct): #强制交易员退出
def __init__(self, BrokerID='', UserID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
class ReqAuthenticate(BaseStruct): #客户端认证请求
def __init__(self, BrokerID='', UserID='', UserProductInfo='', AuthCode=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.UserProductInfo = 'ProductInfo' #用户端产品信息, char[11]
self.AuthCode = '' #认证码, char[17]
class RspAuthenticate(BaseStruct): #客户端认证响应
def __init__(self, BrokerID='', UserID='', UserProductInfo=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.UserProductInfo = 'ProductInfo' #用户端产品信息, char[11]
class AuthenticationInfo(BaseStruct): #客户端认证信息
def __init__(self, BrokerID='', UserID='', UserProductInfo='', AuthInfo='', IsResult=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.UserProductInfo = 'ProductInfo' #用户端产品信息, char[11]
self.AuthInfo = '' #认证信息, char[129]
self.IsResult = 'Bool' #是否为认证结果, int
class TransferHeader(BaseStruct): #银期转帐报文头
def __init__(self, Version='', TradeCode='', TradeDate='', TradeTime='', TradeSerial='', FutureID='', BankID='', BankBrchID='', OperNo='', DeviceID='', RecordNum='', SessionID=0, RequestID=0):
self.Version = '' #版本号,常量,1.0, char[4]
self.TradeCode = '' #交易代码,必填, char[7]
self.TradeDate = '' #交易日期,必填,格式:yyyymmdd, char[9]
self.TradeTime = '' #交易时间,必填,格式:hhmmss, char[9]
self.TradeSerial = '' #发起方流水号,N/A, char[9]
self.FutureID = '' #期货公司代码,必填, char[11]
self.BankID = '' #银行代码,根据查询银行得到,必填, char[4]
self.BankBrchID = '' #银行分中心代码,根据查询银行得到,必填, char[5]
self.OperNo = '' #操作员,N/A, char[17]
self.DeviceID = '' #交易设备类型,N/A, char[3]
self.RecordNum = '' #记录数,N/A, char[7]
self.SessionID = '' #会话编号,N/A, int
self.RequestID = '' #请求编号,N/A, int
class TransferBankToFutureReq(BaseStruct): #银行资金转期货请求,TradeCode=202001
def __init__(self, FutureAccount='', FuturePwdFlag=FPWD_UnCheck, FutureAccPwd='', TradeAmt=0.0, CustFee=0.0, CurrencyCode=''):
self.FutureAccount = 'AccountID' #期货资金账户, char[13]
self.FuturePwdFlag = '' #密码标志, char
self.FutureAccPwd = '' #密码, char[17]
self.TradeAmt = 'Money' #转账金额, double
self.CustFee = 'Money' #客户手续费, double
self.CurrencyCode = '' #币种:RMB-人民币 USD-美圆 HKD-港元, char[4]
class TransferBankToFutureRsp(BaseStruct): #银行资金转期货请求响应
def __init__(self, RetCode='', RetInfo='', FutureAccount='', TradeAmt=0.0, CustFee=0.0, CurrencyCode=''):
self.RetCode = '' #响应代码, char[5]
self.RetInfo = '' #响应信息, char[129]
self.FutureAccount = 'AccountID' #资金账户, char[13]
self.TradeAmt = 'Money' #转帐金额, double
self.CustFee = 'Money' #应收客户手续费, double
self.CurrencyCode = '' #币种, char[4]
class TransferFutureToBankReq(BaseStruct): #期货资金转银行请求,TradeCode=202002
def __init__(self, FutureAccount='', FuturePwdFlag=FPWD_UnCheck, FutureAccPwd='', TradeAmt=0.0, CustFee=0.0, CurrencyCode=''):
self.FutureAccount = 'AccountID' #期货资金账户, char[13]
self.FuturePwdFlag = '' #密码标志, char
self.FutureAccPwd = '' #密码, char[17]
self.TradeAmt = 'Money' #转账金额, double
self.CustFee = 'Money' #客户手续费, double
self.CurrencyCode = '' #币种:RMB-人民币 USD-美圆 HKD-港元, char[4]
class TransferFutureToBankRsp(BaseStruct): #期货资金转银行请求响应
def __init__(self, RetCode='', RetInfo='', FutureAccount='', TradeAmt=0.0, CustFee=0.0, CurrencyCode=''):
self.RetCode = '' #响应代码, char[5]
self.RetInfo = '' #响应信息, char[129]
self.FutureAccount = 'AccountID' #资金账户, char[13]
self.TradeAmt = 'Money' #转帐金额, double
self.CustFee = 'Money' #应收客户手续费, double
self.CurrencyCode = '' #币种, char[4]
class TransferQryBankReq(BaseStruct): #查询银行资金请求,TradeCode=204002
def __init__(self, FutureAccount='', FuturePwdFlag=FPWD_UnCheck, FutureAccPwd='', CurrencyCode=''):
self.FutureAccount = 'AccountID' #期货资金账户, char[13]
self.FuturePwdFlag = '' #密码标志, char
self.FutureAccPwd = '' #密码, char[17]
self.CurrencyCode = '' #币种:RMB-人民币 USD-美圆 HKD-港元, char[4]
class TransferQryBankRsp(BaseStruct): #查询银行资金请求响应
def __init__(self, RetCode='', RetInfo='', FutureAccount='', TradeAmt=0.0, UseAmt=0.0, FetchAmt=0.0, CurrencyCode=''):
self.RetCode = '' #响应代码, char[5]
self.RetInfo = '' #响应信息, char[129]
self.FutureAccount = 'AccountID' #资金账户, char[13]
self.TradeAmt = 'Money' #银行余额, double
self.UseAmt = 'Money' #银行可用余额, double
self.FetchAmt = 'Money' #银行可取余额, double
self.CurrencyCode = '' #币种, char[4]
class TransferQryDetailReq(BaseStruct): #查询银行交易明细请求,TradeCode=204999
def __init__(self, FutureAccount=''):
self.FutureAccount = 'AccountID' #期货资金账户, char[13]
class TransferQryDetailRsp(BaseStruct): #查询银行交易明细请求响应
def __init__(self, TradeDate='', TradeTime='', TradeCode='', FutureSerial=0, FutureID='', FutureAccount='', BankSerial=0, BankID='', BankBrchID='', BankAccount='', CertCode='', CurrencyCode='', TxAmount=0.0, Flag=TVF_Invalid):
self.TradeDate = 'Date' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.TradeCode = '' #交易代码, char[7]
self.FutureSerial = 'TradeSerialNo' #期货流水号, int
self.FutureID = '' #期货公司代码, char[11]
self.FutureAccount = '' #资金帐号, char[22]
self.BankSerial = 'TradeSerialNo' #银行流水号, int
self.BankID = '' #银行代码, char[4]
self.BankBrchID = '' #银行分中心代码, char[5]
self.BankAccount = '' #银行账号, char[41]
self.CertCode = '' #证件号码, char[21]
self.CurrencyCode = '' #货币代码, char[4]
self.TxAmount = 'Money' #发生金额, double
self.Flag = 'TransferValidFlag' #有效标志, char
class RspInfo(BaseStruct): #响应信息
def __init__(self, ErrorID=0, ErrorMsg=''):
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class Exchange(BaseStruct): #交易所
def __init__(self, ExchangeID='', ExchangeName='', ExchangeProperty=EXP_Normal):
self.ExchangeID = '' #交易所代码, char[9]
self.ExchangeName = '' #交易所名称, char[31]
self.ExchangeProperty = '' #交易所属性, char
class Product(BaseStruct): #产品
def __init__(self, ProductID='', ProductName='', ExchangeID='', ProductClass=PC_Futures, VolumeMultiple=0, PriceTick=0.0, MaxMarketOrderVolume=0, MinMarketOrderVolume=0, MaxLimitOrderVolume=0, MinLimitOrderVolume=0, PositionType=PT_Net, PositionDateType=PDT_UseHistory, CloseDealType=CDT_Normal):
self.ProductID = 'InstrumentID' #产品代码, char[31]
self.ProductName = '' #产品名称, char[21]
self.ExchangeID = '' #交易所代码, char[9]
self.ProductClass = '' #产品类型, char
self.VolumeMultiple = '' #合约数量乘数, int
self.PriceTick = 'Price' #最小变动价位, double
self.MaxMarketOrderVolume = 'Volume' #市价单最大下单量, int
self.MinMarketOrderVolume = 'Volume' #市价单最小下单量, int
self.MaxLimitOrderVolume = 'Volume' #限价单最大下单量, int
self.MinLimitOrderVolume = 'Volume' #限价单最小下单量, int
self.PositionType = '' #持仓类型, char
self.PositionDateType = '' #持仓日期类型, char
self.CloseDealType = '' #平仓处理类型, char
class Instrument(BaseStruct): #合约
def __init__(self, InstrumentID='', ExchangeID='', InstrumentName='', ExchangeInstID='', ProductID='', ProductClass=PC_Futures, DeliveryYear=0, DeliveryMonth=0, MaxMarketOrderVolume=0, MinMarketOrderVolume=0, MaxLimitOrderVolume=0, MinLimitOrderVolume=0, VolumeMultiple=0, PriceTick=0.0, CreateDate='', OpenDate='', ExpireDate='', StartDelivDate='', EndDelivDate='', InstLifePhase=IP_NotStart, IsTrading=0, PositionType=PT_Net, PositionDateType=PDT_UseHistory, LongMarginRatio=0.0, ShortMarginRatio=0.0):
self.InstrumentID = '' #合约代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
self.InstrumentName = '' #合约名称, char[21]
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.ProductID = 'InstrumentID' #产品代码, char[31]
self.ProductClass = '' #产品类型, char
self.DeliveryYear = 'Year' #交割年份, int
self.DeliveryMonth = 'Month' #交割月, int
self.MaxMarketOrderVolume = 'Volume' #市价单最大下单量, int
self.MinMarketOrderVolume = 'Volume' #市价单最小下单量, int
self.MaxLimitOrderVolume = 'Volume' #限价单最大下单量, int
self.MinLimitOrderVolume = 'Volume' #限价单最小下单量, int
self.VolumeMultiple = '' #合约数量乘数, int
self.PriceTick = 'Price' #最小变动价位, double
self.CreateDate = 'Date' #创建日, char[9]
self.OpenDate = 'Date' #上市日, char[9]
self.ExpireDate = 'Date' #到期日, char[9]
self.StartDelivDate = 'Date' #开始交割日, char[9]
self.EndDelivDate = 'Date' #结束交割日, char[9]
self.InstLifePhase = '' #合约生命周期状态, char
self.IsTrading = 'Bool' #当前是否交易, int
self.PositionType = '' #持仓类型, char
self.PositionDateType = '' #持仓日期类型, char
self.LongMarginRatio = 'Ratio' #多头保证金率, double
self.ShortMarginRatio = 'Ratio' #空头保证金率, double
class Broker(BaseStruct): #经纪公司
def __init__(self, BrokerID='', BrokerAbbr='', BrokerName='', IsActive=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.BrokerAbbr = '' #经纪公司简称, char[9]
self.BrokerName = '' #经纪公司名称, char[81]
self.IsActive = 'Bool' #是否活跃, int
class Trader(BaseStruct): #交易所交易员
def __init__(self, ExchangeID='', TraderID='', ParticipantID='', Password='', InstallCount=0, BrokerID=''):
self.ExchangeID = '' #交易所代码, char[9]
self.TraderID = '' #交易所交易员代码, char[21]
self.ParticipantID = '' #会员代码, char[11]
self.Password = '' #密码, char[41]
self.InstallCount = '' #安装数量, int
self.BrokerID = '' #经纪公司代码, char[11]
class Investor(BaseStruct): #投资者
def __init__(self, InvestorID='', BrokerID='', InvestorGroupID='', InvestorName='', IdentifiedCardType=ICT_EID, IdentifiedCardNo='', IsActive=0, Telephone='', Address='', OpenDate='', Mobile='', CommModelID='', MarginModelID=''):
self.InvestorID = '' #投资者代码, char[13]
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorGroupID = 'InvestorID' #投资者分组代码, char[13]
self.InvestorName = 'PartyName' #投资者名称, char[81]
self.IdentifiedCardType = 'IdCardType' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.IsActive = 'Bool' #是否活跃, int
self.Telephone = '' #联系电话, char[41]
self.Address = '' #通讯地址, char[101]
self.OpenDate = 'Date' #开户日期, char[9]
self.Mobile = '' #手机, char[41]
self.CommModelID = 'InvestorID' #手续费率模板代码, char[13]
self.MarginModelID = 'InvestorID' #保证金率模板代码, char[13]
class TradingCode(BaseStruct): #交易编码
def __init__(self, InvestorID='', BrokerID='', ExchangeID='', ClientID='', IsActive=0, ClientIDType=CIDT_Speculation):
self.InvestorID = '' #投资者代码, char[13]
self.BrokerID = '' #经纪公司代码, char[11]
self.ExchangeID = '' #交易所代码, char[9]
self.ClientID = '' #客户代码, char[11]
self.IsActive = 'Bool' #是否活跃, int
self.ClientIDType = '' #交易编码类型, char
class PartBroker(BaseStruct): #会员编码和经纪公司编码对照表
def __init__(self, BrokerID='', ExchangeID='', ParticipantID='', IsActive=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.ExchangeID = '' #交易所代码, char[9]
self.ParticipantID = '' #会员代码, char[11]
self.IsActive = 'Bool' #是否活跃, int
class SuperUser(BaseStruct): #管理用户
def __init__(self, UserID='', UserName='', Password='', IsActive=0):
self.UserID = '' #用户代码, char[16]
self.UserName = '' #用户名称, char[81]
self.Password = '' #密码, char[41]
self.IsActive = 'Bool' #是否活跃, int
class SuperUserFunction(BaseStruct): #管理用户功能权限
def __init__(self, UserID='', FunctionCode=FC_DataAsync):
self.UserID = '' #用户代码, char[16]
self.FunctionCode = '' #功能代码, char
class InvestorGroup(BaseStruct): #投资者组
def __init__(self, BrokerID='', InvestorGroupID='', InvestorGroupName=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorGroupID = 'InvestorID' #投资者分组代码, char[13]
self.InvestorGroupName = '' #投资者分组名称, char[41]
class TradingAccount(BaseStruct): #资金账户
def __init__(self, BrokerID='', AccountID='', PreMortgage=0.0, PreCredit=0.0, PreDeposit=0.0, PreBalance=0.0, PreMargin=0.0, InterestBase=0.0, Interest=0.0, Deposit=0.0, Withdraw=0.0, FrozenMargin=0.0, FrozenCash=0.0, FrozenCommission=0.0, CurrMargin=0.0, CashIn=0.0, Commission=0.0, CloseProfit=0.0, PositionProfit=0.0, Balance=0.0, Available=0.0, WithdrawQuota=0.0, Reserve=0.0, TradingDay='', SettlementID=0, Credit=0.0, Mortgage=0.0, ExchangeMargin=0.0, DeliveryMargin=0.0, ExchangeDeliveryMargin=0.0):
self.BrokerID = '' #经纪公司代码, char[11]
self.AccountID = '' #投资者帐号, char[13]
self.PreMortgage = 'Money' #上次质押金额, double
self.PreCredit = 'Money' #上次信用额度, double
self.PreDeposit = 'Money' #上次存款额, double
self.PreBalance = 'Money' #上次结算准备金, double
self.PreMargin = 'Money' #上次占用的保证金, double
self.InterestBase = 'Money' #利息基数, double
self.Interest = 'Money' #利息收入, double
self.Deposit = 'Money' #入金金额, double
self.Withdraw = 'Money' #出金金额, double
self.FrozenMargin = 'Money' #冻结的保证金, double
self.FrozenCash = 'Money' #冻结的资金, double
self.FrozenCommission = 'Money' #冻结的手续费, double
self.CurrMargin = 'Money' #当前保证金总额, double
self.CashIn = 'Money' #资金差额, double
self.Commission = 'Money' #手续费, double
self.CloseProfit = 'Money' #平仓盈亏, double
self.PositionProfit = 'Money' #持仓盈亏, double
self.Balance = 'Money' #期货结算准备金, double
self.Available = 'Money' #可用资金, double
self.WithdrawQuota = 'Money' #可取资金, double
self.Reserve = 'Money' #基本准备金, double
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
self.Credit = 'Money' #信用额度, double
self.Mortgage = 'Money' #质押金额, double
self.ExchangeMargin = 'Money' #交易所保证金, double
self.DeliveryMargin = 'Money' #投资者交割保证金, double
self.ExchangeDeliveryMargin = 'Money' #交易所交割保证金, double
class InvestorPosition(BaseStruct): #投资者持仓
def __init__(self, InstrumentID='', BrokerID='', InvestorID='', PosiDirection=PD_Net, HedgeFlag=HF_Speculation, PositionDate=PSD_Today, YdPosition=0, Position=0, LongFrozen=0, ShortFrozen=0, LongFrozenAmount=0.0, ShortFrozenAmount=0.0, OpenVolume=0, CloseVolume=0, OpenAmount=0.0, CloseAmount=0.0, PositionCost=0.0, PreMargin=0.0, UseMargin=0.0, FrozenMargin=0.0, FrozenCash=0.0, FrozenCommission=0.0, CashIn=0.0, Commission=0.0, CloseProfit=0.0, PositionProfit=0.0, PreSettlementPrice=0.0, SettlementPrice=0.0, TradingDay='', SettlementID=0, OpenCost=0.0, ExchangeMargin=0.0, CombPosition=0, CombLongFrozen=0, CombShortFrozen=0, CloseProfitByDate=0.0, CloseProfitByTrade=0.0, TodayPosition=0, MarginRateByMoney=0.0, MarginRateByVolume=0.0):
self.InstrumentID = '' #合约代码, char[31]
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.PosiDirection = '' #持仓多空方向, char
self.HedgeFlag = '' #投机套保标志, char
self.PositionDate = '' #持仓日期, char
self.YdPosition = 'Volume' #上日持仓, int
self.Position = 'Volume' #今日持仓, int
self.LongFrozen = 'Volume' #多头冻结, int
self.ShortFrozen = 'Volume' #空头冻结, int
self.LongFrozenAmount = 'Money' #开仓冻结金额, double
self.ShortFrozenAmount = 'Money' #开仓冻结金额, double
self.OpenVolume = 'Volume' #开仓量, int
self.CloseVolume = 'Volume' #平仓量, int
self.OpenAmount = 'Money' #开仓金额, double
self.CloseAmount = 'Money' #平仓金额, double
self.PositionCost = 'Money' #持仓成本, double
self.PreMargin = 'Money' #上次占用的保证金, double
self.UseMargin = 'Money' #占用的保证金, double
self.FrozenMargin = 'Money' #冻结的保证金, double
self.FrozenCash = 'Money' #冻结的资金, double
self.FrozenCommission = 'Money' #冻结的手续费, double
self.CashIn = 'Money' #资金差额, double
self.Commission = 'Money' #手续费, double
self.CloseProfit = 'Money' #平仓盈亏, double
self.PositionProfit = 'Money' #持仓盈亏, double
self.PreSettlementPrice = 'Price' #上次结算价, double
self.SettlementPrice = 'Price' #本次结算价, double
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
self.OpenCost = 'Money' #开仓成本, double
self.ExchangeMargin = 'Money' #交易所保证金, double
self.CombPosition = 'Volume' #组合成交形成的持仓, int
self.CombLongFrozen = 'Volume' #组合多头冻结, int
self.CombShortFrozen = 'Volume' #组合空头冻结, int
self.CloseProfitByDate = 'Money' #逐日盯市平仓盈亏, double
self.CloseProfitByTrade = 'Money' #逐笔对冲平仓盈亏, double
self.TodayPosition = 'Volume' #今日持仓, int
self.MarginRateByMoney = 'Ratio' #保证金率, double
self.MarginRateByVolume = 'Ratio' #保证金率(按手数), double
class InstrumentMarginRate(BaseStruct): #合约保证金率
def __init__(self, InstrumentID='', InvestorRange=IR_All, BrokerID='', InvestorID='', HedgeFlag=HF_Speculation, LongMarginRatioByMoney=0.0, LongMarginRatioByVolume=0.0, ShortMarginRatioByMoney=0.0, ShortMarginRatioByVolume=0.0, IsRelative=0):
self.InstrumentID = '' #合约代码, char[31]
self.InvestorRange = '' #投资者范围, char
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.HedgeFlag = '' #投机套保标志, char
self.LongMarginRatioByMoney = 'Ratio' #多头保证金率, double
self.LongMarginRatioByVolume = 'Money' #多头保证金费, double
self.ShortMarginRatioByMoney = 'Ratio' #空头保证金率, double
self.ShortMarginRatioByVolume = 'Money' #空头保证金费, double
self.IsRelative = 'Bool' #是否相对交易所收取, int
class InstrumentCommissionRate(BaseStruct): #合约手续费率
def __init__(self, InstrumentID='', InvestorRange=IR_All, BrokerID='', InvestorID='', OpenRatioByMoney=0.0, OpenRatioByVolume=0.0, CloseRatioByMoney=0.0, CloseRatioByVolume=0.0, CloseTodayRatioByMoney=0.0, CloseTodayRatioByVolume=0.0):
self.InstrumentID = '' #合约代码, char[31]
self.InvestorRange = '' #投资者范围, char
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.OpenRatioByMoney = 'Ratio' #开仓手续费率, double
self.OpenRatioByVolume = 'Ratio' #开仓手续费, double
self.CloseRatioByMoney = 'Ratio' #平仓手续费率, double
self.CloseRatioByVolume = 'Ratio' #平仓手续费, double
self.CloseTodayRatioByMoney = 'Ratio' #平今手续费率, double
self.CloseTodayRatioByVolume = 'Ratio' #平今手续费, double
class DepthMarketData(BaseStruct): #深度行情
def __init__(self, TradingDay='', InstrumentID='', ExchangeID='', ExchangeInstID='', LastPrice=0.0, PreSettlementPrice=0.0, PreClosePrice=0.0, PreOpenInterest=0.0, OpenPrice=0.0, HighestPrice=0.0, LowestPrice=0.0, Volume=0, Turnover=0.0, OpenInterest=0.0, ClosePrice=0.0, SettlementPrice=0.0, UpperLimitPrice=0.0, LowerLimitPrice=0.0, PreDelta=0.0, CurrDelta=0.0, UpdateTime='', UpdateMillisec=0, BidPrice1=0.0, BidVolume1=0, AskPrice1=0.0, AskVolume1=0, BidPrice2=0.0, BidVolume2=0, AskPrice2=0.0, AskVolume2=0, BidPrice3=0.0, BidVolume3=0, AskPrice3=0.0, AskVolume3=0, BidPrice4=0.0, BidVolume4=0, AskPrice4=0.0, AskVolume4=0, BidPrice5=0.0, BidVolume5=0, AskPrice5=0.0, AskVolume5=0, AveragePrice=0.0, ActionDay=''):
self.TradingDay = 'Date' #交易日, char[9]
self.InstrumentID = '' #合约代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.LastPrice = 'Price' #最新价, double
self.PreSettlementPrice = 'Price' #上次结算价, double
self.PreClosePrice = 'Price' #昨收盘, double
self.PreOpenInterest = 'LargeVolume' #昨持仓量, double
self.OpenPrice = 'Price' #今开盘, double
self.HighestPrice = 'Price' #最高价, double
self.LowestPrice = 'Price' #最低价, double
self.Volume = '' #数量, int
self.Turnover = 'Money' #成交金额, double
self.OpenInterest = 'LargeVolume' #持仓量, double
self.ClosePrice = 'Price' #今收盘, double
self.SettlementPrice = 'Price' #本次结算价, double
self.UpperLimitPrice = 'Price' #涨停板价, double
self.LowerLimitPrice = 'Price' #跌停板价, double
self.PreDelta = 'Ratio' #昨虚实度, double
self.CurrDelta = 'Ratio' #今虚实度, double
self.UpdateTime = 'Time' #最后修改时间, char[9]
self.UpdateMillisec = 'Millisec' #最后修改毫秒, int
self.BidPrice1 = 'Price' #申买价一, double
self.BidVolume1 = 'Volume' #申买量一, int
self.AskPrice1 = 'Price' #申卖价一, double
self.AskVolume1 = 'Volume' #申卖量一, int
self.BidPrice2 = 'Price' #申买价二, double
self.BidVolume2 = 'Volume' #申买量二, int
self.AskPrice2 = 'Price' #申卖价二, double
self.AskVolume2 = 'Volume' #申卖量二, int
self.BidPrice3 = 'Price' #申买价三, double
self.BidVolume3 = 'Volume' #申买量三, int
self.AskPrice3 = 'Price' #申卖价三, double
self.AskVolume3 = 'Volume' #申卖量三, int
self.BidPrice4 = 'Price' #申买价四, double
self.BidVolume4 = 'Volume' #申买量四, int
self.AskPrice4 = 'Price' #申卖价四, double
self.AskVolume4 = 'Volume' #申卖量四, int
self.BidPrice5 = 'Price' #申买价五, double
self.BidVolume5 = 'Volume' #申买量五, int
self.AskPrice5 = 'Price' #申卖价五, double
self.AskVolume5 = 'Volume' #申卖量五, int
self.AveragePrice = 'Price' #当日均价, double
self.ActionDay = 'Date' #业务日期, char[9]
class InstrumentTradingRight(BaseStruct): #投资者合约交易权限
def __init__(self, InstrumentID='', InvestorRange=IR_All, BrokerID='', InvestorID='', TradingRight=TR_Allow):
self.InstrumentID = '' #合约代码, char[31]
self.InvestorRange = '' #投资者范围, char
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.TradingRight = '' #交易权限, char
class BrokerUser(BaseStruct): #经纪公司用户
def __init__(self, BrokerID='', UserID='', UserName='', UserType=UT_Investor, IsActive=0, IsUsingOTP=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.UserName = '' #用户名称, char[81]
self.UserType = '' #用户类型, char
self.IsActive = 'Bool' #是否活跃, int
self.IsUsingOTP = 'Bool' #是否使用令牌, int
class BrokerUserPassword(BaseStruct): #经纪公司用户口令
def __init__(self, BrokerID='', UserID='', Password=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.Password = '' #密码, char[41]
class BrokerUserFunction(BaseStruct): #经纪公司用户功能权限
def __init__(self, BrokerID='', UserID='', BrokerFunctionCode=BFC_ForceUserLogout):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.BrokerFunctionCode = '' #经纪公司功能代码, char
class TraderOffer(BaseStruct): #交易所交易员报盘机
def __init__(self, ExchangeID='', TraderID='', ParticipantID='', Password='', InstallID=0, OrderLocalID='', TraderConnectStatus=TCS_NotConnected, ConnectRequestDate='', ConnectRequestTime='', LastReportDate='', LastReportTime='', ConnectDate='', ConnectTime='', StartDate='', StartTime='', TradingDay='', BrokerID='', MaxTradeID='', MaxOrderMessageReference=''):
self.ExchangeID = '' #交易所代码, char[9]
self.TraderID = '' #交易所交易员代码, char[21]
self.ParticipantID = '' #会员代码, char[11]
self.Password = '' #密码, char[41]
self.InstallID = '' #安装编号, int
self.OrderLocalID = '' #本地报单编号, char[13]
self.TraderConnectStatus = '' #交易所交易员连接状态, char
self.ConnectRequestDate = 'Date' #发出连接请求的日期, char[9]
self.ConnectRequestTime = 'Time' #发出连接请求的时间, char[9]
self.LastReportDate = 'Date' #上次报告日期, char[9]
self.LastReportTime = 'Time' #上次报告时间, char[9]
self.ConnectDate = 'Date' #完成连接日期, char[9]
self.ConnectTime = 'Time' #完成连接时间, char[9]
self.StartDate = 'Date' #启动日期, char[9]
self.StartTime = 'Time' #启动时间, char[9]
self.TradingDay = 'Date' #交易日, char[9]
self.BrokerID = '' #经纪公司代码, char[11]
self.MaxTradeID = 'TradeID' #本席位最大成交编号, char[21]
self.MaxOrderMessageReference = 'ReturnCode' #本席位最大报单备拷, char[7]
class SettlementInfo(BaseStruct): #投资者结算结果
def __init__(self, TradingDay='', SettlementID=0, BrokerID='', InvestorID='', SequenceNo=0, Content=''):
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.SequenceNo = '' #序号, int
self.Content = '' #消息正文, char[501]
class InstrumentMarginRateAdjust(BaseStruct): #合约保证金率调整
def __init__(self, InstrumentID='', InvestorRange=IR_All, BrokerID='', InvestorID='', HedgeFlag=HF_Speculation, LongMarginRatioByMoney=0.0, LongMarginRatioByVolume=0.0, ShortMarginRatioByMoney=0.0, ShortMarginRatioByVolume=0.0, IsRelative=0):
self.InstrumentID = '' #合约代码, char[31]
self.InvestorRange = '' #投资者范围, char
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.HedgeFlag = '' #投机套保标志, char
self.LongMarginRatioByMoney = 'Ratio' #多头保证金率, double
self.LongMarginRatioByVolume = 'Money' #多头保证金费, double
self.ShortMarginRatioByMoney = 'Ratio' #空头保证金率, double
self.ShortMarginRatioByVolume = 'Money' #空头保证金费, double
self.IsRelative = 'Bool' #是否相对交易所收取, int
class ExchangeMarginRate(BaseStruct): #交易所保证金率
def __init__(self, BrokerID='', InstrumentID='', HedgeFlag=HF_Speculation, LongMarginRatioByMoney=0.0, LongMarginRatioByVolume=0.0, ShortMarginRatioByMoney=0.0, ShortMarginRatioByVolume=0.0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InstrumentID = '' #合约代码, char[31]
self.HedgeFlag = '' #投机套保标志, char
self.LongMarginRatioByMoney = 'Ratio' #多头保证金率, double
self.LongMarginRatioByVolume = 'Money' #多头保证金费, double
self.ShortMarginRatioByMoney = 'Ratio' #空头保证金率, double
self.ShortMarginRatioByVolume = 'Money' #空头保证金费, double
class ExchangeMarginRateAdjust(BaseStruct): #交易所保证金率调整
def __init__(self, BrokerID='', InstrumentID='', HedgeFlag=HF_Speculation, LongMarginRatioByMoney=0.0, LongMarginRatioByVolume=0.0, ShortMarginRatioByMoney=0.0, ShortMarginRatioByVolume=0.0, ExchLongMarginRatioByMoney=0.0, ExchLongMarginRatioByVolume=0.0, ExchShortMarginRatioByMoney=0.0, ExchShortMarginRatioByVolume=0.0, NoLongMarginRatioByMoney=0.0, NoLongMarginRatioByVolume=0.0, NoShortMarginRatioByMoney=0.0, NoShortMarginRatioByVolume=0.0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InstrumentID = '' #合约代码, char[31]
self.HedgeFlag = '' #投机套保标志, char
self.LongMarginRatioByMoney = 'Ratio' #跟随交易所投资者多头保证金率, double
self.LongMarginRatioByVolume = 'Money' #跟随交易所投资者多头保证金费, double
self.ShortMarginRatioByMoney = 'Ratio' #跟随交易所投资者空头保证金率, double
self.ShortMarginRatioByVolume = 'Money' #跟随交易所投资者空头保证金费, double
self.ExchLongMarginRatioByMoney = 'Ratio' #交易所多头保证金率, double
self.ExchLongMarginRatioByVolume = 'Money' #交易所多头保证金费, double
self.ExchShortMarginRatioByMoney = 'Ratio' #交易所空头保证金率, double
self.ExchShortMarginRatioByVolume = 'Money' #交易所空头保证金费, double
self.NoLongMarginRatioByMoney = 'Ratio' #不跟随交易所投资者多头保证金率, double
self.NoLongMarginRatioByVolume = 'Money' #不跟随交易所投资者多头保证金费, double
self.NoShortMarginRatioByMoney = 'Ratio' #不跟随交易所投资者空头保证金率, double
self.NoShortMarginRatioByVolume = 'Money' #不跟随交易所投资者空头保证金费, double
class SettlementRef(BaseStruct): #结算引用
def __init__(self, TradingDay='', SettlementID=0):
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
class CurrentTime(BaseStruct): #当前时间
def __init__(self, CurrDate='', CurrTime='', CurrMillisec=0, ActionDay=''):
self.CurrDate = 'Date' #当前日期, char[9]
self.CurrTime = 'Time' #当前时间, char[9]
self.CurrMillisec = 'Millisec' #当前时间(毫秒), int
self.ActionDay = 'Date' #业务日期, char[9]
class CommPhase(BaseStruct): #通讯阶段
def __init__(self, TradingDay='', CommPhaseNo=0, SystemID=''):
self.TradingDay = 'Date' #交易日, char[9]
self.CommPhaseNo = '' #通讯时段编号, short
self.SystemID = '' #系统编号, char[21]
class LoginInfo(BaseStruct): #登录信息
def __init__(self, FrontID=0, SessionID=0, BrokerID='', UserID='', LoginDate='', LoginTime='', IPAddress='', UserProductInfo='', InterfaceProductInfo='', ProtocolInfo='', SystemName='', Password='', MaxOrderRef='', SHFETime='', DCETime='', CZCETime='', FFEXTime='', MacAddress='', OneTimePassword=''):
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.LoginDate = 'Date' #登录日期, char[9]
self.LoginTime = 'Time' #登录时间, char[9]
self.IPAddress = '' #IP地址, char[16]
self.UserProductInfo = 'ProductInfo' #用户端产品信息, char[11]
self.InterfaceProductInfo = 'ProductInfo' #接口端产品信息, char[11]
self.ProtocolInfo = '' #协议信息, char[11]
self.SystemName = '' #系统名称, char[41]
self.Password = '' #密码, char[41]
self.MaxOrderRef = 'OrderRef' #最大报单引用, char[13]
self.SHFETime = 'Time' #上期所时间, char[9]
self.DCETime = 'Time' #大商所时间, char[9]
self.CZCETime = 'Time' #郑商所时间, char[9]
self.FFEXTime = 'Time' #中金所时间, char[9]
self.MacAddress = '' #Mac地址, char[21]
self.OneTimePassword = 'Password' #动态密码, char[41]
class LogoutAll(BaseStruct): #登录信息
def __init__(self, FrontID=0, SessionID=0, SystemName=''):
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.SystemName = '' #系统名称, char[41]
class FrontStatus(BaseStruct): #前置状态
def __init__(self, FrontID=0, LastReportDate='', LastReportTime='', IsActive=0):
self.FrontID = '' #前置编号, int
self.LastReportDate = 'Date' #上次报告日期, char[9]
self.LastReportTime = 'Time' #上次报告时间, char[9]
self.IsActive = 'Bool' #是否活跃, int
class UserPasswordUpdate(BaseStruct): #用户口令变更
def __init__(self, BrokerID='', UserID='', OldPassword='', NewPassword=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.OldPassword = 'Password' #原来的口令, char[41]
self.NewPassword = 'Password' #新的口令, char[41]
class InputOrder(BaseStruct): #输入报单
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', OrderRef='', UserID='', OrderPriceType=OPT_AnyPrice, Direction=D_Buy, CombOffsetFlag='', CombHedgeFlag='', LimitPrice=0.0, VolumeTotalOriginal=0, TimeCondition=TC_IOC, GTDDate='', VolumeCondition=VC_AV, MinVolume=0, ContingentCondition=CC_Immediately, StopPrice=0.0, ForceCloseReason=FCC_NotForceClose, IsAutoSuspend=0, BusinessUnit='', RequestID=0, UserForceClose=0, IsSwapOrder=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.OrderRef = '' #报单引用, char[13]
self.UserID = '' #用户代码, char[16]
self.OrderPriceType = '' #报单价格条件, char
self.Direction = '' #买卖方向, char
self.CombOffsetFlag = '' #组合开平标志, char[5]
self.CombHedgeFlag = '' #组合投机套保标志, char[5]
self.LimitPrice = 'Price' #价格, double
self.VolumeTotalOriginal = 'Volume' #数量, int
self.TimeCondition = '' #有效期类型, char
self.GTDDate = 'Date' #GTD日期, char[9]
self.VolumeCondition = '' #成交量类型, char
self.MinVolume = 'Volume' #最小成交量, int
self.ContingentCondition = '' #触发条件, char
self.StopPrice = 'Price' #止损价, double
self.ForceCloseReason = '' #强平原因, char
self.IsAutoSuspend = 'Bool' #自动挂起标志, int
self.BusinessUnit = '' #业务单元, char[21]
self.RequestID = '' #请求编号, int
self.UserForceClose = 'Bool' #用户强评标志, int
self.IsSwapOrder = 'Bool' #互换单标志, int
class Order(BaseStruct): #报单
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', OrderRef='', UserID='', OrderPriceType=OPT_AnyPrice, Direction=D_Buy, CombOffsetFlag='', CombHedgeFlag='', LimitPrice=0.0, VolumeTotalOriginal=0, TimeCondition=TC_IOC, GTDDate='', VolumeCondition=VC_AV, MinVolume=0, ContingentCondition=CC_Immediately, StopPrice=0.0, ForceCloseReason=FCC_NotForceClose, IsAutoSuspend=0, BusinessUnit='', RequestID=0, OrderLocalID='', ExchangeID='', ParticipantID='', ClientID='', ExchangeInstID='', TraderID='', InstallID=0, OrderSubmitStatus=OSS_InsertSubmitted, NotifySequence=0, TradingDay='', SettlementID=0, OrderSysID='', OrderSource=OSRC_Participant, OrderStatus=OST_AllTraded, OrderType=ORDT_Normal, VolumeTraded=0, VolumeTotal=0, InsertDate='', InsertTime='', ActiveTime='', SuspendTime='', UpdateTime='', CancelTime='', ActiveTraderID='', ClearingPartID='', SequenceNo=0, FrontID=0, SessionID=0, UserProductInfo='', StatusMsg='', UserForceClose=0, ActiveUserID='', BrokerOrderSeq=0, RelativeOrderSysID='', ZCETotalTradedVolume=0, IsSwapOrder=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.OrderRef = '' #报单引用, char[13]
self.UserID = '' #用户代码, char[16]
self.OrderPriceType = '' #报单价格条件, char
self.Direction = '' #买卖方向, char
self.CombOffsetFlag = '' #组合开平标志, char[5]
self.CombHedgeFlag = '' #组合投机套保标志, char[5]
self.LimitPrice = 'Price' #价格, double
self.VolumeTotalOriginal = 'Volume' #数量, int
self.TimeCondition = '' #有效期类型, char
self.GTDDate = 'Date' #GTD日期, char[9]
self.VolumeCondition = '' #成交量类型, char
self.MinVolume = 'Volume' #最小成交量, int
self.ContingentCondition = '' #触发条件, char
self.StopPrice = 'Price' #止损价, double
self.ForceCloseReason = '' #强平原因, char
self.IsAutoSuspend = 'Bool' #自动挂起标志, int
self.BusinessUnit = '' #业务单元, char[21]
self.RequestID = '' #请求编号, int
self.OrderLocalID = '' #本地报单编号, char[13]
self.ExchangeID = '' #交易所代码, char[9]
self.ParticipantID = '' #会员代码, char[11]
self.ClientID = '' #客户代码, char[11]
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.TraderID = '' #交易所交易员代码, char[21]
self.InstallID = '' #安装编号, int
self.OrderSubmitStatus = '' #报单提交状态, char
self.NotifySequence = 'SequenceNo' #报单提示序号, int
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
self.OrderSysID = '' #报单编号, char[21]
self.OrderSource = '' #报单来源, char
self.OrderStatus = '' #报单状态, char
self.OrderType = '' #报单类型, char
self.VolumeTraded = 'Volume' #今成交数量, int
self.VolumeTotal = 'Volume' #剩余数量, int
self.InsertDate = 'Date' #报单日期, char[9]
self.InsertTime = 'Time' #委托时间, char[9]
self.ActiveTime = 'Time' #激活时间, char[9]
self.SuspendTime = 'Time' #挂起时间, char[9]
self.UpdateTime = 'Time' #最后修改时间, char[9]
self.CancelTime = 'Time' #撤销时间, char[9]
self.ActiveTraderID = 'TraderID' #最后修改交易所交易员代码, char[21]
self.ClearingPartID = 'ParticipantID' #结算会员编号, char[11]
self.SequenceNo = '' #序号, int
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.UserProductInfo = 'ProductInfo' #用户端产品信息, char[11]
self.StatusMsg = 'ErrorMsg' #状态信息, char[81]
self.UserForceClose = 'Bool' #用户强评标志, int
self.ActiveUserID = 'UserID' #操作用户代码, char[16]
self.BrokerOrderSeq = 'SequenceNo' #经纪公司报单编号, int
self.RelativeOrderSysID = 'OrderSysID' #相关报单, char[21]
self.ZCETotalTradedVolume = 'Volume' #郑商所成交数量, int
self.IsSwapOrder = 'Bool' #互换单标志, int
class ExchangeOrder(BaseStruct): #交易所报单
def __init__(self, OrderPriceType=OPT_AnyPrice, Direction=D_Buy, CombOffsetFlag='', CombHedgeFlag='', LimitPrice=0.0, VolumeTotalOriginal=0, TimeCondition=TC_IOC, GTDDate='', VolumeCondition=VC_AV, MinVolume=0, ContingentCondition=CC_Immediately, StopPrice=0.0, ForceCloseReason=FCC_NotForceClose, IsAutoSuspend=0, BusinessUnit='', RequestID=0, OrderLocalID='', ExchangeID='', ParticipantID='', ClientID='', ExchangeInstID='', TraderID='', InstallID=0, OrderSubmitStatus=OSS_InsertSubmitted, NotifySequence=0, TradingDay='', SettlementID=0, OrderSysID='', OrderSource=OSRC_Participant, OrderStatus=OST_AllTraded, OrderType=ORDT_Normal, VolumeTraded=0, VolumeTotal=0, InsertDate='', InsertTime='', ActiveTime='', SuspendTime='', UpdateTime='', CancelTime='', ActiveTraderID='', ClearingPartID='', SequenceNo=0):
self.OrderPriceType = '' #报单价格条件, char
self.Direction = '' #买卖方向, char
self.CombOffsetFlag = '' #组合开平标志, char[5]
self.CombHedgeFlag = '' #组合投机套保标志, char[5]
self.LimitPrice = 'Price' #价格, double
self.VolumeTotalOriginal = 'Volume' #数量, int
self.TimeCondition = '' #有效期类型, char
self.GTDDate = 'Date' #GTD日期, char[9]
self.VolumeCondition = '' #成交量类型, char
self.MinVolume = 'Volume' #最小成交量, int
self.ContingentCondition = '' #触发条件, char
self.StopPrice = 'Price' #止损价, double
self.ForceCloseReason = '' #强平原因, char
self.IsAutoSuspend = 'Bool' #自动挂起标志, int
self.BusinessUnit = '' #业务单元, char[21]
self.RequestID = '' #请求编号, int
self.OrderLocalID = '' #本地报单编号, char[13]
self.ExchangeID = '' #交易所代码, char[9]
self.ParticipantID = '' #会员代码, char[11]
self.ClientID = '' #客户代码, char[11]
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.TraderID = '' #交易所交易员代码, char[21]
self.InstallID = '' #安装编号, int
self.OrderSubmitStatus = '' #报单提交状态, char
self.NotifySequence = 'SequenceNo' #报单提示序号, int
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
self.OrderSysID = '' #报单编号, char[21]
self.OrderSource = '' #报单来源, char
self.OrderStatus = '' #报单状态, char
self.OrderType = '' #报单类型, char
self.VolumeTraded = 'Volume' #今成交数量, int
self.VolumeTotal = 'Volume' #剩余数量, int
self.InsertDate = 'Date' #报单日期, char[9]
self.InsertTime = 'Time' #委托时间, char[9]
self.ActiveTime = 'Time' #激活时间, char[9]
self.SuspendTime = 'Time' #挂起时间, char[9]
self.UpdateTime = 'Time' #最后修改时间, char[9]
self.CancelTime = 'Time' #撤销时间, char[9]
self.ActiveTraderID = 'TraderID' #最后修改交易所交易员代码, char[21]
self.ClearingPartID = 'ParticipantID' #结算会员编号, char[11]
self.SequenceNo = '' #序号, int
class ExchangeOrderInsertError(BaseStruct): #交易所报单插入失败
def __init__(self, ExchangeID='', ParticipantID='', TraderID='', InstallID=0, OrderLocalID='', ErrorID=0, ErrorMsg=''):
self.ExchangeID = '' #交易所代码, char[9]
self.ParticipantID = '' #会员代码, char[11]
self.TraderID = '' #交易所交易员代码, char[21]
self.InstallID = '' #安装编号, int
self.OrderLocalID = '' #本地报单编号, char[13]
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class InputOrderAction(BaseStruct): #输入报单操作
def __init__(self, BrokerID='', InvestorID='', OrderActionRef=0, OrderRef='', RequestID=0, FrontID=0, SessionID=0, ExchangeID='', OrderSysID='', ActionFlag=AF_Delete, LimitPrice=0.0, VolumeChange=0, UserID='', InstrumentID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.OrderActionRef = '' #报单操作引用, int
self.OrderRef = '' #报单引用, char[13]
self.RequestID = '' #请求编号, int
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.ExchangeID = '' #交易所代码, char[9]
self.OrderSysID = '' #报单编号, char[21]
self.ActionFlag = '' #操作标志, char
self.LimitPrice = 'Price' #价格, double
self.VolumeChange = 'Volume' #数量变化, int
self.UserID = '' #用户代码, char[16]
self.InstrumentID = '' #合约代码, char[31]
class OrderAction(BaseStruct): #报单操作
def __init__(self, BrokerID='', InvestorID='', OrderActionRef=0, OrderRef='', RequestID=0, FrontID=0, SessionID=0, ExchangeID='', OrderSysID='', ActionFlag=AF_Delete, LimitPrice=0.0, VolumeChange=0, ActionDate='', ActionTime='', TraderID='', InstallID=0, OrderLocalID='', ActionLocalID='', ParticipantID='', ClientID='', BusinessUnit='', OrderActionStatus=OAS_Submitted, UserID='', StatusMsg='', InstrumentID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.OrderActionRef = '' #报单操作引用, int
self.OrderRef = '' #报单引用, char[13]
self.RequestID = '' #请求编号, int
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.ExchangeID = '' #交易所代码, char[9]
self.OrderSysID = '' #报单编号, char[21]
self.ActionFlag = '' #操作标志, char
self.LimitPrice = 'Price' #价格, double
self.VolumeChange = 'Volume' #数量变化, int
self.ActionDate = 'Date' #操作日期, char[9]
self.ActionTime = 'Time' #操作时间, char[9]
self.TraderID = '' #交易所交易员代码, char[21]
self.InstallID = '' #安装编号, int
self.OrderLocalID = '' #本地报单编号, char[13]
self.ActionLocalID = 'OrderLocalID' #操作本地编号, char[13]
self.ParticipantID = '' #会员代码, char[11]
self.ClientID = '' #客户代码, char[11]
self.BusinessUnit = '' #业务单元, char[21]
self.OrderActionStatus = '' #报单操作状态, char
self.UserID = '' #用户代码, char[16]
self.StatusMsg = 'ErrorMsg' #状态信息, char[81]
self.InstrumentID = '' #合约代码, char[31]
class ExchangeOrderAction(BaseStruct): #交易所报单操作
def __init__(self, ExchangeID='', OrderSysID='', ActionFlag=AF_Delete, LimitPrice=0.0, VolumeChange=0, ActionDate='', ActionTime='', TraderID='', InstallID=0, OrderLocalID='', ActionLocalID='', ParticipantID='', ClientID='', BusinessUnit='', OrderActionStatus=OAS_Submitted, UserID=''):
self.ExchangeID = '' #交易所代码, char[9]
self.OrderSysID = '' #报单编号, char[21]
self.ActionFlag = '' #操作标志, char
self.LimitPrice = 'Price' #价格, double
self.VolumeChange = 'Volume' #数量变化, int
self.ActionDate = 'Date' #操作日期, char[9]
self.ActionTime = 'Time' #操作时间, char[9]
self.TraderID = '' #交易所交易员代码, char[21]
self.InstallID = '' #安装编号, int
self.OrderLocalID = '' #本地报单编号, char[13]
self.ActionLocalID = 'OrderLocalID' #操作本地编号, char[13]
self.ParticipantID = '' #会员代码, char[11]
self.ClientID = '' #客户代码, char[11]
self.BusinessUnit = '' #业务单元, char[21]
self.OrderActionStatus = '' #报单操作状态, char
self.UserID = '' #用户代码, char[16]
class ExchangeOrderActionError(BaseStruct): #交易所报单操作失败
def __init__(self, ExchangeID='', OrderSysID='', TraderID='', InstallID=0, OrderLocalID='', ActionLocalID='', ErrorID=0, ErrorMsg=''):
self.ExchangeID = '' #交易所代码, char[9]
self.OrderSysID = '' #报单编号, char[21]
self.TraderID = '' #交易所交易员代码, char[21]
self.InstallID = '' #安装编号, int
self.OrderLocalID = '' #本地报单编号, char[13]
self.ActionLocalID = 'OrderLocalID' #操作本地编号, char[13]
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class ExchangeTrade(BaseStruct): #交易所成交
def __init__(self, ExchangeID='', TradeID='', Direction=D_Buy, OrderSysID='', ParticipantID='', ClientID='', TradingRole=ER_Broker, ExchangeInstID='', OffsetFlag=OF_Open, HedgeFlag=HF_Speculation, Price=0.0, Volume=0, TradeDate='', TradeTime='', TradeType=TRDT_Common, PriceSource=PSRC_LastPrice, TraderID='', OrderLocalID='', ClearingPartID='', BusinessUnit='', SequenceNo=0, TradeSource=TSRC_NORMAL):
self.ExchangeID = '' #交易所代码, char[9]
self.TradeID = '' #成交编号, char[21]
self.Direction = '' #买卖方向, char
self.OrderSysID = '' #报单编号, char[21]
self.ParticipantID = '' #会员代码, char[11]
self.ClientID = '' #客户代码, char[11]
self.TradingRole = '' #交易角色, char
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.OffsetFlag = '' #开平标志, char
self.HedgeFlag = '' #投机套保标志, char
self.Price = '' #价格, double
self.Volume = '' #数量, int
self.TradeDate = 'Date' #成交时期, char[9]
self.TradeTime = 'Time' #成交时间, char[9]
self.TradeType = '' #成交类型, char
self.PriceSource = '' #成交价来源, char
self.TraderID = '' #交易所交易员代码, char[21]
self.OrderLocalID = '' #本地报单编号, char[13]
self.ClearingPartID = 'ParticipantID' #结算会员编号, char[11]
self.BusinessUnit = '' #业务单元, char[21]
self.SequenceNo = '' #序号, int
self.TradeSource = '' #成交来源, char
class Trade(BaseStruct): #成交
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', OrderRef='', UserID='', ExchangeID='', TradeID='', Direction=D_Buy, OrderSysID='', ParticipantID='', ClientID='', TradingRole=ER_Broker, ExchangeInstID='', OffsetFlag=OF_Open, HedgeFlag=HF_Speculation, Price=0.0, Volume=0, TradeDate='', TradeTime='', TradeType=TRDT_Common, PriceSource=PSRC_LastPrice, TraderID='', OrderLocalID='', ClearingPartID='', BusinessUnit='', SequenceNo=0, TradingDay='', SettlementID=0, BrokerOrderSeq=0, TradeSource=TSRC_NORMAL):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.OrderRef = '' #报单引用, char[13]
self.UserID = '' #用户代码, char[16]
self.ExchangeID = '' #交易所代码, char[9]
self.TradeID = '' #成交编号, char[21]
self.Direction = '' #买卖方向, char
self.OrderSysID = '' #报单编号, char[21]
self.ParticipantID = '' #会员代码, char[11]
self.ClientID = '' #客户代码, char[11]
self.TradingRole = '' #交易角色, char
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.OffsetFlag = '' #开平标志, char
self.HedgeFlag = '' #投机套保标志, char
self.Price = '' #价格, double
self.Volume = '' #数量, int
self.TradeDate = 'Date' #成交时期, char[9]
self.TradeTime = 'Time' #成交时间, char[9]
self.TradeType = '' #成交类型, char
self.PriceSource = '' #成交价来源, char
self.TraderID = '' #交易所交易员代码, char[21]
self.OrderLocalID = '' #本地报单编号, char[13]
self.ClearingPartID = 'ParticipantID' #结算会员编号, char[11]
self.BusinessUnit = '' #业务单元, char[21]
self.SequenceNo = '' #序号, int
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
self.BrokerOrderSeq = 'SequenceNo' #经纪公司报单编号, int
self.TradeSource = '' #成交来源, char
class UserSession(BaseStruct): #用户会话
def __init__(self, FrontID=0, SessionID=0, BrokerID='', UserID='', LoginDate='', LoginTime='', IPAddress='', UserProductInfo='', InterfaceProductInfo='', ProtocolInfo='', MacAddress=''):
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.LoginDate = 'Date' #登录日期, char[9]
self.LoginTime = 'Time' #登录时间, char[9]
self.IPAddress = '' #IP地址, char[16]
self.UserProductInfo = 'ProductInfo' #用户端产品信息, char[11]
self.InterfaceProductInfo = 'ProductInfo' #接口端产品信息, char[11]
self.ProtocolInfo = '' #协议信息, char[11]
self.MacAddress = '' #Mac地址, char[21]
class QueryMaxOrderVolume(BaseStruct): #查询最大报单数量
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', Direction=D_Buy, OffsetFlag=OF_Open, HedgeFlag=HF_Speculation, MaxVolume=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.Direction = '' #买卖方向, char
self.OffsetFlag = '' #开平标志, char
self.HedgeFlag = '' #投机套保标志, char
self.MaxVolume = 'Volume' #最大允许报单数量, int
class SettlementInfoConfirm(BaseStruct): #投资者结算结果确认信息
def __init__(self, BrokerID='', InvestorID='', ConfirmDate='', ConfirmTime=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.ConfirmDate = 'Date' #确认日期, char[9]
self.ConfirmTime = 'Time' #确认时间, char[9]
class SyncDeposit(BaseStruct): #出入金同步
def __init__(self, DepositSeqNo='', BrokerID='', InvestorID='', Deposit=0.0, IsForce=0):
self.DepositSeqNo = '' #出入金流水号, char[15]
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.Deposit = 'Money' #入金金额, double
self.IsForce = 'Bool' #是否强制进行, int
class BrokerSync(BaseStruct): #经纪公司同步
def __init__(self, BrokerID=''):
self.BrokerID = '' #经纪公司代码, char[11]
class SyncingInvestor(BaseStruct): #正在同步中的投资者
def __init__(self, InvestorID='', BrokerID='', InvestorGroupID='', InvestorName='', IdentifiedCardType=ICT_EID, IdentifiedCardNo='', IsActive=0, Telephone='', Address='', OpenDate='', Mobile='', CommModelID='', MarginModelID=''):
self.InvestorID = '' #投资者代码, char[13]
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorGroupID = 'InvestorID' #投资者分组代码, char[13]
self.InvestorName = 'PartyName' #投资者名称, char[81]
self.IdentifiedCardType = 'IdCardType' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.IsActive = 'Bool' #是否活跃, int
self.Telephone = '' #联系电话, char[41]
self.Address = '' #通讯地址, char[101]
self.OpenDate = 'Date' #开户日期, char[9]
self.Mobile = '' #手机, char[41]
self.CommModelID = 'InvestorID' #手续费率模板代码, char[13]
self.MarginModelID = 'InvestorID' #保证金率模板代码, char[13]
class SyncingTradingCode(BaseStruct): #正在同步中的交易代码
def __init__(self, InvestorID='', BrokerID='', ExchangeID='', ClientID='', IsActive=0, ClientIDType=CIDT_Speculation):
self.InvestorID = '' #投资者代码, char[13]
self.BrokerID = '' #经纪公司代码, char[11]
self.ExchangeID = '' #交易所代码, char[9]
self.ClientID = '' #客户代码, char[11]
self.IsActive = 'Bool' #是否活跃, int
self.ClientIDType = '' #交易编码类型, char
class SyncingInvestorGroup(BaseStruct): #正在同步中的投资者分组
def __init__(self, BrokerID='', InvestorGroupID='', InvestorGroupName=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorGroupID = 'InvestorID' #投资者分组代码, char[13]
self.InvestorGroupName = '' #投资者分组名称, char[41]
class SyncingTradingAccount(BaseStruct): #正在同步中的交易账号
def __init__(self, BrokerID='', AccountID='', PreMortgage=0.0, PreCredit=0.0, PreDeposit=0.0, PreBalance=0.0, PreMargin=0.0, InterestBase=0.0, Interest=0.0, Deposit=0.0, Withdraw=0.0, FrozenMargin=0.0, FrozenCash=0.0, FrozenCommission=0.0, CurrMargin=0.0, CashIn=0.0, Commission=0.0, CloseProfit=0.0, PositionProfit=0.0, Balance=0.0, Available=0.0, WithdrawQuota=0.0, Reserve=0.0, TradingDay='', SettlementID=0, Credit=0.0, Mortgage=0.0, ExchangeMargin=0.0, DeliveryMargin=0.0, ExchangeDeliveryMargin=0.0):
self.BrokerID = '' #经纪公司代码, char[11]
self.AccountID = '' #投资者帐号, char[13]
self.PreMortgage = 'Money' #上次质押金额, double
self.PreCredit = 'Money' #上次信用额度, double
self.PreDeposit = 'Money' #上次存款额, double
self.PreBalance = 'Money' #上次结算准备金, double
self.PreMargin = 'Money' #上次占用的保证金, double
self.InterestBase = 'Money' #利息基数, double
self.Interest = 'Money' #利息收入, double
self.Deposit = 'Money' #入金金额, double
self.Withdraw = 'Money' #出金金额, double
self.FrozenMargin = 'Money' #冻结的保证金, double
self.FrozenCash = 'Money' #冻结的资金, double
self.FrozenCommission = 'Money' #冻结的手续费, double
self.CurrMargin = 'Money' #当前保证金总额, double
self.CashIn = 'Money' #资金差额, double
self.Commission = 'Money' #手续费, double
self.CloseProfit = 'Money' #平仓盈亏, double
self.PositionProfit = 'Money' #持仓盈亏, double
self.Balance = 'Money' #期货结算准备金, double
self.Available = 'Money' #可用资金, double
self.WithdrawQuota = 'Money' #可取资金, double
self.Reserve = 'Money' #基本准备金, double
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
self.Credit = 'Money' #信用额度, double
self.Mortgage = 'Money' #质押金额, double
self.ExchangeMargin = 'Money' #交易所保证金, double
self.DeliveryMargin = 'Money' #投资者交割保证金, double
self.ExchangeDeliveryMargin = 'Money' #交易所交割保证金, double
class SyncingInvestorPosition(BaseStruct): #正在同步中的投资者持仓
def __init__(self, InstrumentID='', BrokerID='', InvestorID='', PosiDirection=PD_Net, HedgeFlag=HF_Speculation, PositionDate=PSD_Today, YdPosition=0, Position=0, LongFrozen=0, ShortFrozen=0, LongFrozenAmount=0.0, ShortFrozenAmount=0.0, OpenVolume=0, CloseVolume=0, OpenAmount=0.0, CloseAmount=0.0, PositionCost=0.0, PreMargin=0.0, UseMargin=0.0, FrozenMargin=0.0, FrozenCash=0.0, FrozenCommission=0.0, CashIn=0.0, Commission=0.0, CloseProfit=0.0, PositionProfit=0.0, PreSettlementPrice=0.0, SettlementPrice=0.0, TradingDay='', SettlementID=0, OpenCost=0.0, ExchangeMargin=0.0, CombPosition=0, CombLongFrozen=0, CombShortFrozen=0, CloseProfitByDate=0.0, CloseProfitByTrade=0.0, TodayPosition=0, MarginRateByMoney=0.0, MarginRateByVolume=0.0):
self.InstrumentID = '' #合约代码, char[31]
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.PosiDirection = '' #持仓多空方向, char
self.HedgeFlag = '' #投机套保标志, char
self.PositionDate = '' #持仓日期, char
self.YdPosition = 'Volume' #上日持仓, int
self.Position = 'Volume' #今日持仓, int
self.LongFrozen = 'Volume' #多头冻结, int
self.ShortFrozen = 'Volume' #空头冻结, int
self.LongFrozenAmount = 'Money' #开仓冻结金额, double
self.ShortFrozenAmount = 'Money' #开仓冻结金额, double
self.OpenVolume = 'Volume' #开仓量, int
self.CloseVolume = 'Volume' #平仓量, int
self.OpenAmount = 'Money' #开仓金额, double
self.CloseAmount = 'Money' #平仓金额, double
self.PositionCost = 'Money' #持仓成本, double
self.PreMargin = 'Money' #上次占用的保证金, double
self.UseMargin = 'Money' #占用的保证金, double
self.FrozenMargin = 'Money' #冻结的保证金, double
self.FrozenCash = 'Money' #冻结的资金, double
self.FrozenCommission = 'Money' #冻结的手续费, double
self.CashIn = 'Money' #资金差额, double
self.Commission = 'Money' #手续费, double
self.CloseProfit = 'Money' #平仓盈亏, double
self.PositionProfit = 'Money' #持仓盈亏, double
self.PreSettlementPrice = 'Price' #上次结算价, double
self.SettlementPrice = 'Price' #本次结算价, double
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
self.OpenCost = 'Money' #开仓成本, double
self.ExchangeMargin = 'Money' #交易所保证金, double
self.CombPosition = 'Volume' #组合成交形成的持仓, int
self.CombLongFrozen = 'Volume' #组合多头冻结, int
self.CombShortFrozen = 'Volume' #组合空头冻结, int
self.CloseProfitByDate = 'Money' #逐日盯市平仓盈亏, double
self.CloseProfitByTrade = 'Money' #逐笔对冲平仓盈亏, double
self.TodayPosition = 'Volume' #今日持仓, int
self.MarginRateByMoney = 'Ratio' #保证金率, double
self.MarginRateByVolume = 'Ratio' #保证金率(按手数), double
class SyncingInstrumentMarginRate(BaseStruct): #正在同步中的合约保证金率
def __init__(self, InstrumentID='', InvestorRange=IR_All, BrokerID='', InvestorID='', HedgeFlag=HF_Speculation, LongMarginRatioByMoney=0.0, LongMarginRatioByVolume=0.0, ShortMarginRatioByMoney=0.0, ShortMarginRatioByVolume=0.0, IsRelative=0):
self.InstrumentID = '' #合约代码, char[31]
self.InvestorRange = '' #投资者范围, char
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.HedgeFlag = '' #投机套保标志, char
self.LongMarginRatioByMoney = 'Ratio' #多头保证金率, double
self.LongMarginRatioByVolume = 'Money' #多头保证金费, double
self.ShortMarginRatioByMoney = 'Ratio' #空头保证金率, double
self.ShortMarginRatioByVolume = 'Money' #空头保证金费, double
self.IsRelative = 'Bool' #是否相对交易所收取, int
class SyncingInstrumentCommissionRate(BaseStruct): #正在同步中的合约手续费率
def __init__(self, InstrumentID='', InvestorRange=IR_All, BrokerID='', InvestorID='', OpenRatioByMoney=0.0, OpenRatioByVolume=0.0, CloseRatioByMoney=0.0, CloseRatioByVolume=0.0, CloseTodayRatioByMoney=0.0, CloseTodayRatioByVolume=0.0):
self.InstrumentID = '' #合约代码, char[31]
self.InvestorRange = '' #投资者范围, char
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.OpenRatioByMoney = 'Ratio' #开仓手续费率, double
self.OpenRatioByVolume = 'Ratio' #开仓手续费, double
self.CloseRatioByMoney = 'Ratio' #平仓手续费率, double
self.CloseRatioByVolume = 'Ratio' #平仓手续费, double
self.CloseTodayRatioByMoney = 'Ratio' #平今手续费率, double
self.CloseTodayRatioByVolume = 'Ratio' #平今手续费, double
class SyncingInstrumentTradingRight(BaseStruct): #正在同步中的合约交易权限
def __init__(self, InstrumentID='', InvestorRange=IR_All, BrokerID='', InvestorID='', TradingRight=TR_Allow):
self.InstrumentID = '' #合约代码, char[31]
self.InvestorRange = '' #投资者范围, char
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.TradingRight = '' #交易权限, char
class QryOrder(BaseStruct): #查询报单
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', ExchangeID='', OrderSysID='', InsertTimeStart='', InsertTimeEnd=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
self.OrderSysID = '' #报单编号, char[21]
self.InsertTimeStart = 'Time' #开始时间, char[9]
self.InsertTimeEnd = 'Time' #结束时间, char[9]
class QryTrade(BaseStruct): #查询成交
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', ExchangeID='', TradeID='', TradeTimeStart='', TradeTimeEnd=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
self.TradeID = '' #成交编号, char[21]
self.TradeTimeStart = 'Time' #开始时间, char[9]
self.TradeTimeEnd = 'Time' #结束时间, char[9]
class QryInvestorPosition(BaseStruct): #查询投资者持仓
def __init__(self, BrokerID='', InvestorID='', InstrumentID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
class QryTradingAccount(BaseStruct): #查询资金账户
def __init__(self, BrokerID='', InvestorID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
class QryInvestor(BaseStruct): #查询投资者
def __init__(self, BrokerID='', InvestorID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
class QryTradingCode(BaseStruct): #查询交易编码
def __init__(self, BrokerID='', InvestorID='', ExchangeID='', ClientID='', ClientIDType=CIDT_Speculation):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.ExchangeID = '' #交易所代码, char[9]
self.ClientID = '' #客户代码, char[11]
self.ClientIDType = '' #交易编码类型, char
class QryInvestorGroup(BaseStruct): #查询交易编码
def __init__(self, BrokerID=''):
self.BrokerID = '' #经纪公司代码, char[11]
class QryInstrumentMarginRate(BaseStruct): #查询交易编码
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', HedgeFlag=HF_Speculation):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.HedgeFlag = '' #投机套保标志, char
class QryInstrumentCommissionRate(BaseStruct): #查询交易编码
def __init__(self, BrokerID='', InvestorID='', InstrumentID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
class QryInstrumentTradingRight(BaseStruct): #查询交易编码
def __init__(self, BrokerID='', InvestorID='', InstrumentID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
class QryBroker(BaseStruct): #查询经纪公司
def __init__(self, BrokerID=''):
self.BrokerID = '' #经纪公司代码, char[11]
class QryTrader(BaseStruct): #查询交易员
def __init__(self, ExchangeID='', ParticipantID='', TraderID=''):
self.ExchangeID = '' #交易所代码, char[9]
self.ParticipantID = '' #会员代码, char[11]
self.TraderID = '' #交易所交易员代码, char[21]
class QryPartBroker(BaseStruct): #查询经纪公司会员代码
def __init__(self, ExchangeID='', BrokerID='', ParticipantID=''):
self.ExchangeID = '' #交易所代码, char[9]
self.BrokerID = '' #经纪公司代码, char[11]
self.ParticipantID = '' #会员代码, char[11]
class QrySuperUserFunction(BaseStruct): #查询管理用户功能权限
def __init__(self, UserID=''):
self.UserID = '' #用户代码, char[16]
class QryUserSession(BaseStruct): #查询用户会话
def __init__(self, FrontID=0, SessionID=0, BrokerID='', UserID=''):
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
class QryFrontStatus(BaseStruct): #查询前置状态
def __init__(self, FrontID=0):
self.FrontID = '' #前置编号, int
class QryExchangeOrder(BaseStruct): #查询交易所报单
def __init__(self, ParticipantID='', ClientID='', ExchangeInstID='', ExchangeID='', TraderID=''):
self.ParticipantID = '' #会员代码, char[11]
self.ClientID = '' #客户代码, char[11]
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
self.TraderID = '' #交易所交易员代码, char[21]
class QryOrderAction(BaseStruct): #查询报单操作
def __init__(self, BrokerID='', InvestorID='', ExchangeID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.ExchangeID = '' #交易所代码, char[9]
class QryExchangeOrderAction(BaseStruct): #查询交易所报单操作
def __init__(self, ParticipantID='', ClientID='', ExchangeID='', TraderID=''):
self.ParticipantID = '' #会员代码, char[11]
self.ClientID = '' #客户代码, char[11]
self.ExchangeID = '' #交易所代码, char[9]
self.TraderID = '' #交易所交易员代码, char[21]
class QrySuperUser(BaseStruct): #查询管理用户
def __init__(self, UserID=''):
self.UserID = '' #用户代码, char[16]
class QryExchange(BaseStruct): #查询交易所
def __init__(self, ExchangeID=''):
self.ExchangeID = '' #交易所代码, char[9]
class QryProduct(BaseStruct): #查询产品
def __init__(self, ProductID=''):
self.ProductID = 'InstrumentID' #产品代码, char[31]
class QryInstrument(BaseStruct): #查询合约
def __init__(self, InstrumentID='', ExchangeID='', ExchangeInstID='', ProductID=''):
self.InstrumentID = '' #合约代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.ProductID = 'InstrumentID' #产品代码, char[31]
class QryDepthMarketData(BaseStruct): #查询行情
def __init__(self, InstrumentID=''):
self.InstrumentID = '' #合约代码, char[31]
class QryBrokerUser(BaseStruct): #查询经纪公司用户
def __init__(self, BrokerID='', UserID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
class QryBrokerUserFunction(BaseStruct): #查询经纪公司用户权限
def __init__(self, BrokerID='', UserID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
class QryTraderOffer(BaseStruct): #查询交易员报盘机
def __init__(self, ExchangeID='', ParticipantID='', TraderID=''):
self.ExchangeID = '' #交易所代码, char[9]
self.ParticipantID = '' #会员代码, char[11]
self.TraderID = '' #交易所交易员代码, char[21]
class QrySyncDeposit(BaseStruct): #查询出入金流水
def __init__(self, BrokerID='', DepositSeqNo=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.DepositSeqNo = '' #出入金流水号, char[15]
class QrySettlementInfo(BaseStruct): #查询投资者结算结果
def __init__(self, BrokerID='', InvestorID='', TradingDay=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.TradingDay = 'Date' #交易日, char[9]
class QryHisOrder(BaseStruct): #查询报单
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', ExchangeID='', OrderSysID='', InsertTimeStart='', InsertTimeEnd='', TradingDay='', SettlementID=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
self.OrderSysID = '' #报单编号, char[21]
self.InsertTimeStart = 'Time' #开始时间, char[9]
self.InsertTimeEnd = 'Time' #结束时间, char[9]
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
class MarketData(BaseStruct): #市场行情
def __init__(self, TradingDay='', InstrumentID='', ExchangeID='', ExchangeInstID='', LastPrice=0.0, PreSettlementPrice=0.0, PreClosePrice=0.0, PreOpenInterest=0.0, OpenPrice=0.0, HighestPrice=0.0, LowestPrice=0.0, Volume=0, Turnover=0.0, OpenInterest=0.0, ClosePrice=0.0, SettlementPrice=0.0, UpperLimitPrice=0.0, LowerLimitPrice=0.0, PreDelta=0.0, CurrDelta=0.0, UpdateTime='', UpdateMillisec=0, ActionDay=''):
self.TradingDay = 'Date' #交易日, char[9]
self.InstrumentID = '' #合约代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.LastPrice = 'Price' #最新价, double
self.PreSettlementPrice = 'Price' #上次结算价, double
self.PreClosePrice = 'Price' #昨收盘, double
self.PreOpenInterest = 'LargeVolume' #昨持仓量, double
self.OpenPrice = 'Price' #今开盘, double
self.HighestPrice = 'Price' #最高价, double
self.LowestPrice = 'Price' #最低价, double
self.Volume = '' #数量, int
self.Turnover = 'Money' #成交金额, double
self.OpenInterest = 'LargeVolume' #持仓量, double
self.ClosePrice = 'Price' #今收盘, double
self.SettlementPrice = 'Price' #本次结算价, double
self.UpperLimitPrice = 'Price' #涨停板价, double
self.LowerLimitPrice = 'Price' #跌停板价, double
self.PreDelta = 'Ratio' #昨虚实度, double
self.CurrDelta = 'Ratio' #今虚实度, double
self.UpdateTime = 'Time' #最后修改时间, char[9]
self.UpdateMillisec = 'Millisec' #最后修改毫秒, int
self.ActionDay = 'Date' #业务日期, char[9]
class MarketDataBase(BaseStruct): #行情基础属性
def __init__(self, TradingDay='', PreSettlementPrice=0.0, PreClosePrice=0.0, PreOpenInterest=0.0, PreDelta=0.0):
self.TradingDay = 'Date' #交易日, char[9]
self.PreSettlementPrice = 'Price' #上次结算价, double
self.PreClosePrice = 'Price' #昨收盘, double
self.PreOpenInterest = 'LargeVolume' #昨持仓量, double
self.PreDelta = 'Ratio' #昨虚实度, double
class MarketDataStatic(BaseStruct): #行情静态属性
def __init__(self, OpenPrice=0.0, HighestPrice=0.0, LowestPrice=0.0, ClosePrice=0.0, UpperLimitPrice=0.0, LowerLimitPrice=0.0, SettlementPrice=0.0, CurrDelta=0.0):
self.OpenPrice = 'Price' #今开盘, double
self.HighestPrice = 'Price' #最高价, double
self.LowestPrice = 'Price' #最低价, double
self.ClosePrice = 'Price' #今收盘, double
self.UpperLimitPrice = 'Price' #涨停板价, double
self.LowerLimitPrice = 'Price' #跌停板价, double
self.SettlementPrice = 'Price' #本次结算价, double
self.CurrDelta = 'Ratio' #今虚实度, double
class MarketDataLastMatch(BaseStruct): #行情最新成交属性
def __init__(self, LastPrice=0.0, Volume=0, Turnover=0.0, OpenInterest=0.0):
self.LastPrice = 'Price' #最新价, double
self.Volume = '' #数量, int
self.Turnover = 'Money' #成交金额, double
self.OpenInterest = 'LargeVolume' #持仓量, double
class MarketDataBestPrice(BaseStruct): #行情最优价属性
def __init__(self, BidPrice1=0.0, BidVolume1=0, AskPrice1=0.0, AskVolume1=0):
self.BidPrice1 = 'Price' #申买价一, double
self.BidVolume1 = 'Volume' #申买量一, int
self.AskPrice1 = 'Price' #申卖价一, double
self.AskVolume1 = 'Volume' #申卖量一, int
class MarketDataBid23(BaseStruct): #行情申买二、三属性
def __init__(self, BidPrice2=0.0, BidVolume2=0, BidPrice3=0.0, BidVolume3=0):
self.BidPrice2 = 'Price' #申买价二, double
self.BidVolume2 = 'Volume' #申买量二, int
self.BidPrice3 = 'Price' #申买价三, double
self.BidVolume3 = 'Volume' #申买量三, int
class MarketDataAsk23(BaseStruct): #行情申卖二、三属性
def __init__(self, AskPrice2=0.0, AskVolume2=0, AskPrice3=0.0, AskVolume3=0):
self.AskPrice2 = 'Price' #申卖价二, double
self.AskVolume2 = 'Volume' #申卖量二, int
self.AskPrice3 = 'Price' #申卖价三, double
self.AskVolume3 = 'Volume' #申卖量三, int
class MarketDataBid45(BaseStruct): #行情申买四、五属性
def __init__(self, BidPrice4=0.0, BidVolume4=0, BidPrice5=0.0, BidVolume5=0):
self.BidPrice4 = 'Price' #申买价四, double
self.BidVolume4 = 'Volume' #申买量四, int
self.BidPrice5 = 'Price' #申买价五, double
self.BidVolume5 = 'Volume' #申买量五, int
class MarketDataAsk45(BaseStruct): #行情申卖四、五属性
def __init__(self, AskPrice4=0.0, AskVolume4=0, AskPrice5=0.0, AskVolume5=0):
self.AskPrice4 = 'Price' #申卖价四, double
self.AskVolume4 = 'Volume' #申卖量四, int
self.AskPrice5 = 'Price' #申卖价五, double
self.AskVolume5 = 'Volume' #申卖量五, int
class MarketDataUpdateTime(BaseStruct): #行情更新时间属性
def __init__(self, InstrumentID='', UpdateTime='', UpdateMillisec=0, ActionDay=''):
self.InstrumentID = '' #合约代码, char[31]
self.UpdateTime = 'Time' #最后修改时间, char[9]
self.UpdateMillisec = 'Millisec' #最后修改毫秒, int
self.ActionDay = 'Date' #业务日期, char[9]
class MarketDataExchange(BaseStruct): #行情交易所代码属性
def __init__(self, ExchangeID=''):
self.ExchangeID = '' #交易所代码, char[9]
class SpecificInstrument(BaseStruct): #指定的合约
def __init__(self, InstrumentID=''):
self.InstrumentID = '' #合约代码, char[31]
class InstrumentStatus(BaseStruct): #合约状态
def __init__(self, ExchangeID='', ExchangeInstID='', SettlementGroupID='', InstrumentID='', InstrumentStatus=IS_BeforeTrading, TradingSegmentSN=0, EnterTime='', EnterReason=IER_Automatic):
self.ExchangeID = '' #交易所代码, char[9]
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.SettlementGroupID = '' #结算组代码, char[9]
self.InstrumentID = '' #合约代码, char[31]
self.InstrumentStatus = '' #合约交易状态, char
self.TradingSegmentSN = '' #交易阶段编号, int
self.EnterTime = 'Time' #进入本状态时间, char[9]
self.EnterReason = 'InstStatusEnterReason' #进入本状态原因, char
class QryInstrumentStatus(BaseStruct): #查询合约状态
def __init__(self, ExchangeID='', ExchangeInstID=''):
self.ExchangeID = '' #交易所代码, char[9]
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
class InvestorAccount(BaseStruct): #投资者账户
def __init__(self, BrokerID='', InvestorID='', AccountID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.AccountID = '' #投资者帐号, char[13]
class PositionProfitAlgorithm(BaseStruct): #浮动盈亏算法
def __init__(self, BrokerID='', AccountID='', Algorithm=AG_All, Memo=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.AccountID = '' #投资者帐号, char[13]
self.Algorithm = '' #盈亏算法, char
self.Memo = '' #备注, char[161]
class Discount(BaseStruct): #会员资金折扣
def __init__(self, BrokerID='', InvestorRange=IR_All, InvestorID='', Discount=0.0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorRange = '' #投资者范围, char
self.InvestorID = '' #投资者代码, char[13]
self.Discount = 'Ratio' #资金折扣比例, double
class QryTransferBank(BaseStruct): #查询转帐银行
def __init__(self, BankID='', BankBrchID=''):
self.BankID = '' #银行代码, char[4]
self.BankBrchID = '' #银行分中心代码, char[5]
class TransferBank(BaseStruct): #转帐银行
def __init__(self, BankID='', BankBrchID='', BankName='', IsActive=0):
self.BankID = '' #银行代码, char[4]
self.BankBrchID = '' #银行分中心代码, char[5]
self.BankName = '' #银行名称, char[101]
self.IsActive = 'Bool' #是否活跃, int
class QryInvestorPositionDetail(BaseStruct): #查询投资者持仓明细
def __init__(self, BrokerID='', InvestorID='', InstrumentID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
class InvestorPositionDetail(BaseStruct): #投资者持仓明细
def __init__(self, InstrumentID='', BrokerID='', InvestorID='', HedgeFlag=HF_Speculation, Direction=D_Buy, OpenDate='', TradeID='', Volume=0, OpenPrice=0.0, TradingDay='', SettlementID=0, TradeType=TRDT_Common, CombInstrumentID='', ExchangeID='', CloseProfitByDate=0.0, CloseProfitByTrade=0.0, PositionProfitByDate=0.0, PositionProfitByTrade=0.0, Margin=0.0, ExchMargin=0.0, MarginRateByMoney=0.0, MarginRateByVolume=0.0, LastSettlementPrice=0.0, SettlementPrice=0.0, CloseVolume=0, CloseAmount=0.0):
self.InstrumentID = '' #合约代码, char[31]
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.HedgeFlag = '' #投机套保标志, char
self.Direction = '' #买卖, char
self.OpenDate = 'Date' #开仓日期, char[9]
self.TradeID = '' #成交编号, char[21]
self.Volume = '' #数量, int
self.OpenPrice = 'Price' #开仓价, double
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
self.TradeType = '' #成交类型, char
self.CombInstrumentID = 'InstrumentID' #组合合约代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
self.CloseProfitByDate = 'Money' #逐日盯市平仓盈亏, double
self.CloseProfitByTrade = 'Money' #逐笔对冲平仓盈亏, double
self.PositionProfitByDate = 'Money' #逐日盯市持仓盈亏, double
self.PositionProfitByTrade = 'Money' #逐笔对冲持仓盈亏, double
self.Margin = 'Money' #投资者保证金, double
self.ExchMargin = 'Money' #交易所保证金, double
self.MarginRateByMoney = 'Ratio' #保证金率, double
self.MarginRateByVolume = 'Ratio' #保证金率(按手数), double
self.LastSettlementPrice = 'Price' #昨结算价, double
self.SettlementPrice = 'Price' #结算价, double
self.CloseVolume = 'Volume' #平仓量, int
self.CloseAmount = 'Money' #平仓金额, double
class TradingAccountPassword(BaseStruct): #资金账户口令域
def __init__(self, BrokerID='', AccountID='', Password=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #密码, char[41]
class MDTraderOffer(BaseStruct): #交易所行情报盘机
def __init__(self, ExchangeID='', TraderID='', ParticipantID='', Password='', InstallID=0, OrderLocalID='', TraderConnectStatus=TCS_NotConnected, ConnectRequestDate='', ConnectRequestTime='', LastReportDate='', LastReportTime='', ConnectDate='', ConnectTime='', StartDate='', StartTime='', TradingDay='', BrokerID='', MaxTradeID='', MaxOrderMessageReference=''):
self.ExchangeID = '' #交易所代码, char[9]
self.TraderID = '' #交易所交易员代码, char[21]
self.ParticipantID = '' #会员代码, char[11]
self.Password = '' #密码, char[41]
self.InstallID = '' #安装编号, int
self.OrderLocalID = '' #本地报单编号, char[13]
self.TraderConnectStatus = '' #交易所交易员连接状态, char
self.ConnectRequestDate = 'Date' #发出连接请求的日期, char[9]
self.ConnectRequestTime = 'Time' #发出连接请求的时间, char[9]
self.LastReportDate = 'Date' #上次报告日期, char[9]
self.LastReportTime = 'Time' #上次报告时间, char[9]
self.ConnectDate = 'Date' #完成连接日期, char[9]
self.ConnectTime = 'Time' #完成连接时间, char[9]
self.StartDate = 'Date' #启动日期, char[9]
self.StartTime = 'Time' #启动时间, char[9]
self.TradingDay = 'Date' #交易日, char[9]
self.BrokerID = '' #经纪公司代码, char[11]
self.MaxTradeID = 'TradeID' #本席位最大成交编号, char[21]
self.MaxOrderMessageReference = 'ReturnCode' #本席位最大报单备拷, char[7]
class QryMDTraderOffer(BaseStruct): #查询行情报盘机
def __init__(self, ExchangeID='', ParticipantID='', TraderID=''):
self.ExchangeID = '' #交易所代码, char[9]
self.ParticipantID = '' #会员代码, char[11]
self.TraderID = '' #交易所交易员代码, char[21]
class QryNotice(BaseStruct): #查询客户通知
def __init__(self, BrokerID=''):
self.BrokerID = '' #经纪公司代码, char[11]
class Notice(BaseStruct): #客户通知
def __init__(self, BrokerID='', Content='', SequenceLabel=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.Content = '' #消息正文, char[501]
self.SequenceLabel = '' #经纪公司通知内容序列号, char[2]
class UserRight(BaseStruct): #用户权限
def __init__(self, BrokerID='', UserID='', UserRightType=URT_Logon, IsForbidden=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.UserRightType = '' #客户权限类型, char
self.IsForbidden = 'Bool' #是否禁止, int
class QrySettlementInfoConfirm(BaseStruct): #查询结算信息确认域
def __init__(self, BrokerID='', InvestorID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
class LoadSettlementInfo(BaseStruct): #装载结算信息
def __init__(self, BrokerID=''):
self.BrokerID = '' #经纪公司代码, char[11]
class BrokerWithdrawAlgorithm(BaseStruct): #经纪公司可提资金算法表
def __init__(self, BrokerID='', WithdrawAlgorithm=AG_All, UsingRatio=0.0, IncludeCloseProfit=ICP_Include, AllWithoutTrade=AWT_Enable, AvailIncludeCloseProfit=ICP_Include, IsBrokerUserEvent=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.WithdrawAlgorithm = 'Algorithm' #可提资金算法, char
self.UsingRatio = 'Ratio' #资金使用率, double
self.IncludeCloseProfit = '' #可提是否包含平仓盈利, char
self.AllWithoutTrade = '' #本日无仓且无成交客户是否受可提比例限制, char
self.AvailIncludeCloseProfit = 'IncludeCloseProfit' #可用是否包含平仓盈利, char
self.IsBrokerUserEvent = 'Bool' #是否启用用户事件, int
class TradingAccountPasswordUpdateV1(BaseStruct): #资金账户口令变更域
def __init__(self, BrokerID='', InvestorID='', OldPassword='', NewPassword=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.OldPassword = 'Password' #原来的口令, char[41]
self.NewPassword = 'Password' #新的口令, char[41]
class TradingAccountPasswordUpdate(BaseStruct): #资金账户口令变更域
def __init__(self, BrokerID='', AccountID='', OldPassword='', NewPassword=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.AccountID = '' #投资者帐号, char[13]
self.OldPassword = 'Password' #原来的口令, char[41]
self.NewPassword = 'Password' #新的口令, char[41]
class QryCombinationLeg(BaseStruct): #查询组合合约分腿
def __init__(self, CombInstrumentID='', LegID=0, LegInstrumentID=''):
self.CombInstrumentID = 'InstrumentID' #组合合约代码, char[31]
self.LegID = '' #单腿编号, int
self.LegInstrumentID = 'InstrumentID' #单腿合约代码, char[31]
class QrySyncStatus(BaseStruct): #查询组合合约分腿
def __init__(self, TradingDay=''):
self.TradingDay = 'Date' #交易日, char[9]
class CombinationLeg(BaseStruct): #组合交易合约的单腿
def __init__(self, CombInstrumentID='', LegID=0, LegInstrumentID='', Direction=D_Buy, LegMultiple=0, ImplyLevel=0):
self.CombInstrumentID = 'InstrumentID' #组合合约代码, char[31]
self.LegID = '' #单腿编号, int
self.LegInstrumentID = 'InstrumentID' #单腿合约代码, char[31]
self.Direction = '' #买卖方向, char
self.LegMultiple = '' #单腿乘数, int
self.ImplyLevel = '' #派生层数, int
class SyncStatus(BaseStruct): #数据同步状态
def __init__(self, TradingDay='', DataSyncStatus=DS_Asynchronous):
self.TradingDay = 'Date' #交易日, char[9]
self.DataSyncStatus = '' #数据同步状态, char
class QryLinkMan(BaseStruct): #查询联系人
def __init__(self, BrokerID='', InvestorID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
class LinkMan(BaseStruct): #联系人
def __init__(self, BrokerID='', InvestorID='', PersonType=PST_Order, IdentifiedCardType=ICT_EID, IdentifiedCardNo='', PersonName='', Telephone='', Address='', ZipCode='', Priority=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.PersonType = '' #联系人类型, char
self.IdentifiedCardType = 'IdCardType' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.PersonName = 'PartyName' #名称, char[81]
self.Telephone = '' #联系电话, char[41]
self.Address = '' #通讯地址, char[101]
self.ZipCode = '' #邮政编码, char[7]
self.Priority = '' #优先级, int
class QryBrokerUserEvent(BaseStruct): #查询经纪公司用户事件
def __init__(self, BrokerID='', UserID='', UserEventType=UET_Login):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.UserEventType = '' #用户事件类型, char
class BrokerUserEvent(BaseStruct): #查询经纪公司用户事件
def __init__(self, BrokerID='', UserID='', UserEventType=UET_Login, EventSequenceNo=0, EventDate='', EventTime='', UserEventInfo='', InvestorID='', InstrumentID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.UserEventType = '' #用户事件类型, char
self.EventSequenceNo = 'SequenceNo' #用户事件序号, int
self.EventDate = 'Date' #事件发生日期, char[9]
self.EventTime = 'Time' #事件发生时间, char[9]
self.UserEventInfo = '' #用户事件信息, char[1025]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
class QryContractBank(BaseStruct): #查询签约银行请求
def __init__(self, BrokerID='', BankID='', BankBrchID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.BankID = '' #银行代码, char[4]
self.BankBrchID = '' #银行分中心代码, char[5]
class ContractBank(BaseStruct): #查询签约银行响应
def __init__(self, BrokerID='', BankID='', BankBrchID='', BankName=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.BankID = '' #银行代码, char[4]
self.BankBrchID = '' #银行分中心代码, char[5]
self.BankName = '' #银行名称, char[101]
class InvestorPositionCombineDetail(BaseStruct): #投资者组合持仓明细
def __init__(self, TradingDay='', OpenDate='', ExchangeID='', SettlementID=0, BrokerID='', InvestorID='', ComTradeID='', TradeID='', InstrumentID='', HedgeFlag=HF_Speculation, Direction=D_Buy, TotalAmt=0, Margin=0.0, ExchMargin=0.0, MarginRateByMoney=0.0, MarginRateByVolume=0.0, LegID=0, LegMultiple=0, CombInstrumentID=''):
self.TradingDay = 'Date' #交易日, char[9]
self.OpenDate = 'Date' #开仓日期, char[9]
self.ExchangeID = '' #交易所代码, char[9]
self.SettlementID = '' #结算编号, int
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.ComTradeID = 'TradeID' #组合编号, char[21]
self.TradeID = '' #撮合编号, char[21]
self.InstrumentID = '' #合约代码, char[31]
self.HedgeFlag = '' #投机套保标志, char
self.Direction = '' #买卖, char
self.TotalAmt = 'Volume' #持仓量, int
self.Margin = 'Money' #投资者保证金, double
self.ExchMargin = 'Money' #交易所保证金, double
self.MarginRateByMoney = 'Ratio' #保证金率, double
self.MarginRateByVolume = 'Ratio' #保证金率(按手数), double
self.LegID = '' #单腿编号, int
self.LegMultiple = '' #单腿乘数, int
self.CombInstrumentID = 'InstrumentID' #组合持仓合约编码, char[31]
class ParkedOrder(BaseStruct): #预埋单
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', OrderRef='', UserID='', OrderPriceType=OPT_AnyPrice, Direction=D_Buy, CombOffsetFlag='', CombHedgeFlag='', LimitPrice=0.0, VolumeTotalOriginal=0, TimeCondition=TC_IOC, GTDDate='', VolumeCondition=VC_AV, MinVolume=0, ContingentCondition=CC_Immediately, StopPrice=0.0, ForceCloseReason=FCC_NotForceClose, IsAutoSuspend=0, BusinessUnit='', RequestID=0, UserForceClose=0, ExchangeID='', ParkedOrderID='', UserType=UT_Investor, Status=PAOS_NotSend, ErrorID=0, ErrorMsg='', IsSwapOrder=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.OrderRef = '' #报单引用, char[13]
self.UserID = '' #用户代码, char[16]
self.OrderPriceType = '' #报单价格条件, char
self.Direction = '' #买卖方向, char
self.CombOffsetFlag = '' #组合开平标志, char[5]
self.CombHedgeFlag = '' #组合投机套保标志, char[5]
self.LimitPrice = 'Price' #价格, double
self.VolumeTotalOriginal = 'Volume' #数量, int
self.TimeCondition = '' #有效期类型, char
self.GTDDate = 'Date' #GTD日期, char[9]
self.VolumeCondition = '' #成交量类型, char
self.MinVolume = 'Volume' #最小成交量, int
self.ContingentCondition = '' #触发条件, char
self.StopPrice = 'Price' #止损价, double
self.ForceCloseReason = '' #强平原因, char
self.IsAutoSuspend = 'Bool' #自动挂起标志, int
self.BusinessUnit = '' #业务单元, char[21]
self.RequestID = '' #请求编号, int
self.UserForceClose = 'Bool' #用户强评标志, int
self.ExchangeID = '' #交易所代码, char[9]
self.ParkedOrderID = '' #预埋报单编号, char[13]
self.UserType = '' #用户类型, char
self.Status = 'ParkedOrderStatus' #预埋单状态, char
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
self.IsSwapOrder = 'Bool' #互换单标志, int
class ParkedOrderAction(BaseStruct): #输入预埋单操作
def __init__(self, BrokerID='', InvestorID='', OrderActionRef=0, OrderRef='', RequestID=0, FrontID=0, SessionID=0, ExchangeID='', OrderSysID='', ActionFlag=AF_Delete, LimitPrice=0.0, VolumeChange=0, UserID='', InstrumentID='', ParkedOrderActionID='', UserType=UT_Investor, Status=PAOS_NotSend, ErrorID=0, ErrorMsg=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.OrderActionRef = '' #报单操作引用, int
self.OrderRef = '' #报单引用, char[13]
self.RequestID = '' #请求编号, int
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.ExchangeID = '' #交易所代码, char[9]
self.OrderSysID = '' #报单编号, char[21]
self.ActionFlag = '' #操作标志, char
self.LimitPrice = 'Price' #价格, double
self.VolumeChange = 'Volume' #数量变化, int
self.UserID = '' #用户代码, char[16]
self.InstrumentID = '' #合约代码, char[31]
self.ParkedOrderActionID = '' #预埋撤单单编号, char[13]
self.UserType = '' #用户类型, char
self.Status = 'ParkedOrderStatus' #预埋撤单状态, char
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class QryParkedOrder(BaseStruct): #查询预埋单
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', ExchangeID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
class QryParkedOrderAction(BaseStruct): #查询预埋撤单
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', ExchangeID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.ExchangeID = '' #交易所代码, char[9]
class RemoveParkedOrder(BaseStruct): #删除预埋单
def __init__(self, BrokerID='', InvestorID='', ParkedOrderID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.ParkedOrderID = '' #预埋报单编号, char[13]
class RemoveParkedOrderAction(BaseStruct): #删除预埋撤单
def __init__(self, BrokerID='', InvestorID='', ParkedOrderActionID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.ParkedOrderActionID = '' #预埋撤单编号, char[13]
class InvestorWithdrawAlgorithm(BaseStruct): #经纪公司可提资金算法表
def __init__(self, BrokerID='', InvestorRange=IR_All, InvestorID='', UsingRatio=0.0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorRange = '' #投资者范围, char
self.InvestorID = '' #投资者代码, char[13]
self.UsingRatio = 'Ratio' #可提资金比例, double
class QryInvestorPositionCombineDetail(BaseStruct): #查询组合持仓明细
def __init__(self, BrokerID='', InvestorID='', CombInstrumentID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.CombInstrumentID = 'InstrumentID' #组合持仓合约编码, char[31]
class MarketDataAveragePrice(BaseStruct): #成交均价
def __init__(self, AveragePrice=0.0):
self.AveragePrice = 'Price' #当日均价, double
class VerifyInvestorPassword(BaseStruct): #校验投资者密码
def __init__(self, BrokerID='', InvestorID='', Password=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.Password = '' #密码, char[41]
class UserIP(BaseStruct): #用户IP
def __init__(self, BrokerID='', UserID='', IPAddress='', IPMask='', MacAddress=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.IPAddress = '' #IP地址, char[16]
self.IPMask = 'IPAddress' #IP地址掩码, char[16]
self.MacAddress = '' #Mac地址, char[21]
class TradingNoticeInfo(BaseStruct): #用户事件通知信息
def __init__(self, BrokerID='', InvestorID='', SendTime='', FieldContent='', SequenceSeries=0, SequenceNo=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.SendTime = 'Time' #发送时间, char[9]
self.FieldContent = 'Content' #消息正文, char[501]
self.SequenceSeries = '' #序列系列号, short
self.SequenceNo = '' #序列号, int
class TradingNotice(BaseStruct): #用户事件通知
def __init__(self, BrokerID='', InvestorRange=IR_All, InvestorID='', SequenceSeries=0, UserID='', SendTime='', SequenceNo=0, FieldContent=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorRange = '' #投资者范围, char
self.InvestorID = '' #投资者代码, char[13]
self.SequenceSeries = '' #序列系列号, short
self.UserID = '' #用户代码, char[16]
self.SendTime = 'Time' #发送时间, char[9]
self.SequenceNo = '' #序列号, int
self.FieldContent = 'Content' #消息正文, char[501]
class QryTradingNotice(BaseStruct): #查询交易事件通知
def __init__(self, BrokerID='', InvestorID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
class QryErrOrder(BaseStruct): #查询错误报单
def __init__(self, BrokerID='', InvestorID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
class ErrOrder(BaseStruct): #错误报单
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', OrderRef='', UserID='', OrderPriceType=OPT_AnyPrice, Direction=D_Buy, CombOffsetFlag='', CombHedgeFlag='', LimitPrice=0.0, VolumeTotalOriginal=0, TimeCondition=TC_IOC, GTDDate='', VolumeCondition=VC_AV, MinVolume=0, ContingentCondition=CC_Immediately, StopPrice=0.0, ForceCloseReason=FCC_NotForceClose, IsAutoSuspend=0, BusinessUnit='', RequestID=0, UserForceClose=0, ErrorID=0, ErrorMsg='', IsSwapOrder=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.OrderRef = '' #报单引用, char[13]
self.UserID = '' #用户代码, char[16]
self.OrderPriceType = '' #报单价格条件, char
self.Direction = '' #买卖方向, char
self.CombOffsetFlag = '' #组合开平标志, char[5]
self.CombHedgeFlag = '' #组合投机套保标志, char[5]
self.LimitPrice = 'Price' #价格, double
self.VolumeTotalOriginal = 'Volume' #数量, int
self.TimeCondition = '' #有效期类型, char
self.GTDDate = 'Date' #GTD日期, char[9]
self.VolumeCondition = '' #成交量类型, char
self.MinVolume = 'Volume' #最小成交量, int
self.ContingentCondition = '' #触发条件, char
self.StopPrice = 'Price' #止损价, double
self.ForceCloseReason = '' #强平原因, char
self.IsAutoSuspend = 'Bool' #自动挂起标志, int
self.BusinessUnit = '' #业务单元, char[21]
self.RequestID = '' #请求编号, int
self.UserForceClose = 'Bool' #用户强评标志, int
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
self.IsSwapOrder = 'Bool' #互换单标志, int
class ErrorConditionalOrder(BaseStruct): #查询错误报单操作
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', OrderRef='', UserID='', OrderPriceType=OPT_AnyPrice, Direction=D_Buy, CombOffsetFlag='', CombHedgeFlag='', LimitPrice=0.0, VolumeTotalOriginal=0, TimeCondition=TC_IOC, GTDDate='', VolumeCondition=VC_AV, MinVolume=0, ContingentCondition=CC_Immediately, StopPrice=0.0, ForceCloseReason=FCC_NotForceClose, IsAutoSuspend=0, BusinessUnit='', RequestID=0, OrderLocalID='', ExchangeID='', ParticipantID='', ClientID='', ExchangeInstID='', TraderID='', InstallID=0, OrderSubmitStatus=OSS_InsertSubmitted, NotifySequence=0, TradingDay='', SettlementID=0, OrderSysID='', OrderSource=OSRC_Participant, OrderStatus=OST_AllTraded, OrderType=ORDT_Normal, VolumeTraded=0, VolumeTotal=0, InsertDate='', InsertTime='', ActiveTime='', SuspendTime='', UpdateTime='', CancelTime='', ActiveTraderID='', ClearingPartID='', SequenceNo=0, FrontID=0, SessionID=0, UserProductInfo='', StatusMsg='', UserForceClose=0, ActiveUserID='', BrokerOrderSeq=0, RelativeOrderSysID='', ZCETotalTradedVolume=0, ErrorID=0, ErrorMsg='', IsSwapOrder=0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.OrderRef = '' #报单引用, char[13]
self.UserID = '' #用户代码, char[16]
self.OrderPriceType = '' #报单价格条件, char
self.Direction = '' #买卖方向, char
self.CombOffsetFlag = '' #组合开平标志, char[5]
self.CombHedgeFlag = '' #组合投机套保标志, char[5]
self.LimitPrice = 'Price' #价格, double
self.VolumeTotalOriginal = 'Volume' #数量, int
self.TimeCondition = '' #有效期类型, char
self.GTDDate = 'Date' #GTD日期, char[9]
self.VolumeCondition = '' #成交量类型, char
self.MinVolume = 'Volume' #最小成交量, int
self.ContingentCondition = '' #触发条件, char
self.StopPrice = 'Price' #止损价, double
self.ForceCloseReason = '' #强平原因, char
self.IsAutoSuspend = 'Bool' #自动挂起标志, int
self.BusinessUnit = '' #业务单元, char[21]
self.RequestID = '' #请求编号, int
self.OrderLocalID = '' #本地报单编号, char[13]
self.ExchangeID = '' #交易所代码, char[9]
self.ParticipantID = '' #会员代码, char[11]
self.ClientID = '' #客户代码, char[11]
self.ExchangeInstID = '' #合约在交易所的代码, char[31]
self.TraderID = '' #交易所交易员代码, char[21]
self.InstallID = '' #安装编号, int
self.OrderSubmitStatus = '' #报单提交状态, char
self.NotifySequence = 'SequenceNo' #报单提示序号, int
self.TradingDay = 'Date' #交易日, char[9]
self.SettlementID = '' #结算编号, int
self.OrderSysID = '' #报单编号, char[21]
self.OrderSource = '' #报单来源, char
self.OrderStatus = '' #报单状态, char
self.OrderType = '' #报单类型, char
self.VolumeTraded = 'Volume' #今成交数量, int
self.VolumeTotal = 'Volume' #剩余数量, int
self.InsertDate = 'Date' #报单日期, char[9]
self.InsertTime = 'Time' #委托时间, char[9]
self.ActiveTime = 'Time' #激活时间, char[9]
self.SuspendTime = 'Time' #挂起时间, char[9]
self.UpdateTime = 'Time' #最后修改时间, char[9]
self.CancelTime = 'Time' #撤销时间, char[9]
self.ActiveTraderID = 'TraderID' #最后修改交易所交易员代码, char[21]
self.ClearingPartID = 'ParticipantID' #结算会员编号, char[11]
self.SequenceNo = '' #序号, int
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.UserProductInfo = 'ProductInfo' #用户端产品信息, char[11]
self.StatusMsg = 'ErrorMsg' #状态信息, char[81]
self.UserForceClose = 'Bool' #用户强评标志, int
self.ActiveUserID = 'UserID' #操作用户代码, char[16]
self.BrokerOrderSeq = 'SequenceNo' #经纪公司报单编号, int
self.RelativeOrderSysID = 'OrderSysID' #相关报单, char[21]
self.ZCETotalTradedVolume = 'Volume' #郑商所成交数量, int
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
self.IsSwapOrder = 'Bool' #互换单标志, int
class QryErrOrderAction(BaseStruct): #查询错误报单操作
def __init__(self, BrokerID='', InvestorID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
class ErrOrderAction(BaseStruct): #错误报单操作
def __init__(self, BrokerID='', InvestorID='', OrderActionRef=0, OrderRef='', RequestID=0, FrontID=0, SessionID=0, ExchangeID='', OrderSysID='', ActionFlag=AF_Delete, LimitPrice=0.0, VolumeChange=0, ActionDate='', ActionTime='', TraderID='', InstallID=0, OrderLocalID='', ActionLocalID='', ParticipantID='', ClientID='', BusinessUnit='', OrderActionStatus=OAS_Submitted, UserID='', StatusMsg='', InstrumentID='', ErrorID=0, ErrorMsg=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.OrderActionRef = '' #报单操作引用, int
self.OrderRef = '' #报单引用, char[13]
self.RequestID = '' #请求编号, int
self.FrontID = '' #前置编号, int
self.SessionID = '' #会话编号, int
self.ExchangeID = '' #交易所代码, char[9]
self.OrderSysID = '' #报单编号, char[21]
self.ActionFlag = '' #操作标志, char
self.LimitPrice = 'Price' #价格, double
self.VolumeChange = 'Volume' #数量变化, int
self.ActionDate = 'Date' #操作日期, char[9]
self.ActionTime = 'Time' #操作时间, char[9]
self.TraderID = '' #交易所交易员代码, char[21]
self.InstallID = '' #安装编号, int
self.OrderLocalID = '' #本地报单编号, char[13]
self.ActionLocalID = 'OrderLocalID' #操作本地编号, char[13]
self.ParticipantID = '' #会员代码, char[11]
self.ClientID = '' #客户代码, char[11]
self.BusinessUnit = '' #业务单元, char[21]
self.OrderActionStatus = '' #报单操作状态, char
self.UserID = '' #用户代码, char[16]
self.StatusMsg = 'ErrorMsg' #状态信息, char[81]
self.InstrumentID = '' #合约代码, char[31]
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class QryExchangeSequence(BaseStruct): #查询交易所状态
def __init__(self, ExchangeID=''):
self.ExchangeID = '' #交易所代码, char[9]
class ExchangeSequence(BaseStruct): #交易所状态
def __init__(self, ExchangeID='', SequenceNo=0, MarketStatus=IS_BeforeTrading):
self.ExchangeID = '' #交易所代码, char[9]
self.SequenceNo = '' #序号, int
self.MarketStatus = 'InstrumentStatus' #合约交易状态, char
class QueryMaxOrderVolumeWithPrice(BaseStruct): #根据价格查询最大报单数量
def __init__(self, BrokerID='', InvestorID='', InstrumentID='', Direction=D_Buy, OffsetFlag=OF_Open, HedgeFlag=HF_Speculation, MaxVolume=0, Price=0.0):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.InstrumentID = '' #合约代码, char[31]
self.Direction = '' #买卖方向, char
self.OffsetFlag = '' #开平标志, char
self.HedgeFlag = '' #投机套保标志, char
self.MaxVolume = 'Volume' #最大允许报单数量, int
self.Price = '' #报单价格, double
class QryBrokerTradingParams(BaseStruct): #查询经纪公司交易参数
def __init__(self, BrokerID='', InvestorID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
class BrokerTradingParams(BaseStruct): #经纪公司交易参数
def __init__(self, BrokerID='', InvestorID='', MarginPriceType=MPT_PreSettlementPrice, Algorithm=AG_All, AvailIncludeCloseProfit=ICP_Include):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.MarginPriceType = '' #保证金价格类型, char
self.Algorithm = '' #盈亏算法, char
self.AvailIncludeCloseProfit = 'IncludeCloseProfit' #可用是否包含平仓盈利, char
class QryBrokerTradingAlgos(BaseStruct): #查询经纪公司交易算法
def __init__(self, BrokerID='', ExchangeID='', InstrumentID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.ExchangeID = '' #交易所代码, char[9]
self.InstrumentID = '' #合约代码, char[31]
class BrokerTradingAlgos(BaseStruct): #经纪公司交易算法
def __init__(self, BrokerID='', ExchangeID='', InstrumentID='', HandlePositionAlgoID=HPA_Base, FindMarginRateAlgoID=FMRA_Base, HandleTradingAccountAlgoID=HTAA_Base):
self.BrokerID = '' #经纪公司代码, char[11]
self.ExchangeID = '' #交易所代码, char[9]
self.InstrumentID = '' #合约代码, char[31]
self.HandlePositionAlgoID = '' #持仓处理算法编号, char
self.FindMarginRateAlgoID = '' #寻找保证金率算法编号, char
self.HandleTradingAccountAlgoID = '' #资金处理算法编号, char
class QueryBrokerDeposit(BaseStruct): #查询经纪公司资金
def __init__(self, BrokerID='', ExchangeID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.ExchangeID = '' #交易所代码, char[9]
class BrokerDeposit(BaseStruct): #经纪公司资金
def __init__(self, TradingDay='', BrokerID='', ParticipantID='', ExchangeID='', PreBalance=0.0, CurrMargin=0.0, CloseProfit=0.0, Balance=0.0, Deposit=0.0, Withdraw=0.0, Available=0.0, Reserve=0.0, FrozenMargin=0.0):
self.TradingDay = 'TradeDate' #交易日期, char[9]
self.BrokerID = '' #经纪公司代码, char[11]
self.ParticipantID = '' #会员代码, char[11]
self.ExchangeID = '' #交易所代码, char[9]
self.PreBalance = 'Money' #上次结算准备金, double
self.CurrMargin = 'Money' #当前保证金总额, double
self.CloseProfit = 'Money' #平仓盈亏, double
self.Balance = 'Money' #期货结算准备金, double
self.Deposit = 'Money' #入金金额, double
self.Withdraw = 'Money' #出金金额, double
self.Available = 'Money' #可提资金, double
self.Reserve = 'Money' #基本准备金, double
self.FrozenMargin = 'Money' #冻结的保证金, double
class QryCFMMCBrokerKey(BaseStruct): #查询保证金监管系统经纪公司密钥
def __init__(self, BrokerID=''):
self.BrokerID = '' #经纪公司代码, char[11]
class CFMMCBrokerKey(BaseStruct): #保证金监管系统经纪公司密钥
def __init__(self, BrokerID='', ParticipantID='', CreateDate='', CreateTime='', KeyID=0, CurrentKey='', KeyKind=CFMMCKK_REQUEST):
self.BrokerID = '' #经纪公司代码, char[11]
self.ParticipantID = '' #经纪公司统一编码, char[11]
self.CreateDate = 'Date' #密钥生成日期, char[9]
self.CreateTime = 'Time' #密钥生成时间, char[9]
self.KeyID = 'SequenceNo' #密钥编号, int
self.CurrentKey = 'CFMMCKey' #动态密钥, char[21]
self.KeyKind = 'CFMMCKeyKind' #动态密钥类型, char
class CFMMCTradingAccountKey(BaseStruct): #保证金监管系统经纪公司资金账户密钥
def __init__(self, BrokerID='', ParticipantID='', AccountID='', KeyID=0, CurrentKey=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.ParticipantID = '' #经纪公司统一编码, char[11]
self.AccountID = '' #投资者帐号, char[13]
self.KeyID = 'SequenceNo' #密钥编号, int
self.CurrentKey = 'CFMMCKey' #动态密钥, char[21]
class QryCFMMCTradingAccountKey(BaseStruct): #请求查询保证金监管系统经纪公司资金账户密钥
def __init__(self, BrokerID='', InvestorID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
class BrokerUserOTPParam(BaseStruct): #用户动态令牌参数
def __init__(self, BrokerID='', UserID='', OTPVendorsID='', SerialNumber='', AuthKey='', LastDrift=0, LastSuccess=0, OTPType=OTP_NONE):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.OTPVendorsID = '' #动态令牌提供商, char[2]
self.SerialNumber = '' #动态令牌序列号, char[17]
self.AuthKey = '' #令牌密钥, char[41]
self.LastDrift = '' #漂移值, int
self.LastSuccess = '' #成功值, int
self.OTPType = '' #动态令牌类型, char
class ManualSyncBrokerUserOTP(BaseStruct): #手工同步用户动态令牌
def __init__(self, BrokerID='', UserID='', OTPType=OTP_NONE, FirstOTP='', SecondOTP=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.OTPType = '' #动态令牌类型, char
self.FirstOTP = 'Password' #第一个动态密码, char[41]
self.SecondOTP = 'Password' #第二个动态密码, char[41]
class CommRateModel(BaseStruct): #投资者手续费率模板
def __init__(self, BrokerID='', CommModelID='', CommModelName=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.CommModelID = 'InvestorID' #手续费率模板代码, char[13]
self.CommModelName = '' #模板名称, char[161]
class QryCommRateModel(BaseStruct): #请求查询投资者手续费率模板
def __init__(self, BrokerID='', CommModelID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.CommModelID = 'InvestorID' #手续费率模板代码, char[13]
class MarginModel(BaseStruct): #投资者保证金率模板
def __init__(self, BrokerID='', MarginModelID='', MarginModelName=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.MarginModelID = 'InvestorID' #保证金率模板代码, char[13]
self.MarginModelName = 'CommModelName' #模板名称, char[161]
class QryMarginModel(BaseStruct): #请求查询投资者保证金率模板
def __init__(self, BrokerID='', MarginModelID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.MarginModelID = 'InvestorID' #保证金率模板代码, char[13]
class EWarrantOffset(BaseStruct): #仓单折抵信息
def __init__(self, TradingDay='', BrokerID='', InvestorID='', ExchangeID='', InstrumentID='', Direction=D_Buy, HedgeFlag=HF_Speculation, Volume=0):
self.TradingDay = 'TradeDate' #交易日期, char[9]
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.ExchangeID = '' #交易所代码, char[9]
self.InstrumentID = '' #合约代码, char[31]
self.Direction = '' #买卖方向, char
self.HedgeFlag = '' #投机套保标志, char
self.Volume = '' #数量, int
class QryEWarrantOffset(BaseStruct): #查询仓单折抵信息
def __init__(self, BrokerID='', InvestorID='', ExchangeID='', InstrumentID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.ExchangeID = '' #交易所代码, char[9]
self.InstrumentID = '' #合约代码, char[31]
class ReqOpenAccount(BaseStruct): #转帐开户请求
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', Gender=GD_Unknown, CountryCode='', CustType=CUSTT_Person, Address='', ZipCode='', Telephone='', MobilePhone='', Fax='', EMail='', MoneyAccountStatus=MAS_Normal, BankAccount='', BankPassWord='', AccountID='', Password='', InstallID=0, VerifyCertNoFlag=YNI_Yes, CurrencyID='', CashExchangeCode=CEC_Exchange, Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', TID=0, UserID=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.Gender = '' #性别, char
self.CountryCode = '' #国家代码, char[21]
self.CustType = '' #客户类型, char
self.Address = '' #地址, char[101]
self.ZipCode = '' #邮编, char[7]
self.Telephone = '' #电话号码, char[41]
self.MobilePhone = '' #手机, char[21]
self.Fax = '' #传真, char[41]
self.EMail = '' #电子邮件, char[41]
self.MoneyAccountStatus = '' #资金账户状态, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.InstallID = '' #安装编号, int
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.CashExchangeCode = '' #汇钞标志, char
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.TID = '' #交易ID, int
self.UserID = '' #用户标识, char[16]
class ReqCancelAccount(BaseStruct): #转帐销户请求
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', Gender=GD_Unknown, CountryCode='', CustType=CUSTT_Person, Address='', ZipCode='', Telephone='', MobilePhone='', Fax='', EMail='', MoneyAccountStatus=MAS_Normal, BankAccount='', BankPassWord='', AccountID='', Password='', InstallID=0, VerifyCertNoFlag=YNI_Yes, CurrencyID='', CashExchangeCode=CEC_Exchange, Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', TID=0, UserID=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.Gender = '' #性别, char
self.CountryCode = '' #国家代码, char[21]
self.CustType = '' #客户类型, char
self.Address = '' #地址, char[101]
self.ZipCode = '' #邮编, char[7]
self.Telephone = '' #电话号码, char[41]
self.MobilePhone = '' #手机, char[21]
self.Fax = '' #传真, char[41]
self.EMail = '' #电子邮件, char[41]
self.MoneyAccountStatus = '' #资金账户状态, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.InstallID = '' #安装编号, int
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.CashExchangeCode = '' #汇钞标志, char
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.TID = '' #交易ID, int
self.UserID = '' #用户标识, char[16]
class ReqChangeAccount(BaseStruct): #变更银行账户请求
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', Gender=GD_Unknown, CountryCode='', CustType=CUSTT_Person, Address='', ZipCode='', Telephone='', MobilePhone='', Fax='', EMail='', MoneyAccountStatus=MAS_Normal, BankAccount='', BankPassWord='', NewBankAccount='', NewBankPassWord='', AccountID='', Password='', BankAccType=BAT_BankBook, InstallID=0, VerifyCertNoFlag=YNI_Yes, CurrencyID='', BrokerIDByBank='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, TID=0, Digest=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.Gender = '' #性别, char
self.CountryCode = '' #国家代码, char[21]
self.CustType = '' #客户类型, char
self.Address = '' #地址, char[101]
self.ZipCode = '' #邮编, char[7]
self.Telephone = '' #电话号码, char[41]
self.MobilePhone = '' #手机, char[21]
self.Fax = '' #传真, char[41]
self.EMail = '' #电子邮件, char[41]
self.MoneyAccountStatus = '' #资金账户状态, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.NewBankAccount = 'BankAccount' #新银行帐号, char[41]
self.NewBankPassWord = 'Password' #新银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.BankAccType = '' #银行帐号类型, char
self.InstallID = '' #安装编号, int
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.TID = '' #交易ID, int
self.Digest = '' #摘要, char[36]
class ReqTransfer(BaseStruct): #转账请求
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', CustType=CUSTT_Person, BankAccount='', BankPassWord='', AccountID='', Password='', InstallID=0, FutureSerial=0, UserID='', VerifyCertNoFlag=YNI_Yes, CurrencyID='', TradeAmount=0.0, FutureFetchAmount=0.0, FeePayFlag=FPF_BEN, CustFee=0.0, BrokerFee=0.0, Message='', Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', RequestID=0, TID=0, TransferStatus=TRFS_Normal):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustType = '' #客户类型, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.InstallID = '' #安装编号, int
self.FutureSerial = '' #期货公司流水号, int
self.UserID = '' #用户标识, char[16]
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.TradeAmount = '' #转帐金额, double
self.FutureFetchAmount = 'TradeAmount' #期货可取金额, double
self.FeePayFlag = '' #费用支付标志, char
self.CustFee = '' #应收客户费用, double
self.BrokerFee = 'FutureFee' #应收期货公司费用, double
self.Message = 'AddInfo' #发送方给接收方的消息, char[129]
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.TransferStatus = '' #转账交易状态, char
class RspTransfer(BaseStruct): #银行发起银行资金转期货响应
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', CustType=CUSTT_Person, BankAccount='', BankPassWord='', AccountID='', Password='', InstallID=0, FutureSerial=0, UserID='', VerifyCertNoFlag=YNI_Yes, CurrencyID='', TradeAmount=0.0, FutureFetchAmount=0.0, FeePayFlag=FPF_BEN, CustFee=0.0, BrokerFee=0.0, Message='', Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', RequestID=0, TID=0, TransferStatus=TRFS_Normal, ErrorID=0, ErrorMsg=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustType = '' #客户类型, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.InstallID = '' #安装编号, int
self.FutureSerial = '' #期货公司流水号, int
self.UserID = '' #用户标识, char[16]
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.TradeAmount = '' #转帐金额, double
self.FutureFetchAmount = 'TradeAmount' #期货可取金额, double
self.FeePayFlag = '' #费用支付标志, char
self.CustFee = '' #应收客户费用, double
self.BrokerFee = 'FutureFee' #应收期货公司费用, double
self.Message = 'AddInfo' #发送方给接收方的消息, char[129]
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.TransferStatus = '' #转账交易状态, char
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class ReqRepeal(BaseStruct): #冲正请求
def __init__(self, RepealTimeInterval=0, RepealedTimes=0, BankRepealFlag=BRF_BankNotNeedRepeal, BrokerRepealFlag=BRORF_BrokerNotNeedRepeal, PlateRepealSerial=0, BankRepealSerial='', FutureRepealSerial=0, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', CustType=CUSTT_Person, BankAccount='', BankPassWord='', AccountID='', Password='', InstallID=0, FutureSerial=0, UserID='', VerifyCertNoFlag=YNI_Yes, CurrencyID='', TradeAmount=0.0, FutureFetchAmount=0.0, FeePayFlag=FPF_BEN, CustFee=0.0, BrokerFee=0.0, Message='', Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', RequestID=0, TID=0, TransferStatus=TRFS_Normal):
self.RepealTimeInterval = '' #冲正时间间隔, int
self.RepealedTimes = '' #已经冲正次数, int
self.BankRepealFlag = '' #银行冲正标志, char
self.BrokerRepealFlag = '' #期商冲正标志, char
self.PlateRepealSerial = 'PlateSerial' #被冲正平台流水号, int
self.BankRepealSerial = 'BankSerial' #被冲正银行流水号, char[13]
self.FutureRepealSerial = 'FutureSerial' #被冲正期货流水号, int
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustType = '' #客户类型, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.InstallID = '' #安装编号, int
self.FutureSerial = '' #期货公司流水号, int
self.UserID = '' #用户标识, char[16]
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.TradeAmount = '' #转帐金额, double
self.FutureFetchAmount = 'TradeAmount' #期货可取金额, double
self.FeePayFlag = '' #费用支付标志, char
self.CustFee = '' #应收客户费用, double
self.BrokerFee = 'FutureFee' #应收期货公司费用, double
self.Message = 'AddInfo' #发送方给接收方的消息, char[129]
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.TransferStatus = '' #转账交易状态, char
class RspRepeal(BaseStruct): #冲正响应
def __init__(self, RepealTimeInterval=0, RepealedTimes=0, BankRepealFlag=BRF_BankNotNeedRepeal, BrokerRepealFlag=BRORF_BrokerNotNeedRepeal, PlateRepealSerial=0, BankRepealSerial='', FutureRepealSerial=0, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', CustType=CUSTT_Person, BankAccount='', BankPassWord='', AccountID='', Password='', InstallID=0, FutureSerial=0, UserID='', VerifyCertNoFlag=YNI_Yes, CurrencyID='', TradeAmount=0.0, FutureFetchAmount=0.0, FeePayFlag=FPF_BEN, CustFee=0.0, BrokerFee=0.0, Message='', Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', RequestID=0, TID=0, TransferStatus=TRFS_Normal, ErrorID=0, ErrorMsg=''):
self.RepealTimeInterval = '' #冲正时间间隔, int
self.RepealedTimes = '' #已经冲正次数, int
self.BankRepealFlag = '' #银行冲正标志, char
self.BrokerRepealFlag = '' #期商冲正标志, char
self.PlateRepealSerial = 'PlateSerial' #被冲正平台流水号, int
self.BankRepealSerial = 'BankSerial' #被冲正银行流水号, char[13]
self.FutureRepealSerial = 'FutureSerial' #被冲正期货流水号, int
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustType = '' #客户类型, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.InstallID = '' #安装编号, int
self.FutureSerial = '' #期货公司流水号, int
self.UserID = '' #用户标识, char[16]
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.TradeAmount = '' #转帐金额, double
self.FutureFetchAmount = 'TradeAmount' #期货可取金额, double
self.FeePayFlag = '' #费用支付标志, char
self.CustFee = '' #应收客户费用, double
self.BrokerFee = 'FutureFee' #应收期货公司费用, double
self.Message = 'AddInfo' #发送方给接收方的消息, char[129]
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.TransferStatus = '' #转账交易状态, char
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class ReqQueryAccount(BaseStruct): #查询账户信息请求
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', CustType=CUSTT_Person, BankAccount='', BankPassWord='', AccountID='', Password='', FutureSerial=0, InstallID=0, UserID='', VerifyCertNoFlag=YNI_Yes, CurrencyID='', Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', RequestID=0, TID=0):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustType = '' #客户类型, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.FutureSerial = '' #期货公司流水号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
class RspQueryAccount(BaseStruct): #查询账户信息响应
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', CustType=CUSTT_Person, BankAccount='', BankPassWord='', AccountID='', Password='', FutureSerial=0, InstallID=0, UserID='', VerifyCertNoFlag=YNI_Yes, CurrencyID='', Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', RequestID=0, TID=0, BankUseAmount=0.0, BankFetchAmount=0.0):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustType = '' #客户类型, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.FutureSerial = '' #期货公司流水号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.BankUseAmount = 'TradeAmount' #银行可用金额, double
self.BankFetchAmount = 'TradeAmount' #银行可取金额, double
class FutureSignIO(BaseStruct): #期商签到签退
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, InstallID=0, UserID='', Digest='', CurrencyID='', DeviceID='', BrokerIDByBank='', OperNo='', RequestID=0, TID=0):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.Digest = '' #摘要, char[36]
self.CurrencyID = '' #币种代码, char[4]
self.DeviceID = '' #渠道标志, char[3]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
class RspFutureSignIn(BaseStruct): #期商签到响应
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, InstallID=0, UserID='', Digest='', CurrencyID='', DeviceID='', BrokerIDByBank='', OperNo='', RequestID=0, TID=0, ErrorID=0, ErrorMsg='', PinKey='', MacKey=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.Digest = '' #摘要, char[36]
self.CurrencyID = '' #币种代码, char[4]
self.DeviceID = '' #渠道标志, char[3]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
self.PinKey = 'PasswordKey' #PIN密钥, char[129]
self.MacKey = 'PasswordKey' #MAC密钥, char[129]
class ReqFutureSignOut(BaseStruct): #期商签退请求
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, InstallID=0, UserID='', Digest='', CurrencyID='', DeviceID='', BrokerIDByBank='', OperNo='', RequestID=0, TID=0):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.Digest = '' #摘要, char[36]
self.CurrencyID = '' #币种代码, char[4]
self.DeviceID = '' #渠道标志, char[3]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
class RspFutureSignOut(BaseStruct): #期商签退响应
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, InstallID=0, UserID='', Digest='', CurrencyID='', DeviceID='', BrokerIDByBank='', OperNo='', RequestID=0, TID=0, ErrorID=0, ErrorMsg=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.Digest = '' #摘要, char[36]
self.CurrencyID = '' #币种代码, char[4]
self.DeviceID = '' #渠道标志, char[3]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class ReqQueryTradeResultBySerial(BaseStruct): #查询指定流水号的交易结果请求
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, Reference=0, RefrenceIssureType=TS_Bank, RefrenceIssure='', CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', CustType=CUSTT_Person, BankAccount='', BankPassWord='', AccountID='', Password='', CurrencyID='', TradeAmount=0.0, Digest=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.Reference = 'Serial' #流水号, int
self.RefrenceIssureType = 'InstitutionType' #本流水号发布者的机构类型, char
self.RefrenceIssure = 'OrganCode' #本流水号发布者机构编码, char[36]
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustType = '' #客户类型, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.CurrencyID = '' #币种代码, char[4]
self.TradeAmount = '' #转帐金额, double
self.Digest = '' #摘要, char[36]
class RspQueryTradeResultBySerial(BaseStruct): #查询指定流水号的交易结果响应
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, ErrorID=0, ErrorMsg='', Reference=0, RefrenceIssureType=TS_Bank, RefrenceIssure='', OriginReturnCode='', OriginDescrInfoForReturnCode='', BankAccount='', BankPassWord='', AccountID='', Password='', CurrencyID='', TradeAmount=0.0, Digest=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
self.Reference = 'Serial' #流水号, int
self.RefrenceIssureType = 'InstitutionType' #本流水号发布者的机构类型, char
self.RefrenceIssure = 'OrganCode' #本流水号发布者机构编码, char[36]
self.OriginReturnCode = 'ReturnCode' #原始返回代码, char[7]
self.OriginDescrInfoForReturnCode = 'DescrInfoForReturnCode' #原始返回码描述, char[129]
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.CurrencyID = '' #币种代码, char[4]
self.TradeAmount = '' #转帐金额, double
self.Digest = '' #摘要, char[36]
class ReqDayEndFileReady(BaseStruct): #日终文件就绪请求
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, FileBusinessCode=FBC_Others, Digest=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.FileBusinessCode = '' #文件业务功能, char
self.Digest = '' #摘要, char[36]
class ReturnResult(BaseStruct): #返回结果
def __init__(self, ReturnCode='', DescrInfoForReturnCode=''):
self.ReturnCode = '' #返回代码, char[7]
self.DescrInfoForReturnCode = '' #返回码描述, char[129]
class VerifyFuturePassword(BaseStruct): #验证期货资金密码
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, AccountID='', Password='', BankAccount='', BankPassWord='', InstallID=0, TID=0):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.InstallID = '' #安装编号, int
self.TID = '' #交易ID, int
class VerifyCustInfo(BaseStruct): #验证客户信息
def __init__(self, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', CustType=CUSTT_Person):
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustType = '' #客户类型, char
class VerifyFuturePasswordAndCustInfo(BaseStruct): #验证期货资金密码和客户信息
def __init__(self, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', CustType=CUSTT_Person, AccountID='', Password=''):
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustType = '' #客户类型, char
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
class DepositResultInform(BaseStruct): #验证期货资金密码和客户信息
def __init__(self, DepositSeqNo='', BrokerID='', InvestorID='', Deposit=0.0, RequestID=0, ReturnCode='', DescrInfoForReturnCode=''):
self.DepositSeqNo = '' #出入金流水号,该流水号为银期报盘返回的流水号, char[15]
self.BrokerID = '' #经纪公司代码, char[11]
self.InvestorID = '' #投资者代码, char[13]
self.Deposit = 'Money' #入金金额, double
self.RequestID = '' #请求编号, int
self.ReturnCode = '' #返回代码, char[7]
self.DescrInfoForReturnCode = '' #返回码描述, char[129]
class ReqSyncKey(BaseStruct): #交易核心向银期报盘发出密钥同步请求
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, InstallID=0, UserID='', Message='', DeviceID='', BrokerIDByBank='', OperNo='', RequestID=0, TID=0):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.Message = 'AddInfo' #交易核心给银期报盘的消息, char[129]
self.DeviceID = '' #渠道标志, char[3]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
class RspSyncKey(BaseStruct): #交易核心向银期报盘发出密钥同步响应
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, InstallID=0, UserID='', Message='', DeviceID='', BrokerIDByBank='', OperNo='', RequestID=0, TID=0, ErrorID=0, ErrorMsg=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.Message = 'AddInfo' #交易核心给银期报盘的消息, char[129]
self.DeviceID = '' #渠道标志, char[3]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class NotifyQueryAccount(BaseStruct): #查询账户信息通知
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', CustType=CUSTT_Person, BankAccount='', BankPassWord='', AccountID='', Password='', FutureSerial=0, InstallID=0, UserID='', VerifyCertNoFlag=YNI_Yes, CurrencyID='', Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', RequestID=0, TID=0, BankUseAmount=0.0, BankFetchAmount=0.0, ErrorID=0, ErrorMsg=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustType = '' #客户类型, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.FutureSerial = '' #期货公司流水号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.BankUseAmount = 'TradeAmount' #银行可用金额, double
self.BankFetchAmount = 'TradeAmount' #银行可取金额, double
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class TransferSerial(BaseStruct): #银期转账交易流水表
def __init__(self, PlateSerial=0, TradeDate='', TradingDay='', TradeTime='', TradeCode='', SessionID=0, BankID='', BankBranchID='', BankAccType=BAT_BankBook, BankAccount='', BankSerial='', BrokerID='', BrokerBranchID='', FutureAccType=FAT_BankBook, AccountID='', InvestorID='', FutureSerial=0, IdCardType=ICT_EID, IdentifiedCardNo='', CurrencyID='', TradeAmount=0.0, CustFee=0.0, BrokerFee=0.0, AvailabilityFlag=AVAF_Invalid, OperatorCode='', BankNewAccount='', ErrorID=0, ErrorMsg=''):
self.PlateSerial = '' #平台流水号, int
self.TradeDate = '' #交易发起方日期, char[9]
self.TradingDay = 'Date' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.TradeCode = '' #交易代码, char[7]
self.SessionID = '' #会话编号, int
self.BankID = '' #银行编码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构编码, char[5]
self.BankAccType = '' #银行帐号类型, char
self.BankAccount = '' #银行帐号, char[41]
self.BankSerial = '' #银行流水号, char[13]
self.BrokerID = '' #期货公司编码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.FutureAccType = '' #期货公司帐号类型, char
self.AccountID = '' #投资者帐号, char[13]
self.InvestorID = '' #投资者代码, char[13]
self.FutureSerial = '' #期货公司流水号, int
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CurrencyID = '' #币种代码, char[4]
self.TradeAmount = '' #交易金额, double
self.CustFee = '' #应收客户费用, double
self.BrokerFee = 'FutureFee' #应收期货公司费用, double
self.AvailabilityFlag = '' #有效标志, char
self.OperatorCode = '' #操作员, char[17]
self.BankNewAccount = 'BankAccount' #新银行帐号, char[41]
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class QryTransferSerial(BaseStruct): #请求查询转帐流水
def __init__(self, BrokerID='', AccountID='', BankID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.AccountID = '' #投资者帐号, char[13]
self.BankID = '' #银行编码, char[4]
class NotifyFutureSignIn(BaseStruct): #期商签到通知
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, InstallID=0, UserID='', Digest='', CurrencyID='', DeviceID='', BrokerIDByBank='', OperNo='', RequestID=0, TID=0, ErrorID=0, ErrorMsg='', PinKey='', MacKey=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.Digest = '' #摘要, char[36]
self.CurrencyID = '' #币种代码, char[4]
self.DeviceID = '' #渠道标志, char[3]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
self.PinKey = 'PasswordKey' #PIN密钥, char[129]
self.MacKey = 'PasswordKey' #MAC密钥, char[129]
class NotifyFutureSignOut(BaseStruct): #期商签退通知
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, InstallID=0, UserID='', Digest='', CurrencyID='', DeviceID='', BrokerIDByBank='', OperNo='', RequestID=0, TID=0, ErrorID=0, ErrorMsg=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.Digest = '' #摘要, char[36]
self.CurrencyID = '' #币种代码, char[4]
self.DeviceID = '' #渠道标志, char[3]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class NotifySyncKey(BaseStruct): #交易核心向银期报盘发出密钥同步处理结果的通知
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, InstallID=0, UserID='', Message='', DeviceID='', BrokerIDByBank='', OperNo='', RequestID=0, TID=0, ErrorID=0, ErrorMsg=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.InstallID = '' #安装编号, int
self.UserID = '' #用户标识, char[16]
self.Message = 'AddInfo' #交易核心给银期报盘的消息, char[129]
self.DeviceID = '' #渠道标志, char[3]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.OperNo = '' #交易柜员, char[17]
self.RequestID = '' #请求编号, int
self.TID = '' #交易ID, int
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class QryAccountregister(BaseStruct): #请求查询银期签约关系
def __init__(self, BrokerID='', AccountID='', BankID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.AccountID = '' #投资者帐号, char[13]
self.BankID = '' #银行编码, char[4]
class Accountregister(BaseStruct): #客户开销户信息表
def __init__(self, TradeDay='', BankID='', BankBranchID='', BankAccount='', BrokerID='', BrokerBranchID='', AccountID='', IdCardType=ICT_EID, IdentifiedCardNo='', CustomerName='', CurrencyID='', OpenOrDestroy=OOD_Open, RegDate='', OutDate='', TID=0, CustType=CUSTT_Person, BankAccType=BAT_BankBook):
self.TradeDay = 'TradeDate' #交易日期, char[9]
self.BankID = '' #银行编码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构编码, char[5]
self.BankAccount = '' #银行帐号, char[41]
self.BrokerID = '' #期货公司编码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期货公司分支机构编码, char[31]
self.AccountID = '' #投资者帐号, char[13]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.CurrencyID = '' #币种代码, char[4]
self.OpenOrDestroy = '' #开销户类别, char
self.RegDate = 'TradeDate' #签约日期, char[9]
self.OutDate = 'TradeDate' #解约日期, char[9]
self.TID = '' #交易ID, int
self.CustType = '' #客户类型, char
self.BankAccType = '' #银行帐号类型, char
class OpenAccount(BaseStruct): #银期开户信息
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', Gender=GD_Unknown, CountryCode='', CustType=CUSTT_Person, Address='', ZipCode='', Telephone='', MobilePhone='', Fax='', EMail='', MoneyAccountStatus=MAS_Normal, BankAccount='', BankPassWord='', AccountID='', Password='', InstallID=0, VerifyCertNoFlag=YNI_Yes, CurrencyID='', CashExchangeCode=CEC_Exchange, Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', TID=0, UserID='', ErrorID=0, ErrorMsg=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.Gender = '' #性别, char
self.CountryCode = '' #国家代码, char[21]
self.CustType = '' #客户类型, char
self.Address = '' #地址, char[101]
self.ZipCode = '' #邮编, char[7]
self.Telephone = '' #电话号码, char[41]
self.MobilePhone = '' #手机, char[21]
self.Fax = '' #传真, char[41]
self.EMail = '' #电子邮件, char[41]
self.MoneyAccountStatus = '' #资金账户状态, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.InstallID = '' #安装编号, int
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.CashExchangeCode = '' #汇钞标志, char
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.TID = '' #交易ID, int
self.UserID = '' #用户标识, char[16]
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class CancelAccount(BaseStruct): #银期销户信息
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', Gender=GD_Unknown, CountryCode='', CustType=CUSTT_Person, Address='', ZipCode='', Telephone='', MobilePhone='', Fax='', EMail='', MoneyAccountStatus=MAS_Normal, BankAccount='', BankPassWord='', AccountID='', Password='', InstallID=0, VerifyCertNoFlag=YNI_Yes, CurrencyID='', CashExchangeCode=CEC_Exchange, Digest='', BankAccType=BAT_BankBook, DeviceID='', BankSecuAccType=BAT_BankBook, BrokerIDByBank='', BankSecuAcc='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, OperNo='', TID=0, UserID='', ErrorID=0, ErrorMsg=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.Gender = '' #性别, char
self.CountryCode = '' #国家代码, char[21]
self.CustType = '' #客户类型, char
self.Address = '' #地址, char[101]
self.ZipCode = '' #邮编, char[7]
self.Telephone = '' #电话号码, char[41]
self.MobilePhone = '' #手机, char[21]
self.Fax = '' #传真, char[41]
self.EMail = '' #电子邮件, char[41]
self.MoneyAccountStatus = '' #资金账户状态, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.InstallID = '' #安装编号, int
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.CashExchangeCode = '' #汇钞标志, char
self.Digest = '' #摘要, char[36]
self.BankAccType = '' #银行帐号类型, char
self.DeviceID = '' #渠道标志, char[3]
self.BankSecuAccType = 'BankAccType' #期货单位帐号类型, char
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankSecuAcc = 'BankAccount' #期货单位帐号, char[41]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.OperNo = '' #交易柜员, char[17]
self.TID = '' #交易ID, int
self.UserID = '' #用户标识, char[16]
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class ChangeAccount(BaseStruct): #银期变更银行账号信息
def __init__(self, TradeCode='', BankID='', BankBranchID='', BrokerID='', BrokerBranchID='', TradeDate='', TradeTime='', BankSerial='', TradingDay='', PlateSerial=0, LastFragment=LF_Yes, SessionID=0, CustomerName='', IdCardType=ICT_EID, IdentifiedCardNo='', Gender=GD_Unknown, CountryCode='', CustType=CUSTT_Person, Address='', ZipCode='', Telephone='', MobilePhone='', Fax='', EMail='', MoneyAccountStatus=MAS_Normal, BankAccount='', BankPassWord='', NewBankAccount='', NewBankPassWord='', AccountID='', Password='', BankAccType=BAT_BankBook, InstallID=0, VerifyCertNoFlag=YNI_Yes, CurrencyID='', BrokerIDByBank='', BankPwdFlag=BPWDF_NoCheck, SecuPwdFlag=BPWDF_NoCheck, TID=0, Digest='', ErrorID=0, ErrorMsg=''):
self.TradeCode = '' #业务功能码, char[7]
self.BankID = '' #银行代码, char[4]
self.BankBranchID = 'BankBrchID' #银行分支机构代码, char[5]
self.BrokerID = '' #期商代码, char[11]
self.BrokerBranchID = 'FutureBranchID' #期商分支机构代码, char[31]
self.TradeDate = '' #交易日期, char[9]
self.TradeTime = '' #交易时间, char[9]
self.BankSerial = '' #银行流水号, char[13]
self.TradingDay = 'TradeDate' #交易系统日期 , char[9]
self.PlateSerial = 'Serial' #银期平台消息流水号, int
self.LastFragment = '' #最后分片标志, char
self.SessionID = '' #会话号, int
self.CustomerName = 'IndividualName' #客户姓名, char[51]
self.IdCardType = '' #证件类型, char
self.IdentifiedCardNo = '' #证件号码, char[51]
self.Gender = '' #性别, char
self.CountryCode = '' #国家代码, char[21]
self.CustType = '' #客户类型, char
self.Address = '' #地址, char[101]
self.ZipCode = '' #邮编, char[7]
self.Telephone = '' #电话号码, char[41]
self.MobilePhone = '' #手机, char[21]
self.Fax = '' #传真, char[41]
self.EMail = '' #电子邮件, char[41]
self.MoneyAccountStatus = '' #资金账户状态, char
self.BankAccount = '' #银行帐号, char[41]
self.BankPassWord = 'Password' #银行密码, char[41]
self.NewBankAccount = 'BankAccount' #新银行帐号, char[41]
self.NewBankPassWord = 'Password' #新银行密码, char[41]
self.AccountID = '' #投资者帐号, char[13]
self.Password = '' #期货密码, char[41]
self.BankAccType = '' #银行帐号类型, char
self.InstallID = '' #安装编号, int
self.VerifyCertNoFlag = 'YesNoIndicator' #验证客户证件号码标志, char
self.CurrencyID = '' #币种代码, char[4]
self.BrokerIDByBank = 'BankCodingForFuture' #期货公司银行编码, char[33]
self.BankPwdFlag = 'PwdFlag' #银行密码标志, char
self.SecuPwdFlag = 'PwdFlag' #期货资金密码核对标志, char
self.TID = '' #交易ID, int
self.Digest = '' #摘要, char[36]
self.ErrorID = '' #错误代码, int
self.ErrorMsg = '' #错误信息, char[81]
class UserRightsAssign(BaseStruct): #灾备中心交易权限
def __init__(self, BrokerID='', UserID='', DRIdentityID=0):
self.BrokerID = '' #应用单元代码, char[11]
self.UserID = '' #用户代码, char[16]
self.DRIdentityID = '' #交易中心代码, int
class BrokerUserRightAssign(BaseStruct): #经济公司是否有在本标示的交易权限
def __init__(self, BrokerID='', DRIdentityID=0, Tradeable=0):
self.BrokerID = '' #应用单元代码, char[11]
self.DRIdentityID = '' #交易中心代码, int
self.Tradeable = 'Bool' #能否交易, int
class DRTransfer(BaseStruct): #灾备交易转换报文
def __init__(self, OrigDRIdentityID=0, DestDRIdentityID=0, OrigBrokerID='', DestBrokerID=''):
self.OrigDRIdentityID = 'DRIdentityID' #原交易中心代码, int
self.DestDRIdentityID = 'DRIdentityID' #目标交易中心代码, int
self.OrigBrokerID = 'BrokerID' #原应用单元代码, char[11]
self.DestBrokerID = 'BrokerID' #目标易用单元代码, char[11]
class FensUserInfo(BaseStruct): #Fens用户信息
def __init__(self, BrokerID='', UserID='', LoginMode=LM_Trade):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
self.LoginMode = '' #登录模式, char
class CurrTransferIdentity(BaseStruct): #当前银期所属交易中心
def __init__(self, IdentityID=0):
self.IdentityID = 'DRIdentityID' #交易中心代码, int
class LoginForbiddenUser(BaseStruct): #禁止登录用户
def __init__(self, BrokerID='', UserID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
class QryLoginForbiddenUser(BaseStruct): #查询禁止登录用户
def __init__(self, BrokerID='', UserID=''):
self.BrokerID = '' #经纪公司代码, char[11]
self.UserID = '' #用户代码, char[16]
class MulticastGroupInfo(BaseStruct): #UDP组播组信息
def __init__(self, GroupIP='', GroupPort=0, SourceIP=''):
self.GroupIP = 'IPAddress' #组播组IP地址, char[16]
self.GroupPort = 'IPPort' #组播组IP端口, int
self.SourceIP = 'IPAddress' #源地址, char[16]
error = {'NONE':0, 0:'综合交易平台:正确', 'INVALID_DATA_SYNC_STATUS':1, 1:'综合交易平台:不在已同步状态', 'INCONSISTENT_INFORMATION':2, 2:'综合交易平台:会话信息不一致', 'INVALID_LOGIN':3, 3:'综合交易平台:不合法的登录', 'USER_NOT_ACTIVE':4, 4:'综合交易平台:用户不活跃', 'DUPLICATE_LOGIN':5, 5:'综合交易平台:重复的登录', 'NOT_LOGIN_YET':6, 6:'综合交易平台:还没有登录', 'NOT_INITED':7, 7:'综合交易平台:还没有初始化', 'FRONT_NOT_ACTIVE':8, 8:'综合交易平台:前置不活跃', 'NO_PRIVILEGE':9, 9:'综合交易平台:无此权限', 'CHANGE_OTHER_PASSWORD':10, 10:'综合交易平台:修改别人的口令', 'USER_NOT_FOUND':11, 11:'综合交易平台:找不到该用户', 'BROKER_NOT_FOUND':12, 12:'综合交易平台:找不到该经纪公司', 'INVESTOR_NOT_FOUND':13, 13:'综合交易平台:找不到投资者', 'OLD_PASSWORD_MISMATCH':14, 14:'综合交易平台:原口令不匹配', 'BAD_FIELD':15, 15:'综合交易平台:报单字段有误', 'INSTRUMENT_NOT_FOUND':16, 16:'综合交易平台:找不到合约', 'INSTRUMENT_NOT_TRADING':17, 17:'综合交易平台:合约不能交易', 'NOT_EXCHANGE_PARTICIPANT':18, 18:'综合交易平台:经纪公司不是交易所的会员', 'INVESTOR_NOT_ACTIVE':19, 19:'综合交易平台:投资者不活跃', 'NOT_EXCHANGE_CLIENT':20, 20:'综合交易平台:投资者未在交易所开户', 'NO_VALID_TRADER_AVAILABLE':21, 21:'综合交易平台:该交易席位未连接到交易所', 'DUPLICATE_ORDER_REF':22, 22:'综合交易平台:报单错误:不允许重复报单', 'BAD_ORDER_ACTION_FIELD':23, 23:'综合交易平台:错误的报单操作字段', 'DUPLICATE_ORDER_ACTION_REF':24, 24:'综合交易平台:撤单已报送,不允许重复撤单', 'ORDER_NOT_FOUND':25, 25:'综合交易平台:撤单找不到相应报单', 'INSUITABLE_ORDER_STATUS':26, 26:'综合交易平台:报单已全成交或已撤销,不能再撤', 'UNSUPPORTED_FUNCTION':27, 27:'综合交易平台:不支持的功能', 'NO_TRADING_RIGHT':28, 28:'综合交易平台:没有报单交易权限', 'CLOSE_ONLY':29, 29:'综合交易平台:只能平仓', 'OVER_CLOSE_POSITION':30, 30:'综合交易平台:平仓量超过持仓量', 'INSUFFICIENT_MONEY':31, 31:'综合交易平台:资金不足', 'DUPLICATE_PK':32, 32:'综合交易平台:主键重复', 'CANNOT_FIND_PK':33, 33:'综合交易平台:找不到主键', 'CAN_NOT_INACTIVE_BROKER':34, 34:'综合交易平台:设置经纪公司不活跃状态失败', 'BROKER_SYNCHRONIZING':35, 35:'综合交易平台:经纪公司正在同步', 'BROKER_SYNCHRONIZED':36, 36:'综合交易平台:经纪公司已同步', 'SHORT_SELL':37, 37:'综合交易平台:现货交易不能卖空', 'INVALID_SETTLEMENT_REF':38, 38:'综合交易平台:不合法的结算引用', 'CFFEX_NETWORK_ERROR':39, 39:'综合交易平台:交易所网络连接失败', 'CFFEX_OVER_REQUEST':40, 40:'综合交易平台:交易所未处理请求超过许可数', 'CFFEX_OVER_REQUEST_PER_SECOND':41, 41:'综合交易平台:交易所每秒发送请求数超过许可数', 'SETTLEMENT_INFO_NOT_CONFIRMED':42, 42:'综合交易平台:结算结果未确认', 'DEPOSIT_NOT_FOUND':43, 43:'综合交易平台:没有对应的入金记录', 'EXCHANG_TRADING':44, 44:'综合交易平台:交易所已经进入连续交易状态', 'PARKEDORDER_NOT_FOUND':45, 45:'综合交易平台:找不到预埋(撤单)单', 'PARKEDORDER_HASSENDED':46, 46:'综合交易平台:预埋(撤单)单已经发送', 'PARKEDORDER_HASDELETE':47, 47:'综合交易平台:预埋(撤单)单已经删除', 'INVALID_INVESTORIDORPASSWORD':48, 48:'综合交易平台:无效的投资者或者密码', 'INVALID_LOGIN_IPADDRESS':49, 49:'综合交易平台:不合法的登录IP地址', 'OVER_CLOSETODAY_POSITION':50, 50:'综合交易平台:平今仓位不足', 'OVER_CLOSEYESTERDAY_POSITION':51, 51:'综合交易平台:平昨仓位不足', 'BROKER_NOT_ENOUGH_CONDORDER':52, 52:'综合交易平台:经纪公司没有足够可用的条件单数量', 'INVESTOR_NOT_ENOUGH_CONDORDER':53, 53:'综合交易平台:投资者没有足够可用的条件单数量', 'BROKER_NOT_SUPPORT_CONDORDER':54, 54:'综合交易平台:经纪公司不支持条件单', 'RESEND_ORDER_BROKERINVESTOR_NOTMATCH':55, 55:'综合交易平台:重发未知单经济公司/投资者不匹配', 'SYC_OTP_FAILED':56, 56:'综合交易平台:同步动态令牌失败', 'OTP_MISMATCH':57, 57:'综合交易平台:动态令牌校验错误', 'OTPPARAM_NOT_FOUND':58, 58:'综合交易平台:找不到动态令牌配置信息', 'UNSUPPORTED_OTPTYPE':59, 59:'综合交易平台:不支持的动态令牌类型', 'SINGLEUSERSESSION_EXCEED_LIMIT':60, 60:'综合交易平台:用户在线会话超出上限', 'EXCHANGE_UNSUPPORTED_ARBITRAGE':61, 61:'综合交易平台:该交易所不支持套利类型报单', 'NO_CONDITIONAL_ORDER_RIGHT':62, 62:'综合交易平台:没有条件单交易权限', 'AUTH_FAILED':63, 63:'综合交易平台:客户端认证失败', 'NOT_AUTHENT':64, 64:'综合交易平台:客户端未认证', 'SWAPORDER_UNSUPPORTED':65, 65:'综合交易平台:该合约不支持互换类型报单', 'LOGIN_FORBIDDEN':66, 66:'综合交易平台:连续登录失败次数超限,登录被禁止', 'NO_TRADING_RIGHT_IN_SEPC_DR':101, 101:'综合交易平台:用户在本系统没有报单权限', 'NO_DR_NO':102, 102:'综合交易平台:系统缺少灾备标示号', 'SEND_INSTITUTION_CODE_ERROR':1000, 1000:'银期转账:发送机构代码错误', 'NO_GET_PLATFORM_SN':1001, 1001:'银期转账:取平台流水号错误', 'ILLEGAL_TRANSFER_BANK':1002, 1002:'银期转账:不合法的转账银行', 'ALREADY_OPEN_ACCOUNT':1003, 1003:'银期转账:已经开户', 'NOT_OPEN_ACCOUNT':1004, 1004:'银期转账:未开户', 'PROCESSING':1005, 1005:'银期转账:处理中', 'OVERTIME':1006, 1006:'银期转账:交易超时', 'RECORD_NOT_FOUND':1007, 1007:'银期转账:找不到记录', 'NO_FOUND_REVERSAL_ORIGINAL_TRANSACTION':1008, 1008:'银期转账:找不到被冲正的原始交易', 'CONNECT_HOST_FAILED':1009, 1009:'银期转账:连接主机失败', 'SEND_FAILED':1010, 1010:'银期转账:发送失败', 'LATE_RESPONSE':1011, 1011:'银期转账:迟到应答', 'REVERSAL_BANKID_NOT_MATCH':1012, 1012:'银期转账:冲正交易银行代码错误', 'REVERSAL_BANKACCOUNT_NOT_MATCH':1013, 1013:'银期转账:冲正交易银行账户错误', 'REVERSAL_BROKERID_NOT_MATCH':1014, 1014:'银期转账:冲正交易经纪公司代码错误', 'REVERSAL_ACCOUNTID_NOT_MATCH':1015, 1015:'银期转账:冲正交易资金账户错误', 'REVERSAL_AMOUNT_NOT_MATCH':1016, 1016:'银期转账:冲正交易交易金额错误', 'DB_OPERATION_FAILED':1017, 1017:'银期转账:数据库操作错误', 'SEND_ASP_FAILURE':1018, 1018:'银期转账:发送到交易系统失败', 'NOT_SIGNIN':1019, 1019:'银期转账:没有签到', 'ALREADY_SIGNIN':1020, 1020:'银期转账:已经签到', 'AMOUNT_OR_TIMES_OVER':1021, 1021:'银期转账:金额或次数超限', 'NOT_IN_TRANSFER_TIME':1022, 1022:'银期转账:这一时间段不能转账', 'BANK_SERVER_ERROR':1023, 1023:'银行主机错', 'BANK_SERIAL_IS_REPEALED':1024, 1024:'银期转账:银行已经冲正', 'BANK_SERIAL_NOT_EXIST':1025, 1025:'银期转账:银行流水不存在', 'NOT_ORGAN_MAP':1026, 1026:'银期转账:机构没有签约', 'EXIST_TRANSFER':1027, 1027:'银期转账:存在转账,不能销户', 'BANK_FORBID_REVERSAL':1028, 1028:'银期转账:银行不支持冲正', 'DUP_BANK_SERIAL':1029, 1029:'银期转账:重复的银行流水', 'FBT_SYSTEM_BUSY':1030, 1030:'银期转账:转账系统忙,稍后再试', 'MACKEY_SYNCING':1031, 1031:'银期转账:MAC密钥正在同步', 'ACCOUNTID_ALREADY_REGISTER':1032, 1032:'银期转账:资金账户已经登记', 'BANKACCOUNT_ALREADY_REGISTER':1033, 1033:'银期转账:银行账户已经登记', 'DUP_BANK_SERIAL_REDO_OK':1034, 1034:'银期转账:重复的银行流水,重发成功', 'CURRENCYID_NOT_SUPPORTED':1035, 1035:'银期转账:该币种代码不支持', 'INVALID_MAC':1036, 1036:'银期转账:MAC值验证失败', 'NO_VALID_BANKOFFER_AVAILABLE':2000, 2000:'综合交易平台:该报盘未连接到银行', 'PASSWORD_MISMATCH':2001, 2001:'综合交易平台:资金密码错误', 'DUPLATION_BANK_SERIAL':2004, 2004:'综合交易平台:银行流水号重复', 'DUPLATION_OFFER_SERIAL':2005, 2005:'综合交易平台:报盘流水号重复', 'SERIAL_NOT_EXSIT':2006, 2006:'综合交易平台:被冲正流水不存在(冲正交易)', 'SERIAL_IS_REPEALED':2007, 2007:'综合交易平台:原流水已冲正(冲正交易)', 'SERIAL_MISMATCH':2008, 2008:'综合交易平台:与原流水信息不符(冲正交易)', 'IdentifiedCardNo_MISMATCH':2009, 2009:'综合交易平台:证件号码或类型错误', 'ACCOUNT_NOT_FUND':2011, 2011:'综合交易平台:资金账户不存在', 'ACCOUNT_NOT_ACTIVE':2012, 2012:'综合交易平台:资金账户已经销户', 'NOT_ALLOW_REPEAL_BYMANUAL':2013, 2013:'综合交易平台:该交易不能执行手工冲正', 'AMOUNT_OUTOFTHEWAY':2014, 2014:'综合交易平台:转帐金额错误', 'WAITING_OFFER_RSP':999999, 999999:'综合交易平台:等待银期报盘处理结果'}
def _init():
import re, sys
from ctypes import c_char, c_short, c_int, c_double, Structure
G = globals(); del G['_init']; T = G.pop('T'); Base = G.pop('BaseStruct')
match = re.compile(r'(\w+)\[(\d+)\]').match
D = {'char':c_char, 'short':c_short, 'int':c_int, 'double':c_double}
for k,v in T.items():
if v not in D:
m = match(v).groups(); D[v] = D[m[0]] * int(m[1])
T[k] = D[v]
if sys.version_info[0] >= 3:
for k,v in G.items():
if isinstance(v, str) and '_' in k[1:-1]: G[k] = v.encode('latin-1')
else:
for k in error:
if not isinstance(k, str): error[k] = error[k].decode('utf-8')
edvs = {'ContingentCondition':CC_Immediately, 'TradeType':TRDT_Common, 'AllWithoutTrade':AWT_Enable, 'PositionDateType':PDT_UseHistory, 'TradingRight':TR_Allow, 'UserRightType':URT_Logon, 'InstitutionType':TS_Bank, 'FindMarginRateAlgoID':FMRA_Base, 'HedgeFlag':HF_Speculation, 'TraderConnectStatus':TCS_NotConnected, 'CustType':CUSTT_Person, 'PositionType':PT_Net, 'ProductClass':PC_Futures, 'UserType':UT_Investor, 'ClientIDType':CIDT_Speculation, 'ParkedOrderStatus':PAOS_NotSend, 'YesNoIndicator':YNI_Yes, 'HandlePositionAlgoID':HPA_Base, 'Direction':D_Buy, 'OffsetFlag':OF_Open, 'PosiDirection':PD_Net, 'PwdFlag':BPWDF_NoCheck, 'CloseDealType':CDT_Normal, 'PersonType':PST_Order, 'ExchangeProperty':EXP_Normal, 'OrderPriceType':OPT_AnyPrice, 'TimeCondition':TC_IOC, 'OrderStatus':OST_AllTraded, 'ActionFlag':AF_Delete, 'OrderSubmitStatus':OSS_InsertSubmitted, 'DataSyncStatus':DS_Asynchronous, 'TransferValidFlag':TVF_Invalid, 'AvailabilityFlag':AVAF_Invalid, 'InstStatusEnterReason':IER_Automatic, 'PositionDate':PSD_Today, 'Algorithm':AG_All, 'ForceCloseReason':FCC_NotForceClose, 'OrderType':ORDT_Normal, 'FeePayFlag':FPF_BEN, 'FuturePwdFlag':FPWD_UnCheck, 'Gender':GD_Unknown, 'FunctionCode':FC_DataAsync, 'OrderSource':OSRC_Participant, 'CashExchangeCode':CEC_Exchange, 'BrokerRepealFlag':BRORF_BrokerNotNeedRepeal, 'InstrumentStatus':IS_BeforeTrading, 'OpenOrDestroy':OOD_Open, 'BankRepealFlag':BRF_BankNotNeedRepeal, 'HandleTradingAccountAlgoID':HTAA_Base, 'IdCardType':ICT_EID, 'MarginPriceType':MPT_PreSettlementPrice, 'FileBusinessCode':FBC_Others, 'IncludeCloseProfit':ICP_Include, 'CFMMCKeyKind':CFMMCKK_REQUEST, 'BankAccType':BAT_BankBook, 'LastFragment':LF_Yes, 'InstLifePhase':IP_NotStart, 'FutureAccType':FAT_BankBook, 'LoginMode':LM_Trade, 'VolumeCondition':VC_AV, 'MoneyAccountStatus':MAS_Normal, 'OTPType':OTP_NONE, 'UserEventType':UET_Login, 'InvestorRange':IR_All, 'TransferStatus':TRFS_Normal, 'TradeSource':TSRC_NORMAL, 'PriceSource':PSRC_LastPrice, 'TradingRole':ER_Broker, 'BrokerFunctionCode':BFC_ForceUserLogout, 'OrderActionStatus':OAS_Submitted}
Structs = [v for v in G.values() if isinstance(v,type) and issubclass(v,Base)]
Base = G['BaseStruct'] = type('BaseStruct', (Structure,), dict((k,v)
for k,v in Base.__dict__.items() if
k in ('__doc__', '__repr__', '__str__') or
not (k.startswith('__') and k.endswith('__'))))
class builder(object):
def __setattr__(self, fn, ft):
ft = ft or fn
if ft in edvs: self.enums.append((len(self.fields), fn, edvs[ft]))
self.fields.append((fn, T[ft]))
def build(self, cls):
self.__dict__['enums'] = []
self.__dict__['fields'] = []
cls.__dict__['__init__'](self)
d = {'_fields_': tuple(self.fields)}
if self.enums:
enums = tuple(self.enums)
def __init__(self, *args, **kwargs):
c = len(args)
for i,n,d in enums:
if i >= c: kwargs.setdefault(n, d)
Base.__init__(self, *args, **kwargs)
d['__init__'] = __init__
G[cls.__name__] = type(cls.__name__, (Base,), d)
builder = builder()
for cls in Structs: builder.build(cls)
_init()
|
dreamhost/akanda-rug | refs/heads/master | akanda/rug/test/unit/openvswitch/__init__.py | 49 | # Copyright 2014 DreamHost, LLC
#
# Author: DreamHost, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
|
terryyin/linkchecker | refs/heads/master | third_party/dnspython/dns/tsigkeyring.py | 9 | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""A place to store TSIG keys."""
import base64
import dns.name
def from_text(textring):
"""Convert a dictionary containing (textual DNS name, base64 secret) pairs
into a binary keyring which has (dns.name.Name, binary secret) pairs.
@rtype: dict"""
keyring = {}
for keytext in textring:
keyname = dns.name.from_text(keytext)
secret = base64.decodestring(textring[keytext])
keyring[keyname] = secret
return keyring
def to_text(keyring):
"""Convert a dictionary containing (dns.name.Name, binary secret) pairs
into a text keyring which has (textual DNS name, base64 secret) pairs.
@rtype: dict"""
textring = {}
for keyname in keyring:
keytext = dns.name.to_text(keyname)
secret = base64.encodestring(keyring[keyname])
textring[keytext] = secret
return textring
|
simpeg/simpeg | refs/heads/em1d | tests/em/fdem/muinverse/test_muinverse.py | 1 | import discretize
from SimPEG import maps, utils, tests
from SimPEG.electromagnetics import frequency_domain as fdem
import numpy as np
from scipy.constants import mu_0
import unittest
MuMax = 50.0
TOL = 1e-8
EPS = 1e-10
np.random.seed(105)
def setupMeshModel():
cs = 10.0
nc = 20.0
npad = 15.0
hx = [(cs, nc), (cs, npad, 1.3)]
hz = [(cs, npad, -1.3), (cs, nc), (cs, npad, 1.3)]
mesh = discretize.CylMesh([hx, 1.0, hz], "0CC")
muMod = 1 + MuMax * np.random.randn(mesh.nC)
sigmaMod = np.random.randn(mesh.nC)
return mesh, muMod, sigmaMod
def setupProblem(
mesh,
muMod,
sigmaMod,
prbtype="ElectricField",
invertMui=False,
sigmaInInversion=False,
freq=1.0,
):
rxcomp = ["real", "imag"]
loc = utils.ndgrid([mesh.vectorCCx, np.r_[0.0], mesh.vectorCCz])
if prbtype in ["ElectricField", "MagneticFluxDensity"]:
rxfields_y = ["ElectricField", "CurrentDensity"]
rxfields_xz = ["MagneticFluxDensity", "MagneticField"]
elif prbtype in ["MagneticField", "CurrentDensity"]:
rxfields_y = ["MagneticFluxDensity", "MagneticField"]
rxfields_xz = ["ElectricField", "CurrentDensity"]
rxList_edge = [
getattr(fdem.receivers, "Point{f}".format(f=f))(
loc, component=comp, orientation=orient
)
for f in rxfields_y
for comp in rxcomp
for orient in ["y"]
]
rxList_face = [
getattr(fdem.receivers, "Point{f}".format(f=f))(
loc, component=comp, orientation=orient
)
for f in rxfields_xz
for comp in rxcomp
for orient in ["x", "z"]
]
rxList = rxList_edge + rxList_face
src_loc = np.r_[0.0, 0.0, 0.0]
if prbtype in ["ElectricField", "MagneticFluxDensity"]:
src = fdem.sources.MagDipole(
receiver_list=rxList, location=src_loc, frequency=freq
)
elif prbtype in ["MagneticField", "CurrentDensity"]:
ind = utils.closestPoints(mesh, src_loc, "Fz") + mesh.vnF[0]
vec = np.zeros(mesh.nF)
vec[ind] = 1.0
src = fdem.sources.RawVec_e(receiver_list=rxList, frequency=freq, s_e=vec)
survey = fdem.Survey([src])
if sigmaInInversion:
wires = maps.Wires(("mu", mesh.nC), ("sigma", mesh.nC))
muMap = maps.MuRelative(mesh) * wires.mu
sigmaMap = maps.ExpMap(mesh) * wires.sigma
if invertMui:
muiMap = maps.ReciprocalMap(mesh) * muMap
prob = getattr(fdem, "Simulation3D{}".format(prbtype))(
mesh, muiMap=muiMap, sigmaMap=sigmaMap
)
# m0 = np.hstack([1./muMod, sigmaMod])
else:
prob = getattr(fdem, "Simulation3D{}".format(prbtype))(
mesh, muMap=muMap, sigmaMap=sigmaMap
)
m0 = np.hstack([muMod, sigmaMod])
else:
muMap = maps.MuRelative(mesh)
if invertMui:
muiMap = maps.ReciprocalMap(mesh) * muMap
prob = getattr(fdem, "Simulation3D{}".format(prbtype))(
mesh, sigma=sigmaMod, muiMap=muiMap
)
# m0 = 1./muMod
else:
prob = getattr(fdem, "Simulation3D{}".format(prbtype))(
mesh, sigma=sigmaMod, muMap=muMap
)
m0 = muMod
prob.survey = survey
return m0, prob, survey
class MuTests(unittest.TestCase):
def setUpProb(
self, prbtype="ElectricField", sigmaInInversion=False, invertMui=False
):
self.mesh, muMod, sigmaMod = setupMeshModel()
self.m0, self.simulation, self.survey = setupProblem(
self.mesh,
muMod,
sigmaMod,
prbtype=prbtype,
sigmaInInversion=sigmaInInversion,
invertMui=invertMui,
)
def test_mats_cleared(self):
self.setUpProb()
u = self.simulation.fields(self.m0)
MeMu = self.simulation.MeMu
MeMuI = self.simulation.MeMuI
MfMui = self.simulation.MfMui
MfMuiI = self.simulation.MfMuiI
MeMuDeriv = self.simulation.MeMuDeriv(u[:, "e"])
MfMuiDeriv = self.simulation.MfMuiDeriv(u[:, "b"])
m1 = np.random.rand(self.mesh.nC)
self.simulation.model = m1
self.assertTrue(getattr(self, "_MeMu", None) is None)
self.assertTrue(getattr(self, "_MeMuI", None) is None)
self.assertTrue(getattr(self, "_MfMui", None) is None)
self.assertTrue(getattr(self, "_MfMuiI", None) is None)
self.assertTrue(getattr(self, "_MfMuiDeriv", None) is None)
self.assertTrue(getattr(self, "_MeMuDeriv", None) is None)
def JvecTest(
self, prbtype="ElectricField", sigmaInInversion=False, invertMui=False
):
self.setUpProb(prbtype, sigmaInInversion, invertMui)
print("Testing Jvec {}".format(prbtype))
def fun(x):
return (
self.simulation.dpred(x),
lambda x: self.simulation.Jvec(self.m0, x),
)
return tests.checkDerivative(fun, self.m0, num=3, plotIt=False)
def JtvecTest(
self, prbtype="ElectricField", sigmaInInversion=False, invertMui=False
):
self.setUpProb(prbtype, sigmaInInversion, invertMui)
print("Testing Jvec {}".format(prbtype))
m = np.random.rand(self.simulation.muMap.nP)
v = np.random.rand(self.survey.nD)
self.simulation.model = self.m0
V1 = v.dot(self.simulation.Jvec(self.m0, m))
V2 = m.dot(self.simulation.Jtvec(self.m0, v))
diff = np.abs(V1 - V2)
tol = TOL * (np.abs(V1) + np.abs(V2)) / 2.0
passed = (diff < tol) | (diff < EPS)
print(
"AdjointTest {prbtype} {v1} {v2} {diff} {tol} {passed}".format(
prbtype=prbtype, v1=V1, v2=V2, diff=diff, tol=tol, passed=passed
)
)
return passed
def test_Jvec_e(self):
self.assertTrue(self.JvecTest("ElectricField", sigmaInInversion=False))
def test_Jvec_b(self):
self.assertTrue(self.JvecTest("MagneticFluxDensity", sigmaInInversion=False))
def test_Jvec_j(self):
self.assertTrue(self.JvecTest("CurrentDensity", sigmaInInversion=False))
def test_Jvec_h(self):
self.assertTrue(self.JvecTest("MagneticField", sigmaInInversion=False))
def test_Jtvec_e(self):
self.assertTrue(self.JtvecTest("ElectricField", sigmaInInversion=False))
def test_Jtvec_b(self):
self.assertTrue(self.JtvecTest("MagneticFluxDensity", sigmaInInversion=False))
def test_Jtvec_j(self):
self.assertTrue(self.JtvecTest("CurrentDensity", sigmaInInversion=False))
def test_Jtvec_h(self):
self.assertTrue(self.JtvecTest("MagneticField", sigmaInInversion=False))
def test_Jvec_musig_e(self):
self.assertTrue(self.JvecTest("ElectricField", sigmaInInversion=True))
def test_Jvec_musig_b(self):
self.assertTrue(self.JvecTest("MagneticFluxDensity", sigmaInInversion=True))
def test_Jvec_musig_j(self):
self.assertTrue(self.JvecTest("CurrentDensity", sigmaInInversion=True))
def test_Jvec_musig_h(self):
self.assertTrue(self.JvecTest("MagneticField", sigmaInInversion=True))
def test_Jtvec_musig_e(self):
self.assertTrue(self.JtvecTest("ElectricField", sigmaInInversion=True))
def test_Jtvec_musig_b(self):
self.assertTrue(self.JtvecTest("MagneticFluxDensity", sigmaInInversion=True))
def test_Jtvec_musig_j(self):
self.assertTrue(self.JtvecTest("CurrentDensity", sigmaInInversion=True))
def test_Jtvec_musig_h(self):
self.assertTrue(self.JtvecTest("MagneticField", sigmaInInversion=True))
def test_Jvec_e_mui(self):
self.assertTrue(
self.JvecTest("ElectricField", sigmaInInversion=False, invertMui=True)
)
def test_Jvec_b_mui(self):
self.assertTrue(
self.JvecTest("MagneticFluxDensity", sigmaInInversion=False, invertMui=True)
)
def test_Jvec_j_mui(self):
self.assertTrue(
self.JvecTest("CurrentDensity", sigmaInInversion=False, invertMui=True)
)
def test_Jvec_h_mui(self):
self.assertTrue(
self.JvecTest("MagneticField", sigmaInInversion=False, invertMui=True)
)
def test_Jtvec_e_mui(self):
self.assertTrue(
self.JtvecTest("ElectricField", sigmaInInversion=False, invertMui=True)
)
def test_Jtvec_b_mui(self):
self.assertTrue(
self.JtvecTest(
"MagneticFluxDensity", sigmaInInversion=False, invertMui=True
)
)
def test_Jtvec_j_mui(self):
self.assertTrue(
self.JtvecTest("CurrentDensity", sigmaInInversion=False, invertMui=True)
)
def test_Jtvec_h_mui(self):
self.assertTrue(
self.JtvecTest("MagneticField", sigmaInInversion=False, invertMui=True)
)
def test_Jvec_musig_e_mui(self):
self.assertTrue(
self.JvecTest("ElectricField", sigmaInInversion=True, invertMui=True)
)
def test_Jvec_musig_b_mui(self):
self.assertTrue(
self.JvecTest("MagneticFluxDensity", sigmaInInversion=True, invertMui=True)
)
def test_Jvec_musig_j_mui(self):
self.assertTrue(
self.JvecTest("CurrentDensity", sigmaInInversion=True, invertMui=True)
)
def test_Jvec_musig_h_mui(self):
self.assertTrue(
self.JvecTest("MagneticField", sigmaInInversion=True, invertMui=True)
)
def test_Jtvec_musig_e_mui(self):
self.assertTrue(
self.JtvecTest("ElectricField", sigmaInInversion=True, invertMui=True)
)
def test_Jtvec_musig_b_mui(self):
self.assertTrue(
self.JtvecTest("MagneticFluxDensity", sigmaInInversion=True, invertMui=True)
)
def test_Jtvec_musig_j_mui(self):
self.assertTrue(
self.JtvecTest("CurrentDensity", sigmaInInversion=True, invertMui=True)
)
def test_Jtvec_musig_h_mui(self):
self.assertTrue(
self.JtvecTest("MagneticField", sigmaInInversion=True, invertMui=True)
)
if __name__ == "__main__":
unittest.main()
|
wehkamp/ansible | refs/heads/devel | test/units/plugins/vars/__init__.py | 7690 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
rishikksh20/scikit-learn | refs/heads/master | sklearn/manifold/t_sne.py | 14 | # Author: Alexander Fabisch -- <afabisch@informatik.uni-bremen.de>
# Author: Christopher Moody <chrisemoody@gmail.com>
# Author: Nick Travers <nickt@squareup.com>
# License: BSD 3 clause (C) 2014
# This is the exact and Barnes-Hut t-SNE implementation. There are other
# modifications of the algorithm:
# * Fast Optimization for t-SNE:
# http://cseweb.ucsd.edu/~lvdmaaten/workshops/nips2010/papers/vandermaaten.pdf
import numpy as np
from scipy import linalg
import scipy.sparse as sp
from scipy.spatial.distance import pdist
from scipy.spatial.distance import squareform
from ..neighbors import BallTree
from ..base import BaseEstimator
from ..utils import check_array
from ..utils import check_random_state
from ..utils.extmath import _ravel
from ..decomposition import PCA
from ..metrics.pairwise import pairwise_distances
from . import _utils
from . import _barnes_hut_tsne
from ..utils.fixes import astype
from ..externals.six import string_types
from ..utils import deprecated
MACHINE_EPSILON = np.finfo(np.double).eps
def _joint_probabilities(distances, desired_perplexity, verbose):
"""Compute joint probabilities p_ij from distances.
Parameters
----------
distances : array, shape (n_samples * (n_samples-1) / 2,)
Distances of samples are stored as condensed matrices, i.e.
we omit the diagonal and duplicate entries and store everything
in a one-dimensional array.
desired_perplexity : float
Desired perplexity of the joint probability distributions.
verbose : int
Verbosity level.
Returns
-------
P : array, shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
"""
# Compute conditional probabilities such that they approximately match
# the desired perplexity
distances = astype(distances, np.float32, copy=False)
conditional_P = _utils._binary_search_perplexity(
distances, None, desired_perplexity, verbose)
P = conditional_P + conditional_P.T
sum_P = np.maximum(np.sum(P), MACHINE_EPSILON)
P = np.maximum(squareform(P) / sum_P, MACHINE_EPSILON)
return P
def _joint_probabilities_nn(distances, neighbors, desired_perplexity, verbose):
"""Compute joint probabilities p_ij from distances using just nearest
neighbors.
This method is approximately equal to _joint_probabilities. The latter
is O(N), but limiting the joint probability to nearest neighbors improves
this substantially to O(uN).
Parameters
----------
distances : array, shape (n_samples * (n_samples-1) / 2,)
Distances of samples are stored as condensed matrices, i.e.
we omit the diagonal and duplicate entries and store everything
in a one-dimensional array.
desired_perplexity : float
Desired perplexity of the joint probability distributions.
verbose : int
Verbosity level.
Returns
-------
P : array, shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
"""
# Compute conditional probabilities such that they approximately match
# the desired perplexity
distances = astype(distances, np.float32, copy=False)
neighbors = astype(neighbors, np.int64, copy=False)
conditional_P = _utils._binary_search_perplexity(
distances, neighbors, desired_perplexity, verbose)
m = "All probabilities should be finite"
assert np.all(np.isfinite(conditional_P)), m
P = conditional_P + conditional_P.T
sum_P = np.maximum(np.sum(P), MACHINE_EPSILON)
P = np.maximum(squareform(P) / sum_P, MACHINE_EPSILON)
assert np.all(np.abs(P) <= 1.0)
return P
def _kl_divergence(params, P, degrees_of_freedom, n_samples, n_components,
skip_num_points=0):
"""t-SNE objective function: gradient of the KL divergence
of p_ijs and q_ijs and the absolute error.
Parameters
----------
params : array, shape (n_params,)
Unraveled embedding.
P : array, shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
degrees_of_freedom : float
Degrees of freedom of the Student's-t distribution.
n_samples : int
Number of samples.
n_components : int
Dimension of the embedded space.
skip_num_points : int (optional, default:0)
This does not compute the gradient for points with indices below
`skip_num_points`. This is useful when computing transforms of new
data where you'd like to keep the old data fixed.
Returns
-------
kl_divergence : float
Kullback-Leibler divergence of p_ij and q_ij.
grad : array, shape (n_params,)
Unraveled gradient of the Kullback-Leibler divergence with respect to
the embedding.
"""
X_embedded = params.reshape(n_samples, n_components)
# Q is a heavy-tailed distribution: Student's t-distribution
n = pdist(X_embedded, "sqeuclidean")
n += 1.
n /= degrees_of_freedom
n **= (degrees_of_freedom + 1.0) / -2.0
Q = np.maximum(n / (2.0 * np.sum(n)), MACHINE_EPSILON)
# Optimization trick below: np.dot(x, y) is faster than
# np.sum(x * y) because it calls BLAS
# Objective: C (Kullback-Leibler divergence of P and Q)
kl_divergence = 2.0 * np.dot(P, np.log(P / Q))
# Gradient: dC/dY
grad = np.ndarray((n_samples, n_components))
PQd = squareform((P - Q) * n)
for i in range(skip_num_points, n_samples):
np.dot(_ravel(PQd[i]), X_embedded[i] - X_embedded, out=grad[i])
grad = grad.ravel()
c = 2.0 * (degrees_of_freedom + 1.0) / degrees_of_freedom
grad *= c
return kl_divergence, grad
def _kl_divergence_error(params, P, neighbors, degrees_of_freedom, n_samples,
n_components):
"""t-SNE objective function: the absolute error of the
KL divergence of p_ijs and q_ijs.
Parameters
----------
params : array, shape (n_params,)
Unraveled embedding.
P : array, shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
neighbors : array (n_samples, K)
The neighbors is not actually required to calculate the
divergence, but is here to match the signature of the
gradient function
degrees_of_freedom : float
Degrees of freedom of the Student's-t distribution.
n_samples : int
Number of samples.
n_components : int
Dimension of the embedded space.
Returns
-------
kl_divergence : float
Kullback-Leibler divergence of p_ij and q_ij.
grad : array, shape (n_params,)
Unraveled gradient of the Kullback-Leibler divergence with respect to
the embedding.
"""
X_embedded = params.reshape(n_samples, n_components)
# Q is a heavy-tailed distribution: Student's t-distribution
n = pdist(X_embedded, "sqeuclidean")
n += 1.
n /= degrees_of_freedom
n **= (degrees_of_freedom + 1.0) / -2.0
Q = np.maximum(n / (2.0 * np.sum(n)), MACHINE_EPSILON)
# Optimization trick below: np.dot(x, y) is faster than
# np.sum(x * y) because it calls BLAS
# Objective: C (Kullback-Leibler divergence of P and Q)
if len(P.shape) == 2:
P = squareform(P)
kl_divergence = 2.0 * np.dot(P, np.log(P / Q))
return kl_divergence
def _kl_divergence_bh(params, P, neighbors, degrees_of_freedom, n_samples,
n_components, angle=0.5, skip_num_points=0,
verbose=False):
"""t-SNE objective function: KL divergence of p_ijs and q_ijs.
Uses Barnes-Hut tree methods to calculate the gradient that
runs in O(NlogN) instead of O(N^2)
Parameters
----------
params : array, shape (n_params,)
Unraveled embedding.
P : array, shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
neighbors : int64 array, shape (n_samples, K)
Array with element [i, j] giving the index for the jth
closest neighbor to point i.
degrees_of_freedom : float
Degrees of freedom of the Student's-t distribution.
n_samples : int
Number of samples.
n_components : int
Dimension of the embedded space.
angle : float (default: 0.5)
This is the trade-off between speed and accuracy for Barnes-Hut T-SNE.
'angle' is the angular size (referred to as theta in [3]) of a distant
node as measured from a point. If this size is below 'angle' then it is
used as a summary node of all points contained within it.
This method is not very sensitive to changes in this parameter
in the range of 0.2 - 0.8. Angle less than 0.2 has quickly increasing
computation time and angle greater 0.8 has quickly increasing error.
skip_num_points : int (optional, default:0)
This does not compute the gradient for points with indices below
`skip_num_points`. This is useful when computing transforms of new
data where you'd like to keep the old data fixed.
verbose : int
Verbosity level.
Returns
-------
kl_divergence : float
Kullback-Leibler divergence of p_ij and q_ij.
grad : array, shape (n_params,)
Unraveled gradient of the Kullback-Leibler divergence with respect to
the embedding.
"""
params = astype(params, np.float32, copy=False)
X_embedded = params.reshape(n_samples, n_components)
neighbors = astype(neighbors, np.int64, copy=False)
if len(P.shape) == 1:
sP = squareform(P).astype(np.float32)
else:
sP = P.astype(np.float32)
grad = np.zeros(X_embedded.shape, dtype=np.float32)
error = _barnes_hut_tsne.gradient(sP, X_embedded, neighbors,
grad, angle, n_components, verbose,
dof=degrees_of_freedom)
c = 2.0 * (degrees_of_freedom + 1.0) / degrees_of_freedom
grad = grad.ravel()
grad *= c
return error, grad
def _gradient_descent(objective, p0, it, n_iter, objective_error=None,
n_iter_check=1, n_iter_without_progress=50,
momentum=0.5, learning_rate=1000.0, min_gain=0.01,
min_grad_norm=1e-7, min_error_diff=1e-7, verbose=0,
args=None, kwargs=None):
"""Batch gradient descent with momentum and individual gains.
Parameters
----------
objective : function or callable
Should return a tuple of cost and gradient for a given parameter
vector. When expensive to compute, the cost can optionally
be None and can be computed every n_iter_check steps using
the objective_error function.
p0 : array-like, shape (n_params,)
Initial parameter vector.
it : int
Current number of iterations (this function will be called more than
once during the optimization).
n_iter : int
Maximum number of gradient descent iterations.
n_iter_check : int
Number of iterations before evaluating the global error. If the error
is sufficiently low, we abort the optimization.
objective_error : function or callable
Should return a tuple of cost and gradient for a given parameter
vector.
n_iter_without_progress : int, optional (default: 30)
Maximum number of iterations without progress before we abort the
optimization.
momentum : float, within (0.0, 1.0), optional (default: 0.5)
The momentum generates a weight for previous gradients that decays
exponentially.
learning_rate : float, optional (default: 1000.0)
The learning rate should be extremely high for t-SNE! Values in the
range [100.0, 1000.0] are common.
min_gain : float, optional (default: 0.01)
Minimum individual gain for each parameter.
min_grad_norm : float, optional (default: 1e-7)
If the gradient norm is below this threshold, the optimization will
be aborted.
min_error_diff : float, optional (default: 1e-7)
If the absolute difference of two successive cost function values
is below this threshold, the optimization will be aborted.
verbose : int, optional (default: 0)
Verbosity level.
args : sequence
Arguments to pass to objective function.
kwargs : dict
Keyword arguments to pass to objective function.
Returns
-------
p : array, shape (n_params,)
Optimum parameters.
error : float
Optimum.
i : int
Last iteration.
"""
if args is None:
args = []
if kwargs is None:
kwargs = {}
p = p0.copy().ravel()
update = np.zeros_like(p)
gains = np.ones_like(p)
error = np.finfo(np.float).max
best_error = np.finfo(np.float).max
best_iter = 0
for i in range(it, n_iter):
new_error, grad = objective(p, *args, **kwargs)
grad_norm = linalg.norm(grad)
inc = update * grad >= 0.0
dec = np.invert(inc)
gains[inc] += 0.05
gains[dec] *= 0.95
np.clip(gains, min_gain, np.inf)
grad *= gains
update = momentum * update - learning_rate * grad
p += update
if (i + 1) % n_iter_check == 0:
if new_error is None:
new_error = objective_error(p, *args)
error_diff = np.abs(new_error - error)
error = new_error
if verbose >= 2:
m = "[t-SNE] Iteration %d: error = %.7f, gradient norm = %.7f"
print(m % (i + 1, error, grad_norm))
if error < best_error:
best_error = error
best_iter = i
elif i - best_iter > n_iter_without_progress:
if verbose >= 2:
print("[t-SNE] Iteration %d: did not make any progress "
"during the last %d episodes. Finished."
% (i + 1, n_iter_without_progress))
break
if grad_norm <= min_grad_norm:
if verbose >= 2:
print("[t-SNE] Iteration %d: gradient norm %f. Finished."
% (i + 1, grad_norm))
break
if error_diff <= min_error_diff:
if verbose >= 2:
m = "[t-SNE] Iteration %d: error difference %f. Finished."
print(m % (i + 1, error_diff))
break
if new_error is not None:
error = new_error
return p, error, i
def trustworthiness(X, X_embedded, n_neighbors=5, precomputed=False):
"""Expresses to what extent the local structure is retained.
The trustworthiness is within [0, 1]. It is defined as
.. math::
T(k) = 1 - \frac{2}{nk (2n - 3k - 1)} \sum^n_{i=1}
\sum_{j \in U^{(k)}_i (r(i, j) - k)}
where :math:`r(i, j)` is the rank of the embedded datapoint j
according to the pairwise distances between the embedded datapoints,
:math:`U^{(k)}_i` is the set of points that are in the k nearest
neighbors in the embedded space but not in the original space.
* "Neighborhood Preservation in Nonlinear Projection Methods: An
Experimental Study"
J. Venna, S. Kaski
* "Learning a Parametric Embedding by Preserving Local Structure"
L.J.P. van der Maaten
Parameters
----------
X : array, shape (n_samples, n_features) or (n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row.
X_embedded : array, shape (n_samples, n_components)
Embedding of the training data in low-dimensional space.
n_neighbors : int, optional (default: 5)
Number of neighbors k that will be considered.
precomputed : bool, optional (default: False)
Set this flag if X is a precomputed square distance matrix.
Returns
-------
trustworthiness : float
Trustworthiness of the low-dimensional embedding.
"""
if precomputed:
dist_X = X
else:
dist_X = pairwise_distances(X, squared=True)
dist_X_embedded = pairwise_distances(X_embedded, squared=True)
ind_X = np.argsort(dist_X, axis=1)
ind_X_embedded = np.argsort(dist_X_embedded, axis=1)[:, 1:n_neighbors + 1]
n_samples = X.shape[0]
t = 0.0
ranks = np.zeros(n_neighbors)
for i in range(n_samples):
for j in range(n_neighbors):
ranks[j] = np.where(ind_X[i] == ind_X_embedded[i, j])[0][0]
ranks -= n_neighbors
t += np.sum(ranks[ranks > 0])
t = 1.0 - t * (2.0 / (n_samples * n_neighbors *
(2.0 * n_samples - 3.0 * n_neighbors - 1.0)))
return t
class TSNE(BaseEstimator):
"""t-distributed Stochastic Neighbor Embedding.
t-SNE [1] is a tool to visualize high-dimensional data. It converts
similarities between data points to joint probabilities and tries
to minimize the Kullback-Leibler divergence between the joint
probabilities of the low-dimensional embedding and the
high-dimensional data. t-SNE has a cost function that is not convex,
i.e. with different initializations we can get different results.
It is highly recommended to use another dimensionality reduction
method (e.g. PCA for dense data or TruncatedSVD for sparse data)
to reduce the number of dimensions to a reasonable amount (e.g. 50)
if the number of features is very high. This will suppress some
noise and speed up the computation of pairwise distances between
samples. For more tips see Laurens van der Maaten's FAQ [2].
Read more in the :ref:`User Guide <t_sne>`.
Parameters
----------
n_components : int, optional (default: 2)
Dimension of the embedded space.
perplexity : float, optional (default: 30)
The perplexity is related to the number of nearest neighbors that
is used in other manifold learning algorithms. Larger datasets
usually require a larger perplexity. Consider selecting a value
between 5 and 50. The choice is not extremely critical since t-SNE
is quite insensitive to this parameter.
early_exaggeration : float, optional (default: 4.0)
Controls how tight natural clusters in the original space are in
the embedded space and how much space will be between them. For
larger values, the space between natural clusters will be larger
in the embedded space. Again, the choice of this parameter is not
very critical. If the cost function increases during initial
optimization, the early exaggeration factor or the learning rate
might be too high.
learning_rate : float, optional (default: 1000)
The learning rate can be a critical parameter. It should be
between 100 and 1000. If the cost function increases during initial
optimization, the early exaggeration factor or the learning rate
might be too high. If the cost function gets stuck in a bad local
minimum increasing the learning rate helps sometimes.
n_iter : int, optional (default: 1000)
Maximum number of iterations for the optimization. Should be at
least 200.
n_iter_without_progress : int, optional (default: 30)
Only used if method='exact'
Maximum number of iterations without progress before we abort the
optimization. If method='barnes_hut' this parameter is fixed to
a value of 30 and cannot be changed.
.. versionadded:: 0.17
parameter *n_iter_without_progress* to control stopping criteria.
min_grad_norm : float, optional (default: 1e-7)
Only used if method='exact'
If the gradient norm is below this threshold, the optimization will
be aborted. If method='barnes_hut' this parameter is fixed to a value
of 1e-3 and cannot be changed.
metric : string or callable, optional
The metric to use when calculating distance between instances in a
feature array. If metric is a string, it must be one of the options
allowed by scipy.spatial.distance.pdist for its metric parameter, or
a metric listed in pairwise.PAIRWISE_DISTANCE_FUNCTIONS.
If metric is "precomputed", X is assumed to be a distance matrix.
Alternatively, if metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays from X as input and return a value indicating
the distance between them. The default is "euclidean" which is
interpreted as squared euclidean distance.
init : string or numpy array, optional (default: "random")
Initialization of embedding. Possible options are 'random', 'pca',
and a numpy array of shape (n_samples, n_components).
PCA initialization cannot be used with precomputed distances and is
usually more globally stable than random initialization.
verbose : int, optional (default: 0)
Verbosity level.
random_state : int or RandomState instance or None (default)
Pseudo Random Number generator seed control. If None, use the
numpy.random singleton. Note that different initializations
might result in different local minima of the cost function.
method : string (default: 'barnes_hut')
By default the gradient calculation algorithm uses Barnes-Hut
approximation running in O(NlogN) time. method='exact'
will run on the slower, but exact, algorithm in O(N^2) time. The
exact algorithm should be used when nearest-neighbor errors need
to be better than 3%. However, the exact method cannot scale to
millions of examples.
.. versionadded:: 0.17
Approximate optimization *method* via the Barnes-Hut.
angle : float (default: 0.5)
Only used if method='barnes_hut'
This is the trade-off between speed and accuracy for Barnes-Hut T-SNE.
'angle' is the angular size (referred to as theta in [3]) of a distant
node as measured from a point. If this size is below 'angle' then it is
used as a summary node of all points contained within it.
This method is not very sensitive to changes in this parameter
in the range of 0.2 - 0.8. Angle less than 0.2 has quickly increasing
computation time and angle greater 0.8 has quickly increasing error.
Attributes
----------
embedding_ : array-like, shape (n_samples, n_components)
Stores the embedding vectors.
kl_divergence_ : float
Kullback-Leibler divergence after optimization.
n_iter_ : int
Number of iterations run.
Examples
--------
>>> import numpy as np
>>> from sklearn.manifold import TSNE
>>> X = np.array([[0, 0, 0], [0, 1, 1], [1, 0, 1], [1, 1, 1]])
>>> model = TSNE(n_components=2, random_state=0)
>>> np.set_printoptions(suppress=True)
>>> model.fit_transform(X) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
array([[ 0.00017599, 0.00003993],
[ 0.00009891, 0.00021913],
[ 0.00018554, -0.00009357],
[ 0.00009528, -0.00001407]])
References
----------
[1] van der Maaten, L.J.P.; Hinton, G.E. Visualizing High-Dimensional Data
Using t-SNE. Journal of Machine Learning Research 9:2579-2605, 2008.
[2] van der Maaten, L.J.P. t-Distributed Stochastic Neighbor Embedding
http://homepage.tudelft.nl/19j49/t-SNE.html
[3] L.J.P. van der Maaten. Accelerating t-SNE using Tree-Based Algorithms.
Journal of Machine Learning Research 15(Oct):3221-3245, 2014.
http://lvdmaaten.github.io/publications/papers/JMLR_2014.pdf
"""
def __init__(self, n_components=2, perplexity=30.0,
early_exaggeration=4.0, learning_rate=1000.0, n_iter=1000,
n_iter_without_progress=30, min_grad_norm=1e-7,
metric="euclidean", init="random", verbose=0,
random_state=None, method='barnes_hut', angle=0.5):
if not ((isinstance(init, string_types) and
init in ["pca", "random"]) or
isinstance(init, np.ndarray)):
msg = "'init' must be 'pca', 'random', or a numpy array"
raise ValueError(msg)
self.n_components = n_components
self.perplexity = perplexity
self.early_exaggeration = early_exaggeration
self.learning_rate = learning_rate
self.n_iter = n_iter
self.n_iter_without_progress = n_iter_without_progress
self.min_grad_norm = min_grad_norm
self.metric = metric
self.init = init
self.verbose = verbose
self.random_state = random_state
self.method = method
self.angle = angle
def _fit(self, X, skip_num_points=0):
"""Fit the model using X as training data.
Note that sparse arrays can only be handled by method='exact'.
It is recommended that you convert your sparse array to dense
(e.g. `X.toarray()`) if it fits in memory, or otherwise using a
dimensionality reduction technique (e.g. TruncatedSVD).
Parameters
----------
X : array, shape (n_samples, n_features) or (n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row. Note that this
when method='barnes_hut', X cannot be a sparse array and if need be
will be converted to a 32 bit float array. Method='exact' allows
sparse arrays and 64bit floating point inputs.
skip_num_points : int (optional, default:0)
This does not compute the gradient for points with indices below
`skip_num_points`. This is useful when computing transforms of new
data where you'd like to keep the old data fixed.
"""
if self.method not in ['barnes_hut', 'exact']:
raise ValueError("'method' must be 'barnes_hut' or 'exact'")
if self.angle < 0.0 or self.angle > 1.0:
raise ValueError("'angle' must be between 0.0 - 1.0")
if self.method == 'barnes_hut' and sp.issparse(X):
raise TypeError('A sparse matrix was passed, but dense '
'data is required for method="barnes_hut". Use '
'X.toarray() to convert to a dense numpy array if '
'the array is small enough for it to fit in '
'memory. Otherwise consider dimensionality '
'reduction techniques (e.g. TruncatedSVD)')
else:
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'],
dtype=np.float64)
random_state = check_random_state(self.random_state)
if self.early_exaggeration < 1.0:
raise ValueError("early_exaggeration must be at least 1, but is "
"%f" % self.early_exaggeration)
if self.n_iter < 200:
raise ValueError("n_iter should be at least 200")
if self.metric == "precomputed":
if isinstance(self.init, string_types) and self.init == 'pca':
raise ValueError("The parameter init=\"pca\" cannot be used "
"with metric=\"precomputed\".")
if X.shape[0] != X.shape[1]:
raise ValueError("X should be a square distance matrix")
distances = X
else:
if self.verbose:
print("[t-SNE] Computing pairwise distances...")
if self.metric == "euclidean":
distances = pairwise_distances(X, metric=self.metric,
squared=True)
else:
distances = pairwise_distances(X, metric=self.metric)
if not np.all(distances >= 0):
raise ValueError("All distances should be positive, either "
"the metric or precomputed distances given "
"as X are not correct")
# Degrees of freedom of the Student's t-distribution. The suggestion
# degrees_of_freedom = n_components - 1 comes from
# "Learning a Parametric Embedding by Preserving Local Structure"
# Laurens van der Maaten, 2009.
degrees_of_freedom = max(self.n_components - 1.0, 1)
n_samples = X.shape[0]
# the number of nearest neighbors to find
k = min(n_samples - 1, int(3. * self.perplexity + 1))
neighbors_nn = None
if self.method == 'barnes_hut':
if self.verbose:
print("[t-SNE] Computing %i nearest neighbors..." % k)
if self.metric == 'precomputed':
# Use the precomputed distances to find
# the k nearest neighbors and their distances
neighbors_nn = np.argsort(distances, axis=1)[:, :k]
else:
# Find the nearest neighbors for every point
bt = BallTree(X)
# LvdM uses 3 * perplexity as the number of neighbors
# And we add one to not count the data point itself
# In the event that we have very small # of points
# set the neighbors to n - 1
distances_nn, neighbors_nn = bt.query(X, k=k + 1)
neighbors_nn = neighbors_nn[:, 1:]
P = _joint_probabilities_nn(distances, neighbors_nn,
self.perplexity, self.verbose)
else:
P = _joint_probabilities(distances, self.perplexity, self.verbose)
assert np.all(np.isfinite(P)), "All probabilities should be finite"
assert np.all(P >= 0), "All probabilities should be zero or positive"
assert np.all(P <= 1), ("All probabilities should be less "
"or then equal to one")
if isinstance(self.init, np.ndarray):
X_embedded = self.init
elif self.init == 'pca':
pca = PCA(n_components=self.n_components, svd_solver='randomized',
random_state=random_state)
X_embedded = pca.fit_transform(X)
elif self.init == 'random':
X_embedded = None
else:
raise ValueError("Unsupported initialization scheme: %s"
% self.init)
return self._tsne(P, degrees_of_freedom, n_samples, random_state,
X_embedded=X_embedded,
neighbors=neighbors_nn,
skip_num_points=skip_num_points)
@property
@deprecated("Attribute n_iter_final was deprecated in version 0.19 and "
"will be removed in 0.21. Use 'n_iter_' instead")
def n_iter_final(self):
return self.n_iter_
def _tsne(self, P, degrees_of_freedom, n_samples, random_state,
X_embedded=None, neighbors=None, skip_num_points=0):
"""Runs t-SNE."""
# t-SNE minimizes the Kullback-Leiber divergence of the Gaussians P
# and the Student's t-distributions Q. The optimization algorithm that
# we use is batch gradient descent with three stages:
# * early exaggeration with momentum 0.5
# * early exaggeration with momentum 0.8
# * final optimization with momentum 0.8
# The embedding is initialized with iid samples from Gaussians with
# standard deviation 1e-4.
if X_embedded is None:
# Initialize embedding randomly
X_embedded = 1e-4 * random_state.randn(n_samples,
self.n_components)
params = X_embedded.ravel()
opt_args = {"n_iter": 50, "momentum": 0.5, "it": 0,
"learning_rate": self.learning_rate,
"n_iter_without_progress": self.n_iter_without_progress,
"verbose": self.verbose, "n_iter_check": 25,
"kwargs": dict(skip_num_points=skip_num_points)}
if self.method == 'barnes_hut':
m = "Must provide an array of neighbors to use Barnes-Hut"
assert neighbors is not None, m
obj_func = _kl_divergence_bh
objective_error = _kl_divergence_error
sP = squareform(P).astype(np.float32)
neighbors = neighbors.astype(np.int64)
args = [sP, neighbors, degrees_of_freedom, n_samples,
self.n_components]
opt_args['args'] = args
opt_args['min_grad_norm'] = 1e-3
opt_args['n_iter_without_progress'] = 30
# Don't always calculate the cost since that calculation
# can be nearly as expensive as the gradient
opt_args['objective_error'] = objective_error
opt_args['kwargs']['angle'] = self.angle
opt_args['kwargs']['verbose'] = self.verbose
else:
obj_func = _kl_divergence
opt_args['args'] = [P, degrees_of_freedom, n_samples,
self.n_components]
opt_args['min_error_diff'] = 0.0
opt_args['min_grad_norm'] = self.min_grad_norm
# Early exaggeration
P *= self.early_exaggeration
params, kl_divergence, it = _gradient_descent(obj_func, params,
**opt_args)
opt_args['n_iter'] = 100
opt_args['momentum'] = 0.8
opt_args['it'] = it + 1
params, kl_divergence, it = _gradient_descent(obj_func, params,
**opt_args)
if self.verbose:
print("[t-SNE] KL divergence after %d iterations with early "
"exaggeration: %f" % (it + 1, kl_divergence))
# Save the final number of iterations
self.n_iter_ = it
# Final optimization
P /= self.early_exaggeration
opt_args['n_iter'] = self.n_iter
opt_args['it'] = it + 1
params, error, it = _gradient_descent(obj_func, params, **opt_args)
if self.verbose:
print("[t-SNE] Error after %d iterations: %f"
% (it + 1, kl_divergence))
X_embedded = params.reshape(n_samples, self.n_components)
self.kl_divergence_ = kl_divergence
return X_embedded
def fit_transform(self, X, y=None):
"""Fit X into an embedded space and return that transformed
output.
Parameters
----------
X : array, shape (n_samples, n_features) or (n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row.
Returns
-------
X_new : array, shape (n_samples, n_components)
Embedding of the training data in low-dimensional space.
"""
embedding = self._fit(X)
self.embedding_ = embedding
return self.embedding_
def fit(self, X, y=None):
"""Fit X into an embedded space.
Parameters
----------
X : array, shape (n_samples, n_features) or (n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row. If the method
is 'exact', X may be a sparse matrix of type 'csr', 'csc'
or 'coo'.
"""
self.fit_transform(X)
return self
|
skerit/shotfactory | refs/heads/master | shotfactory04/image/hashmatch.py | 4 | # browsershots.org - Test your web design in different browsers
# Copyright (C) 2007 Johann C. Rocholl <johann@browsershots.org>
#
# Browsershots is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Browsershots is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Efficient overlap matching for tall screenshots.
"""
__revision__ = "$Rev$"
__date__ = "$Date$"
__author__ = "$Author$"
import re
STEP = 3*64
header_match = re.compile(r'(P\d) (\d+) (\d+) (\d+)').match
def read_ppm_header(infile):
"""
Read a PPM file header and return magic, width, height, maxval.
"""
header = []
while True:
line = infile.readline()
sharp = line.find('#')
if sharp > -1:
line = line[:sharp]
line = line.strip()
if not line:
continue
header.append(line)
match = header_match(' '.join(header))
if match:
magic = match.group(1)
width = int(match.group(2))
height = int(match.group(3))
maxval = int(match.group(4))
return magic, width, height, maxval
elif len(header) >= 4:
raise SyntaxError("could not parse PPM header")
def debug_values(hashtable, minimum = 1):
"""
Print a hash table sorted by value.
>>> debug_values({'a': 1, 'b': 3, 'c': 2}, 2)
2 c
3 b
"""
keys = hashtable.keys()
values = hashtable.values()
pairs = zip(values, keys)
pairs.sort()
for value, key in pairs:
if value >= minimum:
print value, key
def build_hash(pixels, start, height, row_skip):
"""
Build a dict from a vertical column of detail markers.
Non-unique markers will be removed.
"""
positions = {}
frequencies = {}
frequencies_get = frequencies.get
previous = pixels[start:start+STEP]
for y in range(1, height):
start += row_skip
this = pixels[start:start+STEP]
marker = previous + this
previous = this
frequencies[marker] = frequencies_get(marker, 0) + 1
positions[marker] = y
positions_pop = positions.pop
for marker, counter in frequencies.iteritems():
if counter > 1:
positions_pop(marker)
return positions
def match_markers(pixels, start, height, row_skip, positions, votes):
"""
Match markers and collect votes for different offset positions.
"""
positions_get = positions.get
votes_get = votes.get
previous = pixels[start:start+STEP]
for y in range(1, height):
start += row_skip
this = pixels[start:start+STEP]
marker = previous + this
previous = this
position = positions_get(marker, -1)
if position > -1:
offset = position - y
votes[offset] = votes_get(offset, 0) + 1
def winner(votes, minimum):
"""
Get the offset with the most votes, but 0 only if no other option exists.
All entries with less than minimum votes will be ignored.
>>> winner({0:0, 1:1, 2:2, 3:3}, 1)
3
>>> winner({0:100, 1:1, 2:2, 3:3}, 1)
3
>>> winner({0:100, 1:1, 2:2, 3:3}, 0)
3
>>> winner({0:100, 1:1, 2:2, 3:3}, 4)
0
>>> winner({}, 1)
0
"""
maximum = minimum - 1
result = 0
for offset, count in votes.items():
if count > maximum and offset > 0:
maximum = count
result = offset
return result
def find_offset(filename1, filename2):
"""
Find the best vertical match between two PPM files.
Return the offset in pixels.
"""
infile1 = open(filename1, 'rb')
infile2 = open(filename2, 'rb')
header1 = read_ppm_header(infile1)
header2 = read_ppm_header(infile2)
assert header1[0] == header2[0] == 'P6'
assert header1[3] == header2[3] == 255
assert header1[1] == header2[1]
width = header1[1]
height1 = header1[2]
height2 = header2[2]
pixels1 = infile1.read()
pixels2 = infile2.read()
# print width*height1*3, len(pixels1), width*height2*3, len(pixels2)
row_skip = 3*width
votes = {}
for start in range(0, row_skip, STEP):
positions = build_hash(pixels1, start, height1, row_skip)
match_markers(pixels2, start, height2, row_skip, positions, votes)
# debug_values(votes, minimum = 1)
return winner(votes, 3*width/STEP)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
davidrobles/mlnd-capstone-code | refs/heads/master | experiments/play_tic_tac_toe.py | 1 | from capstone.game.games import TicTacToe
from capstone.game.players import RandPlayer
from capstone.game.utils import play_match
game = TicTacToe()
players = [RandPlayer(), RandPlayer()]
play_match(game, players)
|
xiroV/Algorithms_Example | refs/heads/master | Kadane's/Python/Kadane.py | 11 | def kadane(A):
max_so_far = max_ending = 0
for x in A:
max_ending = max(0, max_ending + x)
max_so_far = max(max_so_far, max_ending)
return max_so_far
A = [-2, -3, 4, -1, -2, 1, 5, -3]
print "Maximum contiguous sum is", kadane(A)
|
raptorjr/xbmc | refs/heads/master | lib/gtest/test/gtest_test_utils.py | 1100 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test utilities for Google C++ Testing Framework."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import atexit
import os
import shutil
import sys
import tempfile
import unittest
_test_module = unittest
# Suppresses the 'Import not at the top of the file' lint complaint.
# pylint: disable-msg=C6204
try:
import subprocess
_SUBPROCESS_MODULE_AVAILABLE = True
except:
import popen2
_SUBPROCESS_MODULE_AVAILABLE = False
# pylint: enable-msg=C6204
GTEST_OUTPUT_VAR_NAME = 'GTEST_OUTPUT'
IS_WINDOWS = os.name == 'nt'
IS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0]
# The environment variable for specifying the path to the premature-exit file.
PREMATURE_EXIT_FILE_ENV_VAR = 'TEST_PREMATURE_EXIT_FILE'
environ = os.environ.copy()
def SetEnvVar(env_var, value):
"""Sets/unsets an environment variable to a given value."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
# Here we expose a class from a particular module, depending on the
# environment. The comment suppresses the 'Invalid variable name' lint
# complaint.
TestCase = _test_module.TestCase # pylint: disable-msg=C6409
# Initially maps a flag to its default value. After
# _ParseAndStripGTestFlags() is called, maps a flag to its actual value.
_flag_map = {'source_dir': os.path.dirname(sys.argv[0]),
'build_dir': os.path.dirname(sys.argv[0])}
_gtest_flags_are_parsed = False
def _ParseAndStripGTestFlags(argv):
"""Parses and strips Google Test flags from argv. This is idempotent."""
# Suppresses the lint complaint about a global variable since we need it
# here to maintain module-wide state.
global _gtest_flags_are_parsed # pylint: disable-msg=W0603
if _gtest_flags_are_parsed:
return
_gtest_flags_are_parsed = True
for flag in _flag_map:
# The environment variable overrides the default value.
if flag.upper() in os.environ:
_flag_map[flag] = os.environ[flag.upper()]
# The command line flag overrides the environment variable.
i = 1 # Skips the program name.
while i < len(argv):
prefix = '--' + flag + '='
if argv[i].startswith(prefix):
_flag_map[flag] = argv[i][len(prefix):]
del argv[i]
break
else:
# We don't increment i in case we just found a --gtest_* flag
# and removed it from argv.
i += 1
def GetFlag(flag):
"""Returns the value of the given flag."""
# In case GetFlag() is called before Main(), we always call
# _ParseAndStripGTestFlags() here to make sure the --gtest_* flags
# are parsed.
_ParseAndStripGTestFlags(sys.argv)
return _flag_map[flag]
def GetSourceDir():
"""Returns the absolute path of the directory where the .py files are."""
return os.path.abspath(GetFlag('source_dir'))
def GetBuildDir():
"""Returns the absolute path of the directory where the test binaries are."""
return os.path.abspath(GetFlag('build_dir'))
_temp_dir = None
def _RemoveTempDir():
if _temp_dir:
shutil.rmtree(_temp_dir, ignore_errors=True)
atexit.register(_RemoveTempDir)
def GetTempDir():
"""Returns a directory for temporary files."""
global _temp_dir
if not _temp_dir:
_temp_dir = tempfile.mkdtemp()
return _temp_dir
def GetTestExecutablePath(executable_name, build_dir=None):
"""Returns the absolute path of the test binary given its name.
The function will print a message and abort the program if the resulting file
doesn't exist.
Args:
executable_name: name of the test binary that the test script runs.
build_dir: directory where to look for executables, by default
the result of GetBuildDir().
Returns:
The absolute path of the test binary.
"""
path = os.path.abspath(os.path.join(build_dir or GetBuildDir(),
executable_name))
if (IS_WINDOWS or IS_CYGWIN) and not path.endswith('.exe'):
path += '.exe'
if not os.path.exists(path):
message = (
'Unable to find the test binary. Please make sure to provide path\n'
'to the binary via the --build_dir flag or the BUILD_DIR\n'
'environment variable.')
print >> sys.stderr, message
sys.exit(1)
return path
def GetExitStatus(exit_code):
"""Returns the argument to exit(), or -1 if exit() wasn't called.
Args:
exit_code: the result value of os.system(command).
"""
if os.name == 'nt':
# On Windows, os.WEXITSTATUS() doesn't work and os.system() returns
# the argument to exit() directly.
return exit_code
else:
# On Unix, os.WEXITSTATUS() must be used to extract the exit status
# from the result of os.system().
if os.WIFEXITED(exit_code):
return os.WEXITSTATUS(exit_code)
else:
return -1
class Subprocess:
def __init__(self, command, working_dir=None, capture_stderr=True, env=None):
"""Changes into a specified directory, if provided, and executes a command.
Restores the old directory afterwards.
Args:
command: The command to run, in the form of sys.argv.
working_dir: The directory to change into.
capture_stderr: Determines whether to capture stderr in the output member
or to discard it.
env: Dictionary with environment to pass to the subprocess.
Returns:
An object that represents outcome of the executed process. It has the
following attributes:
terminated_by_signal True iff the child process has been terminated
by a signal.
signal Sygnal that terminated the child process.
exited True iff the child process exited normally.
exit_code The code with which the child process exited.
output Child process's stdout and stderr output
combined in a string.
"""
# The subprocess module is the preferrable way of running programs
# since it is available and behaves consistently on all platforms,
# including Windows. But it is only available starting in python 2.4.
# In earlier python versions, we revert to the popen2 module, which is
# available in python 2.0 and later but doesn't provide required
# functionality (Popen4) under Windows. This allows us to support Mac
# OS X 10.4 Tiger, which has python 2.3 installed.
if _SUBPROCESS_MODULE_AVAILABLE:
if capture_stderr:
stderr = subprocess.STDOUT
else:
stderr = subprocess.PIPE
p = subprocess.Popen(command,
stdout=subprocess.PIPE, stderr=stderr,
cwd=working_dir, universal_newlines=True, env=env)
# communicate returns a tuple with the file obect for the child's
# output.
self.output = p.communicate()[0]
self._return_code = p.returncode
else:
old_dir = os.getcwd()
def _ReplaceEnvDict(dest, src):
# Changes made by os.environ.clear are not inheritable by child
# processes until Python 2.6. To produce inheritable changes we have
# to delete environment items with the del statement.
for key in dest.keys():
del dest[key]
dest.update(src)
# When 'env' is not None, backup the environment variables and replace
# them with the passed 'env'. When 'env' is None, we simply use the
# current 'os.environ' for compatibility with the subprocess.Popen
# semantics used above.
if env is not None:
old_environ = os.environ.copy()
_ReplaceEnvDict(os.environ, env)
try:
if working_dir is not None:
os.chdir(working_dir)
if capture_stderr:
p = popen2.Popen4(command)
else:
p = popen2.Popen3(command)
p.tochild.close()
self.output = p.fromchild.read()
ret_code = p.wait()
finally:
os.chdir(old_dir)
# Restore the old environment variables
# if they were replaced.
if env is not None:
_ReplaceEnvDict(os.environ, old_environ)
# Converts ret_code to match the semantics of
# subprocess.Popen.returncode.
if os.WIFSIGNALED(ret_code):
self._return_code = -os.WTERMSIG(ret_code)
else: # os.WIFEXITED(ret_code) should return True here.
self._return_code = os.WEXITSTATUS(ret_code)
if self._return_code < 0:
self.terminated_by_signal = True
self.exited = False
self.signal = -self._return_code
else:
self.terminated_by_signal = False
self.exited = True
self.exit_code = self._return_code
def Main():
"""Runs the unit test."""
# We must call _ParseAndStripGTestFlags() before calling
# unittest.main(). Otherwise the latter will be confused by the
# --gtest_* flags.
_ParseAndStripGTestFlags(sys.argv)
# The tested binaries should not be writing XML output files unless the
# script explicitly instructs them to.
# TODO(vladl@google.com): Move this into Subprocess when we implement
# passing environment into it as a parameter.
if GTEST_OUTPUT_VAR_NAME in os.environ:
del os.environ[GTEST_OUTPUT_VAR_NAME]
_test_module.main()
|
bd339/servo | refs/heads/master | tests/wpt/web-platform-tests/mixed-content/generic/expect.py | 95 | import json, os, urllib, urlparse
def redirect(url, response):
response.add_required_headers = False
response.writer.write_status(301)
response.writer.write_header("access-control-allow-origin", "*")
response.writer.write_header("location", url)
response.writer.end_headers()
response.writer.write("")
def create_redirect_url(request, swap_scheme = False):
parsed = urlparse.urlsplit(request.url)
destination_netloc = parsed.netloc
scheme = parsed.scheme
if swap_scheme:
scheme = "http" if parsed.scheme == "https" else "https"
hostname = parsed.netloc.split(':')[0]
port = request.server.config["ports"][scheme][0]
destination_netloc = ":".join([hostname, str(port)])
# Remove "redirection" from query to avoid redirect loops.
parsed_query = dict(urlparse.parse_qsl(parsed.query))
assert "redirection" in parsed_query
del parsed_query["redirection"]
destination_url = urlparse.urlunsplit(urlparse.SplitResult(
scheme = scheme,
netloc = destination_netloc,
path = parsed.path,
query = urllib.urlencode(parsed_query),
fragment = None))
return destination_url
def main(request, response):
if "redirection" in request.GET:
redirection = request.GET["redirection"]
if redirection == "no-redirect":
pass
elif redirection == "keep-scheme-redirect":
redirect(create_redirect_url(request, swap_scheme=False), response)
return
elif redirection == "swap-scheme-redirect":
redirect(create_redirect_url(request, swap_scheme=True), response)
return
else:
raise ValueError ("Invalid redirect type: %s" % redirection)
content_type = "text/plain"
response_data = ""
if "action" in request.GET:
action = request.GET["action"]
if "content_type" in request.GET:
content_type = request.GET["content_type"]
key = request.GET["key"]
stash = request.server.stash
path = request.GET.get("path", request.url.split('?'))[0]
if action == "put":
value = request.GET["value"]
stash.take(key=key, path=path)
stash.put(key=key, value=value, path=path)
response_data = json.dumps({"status": "success", "result": key})
elif action == "purge":
value = stash.take(key=key, path=path)
if content_type == "image/png":
response_data = open(os.path.join(request.doc_root,
"images",
"smiley.png"), "rb").read()
elif content_type == "audio/mpeg":
response_data = open(os.path.join(request.doc_root,
"media",
"sound_5.oga"), "rb").read()
elif content_type == "video/mp4":
response_data = open(os.path.join(request.doc_root,
"media",
"movie_5.mp4"), "rb").read()
elif content_type == "application/javascript":
response_data = open(os.path.join(request.doc_root,
"mixed-content",
"generic",
"worker.js"), "rb").read()
else:
response_data = "/* purged */"
elif action == "take":
value = stash.take(key=key, path=path)
if value is None:
status = "allowed"
else:
status = "blocked"
response_data = json.dumps({"status": status, "result": value})
response.add_required_headers = False
response.writer.write_status(200)
response.writer.write_header("content-type", content_type)
response.writer.write_header("cache-control", "no-cache; must-revalidate")
response.writer.end_headers()
response.writer.write(response_data)
|
saydulk/django | refs/heads/master | django/conf/locale/el/formats.py | 446 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'd/m/Y'
TIME_FORMAT = 'P'
DATETIME_FORMAT = 'd/m/Y P'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd/m/Y'
SHORT_DATETIME_FORMAT = 'd/m/Y P'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d/%m/%Y', '%d/%m/%y', '%Y-%m-%d', # '25/10/2006', '25/10/06', '2006-10-25',
]
DATETIME_INPUT_FORMATS = [
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M:%S.%f', # '25/10/06 14:30:59.000200'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
|
sdispater/eloquent | refs/heads/master | tests/connections/__init__.py | 803 | # -*- coding: utf-8 -*-
|
joshuahoman/vivisect | refs/heads/master | envi/archs/z80/regs.py | 27 | '''
Register definition for the z80 architecture
'''
import envi.registers as e_reg
z80regs = [
('AF', 16),
('BC', 16),
('DE', 16),
('HL', 16),
('IX', 16),
('IY', 16),
('PC', 16),
('SP', 16),
('I', 8),
('R', 8),
]
l = locals()
e_reg.addLocalEnums(l, z80regs)
z80meta = [
('A', REG_AF, 8, 8),
('B', REG_BC, 8, 8),
('C', REG_BC, 0, 8),
('D', REG_DE, 8, 8),
('E', REG_DE, 0, 8),
('F', REG_AF, 0, 8),
('H', REG_HL, 8, 8),
('L', REG_HL, 0, 8),
]
e_reg.addLocalMetas(l, z80meta)
class z80RegisterContext(e_reg.RegisterContext):
def __init__(self):
e_reg.RegisterContext.__init__(self)
self.loadRegDef(z80regs)
self.loadRegMetas(z80meta)
self.setRegisterIndexes(REG_PC, REG_SP)
regctx = z80RegisterContext()
|
Team-Huawei/android_kernel_huawei_msm8909 | refs/heads/cm-14.1 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py | 4653 | # EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
|
infobip/infobip-api-python-client | refs/heads/master | infobip/api/model/sms/mt/send/Message.py | 1 | # -*- coding: utf-8 -*-
"""This is a generated class and is not intended for modification!
"""
from datetime import datetime
from infobip.util.models import DefaultObject, serializable
from infobip.api.model.Destination import Destination
from infobip.api.model.sms.mt.send.Language import Language
from infobip.api.model.sms.mt.send.binary.BinaryContent import BinaryContent
from infobip.api.model.sms.mt.send.DeliveryTimeWindow import DeliveryTimeWindow
class Message(DefaultObject):
@property
@serializable(name="from", type=unicode)
def from_(self):
"""
Property is of type: unicode
"""
return self.get_field_value("from_")
@from_.setter
def from_(self, from_):
"""
Property is of type: unicode
"""
self.set_field_value("from_", from_)
def set_from_(self, from_):
self.from_ = from_
return self
@property
@serializable(name="to", type=unicode)
def to(self):
"""
Property is a list of: unicode
"""
return self.get_field_value("to")
@to.setter
def to(self, to):
"""
Property is a list of: unicode
"""
self.set_field_value("to", to)
def set_to(self, to):
self.to = to
return self
@property
@serializable(name="destinations", type=Destination)
def destinations(self):
"""
Property is a list of: Destination
"""
return self.get_field_value("destinations")
@destinations.setter
def destinations(self, destinations):
"""
Property is a list of: Destination
"""
self.set_field_value("destinations", destinations)
def set_destinations(self, destinations):
self.destinations = destinations
return self
@property
@serializable(name="text", type=unicode)
def text(self):
"""
Property is of type: unicode
"""
return self.get_field_value("text")
@text.setter
def text(self, text):
"""
Property is of type: unicode
"""
self.set_field_value("text", text)
def set_text(self, text):
self.text = text
return self
@property
@serializable(name="binary", type=BinaryContent)
def binary(self):
"""
Property is of type: BinaryContent
"""
return self.get_field_value("binary")
@binary.setter
def binary(self, binary):
"""
Property is of type: BinaryContent
"""
self.set_field_value("binary", binary)
def set_binary(self, binary):
self.binary = binary
return self
@property
@serializable(name="flash", type=bool)
def flash(self):
"""
Property is of type: bool
"""
return self.get_field_value("flash")
@flash.setter
def flash(self, flash):
"""
Property is of type: bool
"""
self.set_field_value("flash", flash)
def set_flash(self, flash):
self.flash = flash
return self
@property
@serializable(name="language", type=Language)
def language(self):
"""
Property is of type: Language
"""
return self.get_field_value("language")
@language.setter
def language(self, language):
"""
Property is of type: Language
"""
self.set_field_value("language", language)
def set_language(self, language):
self.language = language
return self
@property
@serializable(name="transliteration", type=unicode)
def transliteration(self):
"""
Property is of type: unicode
"""
return self.get_field_value("transliteration")
@transliteration.setter
def transliteration(self, transliteration):
"""
Property is of type: unicode
"""
self.set_field_value("transliteration", transliteration)
def set_transliteration(self, transliteration):
self.transliteration = transliteration
return self
@property
@serializable(name="notify", type=bool)
def notify(self):
"""
Property is of type: bool
"""
return self.get_field_value("notify")
@notify.setter
def notify(self, notify):
"""
Property is of type: bool
"""
self.set_field_value("notify", notify)
def set_notify(self, notify):
self.notify = notify
return self
@property
@serializable(name="intermediateReport", type=bool)
def intermediate_report(self):
"""
Property is of type: bool
"""
return self.get_field_value("intermediate_report")
@intermediate_report.setter
def intermediate_report(self, intermediate_report):
"""
Property is of type: bool
"""
self.set_field_value("intermediate_report", intermediate_report)
def set_intermediate_report(self, intermediate_report):
self.intermediate_report = intermediate_report
return self
@property
@serializable(name="notifyUrl", type=unicode)
def notify_url(self):
"""
Property is of type: unicode
"""
return self.get_field_value("notify_url")
@notify_url.setter
def notify_url(self, notify_url):
"""
Property is of type: unicode
"""
self.set_field_value("notify_url", notify_url)
def set_notify_url(self, notify_url):
self.notify_url = notify_url
return self
@property
@serializable(name="notifyContentType", type=unicode)
def notify_content_type(self):
"""
Property is of type: unicode
"""
return self.get_field_value("notify_content_type")
@notify_content_type.setter
def notify_content_type(self, notify_content_type):
"""
Property is of type: unicode
"""
self.set_field_value("notify_content_type", notify_content_type)
def set_notify_content_type(self, notify_content_type):
self.notify_content_type = notify_content_type
return self
@property
@serializable(name="callbackData", type=unicode)
def callback_data(self):
"""
Property is of type: unicode
"""
return self.get_field_value("callback_data")
@callback_data.setter
def callback_data(self, callback_data):
"""
Property is of type: unicode
"""
self.set_field_value("callback_data", callback_data)
def set_callback_data(self, callback_data):
self.callback_data = callback_data
return self
@property
@serializable(name="validityPeriod", type=long)
def validity_period(self):
"""
Property is of type: long
"""
return self.get_field_value("validity_period")
@validity_period.setter
def validity_period(self, validity_period):
"""
Property is of type: long
"""
self.set_field_value("validity_period", validity_period)
def set_validity_period(self, validity_period):
self.validity_period = validity_period
return self
@property
@serializable(name="sendAt", type=datetime)
def send_at(self):
"""
Property is of type: datetime
"""
return self.get_field_value("send_at")
@send_at.setter
def send_at(self, send_at):
"""
Property is of type: datetime
"""
self.set_field_value("send_at", send_at)
def set_send_at(self, send_at):
self.send_at = send_at
return self
@property
@serializable(name="deliveryTimeWindow", type=DeliveryTimeWindow)
def delivery_time_window(self):
"""
Property is of type: DeliveryTimeWindow
"""
return self.get_field_value("delivery_time_window")
@delivery_time_window.setter
def delivery_time_window(self, delivery_time_window):
"""
Property is of type: DeliveryTimeWindow
"""
self.set_field_value("delivery_time_window", delivery_time_window)
def set_delivery_time_window(self, delivery_time_window):
self.delivery_time_window = delivery_time_window
return self
@property
@serializable(name="campaignId", type=unicode)
def campaign_id(self):
"""
Property is of type: unicode
"""
return self.get_field_value("campaign_id")
@campaign_id.setter
def campaign_id(self, campaign_id):
"""
Property is of type: unicode
"""
self.set_field_value("campaign_id", campaign_id)
def set_campaign_id(self, campaign_id):
self.campaign_id = campaign_id
return self
@property
@serializable(name="operatorClientId", type=unicode)
def operator_client_id(self):
"""
Property is of type: unicode
"""
return self.get_field_value("operator_client_id")
@operator_client_id.setter
def operator_client_id(self, operator_client_id):
"""
Property is of type: unicode
"""
self.set_field_value("operator_client_id", operator_client_id)
def set_operator_client_id(self, operator_client_id):
self.operator_client_id = operator_client_id
return self |
IPMITMO/statan | refs/heads/master | coala-bears/tests/scss/SCSSLintBearTest.py | 4 | from bears.scss.SCSSLintBear import SCSSLintBear
from coalib.testing.LocalBearTestHelper import verify_local_bear
good_file = """
.btn-primary {
&:hover {
background-color: darken($btn-primary-bg, 3%);
}
}
"""
bad_file = """
.btn-primary {
&:hover {
background-color: darken($btn-primary-bg, 3%)
}
"""
bad_file2 = '''
$value: 5px;
.foo {
padding: $value;
}
.bar {
margin: $value;
}
.foo.bar {
display: block;
}
'''
good_file2 = '''
$value: 5px;
.foo {
padding: $value;
}
.bar {
margin: $value;
}
.new-class {
display: block;
}
'''
SCSSLintBearTest = verify_local_bear(SCSSLintBear,
valid_files=(good_file, good_file2),
invalid_files=(bad_file, bad_file2))
SCSSLintBearChainedClassesTest = verify_local_bear(
SCSSLintBear,
valid_files=(good_file, good_file2),
invalid_files=(bad_file, bad_file2),
settings={'allow_chained_classes': True})
|
msabramo/pip | refs/heads/develop | pip/_vendor/requests/packages/chardet/hebrewprober.py | 2928 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe, eDetecting
from .compat import wrap_ord
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = 0xea
NORMAL_KAF = 0xeb
FINAL_MEM = 0xed
NORMAL_MEM = 0xee
FINAL_NUN = 0xef
NORMAL_NUN = 0xf0
FINAL_PE = 0xf3
NORMAL_PE = 0xf4
FINAL_TSADI = 0xf5
NORMAL_TSADI = 0xf6
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score
# distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score
# distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters
# causing the Non-Final tsadi to appear at an end of a word even
# though this is not the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is
# an indication that the text is laid out "naturally" since the
# final letter really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
# should not end with the Non-Final form of that letter. Exceptions
# to this rule are mentioned above in isNonFinal(). This is an
# indication that the text is laid out backwards. +1 for visual
# score
# 3) A word longer than 1 letter, starting with a final letter. Final
# letters should not appear at the beginning of a word. This is an
# indication that the text is laid out backwards. +1 for visual
# score.
#
# The visual score and logical score are accumulated throughout the
# text and are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since
# that case is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
if self.get_state() == eNotMe:
# Both model probers say it's not them. No reason to continue.
return eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a
# 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if ((self._mBeforePrev == ' ') and
(self.is_final(self._mPrev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return
# eNotMe (handled above)
return eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = (self._mLogicalProber.get_confidence()
- self._mVisualProber.get_confidence())
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == eNotMe) and \
(self._mVisualProber.get_state() == eNotMe):
return eNotMe
return eDetecting
|
seanfisk/powerline | refs/heads/develop | tests/vim.py | 15 | # vim:fileencoding=utf-8:noet
_log = []
vars = {}
vvars = {'version': 703}
_tabpage = 0
_mode = 'n'
_buf_purge_events = set()
options = {
'paste': 0,
'ambiwidth': 'single',
'columns': 80,
'encoding': 'utf-8',
}
_last_bufnr = 0
_highlights = {}
from collections import defaultdict as _defaultdict
_environ = _defaultdict(lambda: '')
del _defaultdict
_thread_id = None
def _set_thread_id():
global _thread_id
from threading import current_thread
_thread_id = current_thread().ident
# Assuming import is done from the main thread
_set_thread_id()
def _print_log():
for item in _log:
print (item)
_log[:] = ()
def _vim(func):
from functools import wraps
from threading import current_thread
@wraps(func)
def f(*args, **kwargs):
global _thread_id
if _thread_id != current_thread().ident:
raise RuntimeError('Accessing vim from separate threads is not allowed')
_log.append((func.__name__, args))
return func(*args, **kwargs)
return f
def _unicode(func):
from functools import wraps
import sys
if sys.version_info < (3,):
return func
@wraps(func)
def f(*args, **kwargs):
from powerline.lib.unicode import u
ret = func(*args, **kwargs)
if isinstance(ret, bytes):
ret = u(ret)
return ret
return f
class _Buffers(object):
@_vim
def __init__(self):
self.d = {}
@_vim
def __len__(self):
return len(self.d)
@_vim
def __getitem__(self, item):
return self.d[item]
@_vim
def __setitem__(self, item, value):
self.d[item] = value
@_vim
def __iter__(self):
return iter(self.d.values())
@_vim
def __contains__(self, item):
return item in self.d
@_vim
def _keys(self):
return self.d.keys()
@_vim
def _pop(self, *args, **kwargs):
return self.d.pop(*args, **kwargs)
buffers = _Buffers()
class _ObjList(object):
@_vim
def __init__(self, objtype):
self.l = []
self.objtype = objtype
@_vim
def __getitem__(self, item):
return self.l[item - int(item > 0)]
@_vim
def __len__(self):
return len(self.l)
@_vim
def __iter__(self):
return iter(self.l)
@_vim
def _pop(self, idx):
obj = self.l.pop(idx - 1)
for moved_obj in self.l[idx - 1:]:
moved_obj.number -= 1
return obj
@_vim
def _append(self, *args, **kwargs):
return self.l.append(*args, **kwargs)
@_vim
def _new(self, *args, **kwargs):
number = len(self) + 1
new_obj = self.objtype(number, *args, **kwargs)
self._append(new_obj)
return new_obj
def _construct_result(r):
import sys
if sys.version_info < (3,):
return r
else:
if isinstance(r, str):
return r.encode('utf-8')
elif isinstance(r, list):
return [_construct_result(i) for i in r]
elif isinstance(r, dict):
return dict((
(_construct_result(k), _construct_result(v))
for k, v in r.items()
))
return r
def _str_func(func):
from functools import wraps
@wraps(func)
def f(*args, **kwargs):
return _construct_result(func(*args, **kwargs))
return f
def _log_print():
import sys
for entry in _log:
sys.stdout.write(repr(entry) + '\n')
_current_group = None
_on_wipeout = []
@_vim
def command(cmd):
global _current_group
cmd = cmd.lstrip()
if cmd.startswith('let g:'):
import re
varname, value = re.compile(r'^let g:(\w+)\s*=\s*(.*)').match(cmd).groups()
vars[varname] = value
elif cmd.startswith('hi '):
sp = cmd.split()
_highlights[sp[1]] = sp[2:]
elif cmd.startswith('augroup'):
augroup = cmd.partition(' ')[2]
if augroup.upper() == 'END':
_current_group = None
else:
_current_group = augroup
elif cmd.startswith('autocmd'):
rest = cmd.partition(' ')[2]
auevent, rest = rest.partition(' ')[::2]
pattern, aucmd = rest.partition(' ')[::2]
if auevent != 'BufWipeout' or pattern != '*':
raise NotImplementedError
import sys
if sys.version_info < (3,):
if not aucmd.startswith(':python '):
raise NotImplementedError
else:
if not aucmd.startswith(':python3 '):
raise NotImplementedError
_on_wipeout.append(aucmd.partition(' ')[2])
elif cmd.startswith('set '):
if cmd.startswith('set statusline='):
options['statusline'] = cmd[len('set statusline='):]
elif cmd.startswith('set tabline='):
options['tabline'] = cmd[len('set tabline='):]
else:
raise NotImplementedError(cmd)
else:
raise NotImplementedError(cmd)
@_vim
@_unicode
def eval(expr):
if expr.startswith('g:'):
return vars[expr[2:]]
elif expr.startswith('v:'):
return vvars[expr[2:]]
elif expr.startswith('&'):
return options[expr[1:]]
elif expr.startswith('$'):
return _environ[expr[1:]]
elif expr.startswith('PowerlineRegisterCachePurgerEvent'):
_buf_purge_events.add(expr[expr.find('"') + 1:expr.rfind('"') - 1])
return '0'
elif expr.startswith('exists('):
return '0'
elif expr.startswith('getwinvar('):
import re
match = re.match(r'^getwinvar\((\d+), "(\w+)"\)$', expr)
if not match:
raise NotImplementedError(expr)
winnr = int(match.group(1))
varname = match.group(2)
return _emul_getwinvar(winnr, varname)
elif expr.startswith('has_key('):
import re
match = re.match(r'^has_key\(getwinvar\((\d+), ""\), "(\w+)"\)$', expr)
if match:
winnr = int(match.group(1))
varname = match.group(2)
return 0 + (varname in current.tabpage.windows[winnr].vars)
else:
match = re.match(r'^has_key\(gettabwinvar\((\d+), (\d+), ""\), "(\w+)"\)$', expr)
if not match:
raise NotImplementedError(expr)
tabnr = int(match.group(1))
winnr = int(match.group(2))
varname = match.group(3)
return 0 + (varname in tabpages[tabnr].windows[winnr].vars)
elif expr == 'getbufvar("%", "NERDTreeRoot").path.str()':
import os
assert os.path.basename(current.buffer.name).startswith('NERD_tree_')
return '/usr/include'
elif expr == 'tabpagenr()':
return current.tabpage.number
elif expr == 'tabpagenr("$")':
return len(tabpages)
elif expr.startswith('tabpagewinnr('):
tabnr = int(expr[len('tabpagewinnr('):-1])
return tabpages[tabnr].window.number
elif expr.startswith('tabpagebuflist('):
import re
match = re.match(r'tabpagebuflist\((\d+)\)\[(\d+)\]', expr)
tabnr = int(match.group(1))
winnr = int(match.group(2)) + 1
return tabpages[tabnr].windows[winnr].buffer.number
elif expr.startswith('gettabwinvar('):
import re
match = re.match(r'gettabwinvar\((\d+), (\d+), "(\w+)"\)', expr)
tabnr = int(match.group(1))
winnr = int(match.group(2))
varname = match.group(3)
return tabpages[tabnr].windows[winnr].vars[varname]
elif expr.startswith('type(function('):
import re
match = re.match(r'^type\(function\("([^"]+)"\)\) == 2$', expr)
if not match:
raise NotImplementedError(expr)
return 0
raise NotImplementedError(expr)
@_vim
def bindeval(expr):
if expr == 'g:':
return vars
elif expr == '{}':
return {}
elif expr == '[]':
return []
import re
match = re.compile(r'^function\("([^"\\]+)"\)$').match(expr)
if match:
return globals()['_emul_' + match.group(1)]
else:
raise NotImplementedError
@_vim
@_str_func
def _emul_mode(*args):
if args and args[0]:
return _mode
else:
return _mode[0]
@_vim
@_str_func
def _emul_getbufvar(bufnr, varname):
import re
if varname[0] == '&':
if bufnr == '%':
bufnr = current.buffer.number
if bufnr not in buffers:
return ''
try:
return buffers[bufnr].options[varname[1:]]
except KeyError:
try:
return options[varname[1:]]
except KeyError:
return ''
elif re.match('^[a-zA-Z_]+$', varname):
if bufnr == '%':
bufnr = current.buffer.number
if bufnr not in buffers:
return ''
return buffers[bufnr].vars[varname]
raise NotImplementedError
@_vim
@_str_func
def _emul_getwinvar(winnr, varname):
return current.tabpage.windows[winnr].vars.get(varname, '')
@_vim
def _emul_setwinvar(winnr, varname, value):
current.tabpage.windows[winnr].vars[varname] = value
@_vim
def _emul_virtcol(expr):
if expr == '.':
return current.window.cursor[1] + 1
if isinstance(expr, list) and len(expr) == 3:
return expr[-2] + expr[-1]
raise NotImplementedError
_v_pos = None
@_vim
def _emul_getpos(expr):
if expr == '.':
return [0, current.window.cursor[0] + 1, current.window.cursor[1] + 1, 0]
if expr == 'v':
return _v_pos or [0, current.window.cursor[0] + 1, current.window.cursor[1] + 1, 0]
raise NotImplementedError
@_vim
@_str_func
def _emul_fnamemodify(path, modstring):
import os
_modifiers = {
'~': lambda path: path.replace(os.environ['HOME'].encode('utf-8'), b'~') if path.startswith(os.environ['HOME'].encode('utf-8')) else path,
'.': lambda path: (lambda tpath: path if tpath[:3] == b'..' + os.sep.encode() else tpath)(os.path.relpath(path)),
't': lambda path: os.path.basename(path),
'h': lambda path: os.path.dirname(path),
}
for mods in modstring.split(':')[1:]:
path = _modifiers[mods](path)
return path
@_vim
@_str_func
def _emul_expand(expr):
global _abuf
if expr == '<abuf>':
return _abuf or current.buffer.number
raise NotImplementedError
@_vim
def _emul_bufnr(expr):
if expr == '$':
return _last_bufnr
raise NotImplementedError
@_vim
def _emul_exists(ident):
if ident.startswith('g:'):
return ident[2:] in vars
elif ident.startswith(':'):
return 0
raise NotImplementedError
@_vim
def _emul_line2byte(line):
buflines = current.buffer._buf_lines
if line == len(buflines) + 1:
return sum((len(s) for s in buflines)) + 1
raise NotImplementedError
@_vim
def _emul_line(expr):
cursorline = current.window.cursor[0] + 1
numlines = len(current.buffer._buf_lines)
if expr == 'w0':
return max(cursorline - 5, 1)
if expr == 'w$':
return min(cursorline + 5, numlines)
raise NotImplementedError
@_vim
@_str_func
def _emul_strtrans(s):
# FIXME Do more replaces
return s.replace(b'\xFF', b'<ff>')
@_vim
@_str_func
def _emul_bufname(bufnr):
try:
return buffers[bufnr]._name or b''
except KeyError:
return b''
_window_id = 0
class _Window(object):
def __init__(self, number, buffer=None, cursor=(1, 0), width=80):
global _window_id
self.cursor = cursor
self.width = width
self.number = number
if buffer:
if type(buffer) is _Buffer:
self.buffer = buffer
else:
self.buffer = _Buffer(**buffer)
else:
self.buffer = _Buffer()
_window_id += 1
self._window_id = _window_id
self.options = {}
self.vars = {
'powerline_window_id': self._window_id,
}
def __repr__(self):
return '<window ' + str(self.number - 1) + '>'
class _Tabpage(object):
def __init__(self, number):
self.windows = _ObjList(_Window)
self.number = number
def _new_window(self, **kwargs):
self.window = self.windows._new(**kwargs)
return self.window
def _close_window(self, winnr, open_window=True):
curwinnr = self.window.number
win = self.windows._pop(winnr)
if self.windows and winnr == curwinnr:
self.window = self.windows[-1]
elif open_window:
current.tabpage._new_window()
return win
def _close(self):
global _tabpage
while self.windows:
self._close_window(1, False)
tabpages._pop(self.number)
_tabpage = len(tabpages)
tabpages = _ObjList(_Tabpage)
_abuf = None
class _Buffer(object):
def __init__(self, name=None):
global _last_bufnr
_last_bufnr += 1
bufnr = _last_bufnr
self.number = bufnr
# FIXME Use unicode() for python-3
self.name = name
self.vars = {'changedtick': 1}
self.options = {
'modified': 0,
'readonly': 0,
'fileformat': 'unix',
'filetype': '',
'buftype': '',
'fileencoding': 'utf-8',
'textwidth': 80,
}
self._buf_lines = ['']
self._undostate = [self._buf_lines[:]]
self._undo_written = len(self._undostate)
buffers[bufnr] = self
@property
def name(self):
import sys
if sys.version_info < (3,):
return self._name
else:
return str(self._name, 'utf-8') if self._name else None
@name.setter
def name(self, name):
if name is None:
self._name = None
else:
import os
if type(name) is not bytes:
name = name.encode('utf-8')
if b':/' in name:
self._name = name
else:
self._name = os.path.abspath(name)
def __getitem__(self, line):
return self._buf_lines[line]
def __setitem__(self, line, value):
self.options['modified'] = 1
self.vars['changedtick'] += 1
self._buf_lines[line] = value
from copy import copy
self._undostate.append(copy(self._buf_lines))
def __setslice__(self, *args):
self.options['modified'] = 1
self.vars['changedtick'] += 1
self._buf_lines.__setslice__(*args)
from copy import copy
self._undostate.append(copy(self._buf_lines))
def __getslice__(self, *args):
return self._buf_lines.__getslice__(*args)
def __len__(self):
return len(self._buf_lines)
def __repr__(self):
return '<buffer ' + str(self.name) + '>'
def __del__(self):
global _abuf
bufnr = self.number
try:
import __main__
except ImportError:
pass
except RuntimeError:
# Module may have already been garbage-collected
pass
else:
if _on_wipeout:
_abuf = bufnr
try:
for event in _on_wipeout:
exec(event, __main__.__dict__)
finally:
_abuf = None
class _Current(object):
@property
def buffer(self):
return self.window.buffer
@property
def window(self):
return self.tabpage.window
@property
def tabpage(self):
return tabpages[_tabpage - 1]
current = _Current()
_dict = None
@_vim
def _init():
global _dict
if _dict:
return _dict
_dict = {}
for varname, value in globals().items():
if varname[0] != '_':
_dict[varname] = value
_tabnew()
return _dict
@_vim
def _get_segment_info():
mode_translations = {
chr(ord('V') - 0x40): '^V',
chr(ord('S') - 0x40): '^S',
}
mode = _mode
mode = mode_translations.get(mode, mode)
window = current.window
buffer = current.buffer
tabpage = current.tabpage
return {
'window': window,
'winnr': window.number,
'buffer': buffer,
'bufnr': buffer.number,
'tabpage': tabpage,
'tabnr': tabpage.number,
'window_id': window._window_id,
'mode': mode,
'encoding': options['encoding'],
}
@_vim
def _launch_event(event):
pass
@_vim
def _start_mode(mode):
global _mode
if mode == 'i':
_launch_event('InsertEnter')
elif _mode == 'i':
_launch_event('InsertLeave')
_mode = mode
@_vim
def _undo():
if len(current.buffer._undostate) == 1:
return
buffer = current.buffer
buffer._undostate.pop(-1)
buffer._buf_lines = buffer._undostate[-1]
if buffer._undo_written == len(buffer._undostate):
buffer.options['modified'] = 0
@_vim
def _edit(name=None):
if current.buffer.name is None:
buffer = current.buffer
buffer.name = name
else:
buffer = _Buffer(name)
current.window.buffer = buffer
@_vim
def _tabnew(name=None):
global windows
global _tabpage
tabpage = tabpages._new()
windows = tabpage.windows
_tabpage = len(tabpages)
_new(name)
return tabpage
@_vim
def _new(name=None):
current.tabpage._new_window(buffer={'name': name})
@_vim
def _split():
current.tabpage._new_window(buffer=current.buffer)
@_vim
def _close(winnr, wipe=True):
win = current.tabpage._close_window(winnr)
if wipe:
for w in current.tabpage.windows:
if w.buffer.number == win.buffer.number:
break
else:
_bw(win.buffer.number)
@_vim
def _bw(bufnr=None):
bufnr = bufnr or current.buffer.number
winnr = 1
for win in current.tabpage.windows:
if win.buffer.number == bufnr:
_close(winnr, wipe=False)
winnr += 1
buffers._pop(bufnr)
if not buffers:
_Buffer()
_b(max(buffers._keys()))
@_vim
def _b(bufnr):
current.window.buffer = buffers[bufnr]
@_vim
def _set_cursor(line, col):
current.window.cursor = (line, col)
if _mode == 'n':
_launch_event('CursorMoved')
elif _mode == 'i':
_launch_event('CursorMovedI')
@_vim
def _get_buffer():
return current.buffer
@_vim
def _set_bufoption(option, value, bufnr=None):
buffers[bufnr or current.buffer.number].options[option] = value
if option == 'filetype':
_launch_event('FileType')
class _WithNewBuffer(object):
def __init__(self, func, *args, **kwargs):
self.call = lambda: func(*args, **kwargs)
def __enter__(self):
self.call()
self.bufnr = current.buffer.number
return _get_segment_info()
def __exit__(self, *args):
_bw(self.bufnr)
@_vim
def _set_dict(d, new, setfunc=None):
if not setfunc:
def setfunc(k, v):
d[k] = v
old = {}
na = []
for k, v in new.items():
try:
old[k] = d[k]
except KeyError:
na.append(k)
setfunc(k, v)
return old, na
class _WithBufOption(object):
def __init__(self, **new):
self.new = new
def __enter__(self):
self.buffer = current.buffer
self.old = _set_dict(self.buffer.options, self.new, _set_bufoption)[0]
def __exit__(self, *args):
self.buffer.options.update(self.old)
class _WithMode(object):
def __init__(self, new):
self.new = new
def __enter__(self):
self.old = _mode
_start_mode(self.new)
return _get_segment_info()
def __exit__(self, *args):
_start_mode(self.old)
class _WithDict(object):
def __init__(self, d, **new):
self.new = new
self.d = d
def __enter__(self):
self.old, self.na = _set_dict(self.d, self.new)
def __exit__(self, *args):
self.d.update(self.old)
for k in self.na:
self.d.pop(k)
class _WithSplit(object):
def __enter__(self):
_split()
def __exit__(self, *args):
_close(2, wipe=False)
class _WithBufName(object):
def __init__(self, new):
self.new = new
def __enter__(self):
import os
buffer = current.buffer
self.buffer = buffer
self.old = buffer.name
buffer.name = self.new
def __exit__(self, *args):
self.buffer.name = self.old
class _WithNewTabPage(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __enter__(self):
self.tab = _tabnew(*self.args, **self.kwargs)
def __exit__(self, *args):
self.tab._close()
class _WithGlobal(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def __enter__(self):
self.empty = object()
self.old = dict(((key, globals().get(key, self.empty)) for key in self.kwargs))
globals().update(self.kwargs)
def __exit__(self, *args):
for k, v in self.old.items():
if v is self.empty:
globals().pop(k, None)
else:
globals()[k] = v
@_vim
def _with(key, *args, **kwargs):
if key == 'buffer':
return _WithNewBuffer(_edit, *args, **kwargs)
elif key == 'bufname':
return _WithBufName(*args, **kwargs)
elif key == 'mode':
return _WithMode(*args, **kwargs)
elif key == 'bufoptions':
return _WithBufOption(**kwargs)
elif key == 'options':
return _WithDict(options, **kwargs)
elif key == 'globals':
return _WithDict(vars, **kwargs)
elif key == 'wvars':
return _WithDict(current.window.vars, **kwargs)
elif key == 'environ':
return _WithDict(_environ, **kwargs)
elif key == 'split':
return _WithSplit()
elif key == 'tabpage':
return _WithNewTabPage(*args, **kwargs)
elif key == 'vpos':
return _WithGlobal(_v_pos=[0, kwargs['line'], kwargs['col'], kwargs['off']])
class error(Exception):
pass
|
AlexMaskovyak/elasticsearch-py | refs/heads/master | test_elasticsearch/test_connection_pool.py | 13 | import time
from elasticsearch.connection_pool import ConnectionPool, RoundRobinSelector, DummyConnectionPool
from elasticsearch.exceptions import ImproperlyConfigured
from .test_cases import TestCase
class TestConnectionPool(TestCase):
def test_dummy_cp_raises_exception_on_more_connections(self):
self.assertRaises(ImproperlyConfigured, DummyConnectionPool, [])
self.assertRaises(ImproperlyConfigured, DummyConnectionPool, [object(), object()])
def test_raises_exception_when_no_connections_defined(self):
self.assertRaises(ImproperlyConfigured, ConnectionPool, [])
def test_default_round_robin(self):
pool = ConnectionPool([(x, {}) for x in range(100)])
connections = set()
for _ in range(100):
connections.add(pool.get_connection())
self.assertEquals(connections, set(range(100)))
def test_disable_shuffling(self):
pool = ConnectionPool([(x, {}) for x in range(100)], randomize_hosts=False)
connections = []
for _ in range(100):
connections.append(pool.get_connection())
self.assertEquals(connections, list(range(100)))
def test_selectors_have_access_to_connection_opts(self):
class MySelector(RoundRobinSelector):
def select(self, connections):
return self.connection_opts[super(MySelector, self).select(connections)]["actual"]
pool = ConnectionPool([(x, {"actual": x*x}) for x in range(100)], selector_class=MySelector, randomize_hosts=False)
connections = []
for _ in range(100):
connections.append(pool.get_connection())
self.assertEquals(connections, [x*x for x in range(100)])
def test_dead_nodes_are_removed_from_active_connections(self):
pool = ConnectionPool([(x, {}) for x in range(100)])
now = time.time()
pool.mark_dead(42, now=now)
self.assertEquals(99, len(pool.connections))
self.assertEquals(1, pool.dead.qsize())
self.assertEquals((now + 60, 42), pool.dead.get())
def test_connection_is_skipped_when_dead(self):
pool = ConnectionPool([(x, {}) for x in range(2)])
pool.mark_dead(0)
self.assertEquals([1, 1, 1], [pool.get_connection(), pool.get_connection(), pool.get_connection(), ])
def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible(self):
pool = ConnectionPool([(x, {}) for x in range(2)])
pool.dead_count[0] = 1
pool.mark_dead(0) # failed twice, longer timeout
pool.mark_dead(1) # failed the first time, first to be resurrected
self.assertEquals([], pool.connections)
self.assertEquals(1, pool.get_connection())
self.assertEquals([1,], pool.connections)
def test_connection_is_resurrected_after_its_timeout(self):
pool = ConnectionPool([(x, {}) for x in range(100)])
now = time.time()
pool.mark_dead(42, now=now-61)
pool.get_connection()
self.assertEquals(42, pool.connections[-1])
self.assertEquals(100, len(pool.connections))
def test_force_resurrect_always_returns_a_connection(self):
pool = ConnectionPool([(0, {})])
pool.connections = []
self.assertEquals(0, pool.get_connection())
self.assertEquals([], pool.connections)
self.assertTrue(pool.dead.empty())
def test_already_failed_connection_has_longer_timeout(self):
pool = ConnectionPool([(x, {}) for x in range(100)])
now = time.time()
pool.dead_count[42] = 2
pool.mark_dead(42, now=now)
self.assertEquals(3, pool.dead_count[42])
self.assertEquals((now + 4*60, 42), pool.dead.get())
def test_timeout_for_failed_connections_is_limitted(self):
pool = ConnectionPool([(x, {}) for x in range(100)])
now = time.time()
pool.dead_count[42] = 245
pool.mark_dead(42, now=now)
self.assertEquals(246, pool.dead_count[42])
self.assertEquals((now + 32*60, 42), pool.dead.get())
def test_dead_count_is_wiped_clean_for_connection_if_marked_live(self):
pool = ConnectionPool([(x, {}) for x in range(100)])
now = time.time()
pool.dead_count[42] = 2
pool.mark_dead(42, now=now)
self.assertEquals(3, pool.dead_count[42])
pool.mark_live(42)
self.assertNotIn(42, pool.dead_count)
|
joshshadowfax/slask | refs/heads/master | plugins/hash.py | 2 | """!md5 <phrase> return an md5 hash for <phrase>"""
import md5
import re
def on_message(msg, server):
text = msg.get("text", "")
match = re.findall(r"!md5 (.*)", text)
if not match: return
return md5.md5(match[0]).hexdigest()
|
mclaughlin6464/pylearn2 | refs/heads/master | pylearn2/space/__init__.py | 34 | """
Classes that define how vector spaces are formatted
Most of our models can be viewed as linearly transforming
one vector space to another. These classes define how the
vector spaces should be represented as theano/numpy
variables.
For example, the VectorSpace class just represents a
vector space with a vector, and the model can transform
between spaces with a matrix multiply. The Conv2DSpace
represents a vector space as an image, and the model
can transform between spaces with a 2D convolution.
To make models as general as possible, models should be
written in terms of Spaces, rather than in terms of
numbers of hidden units, etc. The model should also be
written to transform between spaces using a generic
linear transformer from the pylearn2.linear module.
The Space class is needed so that the model can specify
what kinds of inputs it needs and what kinds of outputs
it will produce when communicating with other parts of
the library. The model also uses Space objects internally
to allocate parameters like hidden unit bias terms in
the right space.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
import functools
import warnings
import numpy as np
from theano.compat.six.moves import xrange
import theano
import theano.sparse
from theano import tensor
from theano.tensor import TensorType
from theano.gof.op import get_debug_values
from theano.sandbox.cuda.type import CudaNdarrayType
from pylearn2.utils import py_integer_types, safe_zip, sharedX, wraps
from pylearn2.format.target_format import OneHotFormatter
if theano.sparse.enable_sparse:
# We know scipy.sparse is available
import scipy.sparse
def _is_batch_all(batch, predicate):
"""
Implementation of is_symbolic_batch() and is_numeric_batch().
Returns True iff predicate() returns True for all components of
(possibly composite) batch.
Parameters
----------
batch : any numeric or symbolic batch.
This includes numpy.ndarray, theano.gof.Variable, None, or a (nested)
tuple thereof.
predicate : function.
A unary function of any non-composite batch that returns True or False.
"""
# Catches any CompositeSpace batches that were mistakenly hand-constructed
# using nested lists rather than nested tuples.
assert not isinstance(batch, list)
# Data-less batches such as None or () are valid numeric and symbolic
# batches.
#
# Justification: we'd like
# is_symbolic_batch(space.make_theano_batch()) to always be True, even if
# space is an empty CompositeSpace.
if batch is None or (isinstance(batch, tuple) and len(batch) == 0):
return True
if isinstance(batch, tuple):
subbatch_results = tuple(_is_batch_all(b, predicate)
for b in batch)
result = all(subbatch_results)
# The subbatch_results must be all true, or all false, not a mix.
assert result == any(subbatch_results), ("composite batch had a "
"mixture of numeric and "
"symbolic subbatches. This "
"should never happen.")
return result
else:
return predicate(batch)
def is_symbolic_batch(batch):
"""
Returns True if batch is a symbolic variable.
Note that a batch may be both a symbolic and numeric variable
(e.g. () for empty CompositeSpaces, None for NullSpaces).
"""
return _is_batch_all(batch, lambda x: isinstance(x, theano.gof.Variable))
def is_numeric_batch(batch):
"""
Returns True if batch is a numeric variable.
Note that a batch may be both a symbolic and numeric variable
(e.g. () for empty CompositeSpaces, None for NullSpaces).
"""
def is_numeric(batch):
# Uses the 'CudaNdarray' string to avoid importing
# theano.sandbox.cuda when it is not available
return (isinstance(batch, np.ndarray) or
scipy.sparse.issparse(batch) or
str(type(batch)) == "<type 'CudaNdarray'>")
return _is_batch_all(batch, is_numeric)
def _dense_to_sparse(batch):
"""
Casts dense batches to sparse batches (non-composite).
Supports both symbolic and numeric variables.
"""
if isinstance(batch, tuple):
raise TypeError("Composite batches not supported.")
assert not isinstance(batch, list)
if is_symbolic_batch(batch):
assert isinstance(batch, theano.tensor.TensorVariable)
return theano.sparse.csr_from_dense(batch)
else:
assert isinstance(batch, np.ndarray), "type of batch: %s" % type(batch)
return scipy.sparse.csr_matrix(batch)
def _reshape(arg, shape):
"""
Reshapes a tensor. Supports both symbolic and numeric variables.
This is a hack that first converts from sparse to dense, reshapes
the dense tensor, then re-converts from dense to sparse. It is
therefore memory-inefficient and unsuitable for large tensors. It
will be replaced by a proper sparse reshaping Op once Theano
implements that.
"""
if isinstance(arg, tuple):
raise TypeError("Composite batches not supported.")
assert not isinstance(arg, list)
if isinstance(arg, (np.ndarray, theano.tensor.TensorVariable)):
return arg.reshape(shape)
elif isinstance(arg, theano.sparse.SparseVariable):
warnings.warn("Using pylearn2.space._reshape(), which is a "
"memory-inefficient hack for reshaping sparse tensors. "
"Do not use this on large tensors. This will eventually "
"be replaced by a proper Theano Op for sparse "
"reshaping, once that is written.")
dense = theano.sparse.dense_from_sparse(arg)
dense = dense.reshape(shape)
if arg.format == 'csr':
return theano.sparse.csr_from_dense(dense)
elif arg.format == 'csc':
return theano.sparse.csc_from_dense(dense)
else:
raise ValueError('Unexpected sparse format "%s".' % arg.format)
else:
raise TypeError('Unexpected batch type "%s"' % str(type(arg)))
def _cast(arg, dtype):
"""
Does element-wise casting to dtype.
Supports symbolic, numeric, simple, and composite batches.
Returns <arg> untouched if <dtype> is None, or dtype is unchanged
(i.e. casting a float32 batch to float32).
(One exception: composite batches are never returned as-is.
A new tuple will always be returned. However, any components
with unchanged dtypes will be returned untouched.)
"""
if dtype is None:
return arg
assert dtype in tuple(t.dtype for t in theano.scalar.all_types)
if isinstance(arg, tuple):
return tuple(_cast(a, dtype) for a in arg)
elif isinstance(arg, np.ndarray):
# theano._asarray is a safer drop-in replacement to numpy.asarray.
return theano._asarray(arg, dtype=dtype)
elif str(type(arg)) == "<type 'CudaNdarray'>": # numeric CUDA array
if str(dtype) != 'float32':
raise TypeError("Can only cast a numeric CudaNdarray to "
"float32, not %s" % dtype)
else:
return arg
elif (isinstance(arg, theano.gof.Variable) and
isinstance(arg.type, CudaNdarrayType)): # symbolic CUDA array
if str(dtype) != 'float32':
raise TypeError("Can only cast a theano CudaNdArrayType to "
"float32, not %s" % dtype)
else:
return arg
elif scipy.sparse.issparse(arg):
return arg.astype(dtype)
elif isinstance(arg, theano.tensor.TensorVariable):
return theano.tensor.cast(arg, dtype)
elif isinstance(arg, theano.sparse.SparseVariable):
return theano.sparse.cast(arg, dtype)
elif isinstance(arg, theano.sandbox.cuda.var.CudaNdarrayVariable):
return arg
else:
raise TypeError("Unsupported arg type '%s'" % str(type(arg)))
def _undo_op(arg, string, strict=False):
"""
Undo symbolic op if string is in str(op).
Returns <arg> untouched if there was no symbolic op.
Parameters
----------
arg : any symbolic variable.
string : str
String that specifies op.
strict : bool
Whether to force op undo or not (default False).
"""
if hasattr(arg.owner, 'op'):
owner = arg.owner
if string in str(owner.op):
return owner.inputs[0]
elif strict:
raise ValueError(string + ' not found in op ' +
str(owner.op) + '.')
elif strict:
raise ValueError(string + ' op not found in variable ' +
str(arg) + '.')
return arg
class Space(object):
"""
A vector space that can be transformed by a linear operator.
Space and its subclasses are used to transform a data batch's geometry
(e.g. vectors <--> matrices) and optionally, its dtype (e.g. float <-->
int).
Batches may be one of the following types:
- numpy.ndarray
- scipy.sparse.csr_matrix
- theano.gof.Variable
- None (for NullSpace)
- A (nested) tuple of the above, possibly empty
(for CompositeSpace).
Parameters
----------
validate_callbacks : list
Callbacks that are run at the start of a call to validate.
Each should be a callable with the same signature as validate.
An example use case is installing an instance-specific error
handler that provides extra instructions for how to correct an
input that is in a bad space.
np_validate_callacks : list
similar to validate_callbacks, but run on calls to np_validate
"""
def __init__(self, validate_callbacks=None,
np_validate_callbacks=None):
if validate_callbacks is None:
validate_callbacks = []
if np_validate_callbacks is None:
np_validate_callbacks = []
self.validate_callbacks = validate_callbacks
self.np_validate_callbacks = np_validate_callbacks
# Forces subclasses to implement __eq__.
# This is necessary for _format_as to work correctly.
def __eq__(self, other):
"""
Returns true iff
space.format_as(batch, self) and
space.format_as(batch, other) return the same formatted batch.
"""
raise NotImplementedError("__eq__ not implemented in class %s." %
type(self))
def get_batch_axis(self):
"""
Returns the batch axis of the output space.
Returns
-------
batch_axis : int
the axis of the batch in the output space.
"""
return 0
def __ne__(self, other):
"""
.. todo::
WRITEME
"""
return not (self == other)
def __repr__(self):
"""
.. todo::
WRITEME
"""
return str(self)
@property
def dtype(self):
"""
An object representing the data type used by this space.
For simple spaces, this will be a dtype string, as used by numpy,
scipy, and theano (e.g. 'float32').
For data-less spaces like NoneType, this will be some other string.
For composite spaces, this will be a nested tuple of such strings.
"""
raise NotImplementedError()
@dtype.setter
def dtype(self, new_value):
"""
.. todo::
WRITEME
"""
raise NotImplementedError()
@dtype.deleter
def dtype(self):
"""
.. todo::
WRITEME
"""
raise RuntimeError("You may not delete the dtype of a space, "
"though you can set it to None.")
def get_origin(self):
"""
Returns the origin in this space.
Returns
-------
origin : ndarray
An NumPy array, the shape of a single points in this
space, representing the origin.
"""
raise NotImplementedError()
def get_origin_batch(self, batch_size, dtype=None):
"""
Returns a batch containing `batch_size` copies of the origin.
Parameters
----------
batch_size : int
The number of examples in the batch to be returned.
dtype : WRITEME
The dtype of the batch to be returned. Default = None.
If None, use self.dtype.
Returns
-------
batch : ndarray
A NumPy array in the shape of a batch of `batch_size` points in
this space (with points being indexed along the first axis),
each `batch[i]` being a copy of the origin.
"""
raise NotImplementedError()
def make_shared_batch(self, batch_size, name=None, dtype=None):
"""
.. todo::
WRITEME
"""
dtype = self._clean_dtype_arg(dtype)
origin_batch = self.get_origin_batch(batch_size, dtype)
return theano.shared(origin_batch, name=name)
def make_theano_batch(self, name=None, dtype=None, batch_size=None):
"""
Returns a symbolic variable representing a batch of points
in this space.
Parameters
----------
name : str
Variable name for the returned batch.
dtype : str
Data type for the returned batch.
If omitted (None), self.dtype is used.
batch_size : int
Number of examples in the returned batch.
Returns
-------
batch : TensorVariable, SparseVariable, or tuple thereof
A batch with the appropriate number of dimensions and
appropriate broadcast flags to represent a batch of
points in this space.
"""
raise NotImplementedError()
def make_batch_theano(self, name=None, dtype=None, batch_size=None):
""" An alias to make_theano_batch """
return self.make_theano_batch(name=name,
dtype=dtype,
batch_size=batch_size)
@wraps(make_theano_batch)
def get_theano_batch(self, *args, **kwargs):
return self.make_theano_batch(*args, **kwargs)
def get_total_dimension(self):
"""
Returns a Python int (not a theano iscalar) representing
the dimensionality of a point in this space.
If you format a batch of examples in this space as a
design matrix (i.e., VectorSpace batch) then the
number of columns will be equal to the total dimension.
"""
raise NotImplementedError(str(type(self)) +
" does not implement get_total_dimension.")
def np_format_as(self, batch, space):
"""
Returns a numeric batch (e.g. a numpy.ndarray or scipy.sparse sparse
array), formatted to lie in this space.
This is just a wrapper around self._format_as, with an extra check
to throw an exception if <batch> is symbolic.
Should be invertible, i.e. batch should equal
`space.format_as(self.format_as(batch, space), self)`
Parameters
----------
batch : numpy.ndarray, or one of the scipy.sparse matrices.
Array which lies in this space.
space : Space
Target space to format batch to.
Returns
-------
WRITEME
The formatted batch
"""
self._check_is_numeric(batch)
return self._format_as(is_numeric=True,
batch=batch,
space=space)
def _check_sizes(self, space):
"""
Called by self._format_as(space), to check whether self and space
have compatible sizes. Throws a ValueError if they don't.
"""
my_dimension = self.get_total_dimension()
other_dimension = space.get_total_dimension()
if my_dimension != other_dimension:
raise ValueError(str(self) + " with total dimension " +
str(my_dimension) +
" can't format a batch into " +
str(space) + "because its total dimension is " +
str(other_dimension))
def format_as(self, batch, space):
"""
.. todo::
WRITEME
"""
self._check_is_symbolic(batch)
return self._format_as(is_numeric=False,
batch=batch,
space=space)
def _format_as(self, is_numeric, batch, space):
"""
The shared implementation of format_as() and np_format_as().
Agnostic to whether batch is symbolic or numeric, which avoids
duplicating a lot of code between format_as() and np_format_as().
Calls the appropriate callbacks, then calls self._format_as_impl().
Should be invertible, i.e. batch should equal
`space._format_as(self._format_as(batch, space), self)`
Parameters
----------
is_numeric : bool
Set to True to call np_validate_callbacks().
Set to False to call validate_callbacks().
batch : WRITEME
space : Space
WRITEME
Returns
-------
WRITEME
"""
assert isinstance(is_numeric, bool)
# Checks if batch belongs to this space
self._validate(is_numeric, batch)
# checks if self and space have compatible sizes for formatting.
self._check_sizes(space)
return self._format_as_impl(is_numeric, batch, space)
def _format_as_impl(self, is_numeric, batch, target_space):
"""
Actual implementation of format_as/np_format_as. Formats batch to
target_space.
Should be invertible, i.e. batch should equal
`space._format_as_impl(self._format_as_impl(batch, space), self)`
Parameters
----------
is_numeric : bool
Set to True to treat batch as a numeric batch, False to
treat it as a symbolic batch. This is necessary because
sometimes a batch's numeric/symbolicness can be ambiguous,
i.e. when it's the empty tuple ().
batch : a numpy.ndarray, scipy.sparse matrix, theano symbol, or a \
nested tuple thereof
Implementations of this method may assume that batch lies in this
space (i.e. that it passed self._validate(batch) without throwing
an exception).
target_space : A Space subclass
The space to transform batch into.
Returns
-------
The batch, converted to the target_space.
"""
raise NotImplementedError("%s does not implement _format_as_impl()." %
type(self))
def undo_np_format_as(self, batch, space):
"""
Returns a numeric batch (e.g. a numpy.ndarray or scipy.sparse sparse
array), with formatting from space undone.
This is just a wrapper around self._undo_format_as, with an extra check
to throw an exception if <batch> is symbolic.
Parameters
----------
batch : numpy.ndarray, or one of the scipy.sparse matrices.
Array which lies in this space.
space : Space
Space to undo formatting from.
Returns
-------
numpy.ndarray or one of the scipy.sparse matrices
The formatted batch.
"""
self._check_is_numeric(batch)
return space.np_format_as(batch=batch,
space=self)
def undo_format_as(self, batch, space):
"""
Returns a symbolic batch (e.g. a theano.tensor or theano.sparse
array), with formatting from space undone.
This is just a wrapper around self._undo_format_as, with an extra check
to throw an exception if <batch> is symbolic. Formatting to space
Parameters
----------
batch : numpy.ndarray, or one of the scipy.sparse matrices.
Array which lies in this space.
space : Space
Space to undo formatting from.
Returns
-------
A symbolic Theano variable
The batch formatted as self.
"""
self._check_is_symbolic(batch)
space.validate(batch)
self._check_sizes(space)
batch = self._undo_format_as_impl(batch=batch,
space=space)
# Checks if batch belongs to this space
self.validate(batch)
return batch
def _undo_format_as_impl(self, batch, target_space):
"""
Actual implementation of undo_format_as.
Undoes target_space_formatting.
Note that undo_np_format_as calls np_format_as.
Parameters
----------
batch : a theano symbol, or a nested tuple thereof
Implementations of this method may assume that batch lies in
space (i.e. that it passed self._validate(batch) without throwing
an exception).
target_space : A Space subclass
The space to undo batch formatting from.
Returns
-------
A symbolic Theano variable
The batch, converted from target_space, back to self.
"""
raise NotImplementedError("%s does not implement "
"_undo_format_as_impl()." %
type(self))
def validate(self, batch):
"""
Runs all validate_callbacks, then checks that batch lies in this space.
Raises an exception if the batch isn't symbolic, or if any of these
checks fails.
Parameters
----------
batch : a symbolic (Theano) variable that lies in this space.
"""
self._check_is_symbolic(batch)
self._validate(is_numeric=False, batch=batch)
def np_validate(self, batch):
"""
Runs all np_validate_callbacks, then checks that batch lies in this
space. Raises an exception if the batch isn't numeric, or if any of
these checks fails.
Parameters
----------
batch : a numeric (numpy/scipy.sparse) variable that lies in this \
space
"""
self._check_is_numeric(batch)
self._validate(is_numeric=True, batch=batch)
def _validate(self, is_numeric, batch):
"""
Shared implementation of validate() and np_validate().
Calls validate_callbacks or np_validate_callbacks as appropriate,
then calls self._validate_impl(batch) to verify that batch belongs
to this space.
Parameters
----------
is_numeric : bool.
Set to True to call np_validate_callbacks,
False to call validate_callbacks.
Necessary because it can be impossible to tell from the
batch whether it should be treated as a numeric of symbolic
batch, for example when the batch is the empty tuple (),
or NullSpace batch None.
batch : a theano variable, numpy ndarray, scipy.sparse matrix \
or a nested tuple thereof
Represents a batch belonging to this space.
"""
if is_numeric:
self._check_is_numeric(batch)
callbacks_name = "np_validate_callbacks"
else:
self._check_is_symbolic(batch)
callbacks_name = "validate_callbacks"
if not hasattr(self, callbacks_name):
raise TypeError("The " + str(type(self)) + " Space subclass "
"is required to call the Space superclass "
"constructor but does not.")
else:
callbacks = getattr(self, callbacks_name)
for callback in callbacks:
callback(batch)
self._validate_impl(is_numeric, batch)
def _validate_impl(self, is_numeric, batch):
"""
Subclasses must override this method so that it throws an
exception if the batch is the wrong shape or dtype for this Space.
Parameters
----------
is_numeric : bool
Set to True to treat batch as a numeric type
(numpy.ndarray or scipy.sparse matrix).
Set to False to treat batch as a symbolic (Theano) variable.
Necessary because batch could be (), which could be numeric
or symbolic.
batch : A numpy ndarray, scipy.sparse matrix, theano variable \
or a nested tuple thereof.
Must be a valid batch belonging to this space.
"""
raise NotImplementedError('Class "%s" does not implement '
'_validate_impl()' % type(self))
def batch_size(self, batch):
"""
Returns the batch size of a symbolic batch.
Parameters
----------
batch : WRITEME
"""
return self._batch_size(is_numeric=False, batch=batch)
def np_batch_size(self, batch):
"""
Returns the batch size of a numeric (numpy/scipy.sparse) batch.
Parameters
----------
batch : WRITEME
"""
return self._batch_size(is_numeric=True, batch=batch)
def _batch_size(self, is_numeric, batch):
"""
.. todo::
WRITEME
"""
self._validate(is_numeric, batch)
return self._batch_size_impl(is_numeric, batch)
def _batch_size_impl(self, is_numeric, batch):
"""
Returns the batch size of a batch.
Parameters
----------
batch : WRITEME
"""
raise NotImplementedError("%s does not implement batch_size" %
type(self))
def get_batch(self, data, start, end):
"""
Returns a batch of data starting from index `start` to index `stop`
Parameters
----------
data : WRITEME
start : WRITEME
end : WRITEME
"""
raise NotImplementedError(str(type(self)) + " does not implement " +
"get_batch")
@staticmethod
def _check_is_numeric(batch):
"""
.. todo::
WRITEME
"""
if not is_numeric_batch(batch):
raise TypeError('Expected batch to be a numeric variable, but '
'instead it was of type "%s"' % type(batch))
@staticmethod
def _check_is_symbolic(batch):
"""
.. todo::
WRITEME
"""
if not is_symbolic_batch(batch):
raise TypeError('Expected batch to be a symbolic variable, but '
'instead it was of type "%s"' % type(batch))
def _clean_dtype_arg(self, dtype):
"""
Checks dtype string for validity, and returns it if it is.
If dtype is 'floatX', returns the theano.config.floatX dtype (this will
either be 'float32' or 'float64'.
"""
if isinstance(dtype, np.dtype):
dtype = str(dtype)
if dtype == 'floatX':
return theano.config.floatX
if dtype is None or \
dtype in tuple(x.dtype for x in theano.scalar.all_types):
return dtype
raise TypeError('Unrecognized value "%s" (type %s) for dtype arg' %
(dtype, type(dtype)))
class SimplyTypedSpace(Space):
"""
An abstract base class for Spaces that use a numpy/theano dtype string for
its .dtype property.
"""
def __init__(self, dtype='floatX', **kwargs):
super(SimplyTypedSpace, self).__init__(**kwargs)
self._dtype = super(SimplyTypedSpace, self)._clean_dtype_arg(dtype)
def _clean_dtype_arg(self, dtype):
"""
if dtype is None, checks that self.dtype is not None.
Otherwise, same as superclass' implementation.
"""
if dtype is None:
if self.dtype is None:
raise TypeError("self.dtype is None, so you must provide a "
"non-None dtype argument to this method.")
return self.dtype
return super(SimplyTypedSpace, self)._clean_dtype_arg(dtype)
def _validate_impl(self, is_numeric, batch):
"""
.. todo::
WRITEME
"""
if isinstance(batch, tuple):
raise TypeError("This space only supports simple dtypes, but "
"received a composite batch.")
# Checks for information-destroying casts.
#
# To be maximally strict, we'd guard against all loss of precision by
# checking if np.can_cast(batch.dtype, self.dtype).
#
# Because this prohibits float64->float32, it breaks too much of the
# codebase (float64 is default float, float32 is default CUDA float for
# many graphics cards).
#
# Therefore, we only prohibit the following:
# * non-integral type to integral type
# * complex to non-complex
def is_complex(dtype):
return np.issubdtype(dtype, np.complex)
def is_integral(dtype):
return np.issubdtype(dtype, np.integer)
if self.dtype is not None:
if (is_complex(batch.dtype) and not is_complex(self.dtype)) or \
(not is_integral(batch.dtype) and is_integral(self.dtype)):
raise TypeError("Cannot safely cast batch dtype %s to "
"space's dtype %s. " %
(batch.dtype, self.dtype))
@property
def dtype(self):
"""
.. todo::
WRITEME
"""
return self._dtype
@dtype.setter
def dtype(self, new_dtype):
"""
.. todo::
WRITEME
"""
self._dtype = super(SimplyTypedSpace, self)._clean_dtype_arg(new_dtype)
def __setstate__(self, state_dict):
"""
.. todo::
WRITEME
"""
self.__dict__.update(state_dict)
# When unpickling a Space that was pickled before Spaces had dtypes,
# we need to set the _dtype to the default value.
if '_dtype' not in state_dict:
self._dtype = theano.config.floatX
class IndexSpace(SimplyTypedSpace):
"""
A space representing indices, for example MNIST labels (0-10) or the
indices of words in a dictionary for NLP tasks. A single space can
contain multiple indices, for example the word indices of an n-gram.
IndexSpaces can be converted to VectorSpaces in two ways: Either the
labels are converted into one-hot vectors which are then concatenated,
or they are converted into a single vector where 1s indicate labels
present i.e. for 4 possible labels we have [0, 2] -> [1 0 1 0] or
[0, 2] -> [1 0 0 0 0 0 1 0].
Parameters
----------
max_labels : int
The number of possible classes/labels. This means that
all labels should be < max_labels. Example: For MNIST
there are 10 numbers and hence max_labels = 10.
dim : int
The number of indices in one space e.g. for MNIST there is
one target label and hence dim = 1. If we have an n-gram
of word indices as input to a neurel net language model, dim = n.
dtype : str
A numpy dtype string indicating this space's dtype.
Must be an integer type e.g. int32 or int64.
kwargs : dict
Passes on to superclass constructor
"""
def __init__(self, max_labels, dim, dtype='int64', **kwargs):
if 'int' not in dtype:
raise ValueError("The dtype of IndexSpace must be an integer type")
super(IndexSpace, self).__init__(dtype, **kwargs)
self.max_labels = max_labels
self.dim = dim
self.formatter = OneHotFormatter(self.max_labels)
def __str__(self):
"""Return a string representation"""
return ('%(classname)s(dim=%(dim)s, max_labels=%(max_labels)s, '
'dtype=%(dtype)s)') % dict(classname=self.__class__.__name__,
dim=self.dim,
max_labels=self.max_labels,
dtype=self.dtype)
def __hash__(self):
return hash((type(self), self.dim, self.max_labels, self.dtype))
def __eq__(self, other):
"""
.. todo::
WRITEME
"""
return (type(self) == type(other) and
self.max_labels == other.max_labels and
self.dim == other.dim and
self.dtype == other.dtype)
def __ne__(self, other):
"""
.. todo::
WRITEME
"""
return (not self == other)
@functools.wraps(Space.get_total_dimension)
def get_total_dimension(self):
return self.dim
@functools.wraps(Space.get_origin)
def get_origin(self):
return np.zeros((1, self.dim,))
@functools.wraps(Space.get_origin_batch)
def get_origin_batch(self, batch_size, dtype=None):
dtype = self._clean_dtype_arg(dtype)
return np.zeros((batch_size, self.dim), dtype=dtype)
@functools.wraps(Space._check_sizes)
def _check_sizes(self, space):
if isinstance(space, VectorSpace):
if space.dim not in (self.max_labels, # merged onehots
self.dim * self.max_labels): # concatenated
raise ValueError("Can't convert to VectorSpace of dim %d. "
"Expected either dim=%d (merged one-hots) or "
"%d (concatenated one-hots)" %
(space.dim,
self.max_labels,
self.dim * self.max_labels))
elif isinstance(space, IndexSpace):
if space.dim != self.dim or space.max_labels != self.max_labels:
raise ValueError("Can't convert to IndexSpace of dim %d and "
"max_labels %d." %
(space.dim, self.max_labels))
else:
raise ValueError("Can't convert to " + str(space.__class__))
@functools.wraps(Space._format_as_impl)
def _format_as_impl(self, is_numeric, batch, space):
if isinstance(space, VectorSpace):
if self.max_labels == space.dim:
mode = 'merge'
elif self.dim * self.max_labels == space.dim:
mode = 'concatenate'
else:
raise ValueError("There is a bug. Couldn't format to a "
"VectorSpace because it had an incorrect "
"size, but this should've been caught in "
"IndexSpace._check_sizes().")
format_func = (self.formatter.format if is_numeric else
self.formatter.theano_expr)
return _cast(format_func(batch, sparse=space.sparse, mode=mode),
space.dtype)
elif isinstance(space, IndexSpace):
if space.dim != self.dim or space.max_labels != self.max_labels:
raise ValueError("The two IndexSpaces' dim and max_labels "
"values don't match. This should have been "
"caught by IndexSpace._check_sizes().")
return _cast(batch, space.dtype)
else:
raise ValueError("Can't convert %s to %s"
% (self, space))
@functools.wraps(Space.make_theano_batch)
def make_theano_batch(self, name=None, dtype=None, batch_size=None):
if batch_size == 1:
rval = tensor.lrow(name=name)
else:
rval = tensor.lmatrix(name=name)
if theano.config.compute_test_value != 'off':
if batch_size == 1:
n = 1
else:
# TODO: try to extract constant scalar value from batch_size
n = 4
rval.tag.test_value = self.get_origin_batch(batch_size=n,
dtype=dtype)
return rval
@functools.wraps(Space._batch_size_impl)
def _batch_size_impl(self, is_numeric, batch):
return batch.shape[0]
@functools.wraps(Space._validate_impl)
def _validate_impl(self, is_numeric, batch):
"""
.. todo::
WRITEME
"""
# checks that batch isn't a tuple, checks batch.type against self.dtype
super(IndexSpace, self)._validate_impl(is_numeric, batch)
if is_numeric:
# Use the 'CudaNdarray' string to avoid importing
# theano.sandbox.cuda when it is not available
if not isinstance(batch, np.ndarray) \
and str(type(batch)) != "<type 'CudaNdarray'>":
raise TypeError("The value of a IndexSpace batch should be a "
"numpy.ndarray, or CudaNdarray, but is %s."
% str(type(batch)))
if batch.ndim != 2:
raise ValueError("The value of a IndexSpace batch must be "
"2D, got %d dimensions for %s." % (batch.ndim,
batch))
if batch.shape[1] != self.dim:
raise ValueError("The width of a IndexSpace batch must match "
"with the space's dimension, but batch has "
"shape %s and dim = %d." % (str(batch.shape),
self.dim))
else:
if not isinstance(batch, theano.gof.Variable):
raise TypeError("IndexSpace batch should be a theano "
"Variable, got " + str(type(batch)))
if not isinstance(batch.type, (theano.tensor.TensorType,
CudaNdarrayType)):
raise TypeError("IndexSpace batch should be TensorType or "
"CudaNdarrayType, got " + str(batch.type))
if batch.ndim != 2:
raise ValueError('IndexSpace batches must be 2D, got %d '
'dimensions' % batch.ndim)
for val in get_debug_values(batch):
self.np_validate(val)
class VectorSpace(SimplyTypedSpace):
"""
A space whose points are defined as fixed-length vectors.
Parameters
----------
dim : int
Dimensionality of a vector in this space.
sparse : bool, optional
Sparse vector or not
dtype : str, optional
A numpy dtype string (e.g. 'float32') indicating this space's
dtype, or None for a dtype-agnostic space.
kwargs : dict
Passed on to superclass constructor.
"""
def __init__(self,
dim,
sparse=False,
dtype='floatX',
**kwargs):
super(VectorSpace, self).__init__(dtype, **kwargs)
self.dim = dim
self.sparse = sparse
def __str__(self):
"""
.. todo::
WRITEME
"""
return ('%s(dim=%d%s, dtype=%s)' %
(self.__class__.__name__,
self.dim,
', sparse' if self.sparse else '',
self.dtype))
@functools.wraps(Space.get_origin)
def get_origin(self):
return np.zeros((self.dim,))
@functools.wraps(Space.get_origin_batch)
def get_origin_batch(self, batch_size, dtype=None):
dtype = self._clean_dtype_arg(dtype)
if self.sparse:
return scipy.sparse.csr_matrix((batch_size, self.dim), dtype=dtype)
else:
return np.zeros((batch_size, self.dim), dtype=dtype)
@functools.wraps(Space._batch_size_impl)
def _batch_size_impl(self, is_numeric, batch):
return batch.shape[0]
@functools.wraps(Space.make_theano_batch)
def make_theano_batch(self, name=None, dtype=None, batch_size=None):
dtype = self._clean_dtype_arg(dtype)
if self.sparse:
if batch_size is not None:
raise NotImplementedError("batch_size not implemented "
"for sparse case")
rval = theano.sparse.csr_matrix(name=name, dtype=dtype)
else:
if batch_size == 1:
rval = tensor.row(name=name, dtype=dtype)
else:
rval = tensor.matrix(name=name, dtype=dtype)
if theano.config.compute_test_value != 'off':
if batch_size == 1:
n = 1
else:
# TODO: try to extract constant scalar value from batch_size
n = 4
rval.tag.test_value = self.get_origin_batch(batch_size=n,
dtype=dtype)
return rval
@functools.wraps(Space.get_total_dimension)
def get_total_dimension(self):
return self.dim
@functools.wraps(Space._format_as_impl)
def _format_as_impl(self, is_numeric, batch, space):
to_type = None
def is_sparse(batch):
return (isinstance(batch, theano.sparse.SparseVariable) or
scipy.sparse.issparse(batch))
if not isinstance(space, IndexSpace):
my_dimension = self.get_total_dimension()
other_dimension = space.get_total_dimension()
if my_dimension != other_dimension:
raise ValueError(str(self) + " with total dimension " +
str(my_dimension) +
" can't format a batch into " +
str(space) +
"because its total dimension is " +
str(other_dimension))
if isinstance(space, CompositeSpace):
if isinstance(batch, theano.sparse.SparseVariable):
warnings.warn('Formatting from a sparse VectorSpace to a '
'CompositeSpace is currently (2 Jan 2014) a '
'non-differentiable action. This is because it '
'calls slicing operations on a sparse batch '
'(e.g. "my_matrix[r:R, c:C]", which Theano does '
'not yet have a gradient operator for. If '
'autodifferentiation is reporting an error, '
'this may be why. Formatting batch type %s '
'from space %s to space %s' %
(type(batch), self, space))
pos = 0
pieces = []
for component in space.components:
width = component.get_total_dimension()
subtensor = batch[:, pos:pos + width]
pos += width
vector_subspace = VectorSpace(dim=width,
dtype=self.dtype,
sparse=self.sparse)
formatted = vector_subspace._format_as(is_numeric,
subtensor,
component)
pieces.append(formatted)
result = tuple(pieces)
elif isinstance(space, Conv2DSpace):
if is_sparse(batch):
raise TypeError("Formatting a SparseVariable to a Conv2DSpace "
"is not supported, since neither scipy nor "
"Theano has sparse tensors with more than 2 "
"dimensions. We need 4 dimensions to "
"represent a Conv2DSpace batch")
dims = {'b': batch.shape[0],
'c': space.num_channels,
0: space.shape[0],
1: space.shape[1]}
if space.axes != space.default_axes:
# Always use default_axes, so conversions like
# Conv2DSpace(c01b) -> VectorSpace -> Conv2DSpace(b01c) work
shape = [dims[ax] for ax in space.default_axes]
batch = _reshape(batch, shape)
batch = batch.transpose(*[space.default_axes.index(ax)
for ax in space.axes])
result = batch
else:
shape = tuple([dims[elem] for elem in space.axes])
result = _reshape(batch, shape)
to_type = space.dtype
elif isinstance(space, VectorSpace):
if self.dim != space.dim:
raise ValueError("Can't convert between VectorSpaces of "
"different sizes (%d to %d)."
% (self.dim, space.dim))
if space.sparse != is_sparse(batch):
if space.sparse:
batch = _dense_to_sparse(batch)
elif isinstance(batch, theano.sparse.SparseVariable):
batch = theano.sparse.dense_from_sparse(batch)
elif scipy.sparse.issparse(batch):
batch = batch.todense()
else:
assert False, ("Unplanned-for branch in if-elif-elif "
"chain. This is a bug in the code.")
result = batch
to_type = space.dtype
else:
raise NotImplementedError("%s doesn't know how to format as %s" %
(self, space))
return _cast(result, dtype=to_type)
@functools.wraps(Space._undo_format_as_impl)
def _undo_format_as_impl(self, batch, space):
def is_sparse(batch):
return isinstance(batch, theano.sparse.SparseVariable)
if not isinstance(space, IndexSpace):
my_dimension = self.get_total_dimension()
other_dimension = space.get_total_dimension()
if my_dimension != other_dimension:
raise ValueError(str(self) + " with total dimension " +
str(my_dimension) +
" can't undo format a batch from " +
str(space) +
"because its total dimension is " +
str(other_dimension))
if isinstance(space, CompositeSpace):
if isinstance(batch, theano.sparse.SparseVariable):
warnings.warn('Undo formatting from a sparse VectorSpace to a '
'CompositeSpace is currently (2 Jan 2014) a '
'non-differentiable action. This is because it '
'calls slicing operations on a sparse batch '
'(e.g. "my_matrix[r:R, c:C]", which Theano does '
'not yet have a gradient operator for. If '
'autodifferentiation is reporting an error, '
'this may be why. Formatting batch type %s '
'from space %s to space %s' %
(type(batch), self, space))
# Recursively try and find a non-Composite, non-Null space
# to extract underlying theano variable
def extract_vector_variable(composite_space, batch_tuple):
found = False
for sp, el in safe_zip(composite_space.components,
batch_tuple):
dim = sp.get_total_dimension()
if not isinstance(sp, NullSpace) and dim > 0:
if isinstance(sp, CompositeSpace):
var, found = extract_vector_variable(sp, el)
var = var.owner.inputs[0]
else:
dummy_sp = VectorSpace(dim=dim,
sparse=sp.sparse,
dtype=sp.dtype
)
var = dummy_sp.undo_format_as(el, sp)
found = True
if found:
break
return var, found
var, found = extract_vector_variable(space, batch)
batch = var
if not found:
raise TypeError("Could not find a valid space "
"to undo format from in the "
"CompositeSpace.")
else:
# Undo subtensor slice
owner = batch.owner
assert 'Subtensor' in str(owner.op)
batch = owner.inputs[0]
elif isinstance(space, Conv2DSpace):
if is_sparse(batch):
raise TypeError("Undo formatting a SparseVariable to a "
"Conv2DSpace is not supported, since "
"neither scipy nor Theano has sparse "
"tensors with more than 2 dimensions. "
"We need 4 dimensions to represent a "
"Conv2DSpace batch")
# Check for cast
batch = _undo_op(batch, 'Cast')
# Undo axes shuffle
if space.axes != space.default_axes:
batch = _undo_op(batch, 'DimShuffle', strict=True)
# Undo reshape
batch = _undo_op(batch, 'Reshape{4}', strict=True)
elif isinstance(space, VectorSpace):
if self.dim != space.dim:
raise ValueError("Can't convert between VectorSpaces of "
"different sizes (%d to %d)."
% (self.dim, space.dim))
# Check for cast
batch = _undo_op(batch, 'Cast')
# Undo any sparse-dense switches
if self.sparse != is_sparse(batch):
if space.sparse:
batch = _undo_op(batch, 'SparseFromDense', strict=True)
elif isinstance(batch, theano.sparse.SparseVariable):
batch = _undo_op(batch, 'DenseFromSparse', strict=True)
else:
assert False, ("Unplanned-for branch in if-elif "
"chain. This is a bug in the code.")
else:
raise NotImplementedError("%s doesn't know how to format as %s" %
(self, space))
return batch
def __eq__(self, other):
"""
.. todo::
WRITEME
"""
return (type(self) == type(other) and
self.dim == other.dim and
self.sparse == other.sparse and
self.dtype == other.dtype)
def __hash__(self):
"""
.. todo::
WRITEME
"""
return hash((type(self), self.dim, self.sparse, self.dtype))
@functools.wraps(Space._validate_impl)
def _validate_impl(self, is_numeric, batch):
"""
.. todo::
WRITEME
"""
# checks that batch isn't a tuple, checks batch.type against self.dtype
super(VectorSpace, self)._validate_impl(is_numeric, batch)
if isinstance(batch, theano.gof.Variable):
if self.sparse:
if not isinstance(batch.type, theano.sparse.SparseType):
raise TypeError('This VectorSpace is%s sparse, but the '
'provided batch is not. (batch type: "%s")'
% ('' if self.sparse else ' not',
type(batch)))
elif not isinstance(batch.type, (theano.tensor.TensorType,
CudaNdarrayType)):
raise TypeError("VectorSpace batch should be TensorType or "
"CudaNdarrayType, got " + str(batch.type))
if batch.ndim != 2:
raise ValueError('VectorSpace batches must be 2D, got %d '
'dimensions' % batch.ndim)
for val in get_debug_values(batch):
self.np_validate(val) # sic; val is numeric, not symbolic
else:
# Use the 'CudaNdarray' string to avoid importing
# theano.sandbox.cuda when it is not available
if (not self.sparse
and not isinstance(batch, np.ndarray)
and type(batch) != 'CudaNdarray'):
raise TypeError("The value of a VectorSpace batch should be a "
"numpy.ndarray, or CudaNdarray, but is %s."
% str(type(batch)))
if self.sparse:
if not theano.sparse.enable_sparse:
raise TypeError("theano.sparse is not enabled, cannot "
"have a value for a sparse VectorSpace.")
if not scipy.sparse.issparse(batch):
raise TypeError("The value of a sparse VectorSpace batch "
"should be a sparse scipy matrix, got %s "
"of type %s." % (batch, type(batch)))
if batch.ndim != 2:
raise ValueError("The value of a VectorSpace batch must be "
"2D, got %d dimensions for %s." % (batch.ndim,
batch))
if batch.shape[1] != self.dim:
raise ValueError("The width of a VectorSpace batch must match "
"with the space's dimension, but batch has "
"shape %s and dim = %d." %
(str(batch.shape), self.dim))
class VectorSequenceSpace(SimplyTypedSpace):
"""
A space representing a single, variable-length sequence of fixed-sized
vectors.
Parameters
----------
dim : int
Vector size
dtype : str, optional
A numpy dtype string indicating this space's dtype.
kwargs : dict
Passes on to superclass constructor
"""
def __init__(self, dim, dtype='floatX', **kwargs):
super(VectorSequenceSpace, self).__init__(dtype, **kwargs)
self.dim = dim
def __str__(self):
"""Return a string representation"""
return ('%(classname)s(dim=%(dim)s, dtype=%(dtype)s)' %
dict(classname=self.__class__.__name__,
dim=self.dim,
dtype=self.dtype))
@wraps(Space.__eq__)
def __eq__(self, other):
return (type(self) == type(other) and
self.dim == other.dim and
self.dtype == other.dtype)
@wraps(Space._check_sizes)
def _check_sizes(self, space):
if not isinstance(space, VectorSequenceSpace):
raise ValueError("Can't convert to " + str(space.__class__))
else:
if space.dim != self.dim:
raise ValueError("Can't convert to VectorSequenceSpace of "
"dim %d" %
(space.dim,))
@wraps(Space._format_as_impl)
def _format_as_impl(self, is_numeric, batch, space):
if isinstance(space, VectorSequenceSpace):
if space.dim != self.dim:
raise ValueError("The two VectorSequenceSpaces' dim "
"values don't match. This should have been "
"caught by "
"VectorSequenceSpace._check_sizes().")
return _cast(batch, space.dtype)
else:
raise ValueError("Can't convert %s to %s" % (self, space))
@wraps(Space.make_theano_batch)
def make_theano_batch(self, name=None, dtype=None, batch_size=None):
if batch_size == 1:
return tensor.matrix(name=name)
else:
return ValueError("VectorSequenceSpace does not support batches "
"of sequences.")
@wraps(Space._batch_size_impl)
def _batch_size_impl(self, is_numeric, batch):
# Only batch size of 1 is supported
return 1
@wraps(Space._validate_impl)
def _validate_impl(self, is_numeric, batch):
# checks that batch isn't a tuple, checks batch.type against self.dtype
super(VectorSequenceSpace, self)._validate_impl(is_numeric, batch)
if is_numeric:
# Use the 'CudaNdarray' string to avoid importing
# theano.sandbox.cuda when it is not available
if not isinstance(batch, np.ndarray) \
and str(type(batch)) != "<type 'CudaNdarray'>":
raise TypeError("The value of a VectorSequenceSpace batch "
"should be a numpy.ndarray, or CudaNdarray, "
"but is %s." % str(type(batch)))
if batch.ndim != 2:
raise ValueError("The value of a VectorSequenceSpace batch "
"must be 2D, got %d dimensions for %s."
% (batch.ndim, batch))
if batch.shape[1] != self.dim:
raise ValueError("The width of a VectorSequenceSpace 'batch' "
"must match with the space's window"
"dimension, but batch has dim %d and "
"this space's dim is %d."
% (batch.shape[1], self.dim))
else:
if not isinstance(batch, theano.gof.Variable):
raise TypeError("VectorSequenceSpace batch should be a theano "
"Variable, got " + str(type(batch)))
if not isinstance(batch.type, (theano.tensor.TensorType,
CudaNdarrayType)):
raise TypeError("VectorSequenceSpace batch should be "
"TensorType or CudaNdarrayType, got " +
str(batch.type))
if batch.ndim != 2:
raise ValueError("VectorSequenceSpace 'batches' must be 2D, "
"got %d dimensions" % batch.ndim)
for val in get_debug_values(batch):
self.np_validate(val)
class IndexSequenceSpace(SimplyTypedSpace):
"""
A space representing a single, variable-length sequence of indexes.
Parameters
----------
max_labels : int
The number of possible classes/labels. This means that
all labels should be < max_labels.
dim : int
The number of indices in one element of the sequence
dtype : str
A numpy dtype string indicating this space's dtype.
Must be an integer type e.g. int32 or int64.
kwargs : dict
Passes on to superclass constructor
"""
def __init__(self, max_labels, dim, dtype='int64', **kwargs):
if 'int' not in dtype:
raise ValueError("The dtype of IndexSequenceSpace must be an "
"integer type")
super(IndexSequenceSpace, self).__init__(dtype, **kwargs)
self.max_labels = max_labels
self.dim = dim
self.formatter = OneHotFormatter(self.max_labels)
def __str__(self):
"""Return a string representation"""
return ('%(classname)s(dim=%(dim)s, max_labels=%(max_labels)s, '
'dtype=%(dtype)s)') % dict(classname=self.__class__.__name__,
dim=self.dim,
max_labels=self.max_labels,
dtype=self.dtype)
def __eq__(self, other):
"""
.. todo::
WRITEME
"""
return (type(self) == type(other) and
self.max_labels == other.max_labels and
self.dim == other.dim and
self.dtype == other.dtype)
@wraps(Space._check_sizes)
def _check_sizes(self, space):
if isinstance(space, VectorSequenceSpace):
# self.max_labels -> merged onehots
# self.dim * self.max_labels -> concatenated
if space.dim not in (self.max_labels, self.dim * self.max_labels):
raise ValueError("Can't convert to VectorSequenceSpace of "
"dim %d. Expected either "
"dim=%d (merged one-hots) or %d "
"(concatenated one-hots)" %
(space.dim,
self.max_labels,
self.dim * self.max_labels))
elif isinstance(space, IndexSequenceSpace):
if space.dim != self.dim or space.max_labels != self.max_labels:
raise ValueError("Can't convert to IndexSequenceSpace of "
"dim %d and max_labels %d." %
(space.dim, self.max_labels))
else:
raise ValueError("Can't convert to " + str(space.__class__))
@wraps(Space._format_as_impl)
def _format_as_impl(self, is_numeric, batch, space):
if isinstance(space, VectorSequenceSpace):
if self.max_labels == space.dim:
mode = 'merge'
elif self.dim * self.max_labels == space.dim:
mode = 'concatenate'
else:
raise ValueError("There is a bug. Couldn't format to a "
"VectorSequenceSpace because it had an "
"incorrect size, but this should've been "
"caught in "
"IndexSequenceSpace._check_sizes().")
format_func = (self.formatter.format if is_numeric else
self.formatter.theano_expr)
return _cast(format_func(batch, mode=mode), space.dtype)
elif isinstance(space, IndexSequenceSpace):
if space.dim != self.dim or space.max_labels != self.max_labels:
raise ValueError("The two IndexSequenceSpaces' dim and "
"max_labels values don't match. This should "
"have been caught by "
"IndexSequenceSpace._check_sizes().")
return _cast(batch, space.dtype)
else:
raise ValueError("Can't convert %s to %s"
% (self, space))
@wraps(Space.make_theano_batch)
def make_theano_batch(self, name=None, dtype=None, batch_size=None):
if batch_size == 1:
return tensor.matrix(name=name)
else:
return ValueError("IndexSequenceSpace does not support batches "
"of sequences.")
@wraps(Space._batch_size_impl)
def _batch_size_impl(self, is_numeric, batch):
# Only batch size of 1 is supported
return 1
@wraps(Space._validate_impl)
def _validate_impl(self, is_numeric, batch):
# checks that batch isn't a tuple, checks batch.type against self.dtype
super(IndexSequenceSpace, self)._validate_impl(is_numeric, batch)
if is_numeric:
# Use the 'CudaNdarray' string to avoid importing
# theano.sandbox.cuda when it is not available
if not isinstance(batch, np.ndarray) \
and str(type(batch)) != "<type 'CudaNdarray'>":
raise TypeError("The value of a IndexSequenceSpace batch "
"should be a numpy.ndarray, or CudaNdarray, "
"but is %s." % str(type(batch)))
if batch.ndim != 2:
raise ValueError("The value of a IndexSequenceSpace batch "
"must be 2D, got %d dimensions for %s." %
(batch.ndim, batch))
if batch.shape[1] != self.dim:
raise ValueError("The width of a IndexSequenceSpace batch "
"must match with the space's dimension, but "
"batch has shape %s and dim = %d." %
(str(batch.shape), self.dim))
else:
if not isinstance(batch, theano.gof.Variable):
raise TypeError("IndexSequenceSpace batch should be a theano "
"Variable, got " + str(type(batch)))
if not isinstance(batch.type, (theano.tensor.TensorType,
CudaNdarrayType)):
raise TypeError("IndexSequenceSpace batch should be "
"TensorType or CudaNdarrayType, got " +
str(batch.type))
if batch.ndim != 2:
raise ValueError('IndexSequenceSpace batches must be 2D, got '
'%d dimensions' % batch.ndim)
for val in get_debug_values(batch):
self.np_validate(val)
class Conv2DSpace(SimplyTypedSpace):
"""
A space whose points are 3-D tensors representing (potentially
multi-channel) images.
Parameters
----------
shape : sequence, length 2
The shape of a single image, i.e. (rows, cols).
num_channels : int (synonym: channels)
Number of channels in the image, i.e. 3 if RGB.
axes : tuple
A tuple indicating the semantics of each axis, containing the
following elements in some order:
- 'b' : this axis is the batch index of a minibatch.
- 'c' : this axis the channel index of a minibatch.
- 0 : topological axis 0 (rows)
- 1 : topological axis 1 (columns)
For example, a PIL image has axes (0, 1, 'c') or (0, 1).
The pylearn2 image displaying functionality uses
('b', 0, 1, 'c') for batches and (0, 1, 'c') for images.
theano's conv2d operator uses ('b', 'c', 0, 1) images.
dtype : str
A numpy dtype string (e.g. 'float32') indicating this space's
dtype, or None for a dtype-agnostic space.
kwargs : dict
Passed on to superclass constructor
"""
# Assume pylearn2's get_topological_view format, since this is how
# data is currently served up. If we make better iterators change
# default to ('b', 'c', 0, 1) for theano conv2d
default_axes = ('b', 0, 1, 'c')
def __init__(self,
shape,
channels=None,
num_channels=None,
axes=None,
dtype='floatX',
**kwargs):
super(Conv2DSpace, self).__init__(dtype, **kwargs)
assert (channels is None) + (num_channels is None) == 1
if num_channels is None:
num_channels = channels
assert isinstance(num_channels, py_integer_types)
if not hasattr(shape, '__len__'):
raise ValueError("shape argument for Conv2DSpace must have a "
"length. Got %s." % str(shape))
if len(shape) != 2:
raise ValueError("shape argument to Conv2DSpace must be length 2, "
"not %d" % len(shape))
assert all(isinstance(elem, py_integer_types) for elem in shape)
assert all(elem > 0 for elem in shape)
assert isinstance(num_channels, py_integer_types)
assert num_channels > 0
# Converts shape to a tuple, so it can be hashable, and self can be too
self.shape = tuple(shape)
self.num_channels = num_channels
if axes is None:
axes = self.default_axes
assert len(axes) == 4
self.axes = tuple(axes)
def __str__(self):
"""
.. todo::
WRITEME
"""
return ("%s(shape=%s, num_channels=%d, axes=%s, dtype=%s)" %
(self.__class__.__name__,
str(self.shape),
self.num_channels,
str(self.axes),
self.dtype))
def __eq__(self, other):
"""
.. todo::
WRITEME
"""
assert isinstance(self.axes, tuple)
if isinstance(other, Conv2DSpace):
assert isinstance(other.axes, tuple)
return (type(self) == type(other) and
self.shape == other.shape and
self.num_channels == other.num_channels and
self.axes == other.axes and
self.dtype == other.dtype)
def __hash__(self):
"""
.. todo::
WRITEME
"""
return hash((type(self),
self.shape,
self.num_channels,
self.axes,
self.dtype))
@functools.wraps(Space.get_batch_axis)
def get_batch_axis(self):
return self.axes.index('b')
@functools.wraps(Space.get_origin)
def get_origin(self):
dims = {0: self.shape[0], 1: self.shape[1], 'c': self.num_channels}
shape = [dims[elem] for elem in self.axes if elem != 'b']
return np.zeros(shape, dtype=self.dtype)
@functools.wraps(Space.get_origin_batch)
def get_origin_batch(self, batch_size, dtype=None):
dtype = self._clean_dtype_arg(dtype)
if not isinstance(batch_size, py_integer_types):
raise TypeError("Conv2DSpace.get_origin_batch expects an int, "
"got %s of type %s" % (str(batch_size),
type(batch_size)))
assert batch_size > 0
dims = {'b': batch_size,
0: self.shape[0],
1: self.shape[1],
'c': self.num_channels}
shape = [dims[elem] for elem in self.axes]
return np.zeros(shape, dtype=dtype)
@functools.wraps(Space.make_theano_batch)
def make_theano_batch(self, name=None, dtype=None, batch_size=None):
dtype = self._clean_dtype_arg(dtype)
broadcastable = [False] * 4
broadcastable[self.axes.index('c')] = (self.num_channels == 1)
broadcastable[self.axes.index('b')] = (batch_size == 1)
broadcastable = tuple(broadcastable)
rval = TensorType(dtype=dtype,
broadcastable=broadcastable
)(name=name)
if theano.config.compute_test_value != 'off':
if batch_size == 1:
n = 1
else:
# TODO: try to extract constant scalar value from batch_size
n = 4
rval.tag.test_value = self.get_origin_batch(batch_size=n,
dtype=dtype)
return rval
@functools.wraps(Space._batch_size_impl)
def _batch_size_impl(self, is_numeric, batch):
return batch.shape[self.axes.index('b')]
@staticmethod
def convert(tensor, src_axes, dst_axes):
"""
Returns a view of tensor using the axis semantics defined
by dst_axes. (If src_axes matches dst_axes, returns
tensor itself)
Useful for transferring tensors between different
Conv2DSpaces.
Parameters
----------
tensor : tensor_like
A 4-tensor representing a batch of images
src_axes : WRITEME
Axis semantics of tensor
dst_axes : WRITEME
WRITEME
"""
src_axes = tuple(src_axes)
dst_axes = tuple(dst_axes)
assert len(src_axes) == 4
assert len(dst_axes) == 4
if src_axes == dst_axes:
return tensor
shuffle = [src_axes.index(elem) for elem in dst_axes]
if is_symbolic_batch(tensor):
return tensor.dimshuffle(*shuffle)
else:
return tensor.transpose(*shuffle)
@staticmethod
def convert_numpy(tensor, src_axes, dst_axes):
"""
.. todo::
WRITEME
"""
return Conv2DSpace.convert(tensor, src_axes, dst_axes)
@functools.wraps(Space.get_total_dimension)
def get_total_dimension(self):
# Patch old pickle files
if not hasattr(self, 'num_channels'):
self.num_channels = self.nchannels
return self.shape[0] * self.shape[1] * self.num_channels
@functools.wraps(Space._validate_impl)
def _validate_impl(self, is_numeric, batch):
# checks batch.type against self.dtype
super(Conv2DSpace, self)._validate_impl(is_numeric, batch)
if not is_numeric:
if isinstance(batch, theano.sparse.SparseVariable):
raise TypeError("Conv2DSpace cannot use SparseVariables, "
"since as of this writing (28 Dec 2013), "
"there is not yet a SparseVariable type with "
"4 dimensions")
if not isinstance(batch, theano.gof.Variable):
raise TypeError("Conv2DSpace batches must be theano "
"Variables, got " + str(type(batch)))
if not isinstance(batch.type, (theano.tensor.TensorType,
CudaNdarrayType)):
raise TypeError('Expected TensorType or CudaNdArrayType, got '
'"%s"' % type(batch.type))
if batch.ndim != 4:
raise ValueError("The value of a Conv2DSpace batch must be "
"4D, got %d dimensions for %s." %
(batch.ndim, batch))
for val in get_debug_values(batch):
self.np_validate(val)
else:
if scipy.sparse.issparse(batch):
raise TypeError("Conv2DSpace cannot use sparse batches, since "
"scipy.sparse does not support 4 dimensional "
"tensors currently (28 Dec 2013).")
if (not isinstance(batch, np.ndarray)) \
and type(batch) != 'CudaNdarray':
raise TypeError("The value of a Conv2DSpace batch should be a "
"numpy.ndarray, or CudaNdarray, but is %s."
% str(type(batch)))
if batch.ndim != 4:
raise ValueError("The value of a Conv2DSpace batch must be "
"4D, got %d dimensions for %s." %
(batch.ndim, batch))
d = self.axes.index('c')
actual_channels = batch.shape[d]
if actual_channels != self.num_channels:
raise ValueError("Expected axis %d to be number of channels "
"(%d) but it is %d" %
(d, self.num_channels, actual_channels))
assert batch.shape[self.axes.index('c')] == self.num_channels
for coord in [0, 1]:
d = self.axes.index(coord)
actual_shape = batch.shape[d]
expected_shape = self.shape[coord]
if actual_shape != expected_shape:
raise ValueError("Conv2DSpace with shape %s and axes %s "
"expected dimension %s of a batch (%s) "
"to have length %s but it has %s"
% (str(self.shape),
str(self.axes),
str(d),
str(batch),
str(expected_shape),
str(actual_shape)))
@functools.wraps(Space._format_as_impl)
def _format_as_impl(self, is_numeric, batch, space):
if isinstance(space, VectorSpace):
# We need to ensure that the resulting batch will always be
# the same in `space`, no matter what the axes of `self` are.
if self.axes != self.default_axes:
# The batch index goes on the first axis
assert self.default_axes[0] == 'b'
batch = batch.transpose(*[self.axes.index(axis)
for axis in self.default_axes])
result = batch.reshape((batch.shape[0],
self.get_total_dimension()))
if space.sparse:
result = _dense_to_sparse(result)
elif isinstance(space, Conv2DSpace):
result = Conv2DSpace.convert(batch, self.axes, space.axes)
else:
raise NotImplementedError("%s doesn't know how to format as %s"
% (str(self), str(space)))
return _cast(result, space.dtype)
@functools.wraps(Space._undo_format_as_impl)
def _undo_format_as_impl(self, batch, space):
# Check for cast
batch = _undo_op(batch, 'Cast')
if isinstance(space, VectorSpace):
# Check for SparseFromDense
batch = _undo_op(batch, 'SparseFromDense')
# Undo reshape op
batch = _undo_op(batch, 'Reshape', strict=True)
# Check to see if axis ordering was changed
if self.axes != self.default_axes:
batch = _undo_op(batch, 'DimShuffle', strict=True)
elif isinstance(space, Conv2DSpace):
# Check to see if axis ordering was changed
if space.axes != self.axes:
batch = _undo_op(batch, 'DimShuffle', strict=True)
else:
raise NotImplementedError("%s doesn't know how to format as %s"
% (str(self), str(space)))
return batch
class CompositeSpace(Space):
"""
A Space whose points are tuples of points in other spaces.
May be nested, in which case the points are nested tuples.
Parameters
----------
components : WRITEME
kwargs : dict
WRITEME
"""
def __init__(self, components, **kwargs):
super(CompositeSpace, self).__init__(**kwargs)
assert isinstance(components, (list, tuple))
for i, component in enumerate(components):
if not isinstance(component, Space):
raise TypeError("component %d is %s of type %s, expected "
"Space instance. " %
(i, str(component), str(type(component))))
self.components = list(components)
def __eq__(self, other):
"""
.. todo::
WRITEME
"""
return (type(self) == type(other) and
len(self.components) == len(other.components) and
all(my_component == other_component for
my_component, other_component in
zip(self.components, other.components)))
def __hash__(self):
"""
.. todo::
WRITEME
"""
return hash((type(self), tuple(self.components)))
def __str__(self):
"""
.. todo::
WRITEME
"""
return '%(classname)s(%(components)s)' % \
dict(classname=self.__class__.__name__,
components=', '.join([str(c) for c in self.components]))
@property
def dtype(self):
"""
Returns a nested tuple of dtype strings. NullSpaces will yield a bogus
dtype string (see NullSpace.dtype).
"""
def get_dtype_of_space(space):
if isinstance(space, CompositeSpace):
return tuple(get_dtype_of_space(c) for c in space.components)
elif isinstance(space, NullSpace):
return NullSpace().dtype
else:
return space.dtype
return get_dtype_of_space(self)
@dtype.setter
def dtype(self, new_dtype):
"""
If new_dtype is None or a string, it will be applied to all components
(except any NullSpaces).
If new_dtype is a (nested) tuple, its elements will be applied to
corresponding components.
"""
if isinstance(new_dtype, tuple):
for component, new_dt in safe_zip(self.components, new_dtype):
component.dtype = new_dt
elif new_dtype is None or isinstance(new_dtype, str):
for component in self.components:
if not isinstance(component, NullSpace):
component.dtype = new_dtype
def restrict(self, subset):
"""
Returns a new Space containing only the components whose indices
are given in subset.
The new space will contain the components in the order given in the
subset list.
Parameters
----------
subset : WRITEME
Notes
-----
The returned Space may not be a CompositeSpace if `subset` contains
only one index.
"""
assert isinstance(subset, (list, tuple))
if len(subset) == 1:
idx, = subset
return self.components[idx]
return CompositeSpace([self.components[i] for i in subset])
def restrict_batch(self, batch, subset):
"""
Returns a batch containing only the components whose indices are
present in subset.
May not be a tuple anymore if there is only one index.
Outputs will be ordered in the order that they appear in subset.
Only supports symbolic batches.
Parameters
----------
batch : WRITEME
subset : WRITEME
"""
self._validate(is_numeric=False, batch=batch)
assert isinstance(subset, (list, tuple))
if len(subset) == 1:
idx, = subset
return batch[idx]
return tuple([batch[i] for i in subset])
@functools.wraps(Space.get_total_dimension)
def get_total_dimension(self):
return sum([component.get_total_dimension() for component in
self.components])
@functools.wraps(Space.make_shared_batch)
def make_shared_batch(self, batch_size, name=None, dtype=None):
dtype = self._clean_dtype_arg(dtype)
batch = self.get_origin_batch(batch_size, dtype)
def recursive_shared(batch):
if isinstance(batch, tuple):
return tuple(recursive_shared(b) for b in batch)
else:
return theano.shared(batch, name=name)
return recursive_shared(batch)
@functools.wraps(Space._format_as_impl)
def _format_as_impl(self, is_numeric, batch, space):
"""
Supports formatting to a single VectorSpace, or to a CompositeSpace.
CompositeSpace->VectorSpace:
Traverses the nested components in depth-first order, serializing the
leaf nodes (i.e. the non-composite subspaces) into the VectorSpace.
CompositeSpace->CompositeSpace:
Only works for two CompositeSpaces that have the same nested
structure. Traverses both CompositeSpaces' nested components in
parallel, converting between corresponding non-composite components
in <self> and <space> as:
`self_component._format_as(is_numeric,
batch_component,
space_component)`
Parameters
----------
batch : WRITEME
space : WRITEME
Returns
-------
WRITEME
"""
if isinstance(space, VectorSpace):
pieces = []
for component, input_piece in zip(self.components, batch):
subspace = VectorSpace(dim=component.get_total_dimension(),
dtype=space.dtype,
sparse=space.sparse)
pieces.append(component._format_as(is_numeric,
input_piece,
subspace))
# Pieces should all have the same dtype, before we concatenate them
if len(pieces) > 0:
for piece in pieces[1:]:
if pieces[0].dtype != piece.dtype:
assert space.dtype is None
raise TypeError("Tried to format components with "
"differing dtypes into a VectorSpace "
"with no dtype of its own. "
"dtypes: %s" %
str(tuple(str(p.dtype)
for p in pieces)))
if is_symbolic_batch(batch):
if space.sparse:
return theano.sparse.hstack(pieces)
else:
return tensor.concatenate(pieces, axis=1)
else:
if space.sparse:
return scipy.sparse.hstack(pieces)
else:
return np.concatenate(pieces, axis=1)
if isinstance(space, CompositeSpace):
def recursive_format_as(orig_space, batch, dest_space):
if not (isinstance(orig_space, CompositeSpace) ==
isinstance(dest_space, CompositeSpace)):
raise TypeError("Can't convert between CompositeSpaces "
"with different tree structures")
# No need to check batch's tree structure. Space._format_as()
# already did that by calling _validate(), before calling this
# method.
if isinstance(orig_space, CompositeSpace):
return tuple(recursive_format_as(os, bt, ds)
for os, bt, ds
in safe_zip(orig_space.components,
batch,
dest_space.components))
else:
return orig_space._format_as(is_numeric, batch, dest_space)
return recursive_format_as(self, batch, space)
raise NotImplementedError(str(self) +
" does not know how to format as " +
str(space))
@functools.wraps(Space._undo_format_as_impl)
def _undo_format_as_impl(self, batch, space):
"""
Undoes the formatting to a single VectorSpace, or to a CompositeSpace.
CompositeSpace->VectorSpace:
Traverses the nested components in depth-first order, serializing the
leaf nodes (i.e. the non-composite subspaces) into the VectorSpace.
CompositeSpace->CompositeSpace:
Only works for two CompositeSpaces that have the same nested
structure. Traverses both CompositeSpaces' nested components in
parallel, converting between corresponding non-composite components
in <self> and <space> as:
`self_component._format_as(is_numeric,
batch_component,
space_component)`
Parameters
----------
batch : WRITEME
space : WRITEME
Returns
-------
WRITEME
"""
if isinstance(space, VectorSpace):
# Undo join
if space.sparse:
owner = batch.owner
assert owner is not None
assert 'HStack' in str(owner.op)
batch = owner.inputs
else:
owner = batch.owner
assert owner is not None
assert str(owner.op) == 'Join'
# First component is join axis
batch = owner.inputs[1:]
def extract_dtype(dtype):
if isinstance(dtype, tuple):
return extract_dtype(dtype[0])
else:
return dtype
def compose_batch(composite_space, batch_list):
rval = ()
for sp, bt in safe_zip(composite_space.components, batch_list):
if False and isinstance(sp, CompositeSpace):
composed, batch_list = compose_batch(sp, batch_list)
rval += (composed,)
else:
sparse = getattr(sp, 'sparse', False)
dtype = extract_dtype(sp.dtype)
new_sp = VectorSpace(dim=sp.get_total_dimension(),
dtype=dtype,
sparse=sparse
)
new_batch = sp.undo_format_as(bt,
new_sp)
rval += (new_batch,)
return rval
composed = compose_batch(self, batch)
return composed
if isinstance(space, CompositeSpace):
def recursive_undo_format_as(orig_space, batch, dest_space):
if not (isinstance(orig_space, CompositeSpace) ==
isinstance(dest_space, CompositeSpace)):
raise TypeError("Can't convert between CompositeSpaces "
"with different tree structures")
# No need to check batch's tree structure.
# Space.undo_format_as() already did that
# by calling _validate(), before calling this
# method.
if isinstance(orig_space, CompositeSpace):
return tuple(recursive_undo_format_as(os, bt, ds)
for os, bt, ds
in safe_zip(orig_space.components,
batch,
dest_space.components))
else:
return orig_space.undo_format_as(batch,
dest_space)
return recursive_undo_format_as(self, batch, space)
raise NotImplementedError(str(self) +
" does not know how to format as " +
str(space))
@functools.wraps(Space._validate_impl)
def _validate_impl(self, is_numeric, batch):
if not isinstance(batch, tuple):
raise TypeError("The value of a CompositeSpace batch should be a "
"tuple, but is %s of type %s." %
(batch, type(batch)))
if len(batch) != len(self.components):
raise ValueError("Expected %d elements in batch, got %d"
% (len(self.components), len(batch)))
for batch_elem, component in zip(batch, self.components):
component._validate(is_numeric, batch_elem)
def get_origin_batch(self, batch_size, dtype=None):
"""
Calls get_origin_batch on all subspaces, and returns a (nested)
tuple containing their return values.
Parameters
----------
batch_size : int
Batch size.
dtype : str
the dtype to use for all the get_origin_batch() calls on
subspaces. If dtype is None, or a single dtype string, that will
be used for all calls. If dtype is a (nested) tuple, it must
mirror the tree structure of this CompositeSpace.
"""
dtype = self._clean_dtype_arg(dtype)
return tuple(component.get_origin_batch(batch_size, dt)
for component, dt
in safe_zip(self.components, dtype))
@functools.wraps(Space.make_theano_batch)
def make_theano_batch(self,
name=None,
dtype=None,
batch_size=None):
"""
Calls make_theano_batch on all subspaces, and returns a (nested)
tuple containing their return values.
Parameters
----------
name : str
Name of the symbolic variable
dtype : str
The dtype of the returned batch.
If dtype is a string, it will be applied to all components.
If dtype is None, C.dtype will be used for each component C.
If dtype is a nested tuple, its elements will be applied to
corresponding elements in the components.
batch_size : int
Batch size.
"""
if name is None:
name = [None] * len(self.components)
elif not isinstance(name, (list, tuple)):
name = ['%s[%i]' % (name, i) for i in xrange(len(self.components))]
dtype = self._clean_dtype_arg(dtype)
assert isinstance(name, (list, tuple))
assert isinstance(dtype, (list, tuple))
rval = tuple([x.make_theano_batch(name=n,
dtype=d,
batch_size=batch_size)
for x, n, d in safe_zip(self.components,
name,
dtype)])
return rval
@functools.wraps(Space._batch_size_impl)
def _batch_size_impl(self, is_numeric, batch):
def has_no_data(space):
"""
Returns True if space can contain no data.
"""
return (isinstance(subspace, NullSpace) or
(isinstance(subspace, CompositeSpace) and
len(subspace.components) == 0))
if is_symbolic_batch(batch):
for subspace, subbatch in safe_zip(self.components, batch):
if not has_no_data(subspace):
return subspace._batch_size(is_numeric, subbatch)
return 0 # TODO: shouldn't this line return a Theano object?
else:
result = None
for subspace, subbatch in safe_zip(self.components, batch):
batch_size = subspace._batch_size(is_numeric, subbatch)
if has_no_data(subspace):
assert batch_size == 0
else:
if result is None:
result = batch_size
elif batch_size != result:
raise ValueError("All non-empty components of a "
"CompositeSpace should have the same "
"batch size, but we encountered "
"components with size %s, then %s." %
(result, batch_size))
return 0 if result is None else result
def _clean_dtype_arg(self, dtype):
"""
If dtype is None or a string, this returns a nested tuple that mirrors
the tree structure of this CompositeSpace, with dtype at the leaves.
If dtype is a nested tuple, this checks that it has the same tree
structure as this CompositeSpace.
"""
super_self = super(CompositeSpace, self)
def make_dtype_tree(dtype, space):
"""
Creates a nested tuple tree that mirrors the tree structure of
<space>, populating the leaves with <dtype>.
"""
if isinstance(space, CompositeSpace):
return tuple(make_dtype_tree(dtype, component)
for component in space.components)
else:
return super_self._clean_dtype_arg(dtype)
def check_dtype_tree(dtype, space):
"""
Verifies that a dtype tree mirrors the tree structure of <space>,
calling Space._clean_dtype_arg on the leaves.
"""
if isinstance(space, CompositeSpace):
if not isinstance(dtype, tuple):
raise TypeError("Tree structure mismatch.")
return tuple(check_dtype_tree(dt, c)
for dt, c in safe_zip(dtype, space.components))
else:
if not (dtype is None or isinstance(dtype, str)):
raise TypeError("Tree structure mismatch.")
return super_self._clean_dtype_arg(dtype)
if dtype is None or isinstance(dtype, str):
dtype = super_self._clean_dtype_arg(dtype)
return make_dtype_tree(dtype, self)
else:
return check_dtype_tree(dtype, self)
class NullSpace(Space):
"""
A space that contains no data. As such, it has the following quirks:
* Its validate()/np_validate() methods only accept None.
* Its dtype string is "Nullspace's dtype".
* The source name associated to this Space is the empty string ('').
"""
# NullSpaces don't support validation callbacks, since they only take None
# as data batches.
def __init__(self):
super(NullSpace, self).__init__()
def __str__(self):
"""
.. todo::
WRITEME
"""
return "NullSpace"
def __eq__(self, other):
"""
.. todo::
WRITEME
"""
return type(self) == type(other)
def __hash__(self):
"""
.. todo::
WRITEME
"""
return hash(type(self))
@property
def dtype(self):
"""
.. todo::
WRITEME
"""
return "%s's dtype" % self.__class__.__name__
@dtype.setter
def dtype(self, new_dtype):
"""
.. todo::
WRITEME
"""
if new_dtype != self.dtype:
raise TypeError('%s can only take the bogus dtype "%s"' %
(self.__class__.__name__,
self.dtype))
# otherwise, do nothing
@functools.wraps(Space.make_theano_batch)
def make_theano_batch(self, name=None, dtype=None):
return None
@functools.wraps(Space._validate_impl)
def _validate_impl(self, is_numeric, batch):
if batch is not None:
raise TypeError('NullSpace only accepts None, as a dummy data '
'batch. Instead, got %s of type %s'
% (batch, type(batch)))
@functools.wraps(Space._format_as_impl)
def _format_as_impl(self, is_numeric, batch, space):
assert isinstance(space, NullSpace)
return None
@functools.wraps(Space._batch_size_impl)
def _batch_size_impl(self, is_numeric, batch):
# There is no way to know how many examples would actually
# have been in the batch, since it is empty. We return 0.
self._validate(is_numeric, batch)
return 0
|
modlinltd/django-advanced-filters | refs/heads/develop | tests/reps/admin.py | 3 | from django.contrib import admin
from .models import SalesRep
admin.site.register(SalesRep)
|
joerocklin/gem5 | refs/heads/master | src/python/m5/ticks.py | 57 | # Copyright (c) 2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
import sys
from m5.util import warn
tps = 1.0e12 # default to 1 THz (1 Tick == 1 ps)
tps_fixed = False # once set to true, can't be changed
# fix the global frequency and tell C++ about it
def fixGlobalFrequency():
import internal
global tps, tps_fixed
if not tps_fixed:
tps_fixed = True
internal.core.setClockFrequency(int(tps))
print "Global frequency set at %d ticks per second" % int(tps)
def setGlobalFrequency(ticksPerSecond):
from m5.util import convert
global tps, tps_fixed
if tps_fixed:
raise AttributeError, \
"Global frequency already fixed at %f ticks/s." % tps
if isinstance(ticksPerSecond, (int, long)):
tps = ticksPerSecond
elif isinstance(ticksPerSecond, float):
tps = ticksPerSecond
elif isinstance(ticksPerSecond, str):
tps = round(convert.anyToFrequency(ticksPerSecond))
else:
raise TypeError, \
"wrong type '%s' for ticksPerSecond" % type(ticksPerSecond)
# how big does a rounding error need to be before we warn about it?
frequency_tolerance = 0.001 # 0.1%
def fromSeconds(value):
if not isinstance(value, float):
raise TypeError, "can't convert '%s' to type tick" % type(value)
# once someone needs to convert to seconds, the global frequency
# had better be fixed
if not tps_fixed:
raise AttributeError, \
"In order to do conversions, the global frequency must be fixed"
if value == 0:
return 0
# convert the value from time to ticks
value *= tps
int_value = int(round(value))
err = (value - int_value) / value
if err > frequency_tolerance:
warn("rounding error > tolerance\n %f rounded to %d", value,
int_value)
return int_value
__all__ = [ 'setGlobalFrequency', 'fixGlobalFrequency', 'fromSeconds',
'frequency_tolerance' ]
|
adedayo/intellij-community | refs/heads/master | python/helpers/profiler/thrift/server/TServer.py | 56 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import Queue
import os
import sys
import threading
import traceback
import logging
logger = logging.getLogger(__name__)
from thrift.Thrift import TProcessor
from thrift.protocol import TBinaryProtocol
from thrift.transport import TTransport
class TServer:
"""Base interface for a server, which must have a serve() method.
Three constructors for all servers:
1) (processor, serverTransport)
2) (processor, serverTransport, transportFactory, protocolFactory)
3) (processor, serverTransport,
inputTransportFactory, outputTransportFactory,
inputProtocolFactory, outputProtocolFactory)
"""
def __init__(self, *args):
if (len(args) == 2):
self.__initArgs__(args[0], args[1],
TTransport.TTransportFactoryBase(),
TTransport.TTransportFactoryBase(),
TBinaryProtocol.TBinaryProtocolFactory(),
TBinaryProtocol.TBinaryProtocolFactory())
elif (len(args) == 4):
self.__initArgs__(args[0], args[1], args[2], args[2], args[3], args[3])
elif (len(args) == 6):
self.__initArgs__(args[0], args[1], args[2], args[3], args[4], args[5])
def __initArgs__(self, processor, serverTransport,
inputTransportFactory, outputTransportFactory,
inputProtocolFactory, outputProtocolFactory):
self.processor = processor
self.serverTransport = serverTransport
self.inputTransportFactory = inputTransportFactory
self.outputTransportFactory = outputTransportFactory
self.inputProtocolFactory = inputProtocolFactory
self.outputProtocolFactory = outputProtocolFactory
def serve(self):
pass
class TSimpleServer(TServer):
"""Simple single-threaded server that just pumps around one transport."""
def __init__(self, *args):
TServer.__init__(self, *args)
def serve(self):
self.serverTransport.listen()
while True:
client = self.serverTransport.accept()
if not client:
continue
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException, tx:
pass
except Exception, x:
logger.exception(x)
itrans.close()
otrans.close()
class TThreadedServer(TServer):
"""Threaded server that spawns a new thread per each connection."""
def __init__(self, *args, **kwargs):
TServer.__init__(self, *args)
self.daemon = kwargs.get("daemon", False)
def serve(self):
self.serverTransport.listen()
while True:
try:
client = self.serverTransport.accept()
if not client:
continue
t = threading.Thread(target=self.handle, args=(client,))
t.setDaemon(self.daemon)
t.start()
except KeyboardInterrupt:
raise
except Exception, x:
logger.exception(x)
def handle(self, client):
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException, tx:
pass
except Exception, x:
logger.exception(x)
itrans.close()
otrans.close()
class TThreadPoolServer(TServer):
"""Server with a fixed size pool of threads which service requests."""
def __init__(self, *args, **kwargs):
TServer.__init__(self, *args)
self.clients = Queue.Queue()
self.threads = 10
self.daemon = kwargs.get("daemon", False)
def setNumThreads(self, num):
"""Set the number of worker threads that should be created"""
self.threads = num
def serveThread(self):
"""Loop around getting clients from the shared queue and process them."""
while True:
try:
client = self.clients.get()
self.serveClient(client)
except Exception, x:
logger.exception(x)
def serveClient(self, client):
"""Process input/output from a client for as long as possible"""
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException, tx:
pass
except Exception, x:
logger.exception(x)
itrans.close()
otrans.close()
def serve(self):
"""Start a fixed number of worker threads and put client into a queue"""
for i in range(self.threads):
try:
t = threading.Thread(target=self.serveThread)
t.setDaemon(self.daemon)
t.start()
except Exception, x:
logger.exception(x)
# Pump the socket for clients
self.serverTransport.listen()
while True:
try:
client = self.serverTransport.accept()
if not client:
continue
self.clients.put(client)
except Exception, x:
logger.exception(x)
class TForkingServer(TServer):
"""A Thrift server that forks a new process for each request
This is more scalable than the threaded server as it does not cause
GIL contention.
Note that this has different semantics from the threading server.
Specifically, updates to shared variables will no longer be shared.
It will also not work on windows.
This code is heavily inspired by SocketServer.ForkingMixIn in the
Python stdlib.
"""
def __init__(self, *args):
TServer.__init__(self, *args)
self.children = []
def serve(self):
def try_close(file):
try:
file.close()
except IOError, e:
logger.warning(e, exc_info=True)
self.serverTransport.listen()
while True:
client = self.serverTransport.accept()
if not client:
continue
try:
pid = os.fork()
if pid: # parent
# add before collect, otherwise you race w/ waitpid
self.children.append(pid)
self.collect_children()
# Parent must close socket or the connection may not get
# closed promptly
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
try_close(itrans)
try_close(otrans)
else:
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
ecode = 0
try:
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException, tx:
pass
except Exception, e:
logger.exception(e)
ecode = 1
finally:
try_close(itrans)
try_close(otrans)
os._exit(ecode)
except TTransport.TTransportException, tx:
pass
except Exception, x:
logger.exception(x)
def collect_children(self):
while self.children:
try:
pid, status = os.waitpid(0, os.WNOHANG)
except os.error:
pid = None
if pid:
self.children.remove(pid)
else:
break
|
Therp/odoo | refs/heads/8.0 | addons/l10n_fr_hr_payroll/l10n_fr_hr_payroll.py | 340 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
class res_company(osv.osv):
_inherit = 'res.company'
_columns = {
'plafond_secu': fields.float('Plafond de la Securite Sociale', digits_compute=dp.get_precision('Payroll')),
'nombre_employes': fields.integer('Nombre d\'employes'),
'cotisation_prevoyance': fields.float('Cotisation Patronale Prevoyance', digits_compute=dp.get_precision('Payroll')),
'org_ss': fields.char('Organisme de securite sociale'),
'conv_coll': fields.char('Convention collective'),
}
class hr_contract(osv.osv):
_inherit = 'hr.contract'
_columns = {
'qualif': fields.char('Qualification'),
'niveau': fields.char('Niveau'),
'coef': fields.char('Coefficient'),
}
class hr_payslip(osv.osv):
_inherit = 'hr.payslip'
_columns = {
'payment_mode': fields.char('Mode de paiement'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
nfoti/StarCluster | refs/heads/develop | starcluster/commands/s3image.py | 19 | # Copyright 2009-2014 Justin Riley
#
# This file is part of StarCluster.
#
# StarCluster is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# StarCluster is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with StarCluster. If not, see <http://www.gnu.org/licenses/>.
import sys
import time
import warnings
from starcluster import exception
from starcluster.logger import log
from completers import InstanceCompleter
class CmdS3Image(InstanceCompleter):
"""
s3image [options] <instance-id> <image_name> [<bucket>]
Create a new instance-store (S3) AMI from a running EC2 instance
Example:
$ starcluster s3image i-999999 my-new-image mybucket
NOTE: It should now be safe to create an image from an instance launched by
StarCluster. If you have issues please submit a bug report to the mailing
list.
"""
names = ['s3image', 'simg', 'createimage']
bucket = None
image_name = None
def addopts(self, parser):
parser.add_option(
"-d", "--description", dest="description", action="store",
type="string",
default="Image created @ %s" % time.strftime("%Y%m%d%H%M"),
help="short description of this AMI")
parser.add_option(
"-k", "--kernel-id", dest="kernel_id", action="store",
type="string", default=None,
help="kernel id for the new AMI")
parser.add_option(
"-R", "--ramdisk-id", dest="ramdisk_id", action="store",
type="string", default=None,
help="ramdisk id for the new AMI")
parser.add_option(
"-r", "--remove-image-files", dest="remove_image_files",
action="store_true", default=False,
help="Remove generated image files on the "
"instance after registering (for S3 AMIs)")
def execute(self, args):
if "createimage" in sys.argv:
warnings.warn("createimage is deprecated and will go away in the "
"next release. please use the s3image/ebsimage "
"commands instead", DeprecationWarning)
if len(args) != 3:
self.parser.error(
'you must specify an instance-id, image name, and bucket')
bucket = None
instanceid, image_name, bucket = args
self.bucket = bucket
self.image_name = image_name
i = self.ec2.get_instance(instanceid)
key_location = self.cfg.get_key(i.key_name).get('key_location')
aws_user_id = self.cfg.aws.get('aws_user_id')
ec2_cert = self.cfg.aws.get('ec2_cert')
ec2_private_key = self.cfg.aws.get('ec2_private_key')
try:
ami_id = self.ec2.create_s3_image(instanceid, key_location,
aws_user_id, ec2_cert,
ec2_private_key, bucket,
image_name=image_name,
**self.specified_options_dict)
log.info("Your new AMI id is: %s" % ami_id)
except KeyboardInterrupt:
raise exception.CancelledS3ImageCreation(self.bucket,
self.image_name)
|
edcast-inc/edx-platform-edcast | refs/heads/master | lms/djangoapps/shoppingcart/migrations/0011_auto__add_invoice__add_field_courseregistrationcode_invoice.py | 114 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Invoice'
db.create_table('shoppingcart_invoice', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('total_amount', self.gf('django.db.models.fields.FloatField')()),
('purchaser_name', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('purchaser_contact', self.gf('django.db.models.fields.CharField')(max_length=255)),
('purchaser_email', self.gf('django.db.models.fields.CharField')(max_length=255)),
('tax_id', self.gf('django.db.models.fields.CharField')(max_length=64, null=True)),
('reference', self.gf('django.db.models.fields.CharField')(max_length=255, null=True)),
))
db.send_create_signal('shoppingcart', ['Invoice'])
# Adding field 'CourseRegistrationCode.invoice'
db.add_column('shoppingcart_courseregistrationcode', 'invoice',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['shoppingcart.Invoice'], null=True),
keep_default=False)
def backwards(self, orm):
# Deleting model 'Invoice'
db.delete_table('shoppingcart_invoice')
# Deleting field 'CourseRegistrationCode.invoice'
db.delete_column('shoppingcart_courseregistrationcode', 'invoice_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shoppingcart.certificateitem': {
'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.coupon': {
'Meta': {'object_name': 'Coupon'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 6, 0, 0)'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'percentage_discount': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'shoppingcart.couponredemption': {
'Meta': {'object_name': 'CouponRedemption'},
'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Coupon']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.courseregistrationcode': {
'Meta': {'object_name': 'CourseRegistrationCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 6, 0, 0)'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_by_user'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Invoice']", 'null': 'True'}),
'transaction_group_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'shoppingcart.invoice': {
'Meta': {'object_name': 'Invoice'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'purchaser_contact': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'purchaser_email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'purchaser_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'tax_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'total_amount': ('django.db.models.fields.FloatField', [], {})
},
'shoppingcart.order': {
'Meta': {'object_name': 'Order'},
'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'refunded_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'fulfilled_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}),
'list_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '30', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'refund_requested_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'report_comments': ('django.db.models.fields.TextField', [], {'default': "''"}),
'service_fee': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32', 'db_index': 'True'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.paidcourseregistration': {
'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.paidcourseregistrationannotation': {
'Meta': {'object_name': 'PaidCourseRegistrationAnnotation'},
'annotation': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'shoppingcart.registrationcoderedemption': {
'Meta': {'object_name': 'RegistrationCodeRedemption'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'redeemed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 6, 0, 0)', 'null': 'True'}),
'redeemed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'registration_code': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.CourseRegistrationCode']"})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['shoppingcart']
|
xuxiaoxin/micropython | refs/heads/master | tests/basics/self_type_check.py | 104 | # make sure type of first arg (self) to a builtin method is checked
list.append
try:
list.append()
except TypeError as e:
print("TypeError")
try:
list.append(1)
except TypeError as e:
print("TypeError")
try:
list.append(1, 2)
except TypeError as e:
print("TypeError")
l = []
list.append(l, 2)
print(l)
try:
getattr(list, "append")(1, 2)
except TypeError as e:
print("TypeError")
l = []
getattr(list, "append")(l, 2)
print(l)
|
sunjiawe/Stino | refs/heads/ST4ArduinoIDE | stino/pyarduino/base/zeroconf.py | 14 | from __future__ import absolute_import, division, print_function, unicode_literals
""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine
Copyright 2003 Paul Scott-Murphy, 2014 William McBrine
This module provides a framework for the use of DNS Service Discovery
using IP multicast.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
USA
"""
__author__ = 'Paul Scott-Murphy, William McBrine'
__maintainer__ = 'Jakub Stasiak <jakub@stasiak.at>'
__version__ = '0.15.1'
__license__ = 'LGPL'
import time
import struct
import socket
import threading
import select
import traceback
from functools import reduce
__all__ = ["Zeroconf", "ServiceInfo", "ServiceBrowser"]
try:
xrange = xrange
except NameError:
xrange = range
try:
unicode
except NameError:
unicode = str
if isinstance(chr(8), unicode):
byte_chr = lambda num: bytes([num])
else:
byte_chr = chr
if isinstance(bytes([8])[0], int):
byte_ord = lambda x: x
else:
byte_ord = ord
try:
raw_input = raw_input
except NameError:
raw_input = input
# hook for threads
_GLOBAL_DONE = False
# Some timing constants
_UNREGISTER_TIME = 125
_CHECK_TIME = 175
_REGISTER_TIME = 225
_LISTENER_TIME = 200
_BROWSER_TIME = 500
# Some DNS constants
_MDNS_ADDR = '224.0.0.251'
_MDNS_PORT = 5353
_DNS_PORT = 53
_DNS_TTL = 60 * 60 # one hour default TTL
_MAX_MSG_TYPICAL = 1460 # unused
_MAX_MSG_ABSOLUTE = 8972
_FLAGS_QR_MASK = 0x8000 # query response mask
_FLAGS_QR_QUERY = 0x0000 # query
_FLAGS_QR_RESPONSE = 0x8000 # response
_FLAGS_AA = 0x0400 # Authorative answer
_FLAGS_TC = 0x0200 # Truncated
_FLAGS_RD = 0x0100 # Recursion desired
_FLAGS_RA = 0x8000 # Recursion available
_FLAGS_Z = 0x0040 # Zero
_FLAGS_AD = 0x0020 # Authentic data
_FLAGS_CD = 0x0010 # Checking disabled
_CLASS_IN = 1
_CLASS_CS = 2
_CLASS_CH = 3
_CLASS_HS = 4
_CLASS_NONE = 254
_CLASS_ANY = 255
_CLASS_MASK = 0x7FFF
_CLASS_UNIQUE = 0x8000
_TYPE_A = 1
_TYPE_NS = 2
_TYPE_MD = 3
_TYPE_MF = 4
_TYPE_CNAME = 5
_TYPE_SOA = 6
_TYPE_MB = 7
_TYPE_MG = 8
_TYPE_MR = 9
_TYPE_NULL = 10
_TYPE_WKS = 11
_TYPE_PTR = 12
_TYPE_HINFO = 13
_TYPE_MINFO = 14
_TYPE_MX = 15
_TYPE_TXT = 16
_TYPE_AAAA = 28
_TYPE_SRV = 33
_TYPE_ANY = 255
# Mapping constants to names
_CLASSES = {_CLASS_IN: "in",
_CLASS_CS: "cs",
_CLASS_CH: "ch",
_CLASS_HS: "hs",
_CLASS_NONE: "none",
_CLASS_ANY: "any"}
_TYPES = {_TYPE_A: "a",
_TYPE_NS: "ns",
_TYPE_MD: "md",
_TYPE_MF: "mf",
_TYPE_CNAME: "cname",
_TYPE_SOA: "soa",
_TYPE_MB: "mb",
_TYPE_MG: "mg",
_TYPE_MR: "mr",
_TYPE_NULL: "null",
_TYPE_WKS: "wks",
_TYPE_PTR: "ptr",
_TYPE_HINFO: "hinfo",
_TYPE_MINFO: "minfo",
_TYPE_MX: "mx",
_TYPE_TXT: "txt",
_TYPE_AAAA: "quada",
_TYPE_SRV: "srv",
_TYPE_ANY: "any"}
# utility functions
def currentTimeMillis():
"""Current system time in milliseconds"""
return time.time() * 1000
# Exceptions
class NonLocalNameException(Exception):
pass
class NonUniqueNameException(Exception):
pass
class NamePartTooLongException(Exception):
pass
class AbstractMethodException(Exception):
pass
class BadTypeInNameException(Exception):
pass
# implementation classes
class DNSEntry(object):
"""A DNS entry"""
def __init__(self, name, type, clazz):
self.key = name.lower()
self.name = name
self.type = type
self.clazz = clazz & _CLASS_MASK
self.unique = (clazz & _CLASS_UNIQUE) != 0
def __eq__(self, other):
"""Equality test on name, type, and class"""
return (isinstance(other, DNSEntry) and
self.name == other.name and
self.type == other.type and
self.clazz == other.clazz)
def __ne__(self, other):
"""Non-equality test"""
return not self.__eq__(other)
def getClazz(self, clazz):
"""Class accessor"""
return _CLASSES.get(clazz, "?(%s)" % clazz)
def getType(self, t):
"""Type accessor"""
return _TYPES.get(t, "?(%s)" % t)
def toString(self, hdr, other):
"""String representation with additional information"""
result = "%s[%s,%s" % (hdr, self.getType(self.type),
self.getClazz(self.clazz))
if self.unique:
result += "-unique,"
else:
result += ","
result += self.name
if other is not None:
result += ",%s]" % (other)
else:
result += "]"
return result
class DNSQuestion(DNSEntry):
"""A DNS question entry"""
def __init__(self, name, type, clazz):
# if not name.endswith(".local."):
# raise NonLocalNameException
DNSEntry.__init__(self, name, type, clazz)
def answeredBy(self, rec):
"""Returns true if the question is answered by the record"""
return (self.clazz == rec.clazz and
(self.type == rec.type or self.type == _TYPE_ANY) and
self.name == rec.name)
def __repr__(self):
"""String representation"""
return DNSEntry.toString(self, "question", None)
class DNSRecord(DNSEntry):
"""A DNS record - like a DNS entry, but has a TTL"""
def __init__(self, name, type, clazz, ttl):
DNSEntry.__init__(self, name, type, clazz)
self.ttl = ttl
self.created = currentTimeMillis()
def __eq__(self, other):
"""Tests equality as per DNSRecord"""
return isinstance(other, DNSRecord) and DNSEntry.__eq__(self, other)
def suppressedBy(self, msg):
"""Returns true if any answer in a message can suffice for the
information held in this record."""
for record in msg.answers:
if self.suppressedByAnswer(record):
return True
return False
def suppressedByAnswer(self, other):
"""Returns true if another record has same name, type and class,
and if its TTL is at least half of this record's."""
return self == other and other.ttl > (self.ttl / 2)
def getExpirationTime(self, percent):
"""Returns the time at which this record will have expired
by a certain percentage."""
return self.created + (percent * self.ttl * 10)
def getRemainingTTL(self, now):
"""Returns the remaining TTL in seconds."""
return max(0, (self.getExpirationTime(100) - now) / 1000)
def isExpired(self, now):
"""Returns true if this record has expired."""
return self.getExpirationTime(100) <= now
def isStale(self, now):
"""Returns true if this record is at least half way expired."""
return self.getExpirationTime(50) <= now
def resetTTL(self, other):
"""Sets this record's TTL and created time to that of
another record."""
self.created = other.created
self.ttl = other.ttl
def write(self, out):
"""Abstract method"""
raise AbstractMethodException
def toString(self, other):
"""String representation with addtional information"""
arg = "%s/%s,%s" % (self.ttl,
self.getRemainingTTL(currentTimeMillis()), other)
return DNSEntry.toString(self, "record", arg)
class DNSAddress(DNSRecord):
"""A DNS address record"""
def __init__(self, name, type, clazz, ttl, address):
DNSRecord.__init__(self, name, type, clazz, ttl)
self.address = address
def write(self, out):
"""Used in constructing an outgoing packet"""
out.writeString(self.address)
def __eq__(self, other):
"""Tests equality on address"""
return isinstance(other, DNSAddress) and self.address == other.address
def __repr__(self):
"""String representation"""
try:
return socket.inet_ntoa(self.address)
except Exception: # TODO stop catching all Exceptions
return self.address
class DNSHinfo(DNSRecord):
"""A DNS host information record"""
def __init__(self, name, type, clazz, ttl, cpu, os):
DNSRecord.__init__(self, name, type, clazz, ttl)
self.cpu = cpu
self.os = os
def write(self, out):
"""Used in constructing an outgoing packet"""
out.writeString(self.cpu)
out.writeString(self.oso)
def __eq__(self, other):
"""Tests equality on cpu and os"""
return (isinstance(other, DNSHinfo) and
self.cpu == other.cpu and self.os == other.os)
def __repr__(self):
"""String representation"""
return self.cpu + " " + self.os
class DNSPointer(DNSRecord):
"""A DNS pointer record"""
def __init__(self, name, type, clazz, ttl, alias):
DNSRecord.__init__(self, name, type, clazz, ttl)
self.alias = alias
def write(self, out):
"""Used in constructing an outgoing packet"""
out.writeName(self.alias)
def __eq__(self, other):
"""Tests equality on alias"""
return isinstance(other, DNSPointer) and self.alias == other.alias
def __repr__(self):
"""String representation"""
return self.toString(self.alias)
class DNSText(DNSRecord):
"""A DNS text record"""
def __init__(self, name, type_, clazz, ttl, text):
assert isinstance(text, (bytes, type(None)))
DNSRecord.__init__(self, name, type_, clazz, ttl)
self.text = text
def write(self, out):
"""Used in constructing an outgoing packet"""
out.writeString(self.text)
def __eq__(self, other):
"""Tests equality on text"""
return isinstance(other, DNSText) and self.text == other.text
def __repr__(self):
"""String representation"""
if len(self.text) > 10:
return self.toString(self.text[:7] + "...")
else:
return self.toString(self.text)
class DNSService(DNSRecord):
"""A DNS service record"""
def __init__(self, name, type, clazz, ttl, priority, weight, port, server):
DNSRecord.__init__(self, name, type, clazz, ttl)
self.priority = priority
self.weight = weight
self.port = port
self.server = server
def write(self, out):
"""Used in constructing an outgoing packet"""
out.writeShort(self.priority)
out.writeShort(self.weight)
out.writeShort(self.port)
out.writeName(self.server)
def __eq__(self, other):
"""Tests equality on priority, weight, port and server"""
return (isinstance(other, DNSService) and
self.priority == other.priority and
self.weight == other.weight and
self.port == other.port and
self.server == other.server)
def __repr__(self):
"""String representation"""
return self.toString("%s:%s" % (self.server, self.port))
class DNSIncoming(object):
"""Object representation of an incoming DNS packet"""
def __init__(self, data):
"""Constructor from string holding bytes of packet"""
self.offset = 0
self.data = data
self.questions = []
self.answers = []
self.numQuestions = 0
self.numAnswers = 0
self.numAuthorities = 0
self.numAdditionals = 0
self.readHeader()
self.readQuestions()
self.readOthers()
def unpack(self, format):
length = struct.calcsize(format)
info = struct.unpack(format, self.data[self.offset:self.offset + length])
self.offset += length
return info
def readHeader(self):
"""Reads header portion of packet"""
(self.id, self.flags, self.numQuestions, self.numAnswers,
self.numAuthorities, self.numAdditionals) = self.unpack(b'!6H')
def readQuestions(self):
"""Reads questions section of packet"""
for i in xrange(self.numQuestions):
name = self.readName()
type, clazz = self.unpack(b'!HH')
question = DNSQuestion(name, type, clazz)
self.questions.append(question)
def readInt(self):
"""Reads an integer from the packet"""
return self.unpack(b'!I')[0]
def readCharacterString(self):
"""Reads a character string from the packet"""
length = byte_ord(self.data[self.offset])
self.offset += 1
return self.readString(length)
def readString(self, length):
"""Reads a string of a given length from the packet"""
info = self.data[self.offset:self.offset + length]
self.offset += length
return info
def readUnsignedShort(self):
"""Reads an unsigned short from the packet"""
return self.unpack(b'!H')[0]
def readOthers(self):
"""Reads the answers, authorities and additionals section of the
packet"""
n = self.numAnswers + self.numAuthorities + self.numAdditionals
for i in xrange(n):
domain = self.readName()
type, clazz, ttl, length = self.unpack(b'!HHiH')
rec = None
if type == _TYPE_A:
rec = DNSAddress(domain, type, clazz, ttl, self.readString(4))
elif type == _TYPE_CNAME or type == _TYPE_PTR:
rec = DNSPointer(domain, type, clazz, ttl, self.readName())
elif type == _TYPE_TXT:
rec = DNSText(domain, type, clazz, ttl, self.readString(length))
elif type == _TYPE_SRV:
rec = DNSService(domain, type, clazz, ttl,
self.readUnsignedShort(), self.readUnsignedShort(),
self.readUnsignedShort(), self.readName())
elif type == _TYPE_HINFO:
rec = DNSHinfo(domain, type, clazz, ttl,
self.readCharacterString(), self.readCharacterString())
elif type == _TYPE_AAAA:
rec = DNSAddress(domain, type, clazz, ttl, self.readString(16))
else:
# Try to ignore types we don't know about
# Skip the payload for the resource record so the next
# records can be parsed correctly
self.offset += length
if rec is not None:
self.answers.append(rec)
def isQuery(self):
"""Returns true if this is a query"""
return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_QUERY
def isResponse(self):
"""Returns true if this is a response"""
return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_RESPONSE
def readUTF(self, offset, length):
"""Reads a UTF-8 string of a given length from the packet"""
return unicode(self.data[offset:offset + length], 'utf-8', 'replace')
def readName(self):
"""Reads a domain name from the packet"""
result = ''
off = self.offset
next = -1
first = off
while True:
length = byte_ord(self.data[off])
off += 1
if length == 0:
break
t = length & 0xC0
if t == 0x00:
result = ''.join((result, self.readUTF(off, length) + '.'))
off += length
elif t == 0xC0:
if next < 0:
next = off + 1
off = ((length & 0x3F) << 8) | byte_ord(self.data[off])
if off >= first:
# TODO raise more specific exception
raise Exception("Bad domain name (circular) at %s" % (off,))
first = off
else:
# TODO raise more specific exception
raise Exception("Bad domain name at %s" % (off,))
if next >= 0:
self.offset = next
else:
self.offset = off
return result
class DNSOutgoing(object):
"""Object representation of an outgoing packet"""
def __init__(self, flags, multicast=True):
self.finished = False
self.id = 0
self.multicast = multicast
self.flags = flags
self.names = {}
self.data = []
self.size = 12
self.questions = []
self.answers = []
self.authorities = []
self.additionals = []
def addQuestion(self, record):
"""Adds a question"""
self.questions.append(record)
def addAnswer(self, inp, record):
"""Adds an answer"""
if not record.suppressedBy(inp):
self.addAnswerAtTime(record, 0)
def addAnswerAtTime(self, record, now):
"""Adds an answer if if does not expire by a certain time"""
if record is not None:
if now == 0 or not record.isExpired(now):
self.answers.append((record, now))
def addAuthorativeAnswer(self, record):
"""Adds an authoritative answer"""
self.authorities.append(record)
def addAdditionalAnswer(self, record):
"""Adds an additional answer"""
self.additionals.append(record)
def pack(self, format, value):
self.data.append(struct.pack(format, value))
self.size += struct.calcsize(format)
def writeByte(self, value):
"""Writes a single byte to the packet"""
self.pack(b'!c', byte_chr(value))
def insertShort(self, index, value):
"""Inserts an unsigned short in a certain position in the packet"""
self.data.insert(index, struct.pack(b'!H', value))
self.size += 2
def writeShort(self, value):
"""Writes an unsigned short to the packet"""
self.pack(b'!H', value)
def writeInt(self, value):
"""Writes an unsigned integer to the packet"""
self.pack(b'!I', int(value))
def writeString(self, value):
"""Writes a string to the packet"""
assert isinstance(value, bytes)
self.data.append(value)
self.size += len(value)
def writeUTF(self, s):
"""Writes a UTF-8 string of a given length to the packet"""
utfstr = s.encode('utf-8')
length = len(utfstr)
if length > 64:
raise NamePartTooLongException
self.writeByte(length)
self.writeString(utfstr)
def writeName(self, name):
"""Writes a domain name to the packet"""
if name in self.names:
# Find existing instance of this name in packet
#
index = self.names[name]
# An index was found, so write a pointer to it
#
self.writeByte((index >> 8) | 0xC0)
self.writeByte(index & 0xFF)
else:
# No record of this name already, so write it
# out as normal, recording the location of the name
# for future pointers to it.
#
self.names[name] = self.size
parts = name.split('.')
if parts[-1] == '':
parts = parts[:-1]
for part in parts:
self.writeUTF(part)
self.writeByte(0)
def writeQuestion(self, question):
"""Writes a question to the packet"""
self.writeName(question.name)
self.writeShort(question.type)
self.writeShort(question.clazz)
def writeRecord(self, record, now):
"""Writes a record (answer, authoritative answer, additional) to
the packet"""
self.writeName(record.name)
self.writeShort(record.type)
if record.unique and self.multicast:
self.writeShort(record.clazz | _CLASS_UNIQUE)
else:
self.writeShort(record.clazz)
if now == 0:
self.writeInt(record.ttl)
else:
self.writeInt(record.getRemainingTTL(now))
index = len(self.data)
# Adjust size for the short we will write before this record
#
self.size += 2
record.write(self)
self.size -= 2
length = len(b''.join(self.data[index:]))
self.insertShort(index, length) # Here is the short we adjusted for
def packet(self):
"""Returns a string containing the packet's bytes
No further parts should be added to the packet once this
is done."""
if not self.finished:
self.finished = True
for question in self.questions:
self.writeQuestion(question)
for answer, time_ in self.answers:
self.writeRecord(answer, time_)
for authority in self.authorities:
self.writeRecord(authority, 0)
for additional in self.additionals:
self.writeRecord(additional, 0)
self.insertShort(0, len(self.additionals))
self.insertShort(0, len(self.authorities))
self.insertShort(0, len(self.answers))
self.insertShort(0, len(self.questions))
self.insertShort(0, self.flags)
if self.multicast:
self.insertShort(0, 0)
else:
self.insertShort(0, self.id)
return b''.join(self.data)
class DNSCache(object):
"""A cache of DNS entries"""
def __init__(self):
self.cache = {}
def add(self, entry):
"""Adds an entry"""
try:
list = self.cache[entry.key]
except Exception: # TODO stop catching all Exceptions
list = self.cache[entry.key] = []
list.append(entry)
def remove(self, entry):
"""Removes an entry"""
try:
list = self.cache[entry.key]
list.remove(entry)
except Exception: # TODO stop catching all Exceptions
pass
def get(self, entry):
"""Gets an entry by key. Will return None if there is no
matching entry."""
try:
list = self.cache[entry.key]
return list[list.index(entry)]
except Exception: # TODO stop catching all Exceptions
return None
def getByDetails(self, name, type, clazz):
"""Gets an entry by details. Will return None if there is
no matching entry."""
entry = DNSEntry(name, type, clazz)
return self.get(entry)
def entriesWithName(self, name):
"""Returns a list of entries whose key matches the name."""
try:
return self.cache[name]
except Exception: # TODO stop catching all Exceptions
return []
def entries(self):
"""Returns a list of all entries"""
def add(x, y):
return x + y
try:
return reduce(add, self.cache.values())
except Exception: # TODO stop catching all Exceptions
return []
class Engine(threading.Thread):
"""An engine wraps read access to sockets, allowing objects that
need to receive data from sockets to be called back when the
sockets are ready.
A reader needs a handle_read() method, which is called when the socket
it is interested in is ready for reading.
Writers are not implemented here, because we only send short
packets.
"""
def __init__(self, zc):
threading.Thread.__init__(self)
self.daemon = True
self.zc = zc
self.readers = {} # maps socket to reader
self.timeout = 5
self.condition = threading.Condition()
self.start()
def run(self):
while not _GLOBAL_DONE:
rs = self.getReaders()
if len(rs) == 0:
# No sockets to manage, but we wait for the timeout
# or addition of a socket
#
self.condition.acquire()
self.condition.wait(self.timeout)
self.condition.release()
else:
try:
rr, wr, er = select.select(rs, [], [], self.timeout)
for socket_ in rr:
try:
self.readers[socket_].handle_read()
except Exception: # TODO stop catching all Exceptions
traceback.print_exc()
except Exception: # TODO stop catching all Exceptions
pass
def getReaders(self):
result = []
self.condition.acquire()
result = self.readers.keys()
self.condition.release()
return result
def addReader(self, reader, socket):
self.condition.acquire()
self.readers[socket] = reader
self.condition.notify()
self.condition.release()
def delReader(self, socket):
self.condition.acquire()
del(self.readers[socket])
self.condition.notify()
self.condition.release()
def notify(self):
self.condition.acquire()
self.condition.notify()
self.condition.release()
class Listener(object):
"""A Listener is used by this module to listen on the multicast
group to which DNS messages are sent, allowing the implementation
to cache information as it arrives.
It requires registration with an Engine object in order to have
the read() method called when a socket is availble for reading."""
def __init__(self, zc):
self.zc = zc
self.zc.engine.addReader(self, self.zc.socket)
def handle_read(self):
try:
data, (addr, port) = self.zc.socket.recvfrom(_MAX_MSG_ABSOLUTE)
except socket.error as e:
# If the socket was closed by another thread -- which happens
# regularly on shutdown -- an EBADF exception is thrown here.
# Ignore it.
if e.errno == socket.EBADF:
return
else:
raise e
self.data = data
msg = DNSIncoming(data)
if msg.isQuery():
# Always multicast responses
#
if port == _MDNS_PORT:
self.zc.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
# If it's not a multicast query, reply via unicast
# and multicast
#
elif port == _DNS_PORT:
self.zc.handleQuery(msg, addr, port)
self.zc.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
else:
self.zc.handleResponse(msg)
class Reaper(threading.Thread):
"""A Reaper is used by this module to remove cache entries that
have expired."""
def __init__(self, zc):
threading.Thread.__init__(self)
self.daemon = True
self.zc = zc
self.start()
def run(self):
while True:
self.zc.wait(10 * 1000)
if _GLOBAL_DONE:
return
now = currentTimeMillis()
for record in self.zc.cache.entries():
if record.isExpired(now):
self.zc.updateRecord(now, record)
self.zc.cache.remove(record)
class ServiceBrowser(threading.Thread):
"""Used to browse for a service of a specific type.
The listener object will have its addService() and
removeService() methods called when this browser
discovers changes in the services availability."""
def __init__(self, zc, type, listener):
"""Creates a browser for a specific type"""
threading.Thread.__init__(self)
self.daemon = True
self.zc = zc
self.type = type
self.listener = listener
self.services = {}
self.nextTime = currentTimeMillis()
self.delay = _BROWSER_TIME
self.list = []
self.done = False
self.zc.addListener(self, DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN))
self.start()
def updateRecord(self, zc, now, record):
"""Callback invoked by Zeroconf when new information arrives.
Updates information required by browser in the Zeroconf cache."""
if record.type == _TYPE_PTR and record.name == self.type:
expired = record.isExpired(now)
try:
oldrecord = self.services[record.alias.lower()]
if not expired:
oldrecord.resetTTL(record)
else:
del(self.services[record.alias.lower()])
callback = lambda x: self.listener.removeService(x,
self.type, record.alias)
self.list.append(callback)
return
except Exception: # TODO stop catching all Exceptions
if not expired:
self.services[record.alias.lower()] = record
callback = lambda x: self.listener.addService(x,
self.type, record.alias)
self.list.append(callback)
expires = record.getExpirationTime(75)
if expires < self.nextTime:
self.nextTime = expires
def cancel(self):
self.done = True
self.zc.notifyAll()
def run(self):
while True:
event = None
now = currentTimeMillis()
if len(self.list) == 0 and self.nextTime > now:
self.zc.wait(self.nextTime - now)
if _GLOBAL_DONE or self.done:
return
now = currentTimeMillis()
if self.nextTime <= now:
out = DNSOutgoing(_FLAGS_QR_QUERY)
out.addQuestion(DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN))
for record in self.services.values():
if not record.isExpired(now):
out.addAnswerAtTime(record, now)
self.zc.send(out)
self.nextTime = now + self.delay
self.delay = min(20 * 1000, self.delay * 2)
if len(self.list) > 0:
event = self.list.pop(0)
if event is not None:
event(self.zc)
class ServiceInfo(object):
"""Service information"""
def __init__(self, type, name, address=None, port=None, weight=0,
priority=0, properties=None, server=None):
"""Create a service description.
type: fully qualified service type name
name: fully qualified service name
address: IP address as unsigned short, network byte order
port: port that the service runs on
weight: weight of the service
priority: priority of the service
properties: dictionary of properties (or a string holding the
bytes for the text field)
server: fully qualified name for service host (defaults to name)"""
if not name.endswith(type):
raise BadTypeInNameException
self.type = type
self.name = name
self.address = address
self.port = port
self.weight = weight
self.priority = priority
if server:
self.server = server
else:
self.server = name
self.setProperties(properties)
def setProperties(self, properties):
"""Sets properties and text of this info from a dictionary"""
if isinstance(properties, dict):
self.properties = properties
list = []
result = b''
for key in properties:
value = properties[key]
if isinstance(key, unicode):
key = key.encode('utf-8')
if value is None:
suffix = b''
elif isinstance(value, unicode):
suffix = value.encode('utf-8')
elif isinstance(value, int):
if value:
suffix = b'true'
else:
suffix = b'false'
else:
suffix = b''
list.append(b'='.join((key, suffix)))
for item in list:
result = b''.join((result, byte_chr(len(item)), item))
self.text = result
else:
self.text = properties
def setText(self, text):
"""Sets properties and text given a text field"""
self.text = text
try:
result = {}
end = len(text)
index = 0
strs = []
while index < end:
length = byte_ord(text[index])
index += 1
strs.append(text[index:index + length])
index += length
for s in strs:
try:
key, value = s.split('=', 1)
if value == 'true':
value = True
elif value == 'false' or not value:
value = False
except Exception: # TODO stop catching all Exceptions
# No equals sign at all
key = s
value = False
# Only update non-existent properties
if key and result.get(key) is None:
result[key] = value
self.properties = result
except Exception: # TODO stop catching all Exceptions
traceback.print_exc()
self.properties = None
def getType(self):
"""Type accessor"""
return self.type
def getName(self):
"""Name accessor"""
if self.type is not None and self.name.endswith("." + self.type):
return self.name[:len(self.name) - len(self.type) - 1]
return self.name
def getAddress(self):
"""Address accessor"""
return self.address
def getPort(self):
"""Port accessor"""
return self.port
def getPriority(self):
"""Pirority accessor"""
return self.priority
def getWeight(self):
"""Weight accessor"""
return self.weight
def getProperties(self):
"""Properties accessor"""
return self.properties
def getText(self):
"""Text accessor"""
return self.text
def getServer(self):
"""Server accessor"""
return self.server
def updateRecord(self, zc, now, record):
"""Updates service information from a DNS record"""
if record is not None and not record.isExpired(now):
if record.type == _TYPE_A:
# if record.name == self.name:
if record.name == self.server:
self.address = record.address
elif record.type == _TYPE_SRV:
if record.name == self.name:
self.server = record.server
self.port = record.port
self.weight = record.weight
self.priority = record.priority
# self.address = None
self.updateRecord(zc, now,
zc.cache.getByDetails(self.server, _TYPE_A, _CLASS_IN))
elif record.type == _TYPE_TXT:
if record.name == self.name:
self.setText(record.text)
def request(self, zc, timeout):
"""Returns true if the service could be discovered on the
network, and updates this object with details discovered.
"""
now = currentTimeMillis()
delay = _LISTENER_TIME
next = now + delay
last = now + timeout
result = False
try:
zc.addListener(self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN))
while (self.server is None or self.address is None or
self.text is None):
if last <= now:
return False
if next <= now:
out = DNSOutgoing(_FLAGS_QR_QUERY)
out.addQuestion(DNSQuestion(self.name, _TYPE_SRV,
_CLASS_IN))
out.addAnswerAtTime(zc.cache.getByDetails(self.name,
_TYPE_SRV, _CLASS_IN), now)
out.addQuestion(DNSQuestion(self.name, _TYPE_TXT,
_CLASS_IN))
out.addAnswerAtTime(zc.cache.getByDetails(self.name,
_TYPE_TXT, _CLASS_IN), now)
if self.server is not None:
out.addQuestion(DNSQuestion(self.server,
_TYPE_A, _CLASS_IN))
out.addAnswerAtTime(zc.cache.getByDetails(self.server,
_TYPE_A, _CLASS_IN), now)
zc.send(out)
next = now + delay
delay = delay * 2
zc.wait(min(next, last) - now)
now = currentTimeMillis()
result = True
finally:
zc.removeListener(self)
return result
def __eq__(self, other):
"""Tests equality of service name"""
if isinstance(other, ServiceInfo):
return other.name == self.name
return False
def __ne__(self, other):
"""Non-equality test"""
return not self.__eq__(other)
def __repr__(self):
"""String representation"""
result = "service[%s,%s:%s," % (self.name,
socket.inet_ntoa(self.getAddress()), self.port)
if self.text is None:
result += "None"
else:
if len(self.text) < 20:
result += self.text
else:
result += self.text[:17] + "..."
result += "]"
return result
class Zeroconf(object):
"""Implementation of Zeroconf Multicast DNS Service Discovery
Supports registration, unregistration, queries and browsing.
"""
def __init__(self, bindaddress=None):
"""Creates an instance of the Zeroconf class, establishing
multicast communications, listening and reaping threads."""
global _GLOBAL_DONE
_GLOBAL_DONE = False
if bindaddress is None:
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('4.2.2.1', 123))
self.intf = s.getsockname()[0]
except Exception: # TODO stop catching all Exceptions
self.intf = socket.gethostbyname(socket.gethostname())
else:
self.intf = bindaddress
self.group = ('', _MDNS_PORT)
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except Exception: # TODO stop catching all Exceptions
# SO_REUSEADDR should be equivalent to SO_REUSEPORT for
# multicast UDP sockets (p 731, "TCP/IP Illustrated,
# Volume 2"), but some BSD-derived systems require
# SO_REUSEPORT to be specified explicity. Also, not all
# versions of Python have SO_REUSEPORT available.
#
pass
self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 255)
self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
try:
self.socket.bind(self.group)
except Exception: # TODO stop catching all Exceptions
# Some versions of linux raise an exception even though
# the SO_REUSE* options have been set, so ignore it
#
pass
# self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF,
# socket.inet_aton(self.intf) + socket.inet_aton('0.0.0.0'))
self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'))
self.listeners = []
self.browsers = []
self.services = {}
self.servicetypes = {}
self.cache = DNSCache()
self.condition = threading.Condition()
self.engine = Engine(self)
self.listener = Listener(self)
self.reaper = Reaper(self)
def isLoopback(self):
return self.intf.startswith("127.0.0.1")
def isLinklocal(self):
return self.intf.startswith("169.254.")
def wait(self, timeout):
"""Calling thread waits for a given number of milliseconds or
until notified."""
self.condition.acquire()
self.condition.wait(timeout / 1000)
self.condition.release()
def notifyAll(self):
"""Notifies all waiting threads"""
self.condition.acquire()
self.condition.notifyAll()
self.condition.release()
def getServiceInfo(self, type, name, timeout=3000):
"""Returns network's service information for a particular
name and type, or None if no service matches by the timeout,
which defaults to 3 seconds."""
info = ServiceInfo(type, name)
if info.request(self, timeout):
return info
return None
def addServiceListener(self, type, listener):
"""Adds a listener for a particular service type. This object
will then have its updateRecord method called when information
arrives for that type."""
self.removeServiceListener(listener)
self.browsers.append(ServiceBrowser(self, type, listener))
def removeServiceListener(self, listener):
"""Removes a listener from the set that is currently listening."""
for browser in self.browsers:
if browser.listener == listener:
browser.cancel()
del(browser)
def registerService(self, info, ttl=_DNS_TTL):
"""Registers service information to the network with a default TTL
of 60 seconds. Zeroconf will then respond to requests for
information for that service. The name of the service may be
changed if needed to make it unique on the network."""
self.checkService(info)
self.services[info.name.lower()] = info
if info.type in self.servicetypes:
self.servicetypes[info.type] += 1
else:
self.servicetypes[info.type] = 1
now = currentTimeMillis()
nextTime = now
i = 0
while i < 3:
if now < nextTime:
self.wait(nextTime - now)
now = currentTimeMillis()
continue
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR,
_CLASS_IN, ttl, info.name), 0)
out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV,
_CLASS_IN, ttl, info.priority, info.weight, info.port,
info.server), 0)
out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN,
ttl, info.text), 0)
if info.address:
out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A,
_CLASS_IN, ttl, info.address), 0)
self.send(out)
i += 1
nextTime += _REGISTER_TIME
def unregisterService(self, info):
"""Unregister a service."""
try:
del(self.services[info.name.lower()])
if self.servicetypes[info.type] > 1:
self.servicetypes[info.type] -= 1
else:
del self.servicetypes[info.type]
except Exception: # TODO stop catching all Exceptions
pass
now = currentTimeMillis()
nextTime = now
i = 0
while i < 3:
if now < nextTime:
self.wait(nextTime - now)
now = currentTimeMillis()
continue
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR,
_CLASS_IN, 0, info.name), 0)
out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV,
_CLASS_IN, 0, info.priority, info.weight, info.port,
info.name), 0)
out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN,
0, info.text), 0)
if info.address:
out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A,
_CLASS_IN, 0, info.address), 0)
self.send(out)
i += 1
nextTime += _UNREGISTER_TIME
def unregisterAllServices(self):
"""Unregister all registered services."""
if len(self.services) > 0:
now = currentTimeMillis()
nextTime = now
i = 0
while i < 3:
if now < nextTime:
self.wait(nextTime - now)
now = currentTimeMillis()
continue
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
for info in self.services.values():
out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR,
_CLASS_IN, 0, info.name), 0)
out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV,
_CLASS_IN, 0, info.priority, info.weight,
info.port, info.server), 0)
out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT,
_CLASS_IN, 0, info.text), 0)
if info.address:
out.addAnswerAtTime(DNSAddress(info.server,
_TYPE_A, _CLASS_IN, 0, info.address), 0)
self.send(out)
i += 1
nextTime += _UNREGISTER_TIME
def checkService(self, info):
"""Checks the network for a unique service name, modifying the
ServiceInfo passed in if it is not unique."""
now = currentTimeMillis()
nextTime = now
i = 0
while i < 3:
for record in self.cache.entriesWithName(info.type):
if (record.type == _TYPE_PTR and
not record.isExpired(now) and
record.alias == info.name):
if info.name.find('.') < 0:
info.name = '%s.[%s:%s].%s' % (info.name,
info.address, info.port, info.type)
self.checkService(info)
return
raise NonUniqueNameException
if now < nextTime:
self.wait(nextTime - now)
now = currentTimeMillis()
continue
out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA)
self.debug = out
out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN))
out.addAuthorativeAnswer(DNSPointer(info.type, _TYPE_PTR,
_CLASS_IN, _DNS_TTL, info.name))
self.send(out)
i += 1
nextTime += _CHECK_TIME
def addListener(self, listener, question):
"""Adds a listener for a given question. The listener will have
its updateRecord method called when information is available to
answer the question."""
now = currentTimeMillis()
self.listeners.append(listener)
if question is not None:
for record in self.cache.entriesWithName(question.name):
if question.answeredBy(record) and not record.isExpired(now):
listener.updateRecord(self, now, record)
self.notifyAll()
def removeListener(self, listener):
"""Removes a listener."""
try:
self.listeners.remove(listener)
self.notifyAll()
except Exception: # TODO stop catching all Exceptions
pass
def updateRecord(self, now, rec):
"""Used to notify listeners of new information that has updated
a record."""
for listener in self.listeners:
listener.updateRecord(self, now, rec)
self.notifyAll()
def handleResponse(self, msg):
"""Deal with incoming response packets. All answers
are held in the cache, and listeners are notified."""
now = currentTimeMillis()
for record in msg.answers:
expired = record.isExpired(now)
if record in self.cache.entries():
if expired:
self.cache.remove(record)
else:
entry = self.cache.get(record)
if entry is not None:
entry.resetTTL(record)
record = entry
else:
self.cache.add(record)
self.updateRecord(now, record)
def handleQuery(self, msg, addr, port):
"""Deal with incoming query packets. Provides a response if
possible."""
out = None
# Support unicast client responses
#
if port != _MDNS_PORT:
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, False)
for question in msg.questions:
out.addQuestion(question)
for question in msg.questions:
if question.type == _TYPE_PTR:
if question.name == "_services._dns-sd._udp.local.":
for stype in self.servicetypes.keys():
if out is None:
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
out.addAnswer(msg,
DNSPointer("_services._dns-sd._udp.local.",
_TYPE_PTR, _CLASS_IN, _DNS_TTL, stype))
for service in self.services.values():
if question.name == service.type:
if out is None:
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
out.addAnswer(msg,
DNSPointer(service.type, _TYPE_PTR,
_CLASS_IN, _DNS_TTL, service.name))
else:
try:
if out is None:
out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
# Answer A record queries for any service addresses we know
if question.type in (_TYPE_A, _TYPE_ANY):
for service in self.services.values():
if service.server == question.name.lower():
out.addAnswer(msg, DNSAddress(question.name,
_TYPE_A, _CLASS_IN | _CLASS_UNIQUE,
_DNS_TTL, service.address))
service = self.services.get(question.name.lower(), None)
if not service:
continue
if question.type in (_TYPE_SRV, _TYPE_ANY):
out.addAnswer(msg, DNSService(question.name,
_TYPE_SRV, _CLASS_IN | _CLASS_UNIQUE,
_DNS_TTL, service.priority, service.weight,
service.port, service.server))
if question.type in (_TYPE_TXT, _TYPE_ANY):
out.addAnswer(msg, DNSText(question.name,
_TYPE_TXT, _CLASS_IN | _CLASS_UNIQUE,
_DNS_TTL, service.text))
if question.type == _TYPE_SRV:
out.addAdditionalAnswer(DNSAddress(service.server,
_TYPE_A, _CLASS_IN | _CLASS_UNIQUE,
_DNS_TTL, service.address))
except Exception: # TODO stop catching all Exceptions
traceback.print_exc()
if out is not None and out.answers:
out.id = msg.id
self.send(out, addr, port)
def send(self, out, addr=_MDNS_ADDR, port=_MDNS_PORT):
"""Sends an outgoing packet."""
packet = out.packet()
try:
while packet:
bytes_sent = self.socket.sendto(packet, 0, (addr, port))
if bytes_sent < 0:
break
packet = packet[bytes_sent:]
except Exception: # TODO stop catching all Exceptions
# Ignore this, it may be a temporary loss of network connection
pass
def close(self):
"""Ends the background threads, and prevent this instance from
servicing further queries."""
global _GLOBAL_DONE
if not _GLOBAL_DONE:
_GLOBAL_DONE = True
self.notifyAll()
self.engine.notify()
self.unregisterAllServices()
self.socket.setsockopt(socket.IPPROTO_IP,
socket.IP_DROP_MEMBERSHIP,
socket.inet_aton(_MDNS_ADDR) +
socket.inet_aton('0.0.0.0'))
self.socket.close()
# Test a few module features, including service registration, service
# query (for Zoe), and service unregistration.
if __name__ == '__main__':
print("Multicast DNS Service Discovery for Python, version %s" % __version__)
r = Zeroconf()
print("1. Testing registration of a service...")
desc = {'version': '0.10', 'a': 'test value', 'b': 'another value'}
info = ServiceInfo("_http._tcp.local.",
"My Service Name._http._tcp.local.",
socket.inet_aton("127.0.0.1"), 1234, 0, 0, desc)
print(" Registering service...")
r.registerService(info)
print(" Registration done.")
print("2. Testing query of service information...")
print(" Getting ZOE service: %s" % (
r.getServiceInfo("_http._tcp.local.", "ZOE._http._tcp.local.")))
print(" Query done.")
print("3. Testing query of own service...")
print(" Getting self: %s" % (
r.getServiceInfo("_http._tcp.local.", "My Service Name._http._tcp.local.")),)
print(" Query done.")
print("4. Testing unregister of service information...")
r.unregisterService(info)
print(" Unregister done.")
r.close()
|
kylerbrown/spikechef | refs/heads/master | gen_klusta_command.py | 1 | from subprocess import call
import os.path
from math import ceil
import argparse
def subset(filebase, directory, shank_num, max_spikes):
clu_filename = "{}.clu.{}".format(os.path.join(directory, filebase),
shank_num)
Nspikes = float(sum(1 for line in open(clu_filename)))
return int(ceil(Nspikes/max_spikes))
def klustakwik_strings(filebase, directory, shank_num, nchannels, max_spikes):
subsample_factor = subset(filebase, directory, shank_num, max_spikes)
minclus = 3 * nchannels
maxclus = 8 * nchannels
klus_args = ['MaskedKlustaKwik',
filebase,
str(shank_num),
#'-MaskStarts', str(minclus),
'-PenaltyK', '1',
'-PenaltyKLogN', '0',
'-UseDistributional', '1',
#'-SplitFirst', '40',
#'-SplitEvery', '100',
#'-MaxIter', '400',
#'-MaxPossibleClusters', str(maxclus),
'-UseMaskedInitialConditions', '1',
'-Subset', str(subsample_factor)
]
return klus_args
def main(filebase, directory,
shank_num, nchannels=32, max_spikes=800000, torque=False):
filebase = os.path.split(filebase)[-1]
klus_args = klustakwik_strings(filebase, directory,
shank_num, nchannels, max_spikes)
scriptname = "{}.{}.sh".format(os.path.join(directory, filebase),
shank_num)
print torque
with open(scriptname, 'w') as f:
if torque == 'beast':
f.write('#PBS -N {}{}\n'.format(filebase[-15:], shank_num))
f.write('#PBS -o {}_{}_err.txt\n'.format(filebase, shank_num))
f.write('#PBS -l nodes=1:ppn=5\n')
f.write('#PBS -l walltime=192:00:00\n')
f.write('#PBS -V\n')
f.write('cd $PBS_O_WORKDIR\n')
f.write(" ".join(klus_args) + '\n')
elif torque == 'beagle':
f.write('#PBS -N {}{}\n'.format(filebase[-15:], shank_num))
f.write('#PBS -o {}_{}_err.txt\n'.format(filebase, shank_num))
f.write('#PBS -l mppwidth=1\n')
f.write('#PBS -l walltime=192:00:00\n')
f.write('cd $PBS_O_WORKDIR\n')
f.write("aprun -n 1 -N 1 ")
f.write(" ".join(klus_args) + '\n')
else:
f.write(" ".join(klus_args) + '\n')
call(['chmod', 'u+x', scriptname])
if __name__ == "__main__":
description = '''
convenience program for running klustakwik,
generates a script, which you should execute on your desired machine
'''
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-f', '--filebase')
parser.add_argument('-d', '--directory',
help="directory containing the files",
default='./')
parser.add_argument('-s', '--shank-num', help="shank number",
default=1, type=int)
parser.add_argument('-n', '--n-channels', help='number of channels on shank',
default=32, type=int)
parser.add_argument('-t', '--torque', help="for running on beast or beagle, \
say 'beast' or 'beagle'.")
args = parser.parse_args()
main(args.filebase, args.directory,
args.shank_num, args.n_channels, torque=args.torque)
|
tsl143/zamboni | refs/heads/master | mkt/receipts/utils.py | 20 | import calendar
import time
from urllib import urlencode
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
import jwt
from nose.tools import nottest
from receipts.receipts import Receipt
from lib.crypto import receipt
from lib.utils import static_url
from mkt.access import acl
from mkt.site.helpers import absolutify
def get_uuid(app, user):
"""
Returns a users uuid suitable for use in the receipt, by looking up
the purchase table. Otherwise it just returns 'none'.
:params app: the app record.
:params user: the UserProfile record.
"""
try:
return app.addonpurchase_set.get(user=user).uuid
except ObjectDoesNotExist:
return 'none'
def sign(data):
"""
Returns a signed receipt. If the seperate signing server is present then
it will use that. Otherwise just uses JWT.
:params receipt: the receipt to be signed.
"""
if settings.SIGNING_SERVER_ACTIVE:
return receipt.sign(data)
else:
return jwt.encode(data, get_key(), u'RS512')
def create_receipt(webapp, user, uuid, flavour=None, contrib=None):
return sign(create_receipt_data(webapp, user, uuid, flavour=flavour,
contrib=contrib))
def create_receipt_data(webapp, user, uuid, flavour=None, contrib=None):
"""
Creates receipt data for use in payments.
:params app: the app record.
:params user: the UserProfile record.
:params uuid: a uuid placed in the user field for this purchase.
:params flavour: None, developer, inapp, or reviewer - the flavour
of receipt.
:param: contrib: the Contribution object for the purchase.
"""
# Unflavo(u)red receipts are for plain ol' vanilla app purchases.
assert flavour in (None, 'developer', 'inapp', 'reviewer'), (
'Invalid flavour: %s' % flavour)
time_ = calendar.timegm(time.gmtime())
typ = 'purchase-receipt'
storedata = {'id': int(webapp.pk)}
# Generate different receipts for reviewers or developers.
expiry = time_ + settings.WEBAPPS_RECEIPT_EXPIRY_SECONDS
verify = static_url('WEBAPPS_RECEIPT_URL')
if flavour == 'inapp':
if not contrib:
raise ValueError(
'a contribution object is required for in-app receipts')
if not contrib.inapp_product:
raise ValueError(
'contribution {c} does not link to an in-app product'
.format(c=contrib))
storedata['contrib'] = int(contrib.pk)
storedata['inapp_id'] = contrib.inapp_product.guid
elif flavour in ('developer', 'reviewer'):
if not (acl.action_allowed_user(user, 'Apps', 'Review') or
webapp.has_author(user)):
raise ValueError('User %s is not a reviewer or developer' %
user.pk)
# Developer and reviewer receipts should expire after 24 hours.
expiry = time_ + (60 * 60 * 24)
typ = flavour + '-receipt'
verify = absolutify(reverse('receipt.verify', args=[webapp.guid]))
product = {'storedata': urlencode(storedata),
# Packaged and hosted apps should have an origin. If there
# isn't one, fallback to the SITE_URL.
'url': webapp.origin or settings.SITE_URL}
reissue = absolutify(reverse('receipt.reissue'))
receipt = dict(exp=expiry, iat=time_,
iss=settings.SITE_URL, nbf=time_, product=product,
# TODO: This is temporary until detail pages get added.
# TODO: bug 1020997, bug 1020999
detail=absolutify(reissue), # Currently this is a 404.
reissue=absolutify(reissue),
typ=typ,
user={'type': 'directed-identifier',
'value': uuid},
verify=verify)
return receipt
def create_inapp_receipt(contrib):
"""
Creates a receipt for an in-app purchase.
:params contrib: the Contribution object for the purchase.
"""
if contrib.is_inapp_simulation():
storedata = {'id': 0, 'contrib': int(contrib.pk),
'inapp_id': contrib.inapp_product.guid}
return create_test_receipt(settings.SITE_URL, 'ok',
storedata=storedata)
return create_receipt(contrib.addon, None, 'anonymous-user',
flavour='inapp', contrib=contrib)
def reissue_receipt(receipt):
"""
Reissues and existing receipt by updating the timestamps and resigning
the receipt. This requires a well formatted receipt, but does not verify
the receipt contents.
:params receipt: an existing receipt
"""
time_ = calendar.timegm(time.gmtime())
receipt_obj = Receipt(receipt)
data = receipt_obj.receipt_decoded()
data.update({
'exp': time_ + settings.WEBAPPS_RECEIPT_EXPIRY_SECONDS,
'iat': time_,
'nbf': time_,
})
return sign(data)
@nottest
def create_test_receipt(root, status, storedata=None):
if not storedata:
storedata = {'id': 0}
time_ = calendar.timegm(time.gmtime())
detail = absolutify(reverse('receipt.test.details'))
receipt = {
'detail': absolutify(detail),
'exp': time_ + (60 * 60 * 24),
'iat': time_,
'iss': settings.SITE_URL,
'nbf': time_,
'product': {
'storedata': urlencode(storedata),
'url': root,
},
'reissue': detail,
'typ': 'test-receipt',
'user': {
'type': 'directed-identifier',
'value': 'none'
},
'verify': absolutify(reverse('receipt.test.verify',
kwargs={'status': status}))
}
return sign(receipt)
def get_key():
"""Return a key for using with encode."""
return jwt.rsa_load(settings.WEBAPPS_RECEIPT_KEY)
|
tectronics/coot | refs/heads/master | rcrane/rotamerSeq.py | 4 | #!/usr/bin/env python
"""Determine the sequence of rotamers for a structure using a hidden Markov model"""
# Copyright 2010 Kevin Keating
#
# Licensed under the Educational Community License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.osedu.org/licenses/ECL-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS"
# BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
import os.path
from math import log
from pseudoPredictor import PseudoPredictor
from puckerList import puckerList, rotList, rotsByPucker
from safelog import ln, negInf
#initialize a PseudoPredictor object when this module is loaded
dataPath = os.path.dirname(os.path.abspath(__file__))
dataPath = os.path.join(dataPath, "data")
pseudoPredic = PseudoPredictor( thetaEta = os.path.join(dataPath, "thetaEtaClusts.csv"),
pucker = os.path.join(dataPath, "smoothedPuckerDist.csv"),
sugarDist = os.path.join(dataPath, "sugarDists.csv"),
startPhosDist = os.path.join(dataPath, "startingPDists.csv"),
endPhosDist = os.path.join(dataPath, "endingPDists.csv"))
def determineRotamerSeq(builtChain):
"""Determine the best sequence of rotamers for a structure.
ARGUMENTS:
builtChain - a Chain object containing phosphate and base coordinates
RETURNS:
bestPath - a list of the most likely rotamer for each suite
predictedProbs - the probability for each suite for each rotamer formatted as a list of dictionaries
predictedProbs[suiteNum][rotamer] = probability
"""
#calculate the probabilities for each suite
predictedProbs = []
for curSuite in builtChain.suites():
curProbs = pseudoPredic.calcProb( theta = curSuite.theta(),
eta = curSuite.eta(),
startPperp = curSuite.startingPperp(),
endPperp = curSuite.endingPperp(),
sugarDist = curSuite.sugarDist(),
startPhosDist = curSuite.startingPhosDist(),
endPhosDist = curSuite.endingPhosDist())
predictedProbs.append(curProbs)
#determine the best path using an HMM
bestPath = rotamerHMM(predictedProbs)
return (bestPath, predictedProbs)
def rotamerHMM(predictedProbs):
"""Given rotamer likelihoods for all suites, predict the most likely rotamer string using a Hidden Markov Model
ARGUMENTS:
predictedProbs - the probability for each suite for each rotamer formatted as a list of dictionaries
predictedProbs[suiteNum][rotamer] = probability
RETURNS:
bestPath - a list of the most likely rotamer for each suite
"""
numSuites = len(predictedProbs)
pathProbs = [{} for i in xrange(numSuites)] #the log probability of having followed a given path (the delta or phi array)
path = [{} for i in xrange(numSuites)] #the path followed for $pathProbs (the psi array)
#initialize the pathProbs list
for curRot in rotList:
pathProbs[0][curRot] = ln(predictedProbs[0][curRot])
for curPos in xrange(1, numSuites): #for each suite
for curRot in rotList: #for each rotamer
#figure out what the best previous rotamer is for ending up at the current rotamer
bestPrevRot = max(pathProbs[curPos-1], key = lambda prevRot: pathProbs[curPos-1][prevRot] + __transitionProb(prevRot, curRot))
path[curPos][curRot] = bestPrevRot
pathProbs[curPos][curRot] = pathProbs[curPos-1][bestPrevRot] + __transitionProb(bestPrevRot, curRot) + ln(predictedProbs[curPos][curRot])
#initialize bestPath to the appropriate length
bestPath = [None] * numSuites
#figure out the best last position
curPos = numSuites - 1
bestPath[curPos] = max(pathProbs[curPos], key = lambda curRot: pathProbs[curPos][curRot])
#follow the path back to figure out what the best path was
for curPos in xrange(numSuites-1, 0, -1):
bestPath[curPos-1] = path[curPos][bestPath[curPos]]
return bestPath
def __transitionProb(startingRot, endingRot):
"""Calculate the log of the transition probability between two rotamers
ARGUMENTS:
startingRot - the rotamer to transition from
endingRot - the rotamer to transition to
RETURNS:
0 if the ending pucker of the starting rotamer is the same as the starting pucker of the ending rotamer
negative infinity otherwise
"""
if puckerList[startingRot][1] == puckerList[endingRot][0]:
return 0
else:
return negInf
def determinePucker(pperp):
"""Predict only the pucker (used when the user has only built a single nucleotide)
ARGUMENTS:
pperp - the base-phosphate perpendicular (P-perp) distance
RETURNS:
3 if a C3'-endo sugar pucker is more likely, 2 otherwise
NOTE:
This function simply calls the calcPucker() function from the pseudoPredictor module. The
determinePucker() function exists only so the traceGui module doesn't have to separately import
and initialize the pseudoPredictor module.
"""
return pseudoPredic.calcPucker(pperp)
def determineAlternateConf(leadingPucker, endingPucker, suiteNum, predictedProbs):
"""Determine the best conformer for a suite given required starting and ending sugar puckers
ARGUMENTS:
leadingPucker - the pucker of the starting sugar of the suite (either 2 or 3)
endingPucker - the pucker of the ending sugar of the suite (either 2 or 3)
suiteNum - the number of the suite
predictedProbs - the probability for each suite for each rotamer, as returned by determineRotamerSeq
RETURNS:
the most likely conformer
"""
return max(rotsByPucker[leadingPucker][endingPucker], key = lambda rot: predictedProbs[suiteNum][rot])
|
Passtechsoft/TPEAlpGen | refs/heads/master | blender/release/scripts/templates_py/operator_simple.py | 9 | import bpy
def main(context):
for ob in context.scene.objects:
print(ob)
class SimpleOperator(bpy.types.Operator):
"""Tooltip"""
bl_idname = "object.simple_operator"
bl_label = "Simple Object Operator"
@classmethod
def poll(cls, context):
return context.active_object is not None
def execute(self, context):
main(context)
return {'FINISHED'}
def register():
bpy.utils.register_class(SimpleOperator)
def unregister():
bpy.utils.unregister_class(SimpleOperator)
if __name__ == "__main__":
register()
# test call
bpy.ops.object.simple_operator()
|
sander76/home-assistant | refs/heads/dev | homeassistant/components/vallox/fan.py | 5 | """Support for the Vallox ventilation unit fan."""
import logging
from homeassistant.components.fan import FanEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import (
DOMAIN,
METRIC_KEY_MODE,
METRIC_KEY_PROFILE_FAN_SPEED_AWAY,
METRIC_KEY_PROFILE_FAN_SPEED_BOOST,
METRIC_KEY_PROFILE_FAN_SPEED_HOME,
SIGNAL_VALLOX_STATE_UPDATE,
)
_LOGGER = logging.getLogger(__name__)
# Device attributes
ATTR_PROFILE_FAN_SPEED_HOME = {
"description": "fan_speed_home",
"metric_key": METRIC_KEY_PROFILE_FAN_SPEED_HOME,
}
ATTR_PROFILE_FAN_SPEED_AWAY = {
"description": "fan_speed_away",
"metric_key": METRIC_KEY_PROFILE_FAN_SPEED_AWAY,
}
ATTR_PROFILE_FAN_SPEED_BOOST = {
"description": "fan_speed_boost",
"metric_key": METRIC_KEY_PROFILE_FAN_SPEED_BOOST,
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the fan device."""
if discovery_info is None:
return
client = hass.data[DOMAIN]["client"]
client.set_settable_address(METRIC_KEY_MODE, int)
device = ValloxFan(
hass.data[DOMAIN]["name"], client, hass.data[DOMAIN]["state_proxy"]
)
async_add_entities([device], update_before_add=False)
class ValloxFan(FanEntity):
"""Representation of the fan."""
def __init__(self, name, client, state_proxy):
"""Initialize the fan."""
self._name = name
self._client = client
self._state_proxy = state_proxy
self._available = False
self._state = None
self._fan_speed_home = None
self._fan_speed_away = None
self._fan_speed_boost = None
@property
def should_poll(self):
"""Do not poll the device."""
return False
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def available(self):
"""Return if state is known."""
return self._available
@property
def is_on(self):
"""Return if device is on."""
return self._state
@property
def extra_state_attributes(self):
"""Return device specific state attributes."""
return {
ATTR_PROFILE_FAN_SPEED_HOME["description"]: self._fan_speed_home,
ATTR_PROFILE_FAN_SPEED_AWAY["description"]: self._fan_speed_away,
ATTR_PROFILE_FAN_SPEED_BOOST["description"]: self._fan_speed_boost,
}
async def async_added_to_hass(self):
"""Call to update."""
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_VALLOX_STATE_UPDATE, self._update_callback
)
)
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
async def async_update(self):
"""Fetch state from the device."""
try:
# Fetch if the whole device is in regular operation state.
mode = self._state_proxy.fetch_metric(METRIC_KEY_MODE)
if mode == 0:
self._state = True
else:
self._state = False
# Fetch the profile fan speeds.
self._fan_speed_home = int(
self._state_proxy.fetch_metric(
ATTR_PROFILE_FAN_SPEED_HOME["metric_key"]
)
)
self._fan_speed_away = int(
self._state_proxy.fetch_metric(
ATTR_PROFILE_FAN_SPEED_AWAY["metric_key"]
)
)
self._fan_speed_boost = int(
self._state_proxy.fetch_metric(
ATTR_PROFILE_FAN_SPEED_BOOST["metric_key"]
)
)
self._available = True
except (OSError, KeyError) as err:
self._available = False
_LOGGER.error("Error updating fan: %s", err)
#
# The fan entity model has changed to use percentages and preset_modes
# instead of speeds.
#
# Please review
# https://developers.home-assistant.io/docs/core/entity/fan/
#
async def async_turn_on(
self,
speed: str = None,
percentage: int = None,
preset_mode: str = None,
**kwargs,
) -> None:
"""Turn the device on."""
_LOGGER.debug("Turn on: %s", speed)
# Only the case speed == None equals the GUI toggle switch being
# activated.
if speed is not None:
return
if self._state is False:
try:
await self._client.set_values({METRIC_KEY_MODE: 0})
# This state change affects other entities like sensors. Force
# an immediate update that can be observed by all parties
# involved.
await self._state_proxy.async_update(None)
except OSError as err:
self._available = False
_LOGGER.error("Error turning on: %s", err)
else:
_LOGGER.error("Already on")
async def async_turn_off(self, **kwargs) -> None:
"""Turn the device off."""
if self._state is True:
try:
await self._client.set_values({METRIC_KEY_MODE: 5})
# Same as for turn_on method.
await self._state_proxy.async_update(None)
except OSError as err:
self._available = False
_LOGGER.error("Error turning off: %s", err)
else:
_LOGGER.error("Already off")
|
duducosmos/pgs4a | refs/heads/master | python-install/lib/python2.7/test/test_weakset.py | 29 | import unittest
from test import test_support
from weakref import proxy, ref, WeakSet
import operator
import copy
import string
import os
from random import randrange, shuffle
import sys
import warnings
import collections
import gc
import contextlib
class Foo:
pass
class SomeClass(object):
def __init__(self, value):
self.value = value
def __eq__(self, other):
if type(other) != type(self):
return False
return other.value == self.value
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((SomeClass, self.value))
class TestWeakSet(unittest.TestCase):
def setUp(self):
# need to keep references to them
self.items = [SomeClass(c) for c in ('a', 'b', 'c')]
self.items2 = [SomeClass(c) for c in ('x', 'y', 'z')]
self.letters = [SomeClass(c) for c in string.ascii_letters]
self.s = WeakSet(self.items)
self.d = dict.fromkeys(self.items)
self.obj = SomeClass('F')
self.fs = WeakSet([self.obj])
def test_methods(self):
weaksetmethods = dir(WeakSet)
for method in dir(set):
if method == 'test_c_api' or method.startswith('_'):
continue
self.assertIn(method, weaksetmethods,
"WeakSet missing method " + method)
def test_new_or_init(self):
self.assertRaises(TypeError, WeakSet, [], 2)
def test_len(self):
self.assertEqual(len(self.s), len(self.d))
self.assertEqual(len(self.fs), 1)
del self.obj
self.assertEqual(len(self.fs), 0)
def test_contains(self):
for c in self.letters:
self.assertEqual(c in self.s, c in self.d)
# 1 is not weakref'able, but that TypeError is caught by __contains__
self.assertNotIn(1, self.s)
self.assertIn(self.obj, self.fs)
del self.obj
self.assertNotIn(SomeClass('F'), self.fs)
def test_union(self):
u = self.s.union(self.items2)
for c in self.letters:
self.assertEqual(c in u, c in self.d or c in self.items2)
self.assertEqual(self.s, WeakSet(self.items))
self.assertEqual(type(u), WeakSet)
self.assertRaises(TypeError, self.s.union, [[]])
for C in set, frozenset, dict.fromkeys, list, tuple:
x = WeakSet(self.items + self.items2)
c = C(self.items2)
self.assertEqual(self.s.union(c), x)
def test_or(self):
i = self.s.union(self.items2)
self.assertEqual(self.s | set(self.items2), i)
self.assertEqual(self.s | frozenset(self.items2), i)
def test_intersection(self):
i = self.s.intersection(self.items2)
for c in self.letters:
self.assertEqual(c in i, c in self.d and c in self.items2)
self.assertEqual(self.s, WeakSet(self.items))
self.assertEqual(type(i), WeakSet)
for C in set, frozenset, dict.fromkeys, list, tuple:
x = WeakSet([])
self.assertEqual(self.s.intersection(C(self.items2)), x)
def test_isdisjoint(self):
self.assertTrue(self.s.isdisjoint(WeakSet(self.items2)))
self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters)))
def test_and(self):
i = self.s.intersection(self.items2)
self.assertEqual(self.s & set(self.items2), i)
self.assertEqual(self.s & frozenset(self.items2), i)
def test_difference(self):
i = self.s.difference(self.items2)
for c in self.letters:
self.assertEqual(c in i, c in self.d and c not in self.items2)
self.assertEqual(self.s, WeakSet(self.items))
self.assertEqual(type(i), WeakSet)
self.assertRaises(TypeError, self.s.difference, [[]])
def test_sub(self):
i = self.s.difference(self.items2)
self.assertEqual(self.s - set(self.items2), i)
self.assertEqual(self.s - frozenset(self.items2), i)
def test_symmetric_difference(self):
i = self.s.symmetric_difference(self.items2)
for c in self.letters:
self.assertEqual(c in i, (c in self.d) ^ (c in self.items2))
self.assertEqual(self.s, WeakSet(self.items))
self.assertEqual(type(i), WeakSet)
self.assertRaises(TypeError, self.s.symmetric_difference, [[]])
def test_xor(self):
i = self.s.symmetric_difference(self.items2)
self.assertEqual(self.s ^ set(self.items2), i)
self.assertEqual(self.s ^ frozenset(self.items2), i)
def test_sub_and_super(self):
pl, ql, rl = map(lambda s: [SomeClass(c) for c in s], ['ab', 'abcde', 'def'])
p, q, r = map(WeakSet, (pl, ql, rl))
self.assertTrue(p < q)
self.assertTrue(p <= q)
self.assertTrue(q <= q)
self.assertTrue(q > p)
self.assertTrue(q >= p)
self.assertFalse(q < r)
self.assertFalse(q <= r)
self.assertFalse(q > r)
self.assertFalse(q >= r)
self.assertTrue(set('a').issubset('abc'))
self.assertTrue(set('abc').issuperset('a'))
self.assertFalse(set('a').issubset('cbs'))
self.assertFalse(set('cbs').issuperset('a'))
def test_gc(self):
# Create a nest of cycles to exercise overall ref count check
s = WeakSet(Foo() for i in range(1000))
for elem in s:
elem.cycle = s
elem.sub = elem
elem.set = WeakSet([elem])
def test_subclass_with_custom_hash(self):
# Bug #1257731
class H(WeakSet):
def __hash__(self):
return int(id(self) & 0x7fffffff)
s=H()
f=set()
f.add(s)
self.assertIn(s, f)
f.remove(s)
f.add(s)
f.discard(s)
def test_init(self):
s = WeakSet()
s.__init__(self.items)
self.assertEqual(s, self.s)
s.__init__(self.items2)
self.assertEqual(s, WeakSet(self.items2))
self.assertRaises(TypeError, s.__init__, s, 2);
self.assertRaises(TypeError, s.__init__, 1);
def test_constructor_identity(self):
s = WeakSet(self.items)
t = WeakSet(s)
self.assertNotEqual(id(s), id(t))
def test_hash(self):
self.assertRaises(TypeError, hash, self.s)
def test_clear(self):
self.s.clear()
self.assertEqual(self.s, WeakSet([]))
self.assertEqual(len(self.s), 0)
def test_copy(self):
dup = self.s.copy()
self.assertEqual(self.s, dup)
self.assertNotEqual(id(self.s), id(dup))
def test_add(self):
x = SomeClass('Q')
self.s.add(x)
self.assertIn(x, self.s)
dup = self.s.copy()
self.s.add(x)
self.assertEqual(self.s, dup)
self.assertRaises(TypeError, self.s.add, [])
self.fs.add(Foo())
self.assertTrue(len(self.fs) == 1)
self.fs.add(self.obj)
self.assertTrue(len(self.fs) == 1)
def test_remove(self):
x = SomeClass('a')
self.s.remove(x)
self.assertNotIn(x, self.s)
self.assertRaises(KeyError, self.s.remove, x)
self.assertRaises(TypeError, self.s.remove, [])
def test_discard(self):
a, q = SomeClass('a'), SomeClass('Q')
self.s.discard(a)
self.assertNotIn(a, self.s)
self.s.discard(q)
self.assertRaises(TypeError, self.s.discard, [])
def test_pop(self):
for i in range(len(self.s)):
elem = self.s.pop()
self.assertNotIn(elem, self.s)
self.assertRaises(KeyError, self.s.pop)
def test_update(self):
retval = self.s.update(self.items2)
self.assertEqual(retval, None)
for c in (self.items + self.items2):
self.assertIn(c, self.s)
self.assertRaises(TypeError, self.s.update, [[]])
def test_update_set(self):
self.s.update(set(self.items2))
for c in (self.items + self.items2):
self.assertIn(c, self.s)
def test_ior(self):
self.s |= set(self.items2)
for c in (self.items + self.items2):
self.assertIn(c, self.s)
def test_intersection_update(self):
retval = self.s.intersection_update(self.items2)
self.assertEqual(retval, None)
for c in (self.items + self.items2):
if c in self.items2 and c in self.items:
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
self.assertRaises(TypeError, self.s.intersection_update, [[]])
def test_iand(self):
self.s &= set(self.items2)
for c in (self.items + self.items2):
if c in self.items2 and c in self.items:
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
def test_difference_update(self):
retval = self.s.difference_update(self.items2)
self.assertEqual(retval, None)
for c in (self.items + self.items2):
if c in self.items and c not in self.items2:
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
self.assertRaises(TypeError, self.s.difference_update, [[]])
self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])
def test_isub(self):
self.s -= set(self.items2)
for c in (self.items + self.items2):
if c in self.items and c not in self.items2:
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
def test_symmetric_difference_update(self):
retval = self.s.symmetric_difference_update(self.items2)
self.assertEqual(retval, None)
for c in (self.items + self.items2):
if (c in self.items) ^ (c in self.items2):
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])
def test_ixor(self):
self.s ^= set(self.items2)
for c in (self.items + self.items2):
if (c in self.items) ^ (c in self.items2):
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
def test_inplace_on_self(self):
t = self.s.copy()
t |= t
self.assertEqual(t, self.s)
t &= t
self.assertEqual(t, self.s)
t -= t
self.assertEqual(t, WeakSet())
t = self.s.copy()
t ^= t
self.assertEqual(t, WeakSet())
def test_eq(self):
# issue 5964
self.assertTrue(self.s == self.s)
self.assertTrue(self.s == WeakSet(self.items))
self.assertFalse(self.s == set(self.items))
self.assertFalse(self.s == list(self.items))
self.assertFalse(self.s == tuple(self.items))
self.assertFalse(self.s == 1)
def test_weak_destroy_while_iterating(self):
# Issue #7105: iterators shouldn't crash when a key is implicitly removed
# Create new items to be sure no-one else holds a reference
items = [SomeClass(c) for c in ('a', 'b', 'c')]
s = WeakSet(items)
it = iter(s)
next(it) # Trigger internal iteration
# Destroy an item
del items[-1]
gc.collect() # just in case
# We have removed either the first consumed items, or another one
self.assertIn(len(list(it)), [len(items), len(items) - 1])
del it
# The removal has been committed
self.assertEqual(len(s), len(items))
def test_weak_destroy_and_mutate_while_iterating(self):
# Issue #7105: iterators shouldn't crash when a key is implicitly removed
items = [SomeClass(c) for c in string.ascii_letters]
s = WeakSet(items)
@contextlib.contextmanager
def testcontext():
try:
it = iter(s)
next(it)
# Schedule an item for removal and recreate it
u = SomeClass(str(items.pop()))
gc.collect() # just in case
yield u
finally:
it = None # should commit all removals
with testcontext() as u:
self.assertNotIn(u, s)
with testcontext() as u:
self.assertRaises(KeyError, s.remove, u)
self.assertNotIn(u, s)
with testcontext() as u:
s.add(u)
self.assertIn(u, s)
t = s.copy()
with testcontext() as u:
s.update(t)
self.assertEqual(len(s), len(t))
with testcontext() as u:
s.clear()
self.assertEqual(len(s), 0)
def test_main(verbose=None):
test_support.run_unittest(TestWeakSet)
if __name__ == "__main__":
test_main(verbose=True)
|
Reflexe/doc_to_pdf | refs/heads/master | Windows/program/python-core-3.5.0/lib/nturl2path.py | 19 | """Convert a NT pathname to a file URL and vice versa."""
def url2pathname(url):
"""OS-specific conversion from a relative URL of the 'file' scheme
to a file system path; not recommended for general use."""
# e.g.
# ///C|/foo/bar/spam.foo
# and
# ///C:/foo/bar/spam.foo
# become
# C:\foo\bar\spam.foo
import string, urllib.parse
# Windows itself uses ":" even in URLs.
url = url.replace(':', '|')
if not '|' in url:
# No drive specifier, just convert slashes
if url[:4] == '////':
# path is something like ////host/path/on/remote/host
# convert this to \\host\path\on\remote\host
# (notice halving of slashes at the start of the path)
url = url[2:]
components = url.split('/')
# make sure not to convert quoted slashes :-)
return urllib.parse.unquote('\\'.join(components))
comp = url.split('|')
if len(comp) != 2 or comp[0][-1] not in string.ascii_letters:
error = 'Bad URL: ' + url
raise OSError(error)
drive = comp[0][-1].upper()
components = comp[1].split('/')
path = drive + ':'
for comp in components:
if comp:
path = path + '\\' + urllib.parse.unquote(comp)
# Issue #11474 - handing url such as |c/|
if path.endswith(':') and url.endswith('/'):
path += '\\'
return path
def pathname2url(p):
"""OS-specific conversion from a file system path to a relative URL
of the 'file' scheme; not recommended for general use."""
# e.g.
# C:\foo\bar\spam.foo
# becomes
# ///C:/foo/bar/spam.foo
import urllib.parse
if not ':' in p:
# No drive specifier, just convert slashes and quote the name
if p[:2] == '\\\\':
# path is something like \\host\path\on\remote\host
# convert this to ////host/path/on/remote/host
# (notice doubling of slashes at the start of the path)
p = '\\\\' + p
components = p.split('\\')
return urllib.parse.quote('/'.join(components))
comp = p.split(':')
if len(comp) != 2 or len(comp[0]) > 1:
error = 'Bad path: ' + p
raise OSError(error)
drive = urllib.parse.quote(comp[0].upper())
components = comp[1].split('\\')
path = '///' + drive + ':'
for comp in components:
if comp:
path = path + '/' + urllib.parse.quote(comp)
return path
|
ly0/xxadmin | refs/heads/master | xadmin/plugins/relate.py | 1 | # coding=UTF-8
from django.core.urlresolvers import reverse
import sys
if sys.version_info.major < 3:
from django.utils.encoding import force_unicode as force_text
from django.utils.encoding import smart_str as smart_text
else:
from django.utils.encoding import force_text
from django.utils.encoding import smart_bytes, smart_text
from django.utils.safestring import mark_safe
from django.db.models.sql.query import LOOKUP_SEP
from django import get_version
v = get_version()
if v[:3] > '1.7':
from django.db.models.fields.related import ForeignObjectRel
VERSION_LT_1_8 = False
else:
from django.db.models.related import RelatedObject as ForeignObjectRel
VERSION_LT_1_8 = True
from django.utils.translation import ugettext as _
from django.db import models
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, ListAdminView, CreateAdminView, UpdateAdminView, DeleteAdminView
RELATE_PREFIX = '_rel_'
class RelateMenuPlugin(BaseAdminPlugin):
related_list = []
use_related_menu = True
def get_related_list(self):
if hasattr(self, '_related_acts'):
return self._related_acts
_related_acts = []
for r in self.opts.get_all_related_objects() + self.opts.get_all_related_many_to_many_objects():
if self.related_list and (r.get_accessor_name() not in self.related_list):
continue
if VERSION_LT_1_8:
r.related_model = r.model
if r.related_model not in self.admin_site._registry.keys():
continue
has_view_perm = self.has_model_perm(r.related_model, 'view')
has_add_perm = self.has_model_perm(r.related_model, 'add')
if not (has_view_perm or has_add_perm):
continue
_related_acts.append((r, has_view_perm, has_add_perm))
self._related_acts = _related_acts
return self._related_acts
def related_link(self, instance):
links = []
for r, view_perm, add_perm in self.get_related_list():
label = r.opts.app_label
model_name = r.opts.model_name
f = r.field
rel_name = f.rel.get_related_field().name
verbose_name = force_text(r.opts.verbose_name)
lookup_name = '%s__%s__exact' % (f.name, rel_name)
link = ''.join(('<li class="with_menu_btn">',
'<a href="%s?%s=%s" title="%s"><i class="icon fa fa-th-list"></i> %s</a>' %
(
reverse('%s:%s_%s_changelist' % (
self.admin_site.app_name, label, model_name)),
RELATE_PREFIX + lookup_name, str(instance.pk), verbose_name, verbose_name) if view_perm else
'<a><span class="text-muted"><i class="icon fa fa-blank"></i> %s</span></a>' % verbose_name,
'<a class="add_link dropdown-menu-btn" href="%s?%s=%s"><i class="icon fa fa-plus pull-right"></i></a>' %
(
reverse('%s:%s_%s_add' % (
self.admin_site.app_name, label, model_name)),
RELATE_PREFIX + lookup_name, str(
instance.pk)) if add_perm else "",
'</li>'))
links.append(link)
ul_html = '<ul class="dropdown-menu" role="menu">%s</ul>' % ''.join(
links)
return '<div class="dropdown related_menu pull-right"><a title="%s" class="relate_menu dropdown-toggle" data-toggle="dropdown"><i class="icon fa fa-list"></i></a>%s</div>' % (_('Related Objects'), ul_html)
related_link.short_description = ' '
related_link.allow_tags = True
related_link.allow_export = False
related_link.is_column = False
def get_list_display(self, list_display):
if self.use_related_menu and len(self.get_related_list()):
list_display.append('related_link')
self.admin_view.related_link = self.related_link
return list_display
class RelateObject(object):
def __init__(self, admin_view, lookup, value):
self.admin_view = admin_view
self.org_model = admin_view.model
self.opts = admin_view.opts
self.lookup = lookup
self.value = value
parts = lookup.split(LOOKUP_SEP)
field = self.opts.get_field_by_name(parts[0])[0]
if not hasattr(field, 'rel') and not isinstance(field, ForeignObjectRel):
raise Exception(u'Relate Lookup field must a related field')
if hasattr(field, 'rel'):
self.to_model = field.rel.to
self.rel_name = field.rel.get_related_field().name
self.is_m2m = isinstance(field.rel, models.ManyToManyRel)
else:
self.to_model = field.model
self.rel_name = self.to_model._meta.pk.name
self.is_m2m = False
to_qs = self.to_model._default_manager.get_query_set()
self.to_objs = to_qs.filter(**{self.rel_name: value}).all()
self.field = field
def filter(self, queryset):
return queryset.filter(**{self.lookup: self.value})
def get_brand_name(self):
if len(self.to_objs) == 1:
to_model_name = str(self.to_objs[0])
else:
to_model_name = force_text(self.to_model._meta.verbose_name)
return mark_safe(u"<span class='rel-brand'>%s <i class='fa fa-caret-right'></i></span> %s" % (to_model_name, force_text(self.opts.verbose_name_plural)))
class BaseRelateDisplayPlugin(BaseAdminPlugin):
def init_request(self, *args, **kwargs):
self.relate_obj = None
for k, v in self.request.REQUEST.items():
if smart_text(k).startswith(RELATE_PREFIX):
self.relate_obj = RelateObject(
self.admin_view, smart_text(k)[len(RELATE_PREFIX):], v)
break
return bool(self.relate_obj)
def _get_relate_params(self):
return RELATE_PREFIX + self.relate_obj.lookup, self.relate_obj.value
def _get_input(self):
return '<input type="hidden" name="%s" value="%s" />' % self._get_relate_params()
def _get_url(self, url):
return url + ('&' if url.find('?') > 0 else '?') + ('%s=%s' % self._get_relate_params())
class ListRelateDisplayPlugin(BaseRelateDisplayPlugin):
def get_list_queryset(self, queryset):
if self.relate_obj:
queryset = self.relate_obj.filter(queryset)
return queryset
def url_for_result(self, url, result):
return self._get_url(url)
def get_context(self, context):
context['brand_name'] = self.relate_obj.get_brand_name()
context['rel_objs'] = self.relate_obj.to_objs
if 'add_url' in context:
context['add_url'] = self._get_url(context['add_url'])
return context
def get_list_display(self, list_display):
if not self.relate_obj.is_m2m:
try:
list_display.remove(self.relate_obj.field.name)
except Exception:
pass
return list_display
class EditRelateDisplayPlugin(BaseRelateDisplayPlugin):
def get_form_datas(self, datas):
if self.admin_view.org_obj is None and self.admin_view.request_method == 'get':
datas['initial'][
self.relate_obj.field.name] = self.relate_obj.value
return datas
def post_response(self, response):
if isinstance(response, str) and response != self.get_admin_url('index'):
return self._get_url(response)
return response
def get_context(self, context):
if 'delete_url' in context:
context['delete_url'] = self._get_url(context['delete_url'])
return context
def block_after_fieldsets(self, context, nodes):
return self._get_input()
class DeleteRelateDisplayPlugin(BaseRelateDisplayPlugin):
def post_response(self, response):
if isinstance(response, str) and response != self.get_admin_url('index'):
return self._get_url(response)
return response
def block_form_fields(self, context, nodes):
return self._get_input()
site.register_plugin(RelateMenuPlugin, ListAdminView)
site.register_plugin(ListRelateDisplayPlugin, ListAdminView)
site.register_plugin(EditRelateDisplayPlugin, CreateAdminView)
site.register_plugin(EditRelateDisplayPlugin, UpdateAdminView)
site.register_plugin(DeleteRelateDisplayPlugin, DeleteAdminView)
|
fo2rist/infra-strike | refs/heads/master | backend/venv/Lib/encodings/cp1250.py | 272 | """ Python Character Mapping Codec cp1250 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1250.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1250',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\u20ac' # 0x80 -> EURO SIGN
'\ufffe' # 0x81 -> UNDEFINED
'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
'\ufffe' # 0x83 -> UNDEFINED
'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
'\u2020' # 0x86 -> DAGGER
'\u2021' # 0x87 -> DOUBLE DAGGER
'\ufffe' # 0x88 -> UNDEFINED
'\u2030' # 0x89 -> PER MILLE SIGN
'\u0160' # 0x8A -> LATIN CAPITAL LETTER S WITH CARON
'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\u015a' # 0x8C -> LATIN CAPITAL LETTER S WITH ACUTE
'\u0164' # 0x8D -> LATIN CAPITAL LETTER T WITH CARON
'\u017d' # 0x8E -> LATIN CAPITAL LETTER Z WITH CARON
'\u0179' # 0x8F -> LATIN CAPITAL LETTER Z WITH ACUTE
'\ufffe' # 0x90 -> UNDEFINED
'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
'\u2022' # 0x95 -> BULLET
'\u2013' # 0x96 -> EN DASH
'\u2014' # 0x97 -> EM DASH
'\ufffe' # 0x98 -> UNDEFINED
'\u2122' # 0x99 -> TRADE MARK SIGN
'\u0161' # 0x9A -> LATIN SMALL LETTER S WITH CARON
'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\u015b' # 0x9C -> LATIN SMALL LETTER S WITH ACUTE
'\u0165' # 0x9D -> LATIN SMALL LETTER T WITH CARON
'\u017e' # 0x9E -> LATIN SMALL LETTER Z WITH CARON
'\u017a' # 0x9F -> LATIN SMALL LETTER Z WITH ACUTE
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u02c7' # 0xA1 -> CARON
'\u02d8' # 0xA2 -> BREVE
'\u0141' # 0xA3 -> LATIN CAPITAL LETTER L WITH STROKE
'\xa4' # 0xA4 -> CURRENCY SIGN
'\u0104' # 0xA5 -> LATIN CAPITAL LETTER A WITH OGONEK
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\u015e' # 0xAA -> LATIN CAPITAL LETTER S WITH CEDILLA
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\u017b' # 0xAF -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\u02db' # 0xB2 -> OGONEK
'\u0142' # 0xB3 -> LATIN SMALL LETTER L WITH STROKE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\u0105' # 0xB9 -> LATIN SMALL LETTER A WITH OGONEK
'\u015f' # 0xBA -> LATIN SMALL LETTER S WITH CEDILLA
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u013d' # 0xBC -> LATIN CAPITAL LETTER L WITH CARON
'\u02dd' # 0xBD -> DOUBLE ACUTE ACCENT
'\u013e' # 0xBE -> LATIN SMALL LETTER L WITH CARON
'\u017c' # 0xBF -> LATIN SMALL LETTER Z WITH DOT ABOVE
'\u0154' # 0xC0 -> LATIN CAPITAL LETTER R WITH ACUTE
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\u0102' # 0xC3 -> LATIN CAPITAL LETTER A WITH BREVE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\u0139' # 0xC5 -> LATIN CAPITAL LETTER L WITH ACUTE
'\u0106' # 0xC6 -> LATIN CAPITAL LETTER C WITH ACUTE
'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\u0118' # 0xCA -> LATIN CAPITAL LETTER E WITH OGONEK
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u011a' # 0xCC -> LATIN CAPITAL LETTER E WITH CARON
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\u010e' # 0xCF -> LATIN CAPITAL LETTER D WITH CARON
'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
'\u0143' # 0xD1 -> LATIN CAPITAL LETTER N WITH ACUTE
'\u0147' # 0xD2 -> LATIN CAPITAL LETTER N WITH CARON
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\u0150' # 0xD5 -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\u0158' # 0xD8 -> LATIN CAPITAL LETTER R WITH CARON
'\u016e' # 0xD9 -> LATIN CAPITAL LETTER U WITH RING ABOVE
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\u0170' # 0xDB -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
'\u0162' # 0xDE -> LATIN CAPITAL LETTER T WITH CEDILLA
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
'\u0155' # 0xE0 -> LATIN SMALL LETTER R WITH ACUTE
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\u0103' # 0xE3 -> LATIN SMALL LETTER A WITH BREVE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\u013a' # 0xE5 -> LATIN SMALL LETTER L WITH ACUTE
'\u0107' # 0xE6 -> LATIN SMALL LETTER C WITH ACUTE
'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\u0119' # 0xEA -> LATIN SMALL LETTER E WITH OGONEK
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\u011b' # 0xEC -> LATIN SMALL LETTER E WITH CARON
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\u010f' # 0xEF -> LATIN SMALL LETTER D WITH CARON
'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
'\u0144' # 0xF1 -> LATIN SMALL LETTER N WITH ACUTE
'\u0148' # 0xF2 -> LATIN SMALL LETTER N WITH CARON
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\u0151' # 0xF5 -> LATIN SMALL LETTER O WITH DOUBLE ACUTE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\u0159' # 0xF8 -> LATIN SMALL LETTER R WITH CARON
'\u016f' # 0xF9 -> LATIN SMALL LETTER U WITH RING ABOVE
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\u0171' # 0xFB -> LATIN SMALL LETTER U WITH DOUBLE ACUTE
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
'\u0163' # 0xFE -> LATIN SMALL LETTER T WITH CEDILLA
'\u02d9' # 0xFF -> DOT ABOVE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
spool/django-allauth | refs/heads/master | allauth/socialaccount/providers/weixin/views.py | 6 | import requests
from allauth.account import app_settings
from allauth.compat import reverse
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
from allauth.utils import build_absolute_uri
from .client import WeixinOAuth2Client
from .provider import WeixinProvider
class WeixinOAuth2Adapter(OAuth2Adapter):
provider_id = WeixinProvider.id
access_token_url = 'https://api.weixin.qq.com/sns/oauth2/access_token'
profile_url = 'https://api.weixin.qq.com/sns/userinfo'
@property
def authorize_url(self):
settings = self.get_provider().get_settings()
url = settings.get(
'AUTHORIZE_URL', 'https://open.weixin.qq.com/connect/qrconnect')
return url
def complete_login(self, request, app, token, **kwargs):
openid = kwargs.get('response', {}).get('openid')
resp = requests.get(self.profile_url,
params={'access_token': token.token,
'openid': openid})
extra_data = resp.json()
nickname = extra_data.get('nickname')
if nickname:
extra_data['nickname'] = nickname.encode(
'raw_unicode_escape').decode('utf-8')
return self.get_provider().sociallogin_from_response(request,
extra_data)
class WeixinOAuth2ClientMixin(object):
def get_client(self, request, app):
callback_url = reverse(self.adapter.provider_id + "_callback")
protocol = (
self.adapter.redirect_uri_protocol or
app_settings.DEFAULT_HTTP_PROTOCOL)
callback_url = build_absolute_uri(
request, callback_url,
protocol=protocol)
provider = self.adapter.get_provider()
scope = provider.get_scope(request)
client = WeixinOAuth2Client(
self.request, app.client_id, app.secret,
self.adapter.access_token_method,
self.adapter.access_token_url,
callback_url,
scope)
return client
class WeixinOAuth2LoginView(WeixinOAuth2ClientMixin, OAuth2LoginView):
pass
class WeixinOAuth2CallbackView(WeixinOAuth2ClientMixin, OAuth2CallbackView):
pass
oauth2_login = WeixinOAuth2LoginView.adapter_view(WeixinOAuth2Adapter)
oauth2_callback = WeixinOAuth2CallbackView.adapter_view(WeixinOAuth2Adapter)
|
qu6d83fu/Python | refs/heads/master | test/threading/condition2.py | 1 | from threading import *
import time
class itemX:
def __init__(self):
self.cnt = 0
def produce (self,num=1):
self.cnt += 1
def consume(self,num=1):
if self.cnt:
self.cnt -= 1
else:
print 'WARNING******WARNING'
def isEmpty(self):
return not self.cnt
def getCount(self):
return self.cnt
class Producer(Thread):
def __init__(self, condition, item, sleeptime=2):
Thread.__init__(self)
self.con=condition
self.item=item
self.sleeptime=sleeptime
def run(self):
while(True):
time.sleep(self.sleeptime)
self.con.acquire()
self.item.produce()
print 'produce 1 product\r\n'
print self.item.getCount()
self.con.notifyAll()
self.con.release()
class Consumer(Thread):
def __init__(self, condition, item, sleeptime=2):
Thread.__init__(self)
self.con=condition
self.item=item
self.sleeptime=sleeptime
def run(self):
while(True):
time.sleep(self.sleeptime)
self.con.acquire()
print '({0})enter'.format(self.getName())
while self.item.isEmpty():
print '({0})wait'.format(self.getName())
self.con.wait()
self.item.consume()
print '({0})consume 1 product\r\n'.format(self.getName())
print self.item.getCount()
self.con.release()
if __name__=="__main__":
X=itemX()
cond = Condition()
Producer(cond,X).start()
Consumer(cond,X).start()
Consumer(cond,X).start()
while (True):
pass
|
jmesteve/saas3 | refs/heads/master | openerpcommand/main.py | 16 | import openerpcommand
def run():
""" Main entry point for the openerp-command tool."""
parser = openerpcommand.main_parser()
args = parser.parse_args()
args.run(args)
|
koparasy/faultinjection-gem5 | refs/heads/master | src/mem/slicc/ast/TransitionDeclAST.py | 9 | # Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.ast.DeclAST import DeclAST
from slicc.symbols import Transition
class TransitionDeclAST(DeclAST):
def __init__(self, slicc, states, events, next_state, pairs, actions):
super(TransitionDeclAST, self).__init__(slicc, pairs)
self.states = states
self.events = events
self.next_state = next_state
self.actions = actions
def __repr__(self):
return "[TransitionDecl: ]"
def generate(self):
machine = self.symtab.state_machine
if machine is None:
self.error("Transition declaration not part of a machine.")
for action in self.actions:
if action not in machine.actions:
self.error("Invalid action: %s is not part of machine: %s" % \
(action, machine))
for state in self.states:
if state not in machine.states:
self.error("Invalid state: %s is not part of machine: %s" % \
(state, machine))
next_state = self.next_state or state
for event in self.events:
if event not in machine.events:
self.error("Invalid event: %s is not part of machine: %s" % \
(event, machine))
t = Transition(self.symtab, machine, state, event, next_state,
self.actions, self.location, self.pairs)
machine.addTransition(t)
|
elena/django | refs/heads/master | tests/queries/test_q.py | 5 | from django.db.models import F, Q
from django.test import SimpleTestCase
class QTests(SimpleTestCase):
def test_combine_and_empty(self):
q = Q(x=1)
self.assertEqual(q & Q(), q)
self.assertEqual(Q() & q, q)
q = Q(x__in={}.keys())
self.assertEqual(q & Q(), q)
self.assertEqual(Q() & q, q)
def test_combine_and_both_empty(self):
self.assertEqual(Q() & Q(), Q())
def test_combine_or_empty(self):
q = Q(x=1)
self.assertEqual(q | Q(), q)
self.assertEqual(Q() | q, q)
q = Q(x__in={}.keys())
self.assertEqual(q | Q(), q)
self.assertEqual(Q() | q, q)
def test_combine_or_both_empty(self):
self.assertEqual(Q() | Q(), Q())
def test_combine_not_q_object(self):
obj = object()
q = Q(x=1)
with self.assertRaisesMessage(TypeError, str(obj)):
q | obj
with self.assertRaisesMessage(TypeError, str(obj)):
q & obj
def test_deconstruct(self):
q = Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(path, 'django.db.models.Q')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'price__gt': F('discounted_price')})
def test_deconstruct_negated(self):
q = ~Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'price__gt': F('discounted_price'),
'_negated': True,
})
def test_deconstruct_or(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 | q2
path, args, kwargs = q.deconstruct()
self.assertEqual(args, (
('price__gt', F('discounted_price')),
('price', F('discounted_price')),
))
self.assertEqual(kwargs, {'_connector': 'OR'})
def test_deconstruct_and(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 & q2
path, args, kwargs = q.deconstruct()
self.assertEqual(args, (
('price__gt', F('discounted_price')),
('price', F('discounted_price')),
))
self.assertEqual(kwargs, {})
def test_deconstruct_multiple_kwargs(self):
q = Q(price__gt=F('discounted_price'), price=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(args, (
('price', F('discounted_price')),
('price__gt', F('discounted_price')),
))
self.assertEqual(kwargs, {})
def test_deconstruct_nested(self):
q = Q(Q(price__gt=F('discounted_price')))
path, args, kwargs = q.deconstruct()
self.assertEqual(args, (Q(price__gt=F('discounted_price')),))
self.assertEqual(kwargs, {})
def test_reconstruct(self):
q = Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
def test_reconstruct_negated(self):
q = ~Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
def test_reconstruct_or(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 | q2
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
def test_reconstruct_and(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 & q2
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
|
leafclick/intellij-community | refs/heads/master | plugins/hg4idea/testData/bin/mercurial/dispatch.py | 91 | # dispatch.py - command dispatching for mercurial
#
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from i18n import _
import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re
import util, commands, hg, fancyopts, extensions, hook, error
import cmdutil, encoding
import ui as uimod
class request(object):
def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
ferr=None):
self.args = args
self.ui = ui
self.repo = repo
# input/output/error streams
self.fin = fin
self.fout = fout
self.ferr = ferr
def run():
"run the command in sys.argv"
sys.exit((dispatch(request(sys.argv[1:])) or 0) & 255)
def dispatch(req):
"run the command specified in req.args"
if req.ferr:
ferr = req.ferr
elif req.ui:
ferr = req.ui.ferr
else:
ferr = sys.stderr
try:
if not req.ui:
req.ui = uimod.ui()
if '--traceback' in req.args:
req.ui.setconfig('ui', 'traceback', 'on')
# set ui streams from the request
if req.fin:
req.ui.fin = req.fin
if req.fout:
req.ui.fout = req.fout
if req.ferr:
req.ui.ferr = req.ferr
except util.Abort, inst:
ferr.write(_("abort: %s\n") % inst)
if inst.hint:
ferr.write(_("(%s)\n") % inst.hint)
return -1
except error.ParseError, inst:
if len(inst.args) > 1:
ferr.write(_("hg: parse error at %s: %s\n") %
(inst.args[1], inst.args[0]))
else:
ferr.write(_("hg: parse error: %s\n") % inst.args[0])
return -1
msg = ' '.join(' ' in a and repr(a) or a for a in req.args)
starttime = time.time()
ret = None
try:
ret = _runcatch(req)
return ret
finally:
duration = time.time() - starttime
req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n",
msg, ret or 0, duration)
def _runcatch(req):
def catchterm(*args):
raise error.SignalInterrupt
ui = req.ui
try:
for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
num = getattr(signal, name, None)
if num:
signal.signal(num, catchterm)
except ValueError:
pass # happens if called in a thread
try:
try:
# enter the debugger before command execution
if '--debugger' in req.args:
ui.warn(_("entering debugger - "
"type c to continue starting hg or h for help\n"))
pdb.set_trace()
try:
return _dispatch(req)
finally:
ui.flush()
except: # re-raises
# enter the debugger when we hit an exception
if '--debugger' in req.args:
traceback.print_exc()
pdb.post_mortem(sys.exc_info()[2])
ui.traceback()
raise
# Global exception handling, alphabetically
# Mercurial-specific first, followed by built-in and library exceptions
except error.AmbiguousCommand, inst:
ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
(inst.args[0], " ".join(inst.args[1])))
except error.ParseError, inst:
if len(inst.args) > 1:
ui.warn(_("hg: parse error at %s: %s\n") %
(inst.args[1], inst.args[0]))
else:
ui.warn(_("hg: parse error: %s\n") % inst.args[0])
return -1
except error.LockHeld, inst:
if inst.errno == errno.ETIMEDOUT:
reason = _('timed out waiting for lock held by %s') % inst.locker
else:
reason = _('lock held by %s') % inst.locker
ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
except error.LockUnavailable, inst:
ui.warn(_("abort: could not lock %s: %s\n") %
(inst.desc or inst.filename, inst.strerror))
except error.CommandError, inst:
if inst.args[0]:
ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
commands.help_(ui, inst.args[0], full=False, command=True)
else:
ui.warn(_("hg: %s\n") % inst.args[1])
commands.help_(ui, 'shortlist')
except error.OutOfBandError, inst:
ui.warn(_("abort: remote error:\n"))
ui.warn(''.join(inst.args))
except error.RepoError, inst:
ui.warn(_("abort: %s!\n") % inst)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
except error.ResponseError, inst:
ui.warn(_("abort: %s") % inst.args[0])
if not isinstance(inst.args[1], basestring):
ui.warn(" %r\n" % (inst.args[1],))
elif not inst.args[1]:
ui.warn(_(" empty string\n"))
else:
ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
except error.RevlogError, inst:
ui.warn(_("abort: %s!\n") % inst)
except error.SignalInterrupt:
ui.warn(_("killed!\n"))
except error.UnknownCommand, inst:
ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
try:
# check if the command is in a disabled extension
# (but don't check for extensions themselves)
commands.help_(ui, inst.args[0], unknowncmd=True)
except error.UnknownCommand:
commands.help_(ui, 'shortlist')
except error.InterventionRequired, inst:
ui.warn("%s\n" % inst)
return 1
except util.Abort, inst:
ui.warn(_("abort: %s\n") % inst)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
except ImportError, inst:
ui.warn(_("abort: %s!\n") % inst)
m = str(inst).split()[-1]
if m in "mpatch bdiff".split():
ui.warn(_("(did you forget to compile extensions?)\n"))
elif m in "zlib".split():
ui.warn(_("(is your Python install correct?)\n"))
except IOError, inst:
if util.safehasattr(inst, "code"):
ui.warn(_("abort: %s\n") % inst)
elif util.safehasattr(inst, "reason"):
try: # usually it is in the form (errno, strerror)
reason = inst.reason.args[1]
except (AttributeError, IndexError):
# it might be anything, for example a string
reason = inst.reason
ui.warn(_("abort: error: %s\n") % reason)
elif util.safehasattr(inst, "args") and inst.args[0] == errno.EPIPE:
if ui.debugflag:
ui.warn(_("broken pipe\n"))
elif getattr(inst, "strerror", None):
if getattr(inst, "filename", None):
ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
else:
ui.warn(_("abort: %s\n") % inst.strerror)
else:
raise
except OSError, inst:
if getattr(inst, "filename", None) is not None:
ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
else:
ui.warn(_("abort: %s\n") % inst.strerror)
except KeyboardInterrupt:
try:
ui.warn(_("interrupted!\n"))
except IOError, inst:
if inst.errno == errno.EPIPE:
if ui.debugflag:
ui.warn(_("\nbroken pipe\n"))
else:
raise
except MemoryError:
ui.warn(_("abort: out of memory\n"))
except SystemExit, inst:
# Commands shouldn't sys.exit directly, but give a return code.
# Just in case catch this and and pass exit code to caller.
return inst.code
except socket.error, inst:
ui.warn(_("abort: %s\n") % inst.args[-1])
except: # re-raises
myver = util.version()
# For compatibility checking, we discard the portion of the hg
# version after the + on the assumption that if a "normal
# user" is running a build with a + in it the packager
# probably built from fairly close to a tag and anyone with a
# 'make local' copy of hg (where the version number can be out
# of date) will be clueful enough to notice the implausible
# version number and try updating.
compare = myver.split('+')[0]
ct = tuplever(compare)
worst = None, ct, ''
for name, mod in extensions.extensions():
testedwith = getattr(mod, 'testedwith', '')
report = getattr(mod, 'buglink', _('the extension author.'))
if not testedwith.strip():
# We found an untested extension. It's likely the culprit.
worst = name, 'unknown', report
break
if compare not in testedwith.split() and testedwith != 'internal':
tested = [tuplever(v) for v in testedwith.split()]
lower = [t for t in tested if t < ct]
nearest = max(lower or tested)
if worst[0] is None or nearest < worst[1]:
worst = name, nearest, report
if worst[0] is not None:
name, testedwith, report = worst
if not isinstance(testedwith, str):
testedwith = '.'.join([str(c) for c in testedwith])
warning = (_('** Unknown exception encountered with '
'possibly-broken third-party extension %s\n'
'** which supports versions %s of Mercurial.\n'
'** Please disable %s and try your action again.\n'
'** If that fixes the bug please report it to %s\n')
% (name, testedwith, name, report))
else:
warning = (_("** unknown exception encountered, "
"please report by visiting\n") +
_("** http://mercurial.selenic.com/wiki/BugTracker\n"))
warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) +
(_("** Mercurial Distributed SCM (version %s)\n") % myver) +
(_("** Extensions loaded: %s\n") %
", ".join([x[0] for x in extensions.extensions()])))
ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc())
ui.warn(warning)
raise
return -1
def tuplever(v):
try:
return tuple([int(i) for i in v.split('.')])
except ValueError:
return tuple()
def aliasargs(fn, givenargs):
args = getattr(fn, 'args', [])
if args:
cmd = ' '.join(map(util.shellquote, args))
nums = []
def replacer(m):
num = int(m.group(1)) - 1
nums.append(num)
if num < len(givenargs):
return givenargs[num]
raise util.Abort(_('too few arguments for command alias'))
cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
givenargs = [x for i, x in enumerate(givenargs)
if i not in nums]
args = shlex.split(cmd)
return args + givenargs
class cmdalias(object):
def __init__(self, name, definition, cmdtable):
self.name = self.cmd = name
self.cmdname = ''
self.definition = definition
self.args = []
self.opts = []
self.help = ''
self.norepo = True
self.optionalrepo = False
self.badalias = False
try:
aliases, entry = cmdutil.findcmd(self.name, cmdtable)
for alias, e in cmdtable.iteritems():
if e is entry:
self.cmd = alias
break
self.shadows = True
except error.UnknownCommand:
self.shadows = False
if not self.definition:
def fn(ui, *args):
ui.warn(_("no definition for alias '%s'\n") % self.name)
return 1
self.fn = fn
self.badalias = True
return
if self.definition.startswith('!'):
self.shell = True
def fn(ui, *args):
env = {'HG_ARGS': ' '.join((self.name,) + args)}
def _checkvar(m):
if m.groups()[0] == '$':
return m.group()
elif int(m.groups()[0]) <= len(args):
return m.group()
else:
ui.debug("No argument found for substitution "
"of %i variable in alias '%s' definition."
% (int(m.groups()[0]), self.name))
return ''
cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
replace = dict((str(i + 1), arg) for i, arg in enumerate(args))
replace['0'] = self.name
replace['@'] = ' '.join(args)
cmd = util.interpolate(r'\$', replace, cmd, escape_prefix=True)
return util.system(cmd, environ=env, out=ui.fout)
self.fn = fn
return
args = shlex.split(self.definition)
self.cmdname = cmd = args.pop(0)
args = map(util.expandpath, args)
for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"):
if _earlygetopt([invalidarg], args):
def fn(ui, *args):
ui.warn(_("error in definition for alias '%s': %s may only "
"be given on the command line\n")
% (self.name, invalidarg))
return 1
self.fn = fn
self.badalias = True
return
try:
tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
if len(tableentry) > 2:
self.fn, self.opts, self.help = tableentry
else:
self.fn, self.opts = tableentry
self.args = aliasargs(self.fn, args)
if cmd not in commands.norepo.split(' '):
self.norepo = False
if cmd in commands.optionalrepo.split(' '):
self.optionalrepo = True
if self.help.startswith("hg " + cmd):
# drop prefix in old-style help lines so hg shows the alias
self.help = self.help[4 + len(cmd):]
self.__doc__ = self.fn.__doc__
except error.UnknownCommand:
def fn(ui, *args):
ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \
% (self.name, cmd))
try:
# check if the command is in a disabled extension
commands.help_(ui, cmd, unknowncmd=True)
except error.UnknownCommand:
pass
return 1
self.fn = fn
self.badalias = True
except error.AmbiguousCommand:
def fn(ui, *args):
ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \
% (self.name, cmd))
return 1
self.fn = fn
self.badalias = True
def __call__(self, ui, *args, **opts):
if self.shadows:
ui.debug("alias '%s' shadows command '%s'\n" %
(self.name, self.cmdname))
if util.safehasattr(self, 'shell'):
return self.fn(ui, *args, **opts)
else:
try:
util.checksignature(self.fn)(ui, *args, **opts)
except error.SignatureError:
args = ' '.join([self.cmdname] + self.args)
ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
raise
def addaliases(ui, cmdtable):
# aliases are processed after extensions have been loaded, so they
# may use extension commands. Aliases can also use other alias definitions,
# but only if they have been defined prior to the current definition.
for alias, definition in ui.configitems('alias'):
aliasdef = cmdalias(alias, definition, cmdtable)
try:
olddef = cmdtable[aliasdef.cmd][0]
if olddef.definition == aliasdef.definition:
continue
except (KeyError, AttributeError):
# definition might not exist or it might not be a cmdalias
pass
cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
if aliasdef.norepo:
commands.norepo += ' %s' % alias
if aliasdef.optionalrepo:
commands.optionalrepo += ' %s' % alias
def _parse(ui, args):
options = {}
cmdoptions = {}
try:
args = fancyopts.fancyopts(args, commands.globalopts, options)
except fancyopts.getopt.GetoptError, inst:
raise error.CommandError(None, inst)
if args:
cmd, args = args[0], args[1:]
aliases, entry = cmdutil.findcmd(cmd, commands.table,
ui.configbool("ui", "strict"))
cmd = aliases[0]
args = aliasargs(entry[0], args)
defaults = ui.config("defaults", cmd)
if defaults:
args = map(util.expandpath, shlex.split(defaults)) + args
c = list(entry[1])
else:
cmd = None
c = []
# combine global options into local
for o in commands.globalopts:
c.append((o[0], o[1], options[o[1]], o[3]))
try:
args = fancyopts.fancyopts(args, c, cmdoptions, True)
except fancyopts.getopt.GetoptError, inst:
raise error.CommandError(cmd, inst)
# separate global options back out
for o in commands.globalopts:
n = o[1]
options[n] = cmdoptions[n]
del cmdoptions[n]
return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
def _parseconfig(ui, config):
"""parse the --config options from the command line"""
configs = []
for cfg in config:
try:
name, value = cfg.split('=', 1)
section, name = name.split('.', 1)
if not section or not name:
raise IndexError
ui.setconfig(section, name, value)
configs.append((section, name, value))
except (IndexError, ValueError):
raise util.Abort(_('malformed --config option: %r '
'(use --config section.name=value)') % cfg)
return configs
def _earlygetopt(aliases, args):
"""Return list of values for an option (or aliases).
The values are listed in the order they appear in args.
The options and values are removed from args.
>>> args = ['x', '--cwd', 'foo', 'y']
>>> _earlygetopt(['--cwd'], args), args
(['foo'], ['x', 'y'])
>>> args = ['x', '--cwd=bar', 'y']
>>> _earlygetopt(['--cwd'], args), args
(['bar'], ['x', 'y'])
>>> args = ['x', '-R', 'foo', 'y']
>>> _earlygetopt(['-R'], args), args
(['foo'], ['x', 'y'])
>>> args = ['x', '-Rbar', 'y']
>>> _earlygetopt(['-R'], args), args
(['bar'], ['x', 'y'])
"""
try:
argcount = args.index("--")
except ValueError:
argcount = len(args)
shortopts = [opt for opt in aliases if len(opt) == 2]
values = []
pos = 0
while pos < argcount:
fullarg = arg = args[pos]
equals = arg.find('=')
if equals > -1:
arg = arg[:equals]
if arg in aliases:
del args[pos]
if equals > -1:
values.append(fullarg[equals + 1:])
argcount -= 1
else:
if pos + 1 >= argcount:
# ignore and let getopt report an error if there is no value
break
values.append(args.pop(pos))
argcount -= 2
elif arg[:2] in shortopts:
# short option can have no following space, e.g. hg log -Rfoo
values.append(args.pop(pos)[2:])
argcount -= 1
else:
pos += 1
return values
def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
# run pre-hook, and abort if it fails
hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
pats=cmdpats, opts=cmdoptions)
ret = _runcommand(ui, options, cmd, d)
# run post-hook, passing command result
hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
result=ret, pats=cmdpats, opts=cmdoptions)
return ret
def _getlocal(ui, rpath):
"""Return (path, local ui object) for the given target path.
Takes paths in [cwd]/.hg/hgrc into account."
"""
try:
wd = os.getcwd()
except OSError, e:
raise util.Abort(_("error getting current working directory: %s") %
e.strerror)
path = cmdutil.findrepo(wd) or ""
if not path:
lui = ui
else:
lui = ui.copy()
lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
if rpath and rpath[-1]:
path = lui.expandpath(rpath[-1])
lui = ui.copy()
lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
return path, lui
def _checkshellalias(lui, ui, args):
options = {}
try:
args = fancyopts.fancyopts(args, commands.globalopts, options)
except fancyopts.getopt.GetoptError:
return
if not args:
return
norepo = commands.norepo
optionalrepo = commands.optionalrepo
def restorecommands():
commands.norepo = norepo
commands.optionalrepo = optionalrepo
cmdtable = commands.table.copy()
addaliases(lui, cmdtable)
cmd = args[0]
try:
aliases, entry = cmdutil.findcmd(cmd, cmdtable,
lui.configbool("ui", "strict"))
except (error.AmbiguousCommand, error.UnknownCommand):
restorecommands()
return
cmd = aliases[0]
fn = entry[0]
if cmd and util.safehasattr(fn, 'shell'):
d = lambda: fn(ui, *args[1:])
return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
[], {})
restorecommands()
_loaded = set()
def _dispatch(req):
args = req.args
ui = req.ui
# read --config before doing anything else
# (e.g. to change trust settings for reading .hg/hgrc)
cfgs = _parseconfig(ui, _earlygetopt(['--config'], args))
# check for cwd
cwd = _earlygetopt(['--cwd'], args)
if cwd:
os.chdir(cwd[-1])
rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
path, lui = _getlocal(ui, rpath)
# Now that we're operating in the right directory/repository with
# the right config settings, check for shell aliases
shellaliasfn = _checkshellalias(lui, ui, args)
if shellaliasfn:
return shellaliasfn()
# Configure extensions in phases: uisetup, extsetup, cmdtable, and
# reposetup. Programs like TortoiseHg will call _dispatch several
# times so we keep track of configured extensions in _loaded.
extensions.loadall(lui)
exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
# Propagate any changes to lui.__class__ by extensions
ui.__class__ = lui.__class__
# (uisetup and extsetup are handled in extensions.loadall)
for name, module in exts:
cmdtable = getattr(module, 'cmdtable', {})
overrides = [cmd for cmd in cmdtable if cmd in commands.table]
if overrides:
ui.warn(_("extension '%s' overrides commands: %s\n")
% (name, " ".join(overrides)))
commands.table.update(cmdtable)
_loaded.add(name)
# (reposetup is handled in hg.repository)
addaliases(lui, commands.table)
# check for fallback encoding
fallback = lui.config('ui', 'fallbackencoding')
if fallback:
encoding.fallbackencoding = fallback
fullargs = args
cmd, func, args, options, cmdoptions = _parse(lui, args)
if options["config"]:
raise util.Abort(_("option --config may not be abbreviated!"))
if options["cwd"]:
raise util.Abort(_("option --cwd may not be abbreviated!"))
if options["repository"]:
raise util.Abort(_(
"option -R has to be separated from other options (e.g. not -qR) "
"and --repository may only be abbreviated as --repo!"))
if options["encoding"]:
encoding.encoding = options["encoding"]
if options["encodingmode"]:
encoding.encodingmode = options["encodingmode"]
if options["time"]:
def get_times():
t = os.times()
if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
t = (t[0], t[1], t[2], t[3], time.clock())
return t
s = get_times()
def print_time():
t = get_times()
ui.warn(_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
(t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
atexit.register(print_time)
uis = set([ui, lui])
if req.repo:
uis.add(req.repo.ui)
# copy configs that were passed on the cmdline (--config) to the repo ui
for cfg in cfgs:
req.repo.ui.setconfig(*cfg)
if options['verbose'] or options['debug'] or options['quiet']:
for opt in ('verbose', 'debug', 'quiet'):
val = str(bool(options[opt]))
for ui_ in uis:
ui_.setconfig('ui', opt, val)
if options['traceback']:
for ui_ in uis:
ui_.setconfig('ui', 'traceback', 'on')
if options['noninteractive']:
for ui_ in uis:
ui_.setconfig('ui', 'interactive', 'off')
if cmdoptions.get('insecure', False):
for ui_ in uis:
ui_.setconfig('web', 'cacerts', '')
if options['version']:
return commands.version_(ui)
if options['help']:
return commands.help_(ui, cmd)
elif not cmd:
return commands.help_(ui, 'shortlist')
repo = None
cmdpats = args[:]
if cmd not in commands.norepo.split():
# use the repo from the request only if we don't have -R
if not rpath and not cwd:
repo = req.repo
if repo:
# set the descriptors of the repo ui to those of ui
repo.ui.fin = ui.fin
repo.ui.fout = ui.fout
repo.ui.ferr = ui.ferr
else:
try:
repo = hg.repository(ui, path=path)
if not repo.local():
raise util.Abort(_("repository '%s' is not local") % path)
if options['hidden']:
repo = repo.unfiltered()
repo.ui.setconfig("bundle", "mainreporoot", repo.root)
except error.RequirementError:
raise
except error.RepoError:
if cmd not in commands.optionalrepo.split():
if (cmd in commands.inferrepo.split() and
args and not path): # try to infer -R from command args
repos = map(cmdutil.findrepo, args)
guess = repos[0]
if guess and repos.count(guess) == len(repos):
req.args = ['--repository', guess] + fullargs
return _dispatch(req)
if not path:
raise error.RepoError(_("no repository found in '%s'"
" (.hg not found)")
% os.getcwd())
raise
if repo:
ui = repo.ui
args.insert(0, repo)
elif rpath:
ui.warn(_("warning: --repository ignored\n"))
msg = ' '.join(' ' in a and repr(a) or a for a in fullargs)
ui.log("command", '%s\n', msg)
d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
try:
return runcommand(lui, repo, cmd, fullargs, ui, options, d,
cmdpats, cmdoptions)
finally:
if repo and repo != req.repo:
repo.close()
def lsprofile(ui, func, fp):
format = ui.config('profiling', 'format', default='text')
field = ui.config('profiling', 'sort', default='inlinetime')
limit = ui.configint('profiling', 'limit', default=30)
climit = ui.configint('profiling', 'nested', default=5)
if format not in ['text', 'kcachegrind']:
ui.warn(_("unrecognized profiling format '%s'"
" - Ignored\n") % format)
format = 'text'
try:
from mercurial import lsprof
except ImportError:
raise util.Abort(_(
'lsprof not available - install from '
'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
p = lsprof.Profiler()
p.enable(subcalls=True)
try:
return func()
finally:
p.disable()
if format == 'kcachegrind':
import lsprofcalltree
calltree = lsprofcalltree.KCacheGrind(p)
calltree.output(fp)
else:
# format == 'text'
stats = lsprof.Stats(p.getstats())
stats.sort(field)
stats.pprint(limit=limit, file=fp, climit=climit)
def statprofile(ui, func, fp):
try:
import statprof
except ImportError:
raise util.Abort(_(
'statprof not available - install using "easy_install statprof"'))
freq = ui.configint('profiling', 'freq', default=1000)
if freq > 0:
statprof.reset(freq)
else:
ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq)
statprof.start()
try:
return func()
finally:
statprof.stop()
statprof.display(fp)
def _runcommand(ui, options, cmd, cmdfunc):
def checkargs():
try:
return cmdfunc()
except error.SignatureError:
raise error.CommandError(cmd, _("invalid arguments"))
if options['profile']:
profiler = os.getenv('HGPROF')
if profiler is None:
profiler = ui.config('profiling', 'type', default='ls')
if profiler not in ('ls', 'stat'):
ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler)
profiler = 'ls'
output = ui.config('profiling', 'output')
if output:
path = ui.expandpath(output)
fp = open(path, 'wb')
else:
fp = sys.stderr
try:
if profiler == 'ls':
return lsprofile(ui, checkargs, fp)
else:
return statprofile(ui, checkargs, fp)
finally:
if output:
fp.close()
else:
return checkargs()
|
itucsdb1603/itucsdb1603 | refs/heads/master | placesB.py | 1 | import os
import json
import re
import psycopg2 as dbapi2
from flask import Blueprint, render_template
from flask import redirect
from flask.helpers import url_for
from flask import current_app, request
from place import Place
from placelist import PlaceList
from flask import current_app as app
from _sqlite3 import Row
places = Blueprint('places', __name__)
@places.route('/places', methods = ['GET', 'POST'])
def places_page():
if request.method == 'GET':
places = current_app.placelist.get_places()
return render_template('places.html', places=sorted(places.items()))
else:
area = str(request.form['area'])
with dbapi2.connect(app.config['dsn']) as connection:
cursor = connection.cursor()
statement ="""INSERT INTO PLACES (AREA) VALUES (%s)"""
cursor.execute(statement, [area])
connection.commit()
place = Place(area)
current_app.placelist.add_place(place)
return redirect(url_for('places.places_page', place_id=place._id))
@places.route('/places/delete', methods=['GET', 'POST'])
def delete_place():
if request.method == 'GET':
return render_template('delete_place.html')
else:
area = str(request.form['area'])
with dbapi2.connect(app.config['dsn']) as connection:
cursor = connection.cursor()
statement ="""SELECT AREA_ID, AREA FROM PLACES WHERE (AREA = (%s))"""
cursor.execute(statement, (area,))
connection.commit()
for row in cursor:
id, area = row
statement ="""DELETE FROM PLACES WHERE (AREA_ID = (%s))"""
cursor.execute(statement, (id,))
connection.commit()
current_app.placelist.delete_place(id)
return redirect(url_for('places.places_page'), place_id=place._id)
@places.route('/places/update', methods=['GET', 'POST'])
def update_place():
if request.method == 'GET':
return render_template('update_place.html')
else:
area= str(request.form['area'])
new_area = str(request.form['new_area'])
with dbapi2.connect(app.config['dsn']) as connection:
cursor = connection.cursor()
statement ="""UPDATE PLACES
SET AREA = (%s)
WHERE AREA = (%s)"""
cursor.execute(statement, (new_area, area,))
connection.commit()
cursor = connection.cursor()
statement = """SELECT AREA_ID, AREA FROM PLACES WHERE (AREA = (%s))"""
cursor.execute(statement, (new_area,))
connection.commit()
for row in cursor:
area_id, area = row
updated_place = current_app.placelist.get_place(area_id)
updated_place.update_place(new_area)
return redirect(url_for('places.places_page'), place_id=place._id)
def get_places():
with dbapi2.connect(app.config['dsn']) as connection:
cursor = connection.cursor()
cursor.execute("SELECT * FROM PLACES")
places = cursor.fetchall()
connection.commit()
return places
@places.route('/initplaces')
def init_places_db():
with dbapi2.connect(app.config['dsn']) as connection:
cursor = connection.cursor()
query = """DROP TABLE IF EXISTS PLACES CASCADE"""
cursor.execute(query)
query = """CREATE TABLE PLACES (
AREA_ID SERIAL,
AREA VARCHAR(300),
PRIMARY KEY(AREA_ID)
)"""
cursor.execute(query)
connection.commit()
return redirect(url_for('site.home_page'))
|
EdwardMoyse/django-indigorestwrapper | refs/heads/master | indigorestwrapper/apps.py | 2 | from __future__ import unicode_literals
from django.apps import AppConfig
class IndigorestwrapperConfig(AppConfig):
name = 'indigorestwrapper'
|
kenshay/ImageScript | refs/heads/master | ProgramData/SystemFiles/Python/Lib/site-packages/OpenGL/raw/GLX/OML/sync_control.py | 8 | '''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GLX import _types as _cs
# End users want this...
from OpenGL.raw.GLX._types import *
from OpenGL.raw.GLX import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GLX_OML_sync_control'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GLX,'GLX_OML_sync_control',error_checker=_errors._error_checker)
@_f
@_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,ctypes.POINTER(_cs.int32_t),ctypes.POINTER(_cs.int32_t))
def glXGetMscRateOML(dpy,drawable,numerator,denominator):pass
@_f
@_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t))
def glXGetSyncValuesOML(dpy,drawable,ust,msc,sbc):pass
@_f
@_p.types(_cs.int64_t,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,_cs.int64_t,_cs.int64_t,_cs.int64_t)
def glXSwapBuffersMscOML(dpy,drawable,target_msc,divisor,remainder):pass
@_f
@_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,_cs.int64_t,_cs.int64_t,_cs.int64_t,ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t))
def glXWaitForMscOML(dpy,drawable,target_msc,divisor,remainder,ust,msc,sbc):pass
@_f
@_p.types(_cs.Bool,ctypes.POINTER(_cs.Display),_cs.GLXDrawable,_cs.int64_t,ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t),ctypes.POINTER(_cs.int64_t))
def glXWaitForSbcOML(dpy,drawable,target_sbc,ust,msc,sbc):pass
|
basicthinker/THNVM | refs/heads/master | src/arch/x86/isa/insts/simd64/integer/data_transfer/__init__.py | 91 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["move",
"move_non_temporal",
"move_mask"]
microcode = '''
# 64 bit multimedia instructions
'''
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
|
nekulin/arangodb | refs/heads/devel | 3rdParty/V8-4.3.61/build/gyp/test/actions-subdir/src/make-file.py | 489 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = 'Hello from make-file.py\n'
open(sys.argv[1], 'wb').write(contents)
|
johngian/remo | refs/heads/master | remo/reports/api/serializers.py | 3 | from rest_framework import serializers
from remo.base.helpers import absolutify
from remo.events.api.serializers import EventSerializer
from remo.profiles.api.serializers import (FunctionalAreaSerializer,
UserSerializer)
from remo.reports.models import NGReport
class ActivitiesSerializer(serializers.ModelSerializer):
"""Serializer for the NGReport model."""
activity = serializers.ReadOnlyField(source='activity.name')
class Meta:
model = NGReport
fields = ['activity', '_url']
class ActivitiesDetailedSerializer(serializers.HyperlinkedModelSerializer):
"""Detailed serializer for the NGReport model."""
user = UserSerializer()
activity = serializers.ReadOnlyField(source='activity.name')
initiative = serializers.ReadOnlyField(source='campaign.name')
mentor = UserSerializer()
passive_report = serializers.ReadOnlyField(source='is_passive')
event = EventSerializer()
functional_areas = FunctionalAreaSerializer(many=True)
remo_url = serializers.SerializerMethodField()
class Meta:
model = NGReport
fields = ['user', 'activity', 'initiative', 'functional_areas',
'activity_description', 'report_date', 'mentor', 'location',
'longitude', 'latitude', 'link', 'link_description',
'passive_report', 'event', 'remo_url']
def get_remo_url(self, obj):
"""
Default method for fetching the activity url in ReMo portal.
"""
return absolutify(obj.get_absolute_url())
|
dmoliveira/networkx | refs/heads/master | networkx/algorithms/connectivity/tests/test_kcutsets.py | 43 | # Jordi Torrents
# Test for k-cutsets
from operator import itemgetter
from nose.tools import assert_equal, assert_false, assert_true, assert_raises
import networkx as nx
from networkx.algorithms.connectivity.kcutsets import _is_separating_set
from networkx.algorithms.flow import (
edmonds_karp,
shortest_augmenting_path,
preflow_push,
)
##
## Some nice synthetic graphs
##
def graph_example_1():
G = nx.convert_node_labels_to_integers(nx.grid_graph([5,5]),
label_attribute='labels')
rlabels = nx.get_node_attributes(G, 'labels')
labels = dict((v, k) for k, v in rlabels.items())
for nodes in [(labels[(0,0)], labels[(1,0)]),
(labels[(0,4)], labels[(1,4)]),
(labels[(3,0)], labels[(4,0)]),
(labels[(3,4)], labels[(4,4)]) ]:
new_node = G.order()+1
# Petersen graph is triconnected
P = nx.petersen_graph()
G = nx.disjoint_union(G,P)
# Add two edges between the grid and P
G.add_edge(new_node+1, nodes[0])
G.add_edge(new_node, nodes[1])
# K5 is 4-connected
K = nx.complete_graph(5)
G = nx.disjoint_union(G,K)
# Add three edges between P and K5
G.add_edge(new_node+2,new_node+11)
G.add_edge(new_node+3,new_node+12)
G.add_edge(new_node+4,new_node+13)
# Add another K5 sharing a node
G = nx.disjoint_union(G,K)
nbrs = G[new_node+10]
G.remove_node(new_node+10)
for nbr in nbrs:
G.add_edge(new_node+17, nbr)
G.add_edge(new_node+16, new_node+5)
G.name = 'Example graph for connectivity'
return G
def torrents_and_ferraro_graph():
G = nx.convert_node_labels_to_integers(nx.grid_graph([5,5]),
label_attribute='labels')
rlabels = nx.get_node_attributes(G, 'labels')
labels = dict((v, k) for k, v in rlabels.items())
for nodes in [ (labels[(0,4)], labels[(1,4)]),
(labels[(3,4)], labels[(4,4)]) ]:
new_node = G.order()+1
# Petersen graph is triconnected
P = nx.petersen_graph()
G = nx.disjoint_union(G,P)
# Add two edges between the grid and P
G.add_edge(new_node+1, nodes[0])
G.add_edge(new_node, nodes[1])
# K5 is 4-connected
K = nx.complete_graph(5)
G = nx.disjoint_union(G,K)
# Add three edges between P and K5
G.add_edge(new_node+2,new_node+11)
G.add_edge(new_node+3,new_node+12)
G.add_edge(new_node+4,new_node+13)
# Add another K5 sharing a node
G = nx.disjoint_union(G,K)
nbrs = G[new_node+10]
G.remove_node(new_node+10)
for nbr in nbrs:
G.add_edge(new_node+17, nbr)
# Commenting this makes the graph not biconnected !!
# This stupid mistake make one reviewer very angry :P
G.add_edge(new_node+16, new_node+8)
for nodes in [(labels[(0,0)], labels[(1,0)]),
(labels[(3,0)], labels[(4,0)])]:
new_node = G.order()+1
# Petersen graph is triconnected
P = nx.petersen_graph()
G = nx.disjoint_union(G,P)
# Add two edges between the grid and P
G.add_edge(new_node+1, nodes[0])
G.add_edge(new_node, nodes[1])
# K5 is 4-connected
K = nx.complete_graph(5)
G = nx.disjoint_union(G,K)
# Add three edges between P and K5
G.add_edge(new_node+2,new_node+11)
G.add_edge(new_node+3,new_node+12)
G.add_edge(new_node+4,new_node+13)
# Add another K5 sharing two nodes
G = nx.disjoint_union(G,K)
nbrs = G[new_node+10]
G.remove_node(new_node+10)
for nbr in nbrs:
G.add_edge(new_node+17, nbr)
nbrs2 = G[new_node+9]
G.remove_node(new_node+9)
for nbr in nbrs2:
G.add_edge(new_node+18, nbr)
G.name = 'Example graph for connectivity'
return G
# Helper function
def _check_separating_sets(G):
for Gc in nx.connected_component_subgraphs(G):
if len(Gc) < 3:
continue
for cut in nx.all_node_cuts(Gc):
assert_equal(nx.node_connectivity(Gc), len(cut))
H = Gc.copy()
H.remove_nodes_from(cut)
assert_false(nx.is_connected(H))
def test_torrents_and_ferraro_graph():
G = torrents_and_ferraro_graph()
_check_separating_sets(G)
def test_example_1():
G = graph_example_1()
_check_separating_sets(G)
def test_random_gnp():
G = nx.gnp_random_graph(100, 0.1)
_check_separating_sets(G)
def test_shell():
constructor=[(20,80,0.8),(80,180,0.6)]
G = nx.random_shell_graph(constructor)
_check_separating_sets(G)
def test_configuration():
deg_seq = nx.utils.create_degree_sequence(100,nx.utils.powerlaw_sequence)
G = nx.Graph(nx.configuration_model(deg_seq))
G.remove_edges_from(G.selfloop_edges())
_check_separating_sets(G)
def test_karate():
G = nx.karate_club_graph()
_check_separating_sets(G)
def _generate_no_biconnected(max_attempts=50):
attempts = 0
while True:
G = nx.fast_gnp_random_graph(100,0.0575)
if nx.is_connected(G) and not nx.is_biconnected(G):
attempts = 0
yield G
else:
if attempts >= max_attempts:
msg = "Tried %d times: no suitable Graph."%attempts
raise Exception(msg % max_attempts)
else:
attempts += 1
def test_articulation_points():
Ggen = _generate_no_biconnected()
for i in range(2):
G = next(Ggen)
articulation_points = list({a} for a in nx.articulation_points(G))
for cut in nx.all_node_cuts(G):
assert_true(cut in articulation_points)
def test_grid_2d_graph():
# All minimum node cuts of a 2d grid
# are the four pairs of nodes that are
# neighbors of the four corner nodes.
G = nx.grid_2d_graph(5, 5)
solution = [
set([(0, 1), (1, 0)]),
set([(3, 0), (4, 1)]),
set([(3, 4), (4, 3)]),
set([(0, 3), (1, 4)]),
]
for cut in nx.all_node_cuts(G):
assert_true(cut in solution)
def test_disconnected_graph():
G = nx.fast_gnp_random_graph(100, 0.01)
cuts = nx.all_node_cuts(G)
assert_raises(nx.NetworkXError, next, cuts)
def test_alternative_flow_functions():
flow_funcs = [edmonds_karp, shortest_augmenting_path, preflow_push]
graph_funcs = [graph_example_1, nx.davis_southern_women_graph]
for graph_func in graph_funcs:
G = graph_func()
for flow_func in flow_funcs:
for cut in nx.all_node_cuts(G, flow_func=flow_func):
assert_equal(nx.node_connectivity(G), len(cut))
H = G.copy()
H.remove_nodes_from(cut)
assert_false(nx.is_connected(H))
def test_is_separating_set_complete_graph():
G = nx.complete_graph(5)
assert_true(_is_separating_set(G, {0, 1, 2, 3}))
def test_is_separating_set():
for i in [5, 10, 15]:
G = nx.star_graph(i)
max_degree_node = max(G, key=G.degree)
assert_true(_is_separating_set(G, {max_degree_node}))
def test_non_repeated_cuts():
# The algorithm was repeating the cut {0, 1} for the giant biconnected
# component of the Karate club graph.
K = nx.karate_club_graph()
G = max(list(nx.biconnected_component_subgraphs(K)), key=len)
solution = [{32, 33}, {2, 33}, {0, 3}, {0, 1}, {29, 33}]
cuts = list(nx.all_node_cuts(G))
if len(solution) != len(cuts):
print(nx.info(G))
print("Solution: {}".format(solution))
print("Result: {}".format(cuts))
assert_true(len(solution) == len(cuts))
for cut in cuts:
assert_true(cut in solution)
|
broferek/ansible | refs/heads/devel | test/units/modules/network/fortios/test_fortios_system_ips_urlfilter_dns.py | 21 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_system_ips_urlfilter_dns
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_system_ips_urlfilter_dns.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_system_ips_urlfilter_dns_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_ips_urlfilter_dns': {
'address': 'test_value_3',
'ipv6_capability': 'enable',
'status': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_ips_urlfilter_dns.fortios_system(input_data, fos_instance)
expected_data = {
'address': 'test_value_3',
'ipv6-capability': 'enable',
'status': 'enable'
}
set_method_mock.assert_called_with('system', 'ips-urlfilter-dns', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_ips_urlfilter_dns_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_ips_urlfilter_dns': {
'address': 'test_value_3',
'ipv6_capability': 'enable',
'status': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_ips_urlfilter_dns.fortios_system(input_data, fos_instance)
expected_data = {
'address': 'test_value_3',
'ipv6-capability': 'enable',
'status': 'enable'
}
set_method_mock.assert_called_with('system', 'ips-urlfilter-dns', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_ips_urlfilter_dns_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'system_ips_urlfilter_dns': {
'address': 'test_value_3',
'ipv6_capability': 'enable',
'status': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_ips_urlfilter_dns.fortios_system(input_data, fos_instance)
delete_method_mock.assert_called_with('system', 'ips-urlfilter-dns', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_ips_urlfilter_dns_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'system_ips_urlfilter_dns': {
'address': 'test_value_3',
'ipv6_capability': 'enable',
'status': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_ips_urlfilter_dns.fortios_system(input_data, fos_instance)
delete_method_mock.assert_called_with('system', 'ips-urlfilter-dns', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_ips_urlfilter_dns_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_ips_urlfilter_dns': {
'address': 'test_value_3',
'ipv6_capability': 'enable',
'status': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_ips_urlfilter_dns.fortios_system(input_data, fos_instance)
expected_data = {
'address': 'test_value_3',
'ipv6-capability': 'enable',
'status': 'enable'
}
set_method_mock.assert_called_with('system', 'ips-urlfilter-dns', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_system_ips_urlfilter_dns_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_ips_urlfilter_dns': {
'random_attribute_not_valid': 'tag',
'address': 'test_value_3',
'ipv6_capability': 'enable',
'status': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_ips_urlfilter_dns.fortios_system(input_data, fos_instance)
expected_data = {
'address': 'test_value_3',
'ipv6-capability': 'enable',
'status': 'enable'
}
set_method_mock.assert_called_with('system', 'ips-urlfilter-dns', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
datalogics-robb/scons | refs/heads/master | src/test_strings.py | 2 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify that we have proper strings like Copyright notices on all the
right files in our distributions.
Note that this is a source file and packaging test, not a functional test,
so the name of this script doesn't end in *Tests.py.
"""
import fnmatch
import os
import os.path
import re
import string
import TestCmd
import TestSCons
# Use TestCmd, not TestSCons, so we don't chdir to a temporary directory.
test = TestCmd.TestCmd()
scons_version = TestSCons.SConsVersion
def build_path(*args):
return apply(os.path.join, ('build',)+args)
build_scons = build_path('scons')
build_local = build_path('scons-local', 'scons-local-'+scons_version)
build_src = build_path('scons-src')
class Checker:
def __init__(self, directory,
search_list = [],
remove_list = [],
remove_patterns = []):
self.directory = directory
self.search_list = search_list
self.remove_dict = {}
for r in remove_list:
self.remove_dict[os.path.join(directory, r)] = 1
self.remove_patterns = remove_patterns
def directory_exists(self):
return os.path.exists(self.directory)
def remove_this(self, name, path):
if self.remove_dict.get(path):
return 1
else:
for pattern in self.remove_patterns:
if fnmatch.fnmatch(name, pattern):
return 1
return 0
def search_this(self, path):
if self.search_list:
for pattern in self.search_list:
if fnmatch.fnmatch(path, pattern):
return 1
return None
else:
return os.path.isfile(path)
def visit(self, result, dirname, names):
make_path_tuple = lambda n, d=dirname: (n, os.path.join(d, n))
for name, path in map(make_path_tuple, names):
if self.remove_this(name, path):
names.remove(name)
elif self.search_this(path):
body = open(path, 'r').read()
for expr in self.expressions:
if not expr.search(body):
msg = '%s: missing %s' % (path, repr(expr.pattern))
result.append(msg)
def find_missing(self):
result = []
os.path.walk(self.directory, self.visit, result)
return result
class CheckUnexpandedStrings(Checker):
expressions = [
re.compile('__COPYRIGHT__'),
re.compile('__FILE__ __REVISION__ __DATE__ __DEVELOPER__'),
]
def must_be_built(self):
return None
class CheckExpandedCopyright(Checker):
expressions = [
re.compile('Copyright.*The SCons Foundation'),
]
def must_be_built(self):
return 1
check_list = [
CheckUnexpandedStrings(
'src',
search_list = [ '*.py' ],
remove_list = [
'engine/SCons/compat/_scons_sets.py',
'engine/SCons/compat/_scons_sets15.py',
'engine/SCons/compat/_scons_subprocess.py',
'engine/SCons/Conftest.py',
'engine/SCons/dblite.py',
'engine/SCons/Optik',
],
),
CheckUnexpandedStrings(
'test',
search_list = [ '*.py' ],
),
CheckExpandedCopyright(
build_scons,
remove_list = [
'build',
'build-stamp',
'configure-stamp',
'debian',
'dist',
'gentoo',
'engine/SCons/compat/_scons_sets.py',
'engine/SCons/compat/_scons_sets15.py',
'engine/SCons/compat/_scons_subprocess.py',
'engine/SCons/Conftest.py',
'engine/SCons/dblite.py',
'engine/SCons/Optik',
'MANIFEST',
'os_spawnv_fix.diff',
'setup.cfg',
],
# We run epydoc on the *.py files, which generates *.pyc files.
remove_patterns = [
'*.pyc'
]
),
CheckExpandedCopyright(
build_local,
remove_list = [
'SCons/compat/_scons_sets.py',
'SCons/compat/_scons_sets15.py',
'SCons/compat/_scons_subprocess.py',
'SCons/Conftest.py',
'SCons/dblite.py',
'SCons/Optik',
],
),
CheckExpandedCopyright(
build_src,
remove_list = [
'bin',
'config',
'debian',
'gentoo',
'doc/design',
'doc/MANIFEST',
'doc/python10',
'doc/reference',
'doc/developer/MANIFEST',
'doc/man/MANIFEST',
'doc/user/cons.pl',
'doc/user/MANIFEST',
'doc/user/SCons-win32-install-1.jpg',
'doc/user/SCons-win32-install-2.jpg',
'doc/user/SCons-win32-install-3.jpg',
'doc/user/SCons-win32-install-4.jpg',
'gentoo',
'QMTest/classes.qmc',
'QMTest/configuration',
'QMTest/TestCmd.py',
'QMTest/TestCommon.py',
'QMTest/unittest.py',
'src/os_spawnv_fix.diff',
'src/MANIFEST.in',
'src/setup.cfg',
'src/engine/MANIFEST.in',
'src/engine/MANIFEST-xml.in',
'src/engine/setup.cfg',
'src/engine/SCons/compat/_scons_sets.py',
'src/engine/SCons/compat/_scons_sets15.py',
'src/engine/SCons/compat/_scons_subprocess.py',
'src/engine/SCons/Conftest.py',
'src/engine/SCons/dblite.py',
'src/engine/SCons/Optik',
'src/script/MANIFEST.in',
'src/script/setup.cfg',
],
),
]
missing_strings = []
not_built = []
for collector in check_list:
if collector.directory_exists():
missing_strings.extend(collector.find_missing())
elif collector.must_be_built():
not_built.append(collector.directory)
if missing_strings:
print "Found the following files with missing strings:"
print "\t" + string.join(missing_strings, "\n\t")
test.fail_test(1)
if not_built:
print "Cannot check all strings, the following have apparently not been built:"
print "\t" + string.join(not_built, "\n\t")
test.no_result(1)
test.pass_test()
|
darkwing/kuma | refs/heads/master | vendor/packages/translate/convert/po2xliff.py | 25 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2005, 2006 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Convert Gettext PO localization files to XLIFF localization files.
See: http://docs.translatehouse.org/projects/translate-toolkit/en/latest/commands/xliff2po.html
for examples and usage instructions.
"""
from translate.storage import po, poxliff
class po2xliff:
def convertunit(self, outputstore, inputunit, filename):
"""creates a transunit node"""
source = inputunit.source
target = inputunit.target
if inputunit.isheader():
unit = outputstore.addheaderunit(target, filename)
else:
unit = outputstore.addsourceunit(source, filename, True)
unit.target = target
#Explicitly marking the fuzzy state will ensure that normal (translated)
#units in the PO file end up as approved in the XLIFF file.
if target:
unit.markfuzzy(inputunit.isfuzzy())
else:
unit.markapproved(False)
#Handle #: location comments
for location in inputunit.getlocations():
unit.createcontextgroup("po-reference", self.contextlist(location), purpose="location")
#Handle #. automatic comments
comment = inputunit.getnotes("developer")
if comment:
unit.createcontextgroup("po-entry", [("x-po-autocomment", comment)], purpose="information")
unit.addnote(comment, origin="developer")
#TODO: x-format, etc.
#Handle # other comments
comment = inputunit.getnotes("translator")
if comment:
unit.createcontextgroup("po-entry", [("x-po-trancomment", comment)], purpose="information")
unit.addnote(comment, origin="po-translator")
return unit
def contextlist(self, location):
contexts = []
if ":" in location:
sourcefile, linenumber = location.split(":", 1)
else:
sourcefile, linenumber = location, None
contexts.append(("sourcefile", sourcefile))
if linenumber:
contexts.append(("linenumber", linenumber))
return contexts
def convertstore(self, inputstore, templatefile=None, **kwargs):
"""converts a .po file to .xlf format"""
if templatefile is None:
outputstore = poxliff.PoXliffFile(**kwargs)
else:
outputstore = poxliff.PoXliffFile(templatefile, **kwargs)
filename = inputstore.filename
for inputunit in inputstore.units:
if inputunit.isblank():
continue
transunitnode = self.convertunit(outputstore, inputunit, filename)
return str(outputstore)
def convertpo(inputfile, outputfile, templatefile):
"""reads in stdin using fromfileclass, converts using convertorclass, writes to stdout"""
inputstore = po.pofile(inputfile)
if inputstore.isempty():
return 0
convertor = po2xliff()
outputstring = convertor.convertstore(inputstore, templatefile)
outputfile.write(outputstring)
return 1
def main(argv=None):
from translate.convert import convert
formats = {
"po": ("xlf", convertpo),
("po", "xlf"): ("xlf", convertpo),
}
parser = convert.ConvertOptionParser(formats, usetemplates=True,
description=__doc__)
parser.run(argv)
if __name__ == '__main__':
main()
|
zengenti/ansible | refs/heads/devel | lib/ansible/modules/cloud/amazon/lambda_facts.py | 21 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: lambda_facts
short_description: Gathers AWS Lambda function details as Ansible facts
description:
- Gathers various details related to Lambda functions, including aliases, versions and event source mappings.
Use module M(lambda) to manage the lambda function itself, M(lambda_alias) to manage function aliases and
M(lambda_event) to manage lambda event source mappings.
version_added: "2.2"
options:
query:
description:
- Specifies the resource type for which to gather facts. Leave blank to retrieve all facts.
required: true
choices: [ "aliases", "all", "config", "mappings", "policy", "versions" ]
default: "all"
function_name:
description:
- The name of the lambda function for which facts are requested.
required: false
default: null
aliases: [ "function", "name"]
event_source_arn:
description:
- For query type 'mappings', this is the Amazon Resource Name (ARN) of the Amazon Kinesis or DynamoDB stream.
default: null
required: false
author: Pierre Jodouin (@pjodouin)
requirements:
- boto3
extends_documentation_fragment:
- aws
'''
EXAMPLES = '''
---
# Simple example of listing all info for a function
- name: List all for a specific function
lambda_facts:
query: all
function_name: myFunction
register: my_function_details
# List all versions of a function
- name: List function versions
lambda_facts:
query: versions
function_name: myFunction
register: my_function_versions
# List all lambda function versions
- name: List all function
lambda_facts:
query: all
max_items: 20
- name: show Lambda facts
debug:
var: lambda_facts
'''
RETURN = '''
---
lambda_facts:
description: lambda facts
returned: success
type: dict
lambda_facts.function:
description: lambda function list
returned: success
type: dict
lambda_facts.function.TheName:
description: lambda function information, including event, mapping, and version information
returned: success
type: dict
'''
import datetime
import sys
try:
import boto3
from botocore.exceptions import ClientError
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def fix_return(node):
"""
fixup returned dictionary
:param node:
:return:
"""
if isinstance(node, datetime.datetime):
node_value = str(node)
elif isinstance(node, list):
node_value = [fix_return(item) for item in node]
elif isinstance(node, dict):
node_value = dict([(item, fix_return(node[item])) for item in node.keys()])
else:
node_value = node
return node_value
def alias_details(client, module):
"""
Returns list of aliases for a specified function.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
params = dict()
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(aliases=client.list_aliases(FunctionName=function_name, **params)['Aliases'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(aliases=[])
else:
module.fail_json(msg='Unable to get {0} aliases, error: {1}'.format(function_name, e))
else:
module.fail_json(msg='Parameter function_name required for query=aliases.')
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
def all_details(client, module):
"""
Returns all lambda related facts.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
if module.params.get('max_items') or module.params.get('next_marker'):
module.fail_json(msg='Cannot specify max_items nor next_marker for query=all.')
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
lambda_facts[function_name] = {}
lambda_facts[function_name].update(config_details(client, module)[function_name])
lambda_facts[function_name].update(alias_details(client, module)[function_name])
lambda_facts[function_name].update(policy_details(client, module)[function_name])
lambda_facts[function_name].update(version_details(client, module)[function_name])
lambda_facts[function_name].update(mapping_details(client, module)[function_name])
else:
lambda_facts.update(config_details(client, module))
return lambda_facts
def config_details(client, module):
"""
Returns configuration details for one or all lambda functions.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
try:
lambda_facts.update(client.get_function_configuration(FunctionName=function_name))
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(function={})
else:
module.fail_json(msg='Unable to get {0} configuration, error: {1}'.format(function_name, e))
else:
params = dict()
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(function_list=client.list_functions(**params)['Functions'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(function_list=[])
else:
module.fail_json(msg='Unable to get function list, error: {0}'.format(e))
functions = dict()
for func in lambda_facts.pop('function_list', []):
functions[func['FunctionName']] = camel_dict_to_snake_dict(func)
return functions
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
def mapping_details(client, module):
"""
Returns all lambda event source mappings.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
params = dict()
function_name = module.params.get('function_name')
if function_name:
params['FunctionName'] = module.params.get('function_name')
if module.params.get('event_source_arn'):
params['EventSourceArn'] = module.params.get('event_source_arn')
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(mappings=client.list_event_source_mappings(**params)['EventSourceMappings'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(mappings=[])
else:
module.fail_json(msg='Unable to get source event mappings, error: {0}'.format(e))
if function_name:
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
return camel_dict_to_snake_dict(lambda_facts)
def policy_details(client, module):
"""
Returns policy attached to a lambda function.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
if module.params.get('max_items') or module.params.get('next_marker'):
module.fail_json(msg='Cannot specify max_items nor next_marker for query=policy.')
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
try:
# get_policy returns a JSON string so must convert to dict before reassigning to its key
lambda_facts.update(policy=json.loads(client.get_policy(FunctionName=function_name)['Policy']))
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(policy={})
else:
module.fail_json(msg='Unable to get {0} policy, error: {1}'.format(function_name, e))
else:
module.fail_json(msg='Parameter function_name required for query=policy.')
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
def version_details(client, module):
"""
Returns all lambda function versions.
:param client: AWS API client reference (boto3)
:param module: Ansible module reference
:return dict:
"""
lambda_facts = dict()
function_name = module.params.get('function_name')
if function_name:
params = dict()
if module.params.get('max_items'):
params['MaxItems'] = module.params.get('max_items')
if module.params.get('next_marker'):
params['Marker'] = module.params.get('next_marker')
try:
lambda_facts.update(versions=client.list_versions_by_function(FunctionName=function_name, **params)['Versions'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
lambda_facts.update(versions=[])
else:
module.fail_json(msg='Unable to get {0} versions, error: {1}'.format(function_name, e))
else:
module.fail_json(msg='Parameter function_name required for query=versions.')
return {function_name: camel_dict_to_snake_dict(lambda_facts)}
def main():
"""
Main entry point.
:return dict: ansible facts
"""
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
function_name=dict(required=False, default=None, aliases=['function', 'name']),
query=dict(required=False, choices=['aliases', 'all', 'config', 'mappings', 'policy', 'versions'], default='all'),
event_source_arn=dict(required=False, default=None)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[],
required_together=[]
)
# validate dependencies
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required for this module.')
# validate function_name if present
function_name = module.params['function_name']
if function_name:
if not re.search("^[\w\-:]+$", function_name):
module.fail_json(
msg='Function name {0} is invalid. Names must contain only alphanumeric characters and hyphens.'.format(function_name)
)
if len(function_name) > 64:
module.fail_json(msg='Function name "{0}" exceeds 64 character limit'.format(function_name))
try:
region, endpoint, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
aws_connect_kwargs.update(dict(region=region,
endpoint=endpoint,
conn_type='client',
resource='lambda'
))
client = boto3_conn(module, **aws_connect_kwargs)
except ClientError as e:
module.fail_json(msg="Can't authorize connection - {0}".format(e))
this_module = sys.modules[__name__]
invocations = dict(
aliases='alias_details',
all='all_details',
config='config_details',
mappings='mapping_details',
policy='policy_details',
versions='version_details',
)
this_module_function = getattr(this_module, invocations[module.params['query']])
all_facts = fix_return(this_module_function(client, module))
results = dict(ansible_facts={'lambda_facts': {'function': all_facts}}, changed=False)
if module.check_mode:
results['msg'] = 'Check mode set but ignored for fact gathering only.'
module.exit_json(**results)
# ansible import module(s) kept at ~eof as recommended
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
ymcagodme/Norwalk-Judo | refs/heads/master | django/contrib/localflavor/no/forms.py | 309 | """
Norwegian-specific Form helpers
"""
import re, datetime
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select
from django.utils.translation import ugettext_lazy as _
class NOZipCodeField(RegexField):
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXX.'),
}
def __init__(self, *args, **kwargs):
super(NOZipCodeField, self).__init__(r'^\d{4}$',
max_length=None, min_length=None, *args, **kwargs)
class NOMunicipalitySelect(Select):
"""
A Select widget that uses a list of Norwegian municipalities (fylker)
as its choices.
"""
def __init__(self, attrs=None):
from no_municipalities import MUNICIPALITY_CHOICES
super(NOMunicipalitySelect, self).__init__(attrs, choices=MUNICIPALITY_CHOICES)
class NOSocialSecurityNumber(Field):
"""
Algorithm is documented at http://no.wikipedia.org/wiki/Personnummer
"""
default_error_messages = {
'invalid': _(u'Enter a valid Norwegian social security number.'),
}
def clean(self, value):
super(NOSocialSecurityNumber, self).clean(value)
if value in EMPTY_VALUES:
return u''
if not re.match(r'^\d{11}$', value):
raise ValidationError(self.error_messages['invalid'])
day = int(value[:2])
month = int(value[2:4])
year2 = int(value[4:6])
inum = int(value[6:9])
self.birthday = None
try:
if 000 <= inum < 500:
self.birthday = datetime.date(1900+year2, month, day)
if 500 <= inum < 750 and year2 > 54:
self.birthday = datetime.date(1800+year2, month, day)
if 500 <= inum < 1000 and year2 < 40:
self.birthday = datetime.date(2000+year2, month, day)
if 900 <= inum < 1000 and year2 > 39:
self.birthday = datetime.date(1900+year2, month, day)
except ValueError:
raise ValidationError(self.error_messages['invalid'])
sexnum = int(value[8])
if sexnum % 2 == 0:
self.gender = 'F'
else:
self.gender = 'M'
digits = map(int, list(value))
weight_1 = [3, 7, 6, 1, 8, 9, 4, 5, 2, 1, 0]
weight_2 = [5, 4, 3, 2, 7, 6, 5, 4, 3, 2, 1]
def multiply_reduce(aval, bval):
return sum([(a * b) for (a, b) in zip(aval, bval)])
if multiply_reduce(digits, weight_1) % 11 != 0:
raise ValidationError(self.error_messages['invalid'])
if multiply_reduce(digits, weight_2) % 11 != 0:
raise ValidationError(self.error_messages['invalid'])
return value
|
gladsonvm/haystackdemo | refs/heads/master | lib/python2.7/site-packages/pip/vendor/distlib/scripts.py | 79 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import logging
import os
import re
import struct
import sys
from . import DistlibException
from .compat import sysconfig, fsencode, detect_encoding
from .resources import finder
from .util import FileOperator, get_export_entry, convert_path, get_executable
logger = logging.getLogger(__name__)
# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
SCRIPT_TEMPLATE = '''%(shebang)s
if __name__ == '__main__':
import sys, re
def _resolve(module, func):
__import__(module)
mod = sys.modules[module]
parts = func.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
try:
sys.argv[0] = re.sub('-script.pyw?$', '', sys.argv[0])
func = _resolve('%(module)s', '%(func)s')
rc = func() # None interpreted as 0
except Exception as e: # only supporting Python >= 2.6
sys.stderr.write('%%s\\n' %% e)
rc = 1
sys.exit(rc)
'''
class ScriptMaker(object):
"""
A class to copy or create scripts from source scripts or callable
specifications.
"""
script_template = SCRIPT_TEMPLATE
executable = None # for shebangs
def __init__(self, source_dir, target_dir, add_launchers=True,
dry_run=False, fileop=None):
self.source_dir = source_dir
self.target_dir = target_dir
self.add_launchers = add_launchers
self.force = False
self.set_mode = False
self._fileop = fileop or FileOperator(dry_run)
def _get_alternate_executable(self, executable, flags):
if 'gui' in flags and os.name == 'nt':
dn, fn = os.path.split(executable)
fn = fn.replace('python', 'pythonw')
executable = os.path.join(dn, fn)
return executable
def _get_shebang(self, encoding, post_interp=b'', flags=None):
if self.executable:
executable = self.executable
elif not sysconfig.is_python_build():
executable = get_executable()
elif hasattr(sys, 'base_prefix') and sys.prefix != sys.base_prefix:
executable = os.path.join(
sysconfig.get_path('scripts'),
'python%s' % sysconfig.get_config_var('EXE'))
else:
executable = os.path.join(
sysconfig.get_config_var('BINDIR'),
'python%s%s' % (sysconfig.get_config_var('VERSION'),
sysconfig.get_config_var('EXE')))
if flags:
executable = self._get_alternate_executable(executable, flags)
executable = fsencode(executable)
shebang = b'#!' + executable + post_interp + b'\n'
# Python parser starts to read a script using UTF-8 until
# it gets a #coding:xxx cookie. The shebang has to be the
# first line of a file, the #coding:xxx cookie cannot be
# written before. So the shebang has to be decodable from
# UTF-8.
try:
shebang.decode('utf-8')
except UnicodeDecodeError:
raise ValueError(
'The shebang (%r) is not decodable from utf-8' % shebang)
# If the script is encoded to a custom encoding (use a
# #coding:xxx cookie), the shebang has to be decodable from
# the script encoding too.
if encoding != 'utf-8':
try:
shebang.decode(encoding)
except UnicodeDecodeError:
raise ValueError(
'The shebang (%r) is not decodable '
'from the script encoding (%r)' % (shebang, encoding))
return shebang
def _get_script_text(self, shebang, entry):
return self.script_template % dict(shebang=shebang,
module=entry.prefix,
func=entry.suffix)
def _make_script(self, entry, filenames):
shebang = self._get_shebang('utf-8', flags=entry.flags).decode('utf-8')
script = self._get_script_text(shebang, entry)
outname = os.path.join(self.target_dir, entry.name)
use_launcher = self.add_launchers and os.name == 'nt'
if use_launcher:
exename = '%s.exe' % outname
if 'gui' in entry.flags:
ext = 'pyw'
launcher = self._get_launcher('w')
else:
ext = 'py'
launcher = self._get_launcher('t')
outname = '%s-script.%s' % (outname, ext)
self._fileop.write_text_file(outname, script, 'utf-8')
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
if use_launcher:
self._fileop.write_binary_file(exename, launcher)
filenames.append(exename)
def _copy_script(self, script, filenames):
adjust = False
script = convert_path(script)
outname = os.path.join(self.target_dir, os.path.basename(script))
filenames.append(outname)
script = os.path.join(self.source_dir, script)
if not self.force and not self._fileop.newer(script, outname):
logger.debug('not copying %s (up-to-date)', script)
return
# Always open the file, but ignore failures in dry-run mode --
# that way, we'll get accurate feedback if we can read the
# script.
try:
f = open(script, 'rb')
except IOError:
if not self.dry_run:
raise
f = None
else:
encoding, lines = detect_encoding(f.readline)
f.seek(0)
first_line = f.readline()
if not first_line:
logger.warning('%s: %s is an empty file (skipping)',
self.get_command_name(), script)
return
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
if match:
adjust = True
post_interp = match.group(1) or b''
if not adjust:
if f:
f.close()
self._fileop.copy_file(script, outname)
else:
logger.info('copying and adjusting %s -> %s', script,
self.target_dir)
if not self._fileop.dry_run:
shebang = self._get_shebang(encoding, post_interp)
use_launcher = self.add_launchers and os.name == 'nt'
if use_launcher:
n, e = os.path.splitext(outname)
exename = n + '.exe'
if b'pythonw' in first_line:
launcher = self._get_launcher('w')
suffix = '-script.pyw'
else:
launcher = self._get_launcher('t')
suffix = '-script.py'
outname = n + suffix
filenames[-1] = outname
self._fileop.write_binary_file(outname, shebang + f.read())
if use_launcher:
self._fileop.write_binary_file(exename, launcher)
filenames.append(exename)
if f:
f.close()
if self.set_mode:
self._fileop.set_executable_mode([outname])
@property
def dry_run(self):
return self._fileop.dry_run
@dry_run.setter
def dry_run(self, value):
self._fileop.dry_run = value
if os.name == 'nt':
# Executable launcher support.
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
def _get_launcher(self, kind):
if struct.calcsize('P') == 8: # 64-bit
bits = '64'
else:
bits = '32'
name = '%s%s.exe' % (kind, bits)
result = finder('distlib').find(name).bytes
return result
# Public API follows
def make(self, specification):
"""
Make a script.
:param specification: The specification, which is either a valid export
entry specification (to make a script from a
callable) or a filename (to make a script by
copying from a source location).
:return: A list of all absolute pathnames written to,
"""
filenames = []
entry = get_export_entry(specification)
if entry is None:
self._copy_script(specification, filenames)
else:
self._make_script(entry, filenames)
return filenames
def make_multiple(self, specifications):
"""
Take a list of specifications and make scripts from them,
:param specifications: A list of specifications.
:return: A list of all absolute pathnames written to,
"""
filenames = []
for specification in specifications:
filenames.extend(self.make(specification))
return filenames
|
jing-bao/pa-chromium | refs/heads/master | tools/json_schema_compiler/PRESUBMIT.py | 127 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for changes affecting tools/json_schema_compiler/
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
WHITELIST = [ r'.+_test.py$' ]
def CheckChangeOnUpload(input_api, output_api):
return input_api.canned_checks.RunUnitTestsInDirectory(
input_api, output_api, '.', whitelist=WHITELIST)
def CheckChangeOnCommit(input_api, output_api):
return input_api.canned_checks.RunUnitTestsInDirectory(
input_api, output_api, '.', whitelist=WHITELIST)
|
tempbottle/restcommander | refs/heads/master | play-1.2.4/python/Lib/nntplib.py | 156 | """An NNTP client class based on RFC 977: Network News Transfer Protocol.
Example:
>>> from nntplib import NNTP
>>> s = NNTP('news')
>>> resp, count, first, last, name = s.group('comp.lang.python')
>>> print 'Group', name, 'has', count, 'articles, range', first, 'to', last
Group comp.lang.python has 51 articles, range 5770 to 5821
>>> resp, subs = s.xhdr('subject', first + '-' + last)
>>> resp = s.quit()
>>>
Here 'resp' is the server response line.
Error responses are turned into exceptions.
To post an article from a file:
>>> f = open(filename, 'r') # file containing article, including header
>>> resp = s.post(f)
>>>
For descriptions of all methods, read the comments in the code below.
Note that all arguments and return values representing article numbers
are strings, not numbers, since they are rarely used for calculations.
"""
# RFC 977 by Brian Kantor and Phil Lapsley.
# xover, xgtitle, xpath, date methods by Kevan Heydon
# Imports
import re
import socket
__all__ = ["NNTP","NNTPReplyError","NNTPTemporaryError",
"NNTPPermanentError","NNTPProtocolError","NNTPDataError",
"error_reply","error_temp","error_perm","error_proto",
"error_data",]
# Exceptions raised when an error or invalid response is received
class NNTPError(Exception):
"""Base class for all nntplib exceptions"""
def __init__(self, *args):
Exception.__init__(self, *args)
try:
self.response = args[0]
except IndexError:
self.response = 'No response given'
class NNTPReplyError(NNTPError):
"""Unexpected [123]xx reply"""
pass
class NNTPTemporaryError(NNTPError):
"""4xx errors"""
pass
class NNTPPermanentError(NNTPError):
"""5xx errors"""
pass
class NNTPProtocolError(NNTPError):
"""Response does not begin with [1-5]"""
pass
class NNTPDataError(NNTPError):
"""Error in response data"""
pass
# for backwards compatibility
error_reply = NNTPReplyError
error_temp = NNTPTemporaryError
error_perm = NNTPPermanentError
error_proto = NNTPProtocolError
error_data = NNTPDataError
# Standard port used by NNTP servers
NNTP_PORT = 119
# Response numbers that are followed by additional text (e.g. article)
LONGRESP = ['100', '215', '220', '221', '222', '224', '230', '231', '282']
# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
CRLF = '\r\n'
# The class itself
class NNTP:
def __init__(self, host, port=NNTP_PORT, user=None, password=None,
readermode=None, usenetrc=True):
"""Initialize an instance. Arguments:
- host: hostname to connect to
- port: port to connect to (default the standard NNTP port)
- user: username to authenticate with
- password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific comamnds, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.port = port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((self.host, self.port))
self.file = self.sock.makefile('rb')
self.debugging = 0
self.welcome = self.getresp()
# 'mode reader' is sometimes necessary to enable 'reader' mode.
# However, the order in which 'mode reader' and 'authinfo' need to
# arrive differs between some NNTP servers. Try to send
# 'mode reader', and if it fails with an authorization failed
# error, try again after sending authinfo.
readermode_afterauth = 0
if readermode:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
except NNTPTemporaryError, e:
if user and e.response[:3] == '480':
# Need authorization before 'mode reader'
readermode_afterauth = 1
else:
raise
# If no login/password was specified, try to get them from ~/.netrc
# Presume that if .netc has an entry, NNRP authentication is required.
try:
if usenetrc and not user:
import netrc
credentials = netrc.netrc()
auth = credentials.authenticators(host)
if auth:
user = auth[0]
password = auth[2]
except IOError:
pass
# Perform NNRP authentication if needed.
if user:
resp = self.shortcmd('authinfo user '+user)
if resp[:3] == '381':
if not password:
raise NNTPReplyError(resp)
else:
resp = self.shortcmd(
'authinfo pass '+password)
if resp[:3] != '281':
raise NNTPPermanentError(resp)
if readermode_afterauth:
try:
self.welcome = self.shortcmd('mode reader')
except NNTPPermanentError:
# error 500, probably 'not implemented'
pass
# Get the welcome message from the server
# (this is read and squirreled away by __init__()).
# If the response code is 200, posting is allowed;
# if it 201, posting is not allowed
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
If the response code is 200, posting is allowed;
if it 201, posting is not allowed."""
if self.debugging: print '*welcome*', repr(self.welcome)
return self.welcome
def set_debuglevel(self, level):
"""Set the debugging level. Argument 'level' means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF"""
self.debugging = level
debug = set_debuglevel
def putline(self, line):
"""Internal: send one line to the server, appending CRLF."""
line = line + CRLF
if self.debugging > 1: print '*put*', repr(line)
self.sock.sendall(line)
def putcmd(self, line):
"""Internal: send one command to the server (through putline())."""
if self.debugging: print '*cmd*', repr(line)
self.putline(line)
def getline(self):
"""Internal: return one line from the server, stripping CRLF.
Raise EOFError if the connection is closed."""
line = self.file.readline()
if self.debugging > 1:
print '*get*', repr(line)
if not line: raise EOFError
if line[-2:] == CRLF: line = line[:-2]
elif line[-1:] in CRLF: line = line[:-1]
return line
def getresp(self):
"""Internal: get a response from the server.
Raise various errors if the response indicates an error."""
resp = self.getline()
if self.debugging: print '*resp*', repr(resp)
c = resp[:1]
if c == '4':
raise NNTPTemporaryError(resp)
if c == '5':
raise NNTPPermanentError(resp)
if c not in '123':
raise NNTPProtocolError(resp)
return resp
def getlongresp(self, file=None):
"""Internal: get a response plus following text from the server.
Raise various errors if the response indicates an error."""
openedFile = None
try:
# If a string was passed then open a file with that name
if isinstance(file, str):
openedFile = file = open(file, "w")
resp = self.getresp()
if resp[:3] not in LONGRESP:
raise NNTPReplyError(resp)
list = []
while 1:
line = self.getline()
if line == '.':
break
if line[:2] == '..':
line = line[1:]
if file:
file.write(line + "\n")
else:
list.append(line)
finally:
# If this method created the file, then it must close it
if openedFile:
openedFile.close()
return resp, list
def shortcmd(self, line):
"""Internal: send a command and get the response."""
self.putcmd(line)
return self.getresp()
def longcmd(self, line, file=None):
"""Internal: send a command and get the response plus following text."""
self.putcmd(line)
return self.getlongresp(file)
def newgroups(self, date, time, file=None):
"""Process a NEWGROUPS command. Arguments:
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of newsgroup names"""
return self.longcmd('NEWGROUPS ' + date + ' ' + time, file)
def newnews(self, group, date, time, file=None):
"""Process a NEWNEWS command. Arguments:
- group: group name or '*'
- date: string 'yymmdd' indicating the date
- time: string 'hhmmss' indicating the time
Return:
- resp: server response if successful
- list: list of message ids"""
cmd = 'NEWNEWS ' + group + ' ' + date + ' ' + time
return self.longcmd(cmd, file)
def list(self, file=None):
"""Process a LIST command. Return:
- resp: server response if successful
- list: list of (group, last, first, flag) (strings)"""
resp, list = self.longcmd('LIST', file)
for i in range(len(list)):
# Parse lines into "group last first flag"
list[i] = tuple(list[i].split())
return resp, list
def description(self, group):
"""Get a description for a single group. If more than one
group matches ('group' is a pattern), return the first. If no
group matches, return an empty string.
This elides the response code from the server, since it can
only be '215' or '285' (for xgtitle) anyway. If the response
code is needed, use the 'descriptions' method.
NOTE: This neither checks for a wildcard in 'group' nor does
it check whether the group actually exists."""
resp, lines = self.descriptions(group)
if len(lines) == 0:
return ""
else:
return lines[0][1]
def descriptions(self, group_pattern):
"""Get descriptions for a range of groups."""
line_pat = re.compile("^(?P<group>[^ \t]+)[ \t]+(.*)$")
# Try the more std (acc. to RFC2980) LIST NEWSGROUPS first
resp, raw_lines = self.longcmd('LIST NEWSGROUPS ' + group_pattern)
if resp[:3] != "215":
# Now the deprecated XGTITLE. This either raises an error
# or succeeds with the same output structure as LIST
# NEWSGROUPS.
resp, raw_lines = self.longcmd('XGTITLE ' + group_pattern)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def group(self, name):
"""Process a GROUP command. Argument:
- group: the group name
Returns:
- resp: server response if successful
- count: number of articles (string)
- first: first article number (string)
- last: last article number (string)
- name: the group name"""
resp = self.shortcmd('GROUP ' + name)
if resp[:3] != '211':
raise NNTPReplyError(resp)
words = resp.split()
count = first = last = 0
n = len(words)
if n > 1:
count = words[1]
if n > 2:
first = words[2]
if n > 3:
last = words[3]
if n > 4:
name = words[4].lower()
return resp, count, first, last, name
def help(self, file=None):
"""Process a HELP command. Returns:
- resp: server response if successful
- list: list of strings"""
return self.longcmd('HELP',file)
def statparse(self, resp):
"""Internal: parse the response of a STAT, NEXT or LAST command."""
if resp[:2] != '22':
raise NNTPReplyError(resp)
words = resp.split()
nr = 0
id = ''
n = len(words)
if n > 1:
nr = words[1]
if n > 2:
id = words[2]
return resp, nr, id
def statcmd(self, line):
"""Internal: process a STAT, NEXT or LAST command."""
resp = self.shortcmd(line)
return self.statparse(resp)
def stat(self, id):
"""Process a STAT command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: the article number
- id: the message id"""
return self.statcmd('STAT ' + id)
def next(self):
"""Process a NEXT command. No arguments. Return as for STAT."""
return self.statcmd('NEXT')
def last(self):
"""Process a LAST command. No arguments. Return as for STAT."""
return self.statcmd('LAST')
def artcmd(self, line, file=None):
"""Internal: process a HEAD, BODY or ARTICLE command."""
resp, list = self.longcmd(line, file)
resp, nr, id = self.statparse(resp)
return resp, nr, id, list
def head(self, id):
"""Process a HEAD command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's header"""
return self.artcmd('HEAD ' + id)
def body(self, id, file=None):
"""Process a BODY command. Argument:
- id: article number or message id
- file: Filename string or file object to store the article in
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article's body or an empty list
if file was used"""
return self.artcmd('BODY ' + id, file)
def article(self, id):
"""Process an ARTICLE command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article"""
return self.artcmd('ARTICLE ' + id)
def slave(self):
"""Process a SLAVE command. Returns:
- resp: server response if successful"""
return self.shortcmd('SLAVE')
def xhdr(self, hdr, str, file=None):
"""Process an XHDR command (optional server extension). Arguments:
- hdr: the header type (e.g. 'subject')
- str: an article nr, a message id, or a range nr1-nr2
Returns:
- resp: server response if successful
- list: list of (nr, value) strings"""
pat = re.compile('^([0-9]+) ?(.*)\n?')
resp, lines = self.longcmd('XHDR ' + hdr + ' ' + str, file)
for i in range(len(lines)):
line = lines[i]
m = pat.match(line)
if m:
lines[i] = m.group(1, 2)
return resp, lines
def xover(self, start, end, file=None):
"""Process an XOVER command (optional server extension) Arguments:
- start: start of range
- end: end of range
Returns:
- resp: server response if successful
- list: list of (art-nr, subject, poster, date,
id, references, size, lines)"""
resp, lines = self.longcmd('XOVER ' + start + '-' + end, file)
xover_lines = []
for line in lines:
elem = line.split("\t")
try:
xover_lines.append((elem[0],
elem[1],
elem[2],
elem[3],
elem[4],
elem[5].split(),
elem[6],
elem[7]))
except IndexError:
raise NNTPDataError(line)
return resp,xover_lines
def xgtitle(self, group, file=None):
"""Process an XGTITLE command (optional server extension) Arguments:
- group: group name wildcard (i.e. news.*)
Returns:
- resp: server response if successful
- list: list of (name,title) strings"""
line_pat = re.compile("^([^ \t]+)[ \t]+(.*)$")
resp, raw_lines = self.longcmd('XGTITLE ' + group, file)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return resp, lines
def xpath(self,id):
"""Process an XPATH command (optional server extension) Arguments:
- id: Message id of article
Returns:
resp: server response if successful
path: directory path to article"""
resp = self.shortcmd("XPATH " + id)
if resp[:3] != '223':
raise NNTPReplyError(resp)
try:
[resp_num, path] = resp.split()
except ValueError:
raise NNTPReplyError(resp)
else:
return resp, path
def date (self):
"""Process the DATE command. Arguments:
None
Returns:
resp: server response if successful
date: Date suitable for newnews/newgroups commands etc.
time: Time suitable for newnews/newgroups commands etc."""
resp = self.shortcmd("DATE")
if resp[:3] != '111':
raise NNTPReplyError(resp)
elem = resp.split()
if len(elem) != 2:
raise NNTPDataError(resp)
date = elem[1][2:8]
time = elem[1][-6:]
if len(date) != 6 or len(time) != 6:
raise NNTPDataError(resp)
return resp, date, time
def post(self, f):
"""Process a POST command. Arguments:
- f: file containing the article
Returns:
- resp: server response if successful"""
resp = self.shortcmd('POST')
# Raises error_??? if posting is not allowed
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
def ihave(self, id, f):
"""Process an IHAVE command. Arguments:
- id: message-id of the article
- f: file containing the article
Returns:
- resp: server response if successful
Note that if the server refuses the article an exception is raised."""
resp = self.shortcmd('IHAVE ' + id)
# Raises error_??? if the server already has it
if resp[0] != '3':
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if not line:
break
if line[-1] == '\n':
line = line[:-1]
if line[:1] == '.':
line = '.' + line
self.putline(line)
self.putline('.')
return self.getresp()
def quit(self):
"""Process a QUIT command and close the socket. Returns:
- resp: server response if successful"""
resp = self.shortcmd('QUIT')
self.file.close()
self.sock.close()
del self.file, self.sock
return resp
# Test retrieval when run as a script.
# Assumption: if there's a local news server, it's called 'news'.
# Assumption: if user queries a remote news server, it's named
# in the environment variable NNTPSERVER (used by slrn and kin)
# and we want readermode off.
if __name__ == '__main__':
import os
newshost = 'news' and os.environ["NNTPSERVER"]
if newshost.find('.') == -1:
mode = 'readermode'
else:
mode = None
s = NNTP(newshost, readermode=mode)
resp, count, first, last, name = s.group('comp.lang.python')
print resp
print 'Group', name, 'has', count, 'articles, range', first, 'to', last
resp, subs = s.xhdr('subject', first + '-' + last)
print resp
for item in subs:
print "%7s %s" % item
resp = s.quit()
print resp
|
HiroIshikawa/21playground | refs/heads/master | visualizer/_app_boilerplate/venv/lib/python3.5/site-packages/requests/packages/chardet/universaldetector.py | 1775 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
import codecs
from .latin1prober import Latin1Prober # windows-1252
from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets
from .sbcsgroupprober import SBCSGroupProber # single-byte character sets
from .escprober import EscCharSetProber # ISO-2122, etc.
import re
MINIMUM_THRESHOLD = 0.20
ePureAscii = 0
eEscAscii = 1
eHighbyte = 2
class UniversalDetector:
def __init__(self):
self._highBitDetector = re.compile(b'[\x80-\xFF]')
self._escDetector = re.compile(b'(\033|~{)')
self._mEscCharSetProber = None
self._mCharSetProbers = []
self.reset()
def reset(self):
self.result = {'encoding': None, 'confidence': 0.0}
self.done = False
self._mStart = True
self._mGotData = False
self._mInputState = ePureAscii
self._mLastChar = b''
if self._mEscCharSetProber:
self._mEscCharSetProber.reset()
for prober in self._mCharSetProbers:
prober.reset()
def feed(self, aBuf):
if self.done:
return
aLen = len(aBuf)
if not aLen:
return
if not self._mGotData:
# If the data starts with BOM, we know it is UTF
if aBuf[:3] == codecs.BOM_UTF8:
# EF BB BF UTF-8 with BOM
self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_LE:
# FF FE 00 00 UTF-32, little-endian BOM
self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
elif aBuf[:4] == codecs.BOM_UTF32_BE:
# 00 00 FE FF UTF-32, big-endian BOM
self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
elif aBuf[:4] == b'\xFE\xFF\x00\x00':
# FE FF 00 00 UCS-4, unusual octet order BOM (3412)
self.result = {
'encoding': "X-ISO-10646-UCS-4-3412",
'confidence': 1.0
}
elif aBuf[:4] == b'\x00\x00\xFF\xFE':
# 00 00 FF FE UCS-4, unusual octet order BOM (2143)
self.result = {
'encoding': "X-ISO-10646-UCS-4-2143",
'confidence': 1.0
}
elif aBuf[:2] == codecs.BOM_LE:
# FF FE UTF-16, little endian BOM
self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
elif aBuf[:2] == codecs.BOM_BE:
# FE FF UTF-16, big endian BOM
self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
self._mGotData = True
if self.result['encoding'] and (self.result['confidence'] > 0.0):
self.done = True
return
if self._mInputState == ePureAscii:
if self._highBitDetector.search(aBuf):
self._mInputState = eHighbyte
elif ((self._mInputState == ePureAscii) and
self._escDetector.search(self._mLastChar + aBuf)):
self._mInputState = eEscAscii
self._mLastChar = aBuf[-1:]
if self._mInputState == eEscAscii:
if not self._mEscCharSetProber:
self._mEscCharSetProber = EscCharSetProber()
if self._mEscCharSetProber.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': self._mEscCharSetProber.get_charset_name(),
'confidence': self._mEscCharSetProber.get_confidence()}
self.done = True
elif self._mInputState == eHighbyte:
if not self._mCharSetProbers:
self._mCharSetProbers = [MBCSGroupProber(), SBCSGroupProber(),
Latin1Prober()]
for prober in self._mCharSetProbers:
if prober.feed(aBuf) == constants.eFoundIt:
self.result = {'encoding': prober.get_charset_name(),
'confidence': prober.get_confidence()}
self.done = True
break
def close(self):
if self.done:
return
if not self._mGotData:
if constants._debug:
sys.stderr.write('no data received!\n')
return
self.done = True
if self._mInputState == ePureAscii:
self.result = {'encoding': 'ascii', 'confidence': 1.0}
return self.result
if self._mInputState == eHighbyte:
proberConfidence = None
maxProberConfidence = 0.0
maxProber = None
for prober in self._mCharSetProbers:
if not prober:
continue
proberConfidence = prober.get_confidence()
if proberConfidence > maxProberConfidence:
maxProberConfidence = proberConfidence
maxProber = prober
if maxProber and (maxProberConfidence > MINIMUM_THRESHOLD):
self.result = {'encoding': maxProber.get_charset_name(),
'confidence': maxProber.get_confidence()}
return self.result
if constants._debug:
sys.stderr.write('no probers hit minimum threshhold\n')
for prober in self._mCharSetProbers[0].mProbers:
if not prober:
continue
sys.stderr.write('%s confidence = %s\n' %
(prober.get_charset_name(),
prober.get_confidence()))
|
ghchinoy/tensorflow | refs/heads/master | tensorflow/contrib/distributions/python/kernel_tests/transformed_distribution_test.py | 25 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for TransformedDistribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from scipy import stats
from tensorflow.contrib import distributions
from tensorflow.contrib.distributions.python.ops import bijectors
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg
from tensorflow.python.platform import test
bs = bijectors
ds = distributions
la = linalg
class DummyMatrixTransform(bs.Bijector):
"""Tractable matrix transformation.
This is a non-sensical bijector that has forward/inverse_min_event_ndims=2.
The main use is to check that transformed distribution calculations are done
appropriately.
"""
def __init__(self):
super(DummyMatrixTransform, self).__init__(
forward_min_event_ndims=2,
is_constant_jacobian=False,
validate_args=False,
name="dummy")
def _forward(self, x):
return x
def _inverse(self, y):
return y
# Note: These jacobians don't make sense.
def _forward_log_det_jacobian(self, x):
return -linalg_ops.matrix_determinant(x)
def _inverse_log_det_jacobian(self, x):
return linalg_ops.matrix_determinant(x)
class TransformedDistributionTest(test.TestCase):
def _cls(self):
return ds.TransformedDistribution
def _make_unimplemented(self, name):
def _unimplemented(self, *args): # pylint: disable=unused-argument
raise NotImplementedError("{} not implemented".format(name))
return _unimplemented
def testTransformedDistribution(self):
g = ops.Graph()
with g.as_default():
mu = 3.0
sigma = 2.0
# Note: the Jacobian callable only works for this example; more generally
# you may or may not need a reduce_sum.
log_normal = self._cls()(
distribution=ds.Normal(loc=mu, scale=sigma),
bijector=bs.Exp())
sp_dist = stats.lognorm(s=sigma, scale=np.exp(mu))
# sample
sample = log_normal.sample(100000, seed=235)
self.assertAllEqual([], log_normal.event_shape)
with self.session(graph=g):
self.assertAllEqual([], log_normal.event_shape_tensor().eval())
self.assertAllClose(
sp_dist.mean(), np.mean(sample.eval()), atol=0.0, rtol=0.05)
# pdf, log_pdf, cdf, etc...
# The mean of the lognormal is around 148.
test_vals = np.linspace(0.1, 1000., num=20).astype(np.float32)
for func in [[log_normal.log_prob, sp_dist.logpdf],
[log_normal.prob, sp_dist.pdf],
[log_normal.log_cdf, sp_dist.logcdf],
[log_normal.cdf, sp_dist.cdf],
[log_normal.survival_function, sp_dist.sf],
[log_normal.log_survival_function, sp_dist.logsf]]:
actual = func[0](test_vals)
expected = func[1](test_vals)
with self.session(graph=g):
self.assertAllClose(expected, actual.eval(), atol=0, rtol=0.01)
def testNonInjectiveTransformedDistribution(self):
g = ops.Graph()
with g.as_default():
mu = 1.
sigma = 2.0
abs_normal = self._cls()(
distribution=ds.Normal(loc=mu, scale=sigma),
bijector=bs.AbsoluteValue())
sp_normal = stats.norm(mu, sigma)
# sample
sample = abs_normal.sample(100000, seed=235)
self.assertAllEqual([], abs_normal.event_shape)
with self.session(graph=g):
sample_ = sample.eval()
self.assertAllEqual([], abs_normal.event_shape_tensor().eval())
# Abs > 0, duh!
np.testing.assert_array_less(0, sample_)
# Let X ~ Normal(mu, sigma), Y := |X|, then
# P[Y < 0.77] = P[-0.77 < X < 0.77]
self.assertAllClose(
sp_normal.cdf(0.77) - sp_normal.cdf(-0.77),
(sample_ < 0.77).mean(), rtol=0.01)
# p_Y(y) = p_X(-y) + p_X(y),
self.assertAllClose(
sp_normal.pdf(1.13) + sp_normal.pdf(-1.13),
abs_normal.prob(1.13).eval())
# Log[p_Y(y)] = Log[p_X(-y) + p_X(y)]
self.assertAllClose(
np.log(sp_normal.pdf(2.13) + sp_normal.pdf(-2.13)),
abs_normal.log_prob(2.13).eval())
def testQuantile(self):
with self.cached_session() as sess:
logit_normal = self._cls()(
distribution=ds.Normal(loc=0., scale=1.),
bijector=bs.Sigmoid(),
validate_args=True)
grid = [0., 0.25, 0.5, 0.75, 1.]
q = logit_normal.quantile(grid)
cdf = logit_normal.cdf(q)
cdf_ = sess.run(cdf)
self.assertAllClose(grid, cdf_, rtol=1e-6, atol=0.)
def testCachedSamples(self):
exp_forward_only = bs.Exp()
exp_forward_only._inverse = self._make_unimplemented(
"inverse")
exp_forward_only._inverse_event_shape_tensor = self._make_unimplemented(
"inverse_event_shape_tensor ")
exp_forward_only._inverse_event_shape = self._make_unimplemented(
"inverse_event_shape ")
exp_forward_only._inverse_log_det_jacobian = self._make_unimplemented(
"inverse_log_det_jacobian ")
with self.cached_session() as sess:
mu = 3.0
sigma = 0.02
log_normal = self._cls()(
distribution=ds.Normal(loc=mu, scale=sigma),
bijector=exp_forward_only)
sample = log_normal.sample([2, 3], seed=42)
sample_val, log_pdf_val = sess.run([sample, log_normal.log_prob(sample)])
expected_log_pdf = stats.lognorm.logpdf(
sample_val, s=sigma, scale=np.exp(mu))
self.assertAllClose(expected_log_pdf, log_pdf_val, rtol=1e-4, atol=0.)
def testCachedSamplesInvert(self):
exp_inverse_only = bs.Exp()
exp_inverse_only._forward = self._make_unimplemented(
"forward")
exp_inverse_only._forward_event_shape_tensor = self._make_unimplemented(
"forward_event_shape_tensor ")
exp_inverse_only._forward_event_shape = self._make_unimplemented(
"forward_event_shape ")
exp_inverse_only._forward_log_det_jacobian = self._make_unimplemented(
"forward_log_det_jacobian ")
log_forward_only = bs.Invert(exp_inverse_only)
with self.cached_session() as sess:
# The log bijector isn't defined over the whole real line, so we make
# sigma sufficiently small so that the draws are positive.
mu = 2.
sigma = 1e-2
exp_normal = self._cls()(
distribution=ds.Normal(loc=mu, scale=sigma),
bijector=log_forward_only)
sample = exp_normal.sample([2, 3], seed=42)
sample_val, log_pdf_val = sess.run([sample, exp_normal.log_prob(sample)])
expected_log_pdf = sample_val + stats.norm.logpdf(
np.exp(sample_val), loc=mu, scale=sigma)
self.assertAllClose(expected_log_pdf, log_pdf_val, atol=0.)
def testShapeChangingBijector(self):
with self.cached_session():
softmax = bs.SoftmaxCentered()
standard_normal = ds.Normal(loc=0., scale=1.)
multi_logit_normal = self._cls()(
distribution=standard_normal,
bijector=softmax,
event_shape=[1])
x = [[[-np.log(3.)], [0.]],
[[np.log(3)], [np.log(5)]]]
y = softmax.forward(x).eval()
expected_log_pdf = (
np.squeeze(stats.norm(loc=0., scale=1.).logpdf(x)) -
np.sum(np.log(y), axis=-1))
self.assertAllClose(expected_log_pdf,
multi_logit_normal.log_prob(y).eval())
self.assertAllClose(
[1, 2, 3, 2],
array_ops.shape(multi_logit_normal.sample([1, 2, 3])).eval())
self.assertAllEqual([2], multi_logit_normal.event_shape)
self.assertAllEqual([2], multi_logit_normal.event_shape_tensor().eval())
def testCastLogDetJacobian(self):
"""Test log_prob when Jacobian and log_prob dtypes do not match."""
with self.cached_session():
# Create an identity bijector whose jacobians have dtype int32
int_identity = bs.Inline(
forward_fn=array_ops.identity,
inverse_fn=array_ops.identity,
inverse_log_det_jacobian_fn=(
lambda y: math_ops.cast(0, dtypes.int32)),
forward_log_det_jacobian_fn=(
lambda x: math_ops.cast(0, dtypes.int32)),
forward_min_event_ndims=0,
is_constant_jacobian=True)
normal = self._cls()(
distribution=ds.Normal(loc=0., scale=1.),
bijector=int_identity,
validate_args=True)
y = normal.sample()
normal.log_prob(y).eval()
normal.prob(y).eval()
normal.entropy().eval()
def testEntropy(self):
with self.cached_session():
shift = np.array([[-1, 0, 1], [-1, -2, -3]], dtype=np.float32)
diag = np.array([[1, 2, 3], [2, 3, 2]], dtype=np.float32)
actual_mvn_entropy = np.concatenate([
[stats.multivariate_normal(shift[i], np.diag(diag[i]**2)).entropy()]
for i in range(len(diag))])
fake_mvn = self._cls()(
ds.MultivariateNormalDiag(
loc=array_ops.zeros_like(shift),
scale_diag=array_ops.ones_like(diag),
validate_args=True),
bs.AffineLinearOperator(
shift,
scale=la.LinearOperatorDiag(diag, is_non_singular=True),
validate_args=True),
validate_args=True)
self.assertAllClose(actual_mvn_entropy,
fake_mvn.entropy().eval())
def testScalarBatchScalarEventIdentityScale(self):
with self.cached_session() as sess:
exp2 = self._cls()(
ds.Exponential(rate=0.25),
bijector=ds.bijectors.AffineScalar(scale=2.)
)
log_prob = exp2.log_prob(1.)
log_prob_ = sess.run(log_prob)
base_log_prob = -0.5 * 0.25 + np.log(0.25)
ildj = np.log(2.)
self.assertAllClose(base_log_prob - ildj, log_prob_, rtol=1e-6, atol=0.)
class ScalarToMultiTest(test.TestCase):
def _cls(self):
return ds.TransformedDistribution
def setUp(self):
self._shift = np.array([-1, 0, 1], dtype=np.float32)
self._tril = np.array([[[1., 0, 0],
[2, 1, 0],
[3, 2, 1]],
[[2, 0, 0],
[3, 2, 0],
[4, 3, 2]]],
dtype=np.float32)
def _testMVN(self,
base_distribution_class,
base_distribution_kwargs,
batch_shape=(),
event_shape=(),
not_implemented_message=None):
with self.cached_session() as sess:
# Overriding shapes must be compatible w/bijector; most bijectors are
# batch_shape agnostic and only care about event_ndims.
# In the case of `Affine`, if we got it wrong then it would fire an
# exception due to incompatible dimensions.
batch_shape_pl = array_ops.placeholder(
dtypes.int32, name="dynamic_batch_shape")
event_shape_pl = array_ops.placeholder(
dtypes.int32, name="dynamic_event_shape")
feed_dict = {batch_shape_pl: np.array(batch_shape, dtype=np.int32),
event_shape_pl: np.array(event_shape, dtype=np.int32)}
fake_mvn_dynamic = self._cls()(
distribution=base_distribution_class(validate_args=True,
**base_distribution_kwargs),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=batch_shape_pl,
event_shape=event_shape_pl,
validate_args=True)
fake_mvn_static = self._cls()(
distribution=base_distribution_class(validate_args=True,
**base_distribution_kwargs),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=batch_shape,
event_shape=event_shape,
validate_args=True)
actual_mean = np.tile(self._shift, [2, 1]) # Affine elided this tile.
actual_cov = np.matmul(self._tril, np.transpose(self._tril, [0, 2, 1]))
def actual_mvn_log_prob(x):
return np.concatenate([
[stats.multivariate_normal(
actual_mean[i], actual_cov[i]).logpdf(x[:, i, :])]
for i in range(len(actual_cov))]).T
actual_mvn_entropy = np.concatenate([
[stats.multivariate_normal(
actual_mean[i], actual_cov[i]).entropy()]
for i in range(len(actual_cov))])
self.assertAllEqual([3], fake_mvn_static.event_shape)
self.assertAllEqual([2], fake_mvn_static.batch_shape)
self.assertAllEqual(tensor_shape.TensorShape(None),
fake_mvn_dynamic.event_shape)
self.assertAllEqual(tensor_shape.TensorShape(None),
fake_mvn_dynamic.batch_shape)
x = fake_mvn_static.sample(5, seed=0).eval()
for unsupported_fn in (fake_mvn_static.log_cdf,
fake_mvn_static.cdf,
fake_mvn_static.survival_function,
fake_mvn_static.log_survival_function):
with self.assertRaisesRegexp(NotImplementedError,
not_implemented_message):
unsupported_fn(x)
num_samples = 5e3
for fake_mvn, feed_dict in ((fake_mvn_static, {}),
(fake_mvn_dynamic, feed_dict)):
# Ensure sample works by checking first, second moments.
y = fake_mvn.sample(int(num_samples), seed=0)
x = y[0:5, ...]
sample_mean = math_ops.reduce_mean(y, 0)
centered_y = array_ops.transpose(y - sample_mean, [1, 2, 0])
sample_cov = math_ops.matmul(
centered_y, centered_y, transpose_b=True) / num_samples
[
sample_mean_,
sample_cov_,
x_,
fake_event_shape_,
fake_batch_shape_,
fake_log_prob_,
fake_prob_,
fake_entropy_,
] = sess.run([
sample_mean,
sample_cov,
x,
fake_mvn.event_shape_tensor(),
fake_mvn.batch_shape_tensor(),
fake_mvn.log_prob(x),
fake_mvn.prob(x),
fake_mvn.entropy(),
], feed_dict=feed_dict)
self.assertAllClose(actual_mean, sample_mean_, atol=0.1, rtol=0.1)
self.assertAllClose(actual_cov, sample_cov_, atol=0., rtol=0.1)
# Ensure all other functions work as intended.
self.assertAllEqual([5, 2, 3], x_.shape)
self.assertAllEqual([3], fake_event_shape_)
self.assertAllEqual([2], fake_batch_shape_)
self.assertAllClose(actual_mvn_log_prob(x_), fake_log_prob_,
atol=0., rtol=1e-6)
self.assertAllClose(np.exp(actual_mvn_log_prob(x_)), fake_prob_,
atol=0., rtol=1e-5)
self.assertAllClose(actual_mvn_entropy, fake_entropy_,
atol=0., rtol=1e-6)
def testScalarBatchScalarEvent(self):
self._testMVN(
base_distribution_class=ds.Normal,
base_distribution_kwargs={"loc": 0., "scale": 1.},
batch_shape=[2],
event_shape=[3],
not_implemented_message="not implemented when overriding event_shape")
def testScalarBatchNonScalarEvent(self):
self._testMVN(
base_distribution_class=ds.MultivariateNormalDiag,
base_distribution_kwargs={"loc": [0., 0., 0.],
"scale_diag": [1., 1, 1]},
batch_shape=[2],
not_implemented_message="not implemented")
with self.cached_session():
# Can't override event_shape for scalar batch, non-scalar event.
with self.assertRaisesRegexp(ValueError, "base distribution not scalar"):
self._cls()(
distribution=ds.MultivariateNormalDiag(loc=[0.], scale_diag=[1.]),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=[2],
event_shape=[3],
validate_args=True)
def testNonScalarBatchScalarEvent(self):
self._testMVN(
base_distribution_class=ds.Normal,
base_distribution_kwargs={"loc": [0., 0], "scale": [1., 1]},
event_shape=[3],
not_implemented_message="not implemented when overriding event_shape")
with self.cached_session():
# Can't override batch_shape for non-scalar batch, scalar event.
with self.assertRaisesRegexp(ValueError, "base distribution not scalar"):
self._cls()(
distribution=ds.Normal(loc=[0.], scale=[1.]),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=[2],
event_shape=[3],
validate_args=True)
def testNonScalarBatchNonScalarEvent(self):
with self.cached_session():
# Can't override event_shape and/or batch_shape for non_scalar batch,
# non-scalar event.
with self.assertRaisesRegexp(ValueError, "base distribution not scalar"):
self._cls()(
distribution=ds.MultivariateNormalDiag(loc=[[0.]],
scale_diag=[[1.]]),
bijector=bs.Affine(shift=self._shift, scale_tril=self._tril),
batch_shape=[2],
event_shape=[3],
validate_args=True)
def testMatrixEvent(self):
with self.cached_session() as sess:
batch_shape = [2]
event_shape = [2, 3, 3]
batch_shape_pl = array_ops.placeholder(
dtypes.int32, name="dynamic_batch_shape")
event_shape_pl = array_ops.placeholder(
dtypes.int32, name="dynamic_event_shape")
feed_dict = {batch_shape_pl: np.array(batch_shape, dtype=np.int32),
event_shape_pl: np.array(event_shape, dtype=np.int32)}
scale = 2.
loc = 0.
fake_mvn_dynamic = self._cls()(
distribution=ds.Normal(
loc=loc,
scale=scale),
bijector=DummyMatrixTransform(),
batch_shape=batch_shape_pl,
event_shape=event_shape_pl,
validate_args=True)
fake_mvn_static = self._cls()(
distribution=ds.Normal(
loc=loc,
scale=scale),
bijector=DummyMatrixTransform(),
batch_shape=batch_shape,
event_shape=event_shape,
validate_args=True)
def actual_mvn_log_prob(x):
# This distribution is the normal PDF, reduced over the
# last 3 dimensions + a jacobian term which corresponds
# to the determinant of x.
return (np.sum(
stats.norm(loc, scale).logpdf(x), axis=(-1, -2, -3)) +
np.sum(np.linalg.det(x), axis=-1))
self.assertAllEqual([2, 3, 3], fake_mvn_static.event_shape)
self.assertAllEqual([2], fake_mvn_static.batch_shape)
self.assertAllEqual(tensor_shape.TensorShape(None),
fake_mvn_dynamic.event_shape)
self.assertAllEqual(tensor_shape.TensorShape(None),
fake_mvn_dynamic.batch_shape)
num_samples = 5e3
for fake_mvn, feed_dict in ((fake_mvn_static, {}),
(fake_mvn_dynamic, feed_dict)):
# Ensure sample works by checking first, second moments.
y = fake_mvn.sample(int(num_samples), seed=0)
x = y[0:5, ...]
[
x_,
fake_event_shape_,
fake_batch_shape_,
fake_log_prob_,
fake_prob_,
] = sess.run([
x,
fake_mvn.event_shape_tensor(),
fake_mvn.batch_shape_tensor(),
fake_mvn.log_prob(x),
fake_mvn.prob(x),
], feed_dict=feed_dict)
# Ensure all other functions work as intended.
self.assertAllEqual([5, 2, 2, 3, 3], x_.shape)
self.assertAllEqual([2, 3, 3], fake_event_shape_)
self.assertAllEqual([2], fake_batch_shape_)
self.assertAllClose(actual_mvn_log_prob(x_), fake_log_prob_,
atol=0., rtol=1e-6)
self.assertAllClose(np.exp(actual_mvn_log_prob(x_)), fake_prob_,
atol=0., rtol=1e-5)
if __name__ == "__main__":
test.main()
|
yloiseau/Watson | refs/heads/master | tests/test_watson.py | 1 | import sys
import json
import os
import datetime
try:
from unittest import mock
except ImportError:
import mock
try:
from io import StringIO
except ImportError:
from StringIO import StringIO
import py
import pytest
import requests
import arrow
from dateutil.tz.tz import tzutc
from click import get_app_dir
from watson import Watson, WatsonError
from watson.watson import ConfigurationError, ConfigParser
from watson.utils import get_start_time_for_period
TEST_FIXTURE_DIR = py.path.local(
os.path.dirname(
os.path.realpath(__file__)
)
) / 'resources'
PY2 = sys.version_info[0] == 2
if not PY2:
builtins = 'builtins'
else:
builtins = '__builtin__'
def mock_datetime(dt, dt_module):
class DateTimeMeta(type):
@classmethod
def __instancecheck__(mcs, obj):
return isinstance(obj, datetime.datetime)
class BaseMockedDateTime(datetime.datetime):
@classmethod
def now(cls, tz=None):
return dt.replace(tzinfo=tz)
@classmethod
def utcnow(cls):
return dt
@classmethod
def today(cls):
return dt
MockedDateTime = DateTimeMeta('datetime', (BaseMockedDateTime,), {})
return mock.patch.object(dt_module, 'datetime', MockedDateTime)
@pytest.fixture
def config_dir(tmpdir):
return str(tmpdir.mkdir('config'))
def mock_read(content):
return lambda self, name: self._read(StringIO(content), name)
@pytest.fixture
def watson(config_dir):
return Watson(config_dir=config_dir)
# current
def test_current(watson):
content = json.dumps({'project': 'foo', 'start': 0, 'tags': ['A', 'B']})
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
assert watson.current['project'] == 'foo'
assert watson.current['start'] == arrow.get(0)
assert watson.current['tags'] == ['A', 'B']
def test_current_with_empty_file(watson):
with mock.patch('%s.open' % builtins, mock.mock_open(read_data="")):
with mock.patch('os.path.getsize', return_value=0):
assert watson.current == {}
def test_current_with_nonexistent_file(watson):
with mock.patch('%s.open' % builtins, side_effect=IOError):
assert watson.current == {}
def test_current_watson_non_valid_json(watson):
content = "{'foo': bar}"
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
with mock.patch('os.path.getsize', return_value=len(content)):
with pytest.raises(WatsonError):
watson.current
def test_current_with_given_state(config_dir):
content = json.dumps({'project': 'foo', 'start': 0})
watson = Watson(current={'project': 'bar', 'start': 0},
config_dir=config_dir)
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
assert watson.current['project'] == 'bar'
def test_current_with_empty_given_state(config_dir):
content = json.dumps({'project': 'foo', 'start': 0})
watson = Watson(current=[], config_dir=config_dir)
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
assert watson.current == {}
# last_sync
def test_last_sync(watson):
now = arrow.get(123)
content = json.dumps(now.timestamp)
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
assert watson.last_sync == now
def test_last_sync_with_empty_file(watson):
with mock.patch('%s.open' % builtins, mock.mock_open(read_data="")):
with mock.patch('os.path.getsize', return_value=0):
assert watson.last_sync == arrow.get(0)
def test_last_sync_with_nonexistent_file(watson):
with mock.patch('%s.open' % builtins, side_effect=IOError):
assert watson.last_sync == arrow.get(0)
def test_last_sync_watson_non_valid_json(watson):
content = "{'foo': bar}"
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
with mock.patch('os.path.getsize', return_value=len(content)):
with pytest.raises(WatsonError):
watson.last_sync
def test_last_sync_with_given_state(config_dir):
content = json.dumps(123)
now = arrow.now()
watson = Watson(last_sync=now, config_dir=config_dir)
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
assert watson.last_sync == now
def test_last_sync_with_empty_given_state(config_dir):
content = json.dumps(123)
watson = Watson(last_sync=None, config_dir=config_dir)
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
assert watson.last_sync == arrow.get(0)
# frames
def test_frames(watson):
content = json.dumps([[0, 10, 'foo', None, ['A', 'B', 'C']]])
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
assert len(watson.frames) == 1
assert watson.frames[0].project == 'foo'
assert watson.frames[0].start == arrow.get(0)
assert watson.frames[0].stop == arrow.get(10)
assert watson.frames[0].tags == ['A', 'B', 'C']
def test_frames_without_tags(watson):
content = json.dumps([[0, 10, 'foo', None]])
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
assert len(watson.frames) == 1
assert watson.frames[0].project == 'foo'
assert watson.frames[0].start == arrow.get(0)
assert watson.frames[0].stop == arrow.get(10)
assert watson.frames[0].tags == []
def test_frames_with_empty_file(watson):
with mock.patch('%s.open' % builtins, mock.mock_open(read_data="")):
with mock.patch('os.path.getsize', return_value=0):
assert len(watson.frames) == 0
def test_frames_with_nonexistent_file(watson):
with mock.patch('%s.open' % builtins, side_effect=IOError):
assert len(watson.frames) == 0
def test_frames_watson_non_valid_json(watson):
content = "{'foo': bar}"
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
with mock.patch('os.path.getsize') as mock_getsize:
mock_getsize.return_value(len(content))
with pytest.raises(WatsonError):
watson.frames
def test_given_frames(config_dir):
content = json.dumps([[0, 10, 'foo', None, ['A']]])
watson = Watson(frames=[[0, 10, 'bar', None, ['A', 'B']]],
config_dir=config_dir)
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
assert len(watson.frames) == 1
assert watson.frames[0].project == 'bar'
assert watson.frames[0].tags == ['A', 'B']
def test_frames_with_empty_given_state(config_dir):
content = json.dumps([[0, 10, 'foo', None, ['A']]])
watson = Watson(frames=[], config_dir=config_dir)
with mock.patch('%s.open' % builtins, mock.mock_open(read_data=content)):
assert len(watson.frames) == 0
# config
def test_empty_config_dir():
watson = Watson()
assert watson._dir == get_app_dir('watson')
def test_wrong_config(watson):
content = u"""
toto
"""
with mock.patch.object(ConfigParser, 'read', mock_read(content)):
with pytest.raises(ConfigurationError):
watson.config
def test_empty_config(watson):
with mock.patch.object(ConfigParser, 'read', mock_read(u'')):
assert len(watson.config.sections()) == 0
def test_config_get(watson):
content = u"""
[backend]
url = foo
token =
"""
with mock.patch.object(ConfigParser, 'read', mock_read(content)):
config = watson.config
assert config.get('backend', 'url') == 'foo'
assert config.get('backend', 'token') == ''
assert config.get('backend', 'foo') is None
assert config.get('backend', 'foo', 'bar') == 'bar'
assert config.get('option', 'spamm') is None
assert config.get('option', 'spamm', 'eggs') == 'eggs'
def test_config_getboolean(watson):
content = u"""
[options]
flag1 = 1
flag2 = ON
flag3 = True
flag4 = yes
flag5 = false
flag6 =
"""
with mock.patch.object(ConfigParser, 'read', mock_read(content)):
config = watson.config
assert config.getboolean('options', 'flag1') is True
assert config.getboolean('options', 'flag1', False) is True
assert config.getboolean('options', 'flag2') is True
assert config.getboolean('options', 'flag3') is True
assert config.getboolean('options', 'flag4') is True
assert config.getboolean('options', 'flag5') is False
assert config.getboolean('options', 'flag6') is False
assert config.getboolean('options', 'flag6', True) is True
assert config.getboolean('options', 'missing') is False
assert config.getboolean('options', 'missing', True) is True
def test_config_getint(watson):
content = u"""
[options]
value1 = 42
value2 = spamm
value3 =
"""
with mock.patch.object(ConfigParser, 'read', mock_read(content)):
config = watson.config
assert config.getint('options', 'value1') == 42
assert config.getint('options', 'value1', 666) == 42
assert config.getint('options', 'missing') is None
assert config.getint('options', 'missing', 23) == 23
# default is not converted!
assert config.getint('options', 'missing', '42') == '42'
assert config.getint('options', 'missing', 6.66) == 6.66
with pytest.raises(ValueError):
config.getint('options', 'value2')
with pytest.raises(ValueError):
config.getint('options', 'value3')
def test_config_getfloat(watson):
content = u"""
[options]
value1 = 3.14
value2 = 42
value3 = spamm
value4 =
"""
with mock.patch.object(ConfigParser, 'read', mock_read(content)):
config = watson.config
assert config.getfloat('options', 'value1') == 3.14
assert config.getfloat('options', 'value1', 6.66) == 3.14
assert config.getfloat('options', 'value2') == 42.0
assert isinstance(config.getfloat('options', 'value2'), float)
assert config.getfloat('options', 'missing') is None
assert config.getfloat('options', 'missing', 3.14) == 3.14
# default is not converted!
assert config.getfloat('options', 'missing', '3.14') == '3.14'
with pytest.raises(ValueError):
config.getfloat('options', 'value3')
with pytest.raises(ValueError):
config.getfloat('options', 'value4')
def test_config_getlist(watson):
content = u"""
# empty lines in option values (including the first one) are discarded
[options]
value1 =
one
two three
four
five six
# multiple inner space preserved
value2 = one "two three" four 'five six'
value3 = one
two three
# outer space stripped
value4 = one
two three
four
# hash char not at start of line does not start comment
value5 = one
two #three
four # five
"""
with mock.patch.object(ConfigParser, 'read', mock_read(content)):
gl = watson.config.getlist
assert gl('options', 'value1') == ['one', 'two three', 'four',
'five six']
assert gl('options', 'value2') == ['one', 'two three', 'four',
'five six']
assert gl('options', 'value3') == ['one', 'two three']
assert gl('options', 'value4') == ['one', 'two three', 'four']
assert gl('options', 'value5') == ['one', 'two #three', 'four # five']
# default values
assert gl('options', 'novalue') == []
assert gl('options', 'novalue', None) == []
assert gl('options', 'novalue', 42) == 42
assert gl('nosection', 'dummy') == []
assert gl('nosection', 'dummy', None) == []
assert gl('nosection', 'dummy', 42) == 42
default = gl('nosection', 'dummy')
default.append(42)
assert gl('nosection', 'dummy') != [42], (
"Modifying default return value should not have side effect.")
def test_set_config(watson):
config = ConfigParser()
config.set('foo', 'bar', 'lol')
watson.config = config
watson.config.get('foo', 'bar') == 'lol'
# start
def test_start_new_project(watson):
watson.start('foo', ['A', 'B'])
assert watson.current != {}
assert watson.is_started is True
assert watson.current.get('project') == 'foo'
assert isinstance(watson.current.get('start'), arrow.Arrow)
assert watson.current.get('tags') == ['A', 'B']
def test_start_new_project_without_tags(watson):
watson.start('foo')
assert watson.current != {}
assert watson.is_started is True
assert watson.current.get('project') == 'foo'
assert isinstance(watson.current.get('start'), arrow.Arrow)
assert watson.current.get('tags') == []
def test_start_two_projects(watson):
watson.start('foo')
with pytest.raises(WatsonError):
watson.start('bar')
assert watson.current != {}
assert watson.current['project'] == 'foo'
assert watson.is_started is True
# stop
def test_stop_started_project(watson):
watson.start('foo', tags=['A', 'B'])
watson.stop()
assert watson.current == {}
assert watson.is_started is False
assert len(watson.frames) == 1
assert watson.frames[0].project == 'foo'
assert isinstance(watson.frames[0].start, arrow.Arrow)
assert isinstance(watson.frames[0].stop, arrow.Arrow)
assert watson.frames[0].tags == ['A', 'B']
def test_stop_started_project_without_tags(watson):
watson.start('foo')
watson.stop()
assert watson.current == {}
assert watson.is_started is False
assert len(watson.frames) == 1
assert watson.frames[0].project == 'foo'
assert isinstance(watson.frames[0].start, arrow.Arrow)
assert isinstance(watson.frames[0].stop, arrow.Arrow)
assert watson.frames[0].tags == []
def test_stop_no_project(watson):
with pytest.raises(WatsonError):
watson.stop()
# cancel
def test_cancel_started_project(watson):
watson.start('foo')
watson.cancel()
assert watson.current == {}
assert len(watson.frames) == 0
def test_cancel_no_project(watson):
with pytest.raises(WatsonError):
watson.cancel()
# save
def test_save_without_changes(watson):
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch('json.dump') as json_mock:
watson.save()
assert not json_mock.called
def test_save_current(watson):
watson.start('foo', ['A', 'B'])
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch('json.dump') as json_mock:
watson.save()
assert json_mock.call_count == 1
result = json_mock.call_args[0][0]
assert result['project'] == 'foo'
assert isinstance(result['start'], (int, float))
assert result['tags'] == ['A', 'B']
def test_save_current_without_tags(watson):
watson.start('foo')
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch('json.dump') as json_mock:
watson.save()
assert json_mock.call_count == 1
result = json_mock.call_args[0][0]
assert result['project'] == 'foo'
assert isinstance(result['start'], (int, float))
assert result['tags'] == []
dump_args = json_mock.call_args[1]
assert dump_args['ensure_ascii'] is False
def test_save_empty_current(config_dir):
watson = Watson(current={'project': 'foo', 'start': 0},
config_dir=config_dir)
watson.current = {}
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch('json.dump') as json_mock:
watson.save()
assert json_mock.call_count == 1
result = json_mock.call_args[0][0]
assert result == {}
def test_save_frames_no_change(config_dir):
watson = Watson(frames=[[0, 10, 'foo', None]],
config_dir=config_dir)
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch('json.dump') as json_mock:
watson.save()
assert not json_mock.called
def test_save_added_frame(config_dir):
watson = Watson(frames=[[0, 10, 'foo', None]], config_dir=config_dir)
watson.frames.add('bar', 10, 20, ['A'])
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch('json.dump') as json_mock:
watson.save()
assert json_mock.call_count == 1
result = json_mock.call_args[0][0]
assert len(result) == 2
assert result[0][2] == 'foo'
assert result[0][4] == []
assert result[1][2] == 'bar'
assert result[1][4] == ['A']
def test_save_changed_frame(config_dir):
watson = Watson(frames=[[0, 10, 'foo', None, ['A']]],
config_dir=config_dir)
watson.frames[0] = ('bar', 0, 10, ['A', 'B'])
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch('json.dump') as json_mock:
watson.save()
assert json_mock.call_count == 1
result = json_mock.call_args[0][0]
assert len(result) == 1
assert result[0][2] == 'bar'
assert result[0][4] == ['A', 'B']
dump_args = json_mock.call_args[1]
assert dump_args['ensure_ascii'] is False
def test_save_config_no_changes(watson):
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch.object(ConfigParser, 'write') as write_mock:
watson.save()
assert not write_mock.called
def test_save_config(watson):
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch.object(ConfigParser, 'write') as write_mock:
watson.config = ConfigParser()
watson.save()
assert write_mock.call_count == 1
def test_save_last_sync(watson):
now = arrow.now()
watson.last_sync = now
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch('json.dump') as json_mock:
watson.save()
assert json_mock.call_count == 1
assert json_mock.call_args[0][0] == now.timestamp
def test_save_empty_last_sync(config_dir):
watson = Watson(last_sync=arrow.now(), config_dir=config_dir)
watson.last_sync = None
with mock.patch('%s.open' % builtins, mock.mock_open()):
with mock.patch('json.dump') as json_mock:
watson.save()
assert json_mock.call_count == 1
assert json_mock.call_args[0][0] == 0
# push
def test_push_with_no_config(watson):
config = ConfigParser()
watson.config = config
with pytest.raises(WatsonError):
watson.push(arrow.now())
def test_push_with_no_url(watson):
config = ConfigParser()
config.add_section('backend')
config.set('backend', 'token', 'bar')
watson.config = config
with pytest.raises(WatsonError):
watson.push(arrow.now())
def test_push_with_no_token(watson):
config = ConfigParser()
config.add_section('backend')
config.set('backend', 'url', 'http://foo.com')
watson.config = config
with pytest.raises(WatsonError):
watson.push(arrow.now())
def test_push(watson, monkeypatch):
config = ConfigParser()
config.add_section('backend')
config.set('backend', 'url', 'http://foo.com')
config.set('backend', 'token', 'bar')
watson.frames.add('foo', 1, 2)
watson.frames.add('foo', 3, 4)
watson.last_sync = arrow.now()
watson.frames.add('bar', 1, 2, ['A', 'B'])
watson.frames.add('lol', 1, 2)
last_pull = arrow.now()
watson.frames.add('foo', 1, 2)
watson.frames.add('bar', 3, 4)
monkeypatch.setattr(watson, '_get_remote_projects', lambda *args: [
{'name': 'foo', 'url': '/projects/1/'},
{'name': 'bar', 'url': '/projects/2/'},
{'name': 'lol', 'url': '/projects/3/'},
])
class Response:
def __init__(self):
self.status_code = 201
with mock.patch('requests.post') as mock_put:
mock_put.return_value = Response()
with mock.patch.object(
Watson, 'config', new_callable=mock.PropertyMock
) as mock_config:
mock_config.return_value = config
watson.push(last_pull)
requests.post.assert_called_once_with(
mock.ANY,
mock.ANY,
headers={
'content-type': 'application/json',
'Authorization': "Token " + config.get('backend', 'token')
}
)
frames_sent = json.loads(mock_put.call_args[0][1])
assert len(frames_sent) == 2
assert frames_sent[0].get('project') == '/projects/2/'
assert frames_sent[0].get('tags') == ['A', 'B']
assert frames_sent[1].get('project') == '/projects/3/'
assert frames_sent[1].get('tags') == []
# pull
def test_pull_with_no_config(watson):
config = ConfigParser()
watson.config = config
with pytest.raises(ConfigurationError):
watson.pull()
def test_pull_with_no_url(watson):
config = ConfigParser()
config.add_section('backend')
config.set('backend', 'token', 'bar')
watson.config = config
with pytest.raises(ConfigurationError):
watson.pull()
def test_pull_with_no_token(watson):
config = ConfigParser()
config.add_section('backend')
config.set('backend', 'url', 'http://foo.com')
watson.config = config
with pytest.raises(ConfigurationError):
watson.pull()
def test_pull(watson, monkeypatch):
config = ConfigParser()
config.add_section('backend')
config.set('backend', 'url', 'http://foo.com')
config.set('backend', 'token', 'bar')
watson.last_sync = arrow.now()
watson.frames.add('foo', 1, 2, ['A', 'B'], id='1')
monkeypatch.setattr(watson, '_get_remote_projects', lambda *args: [
{'name': 'foo', 'url': '/projects/1/'},
{'name': 'bar', 'url': '/projects/2/'},
])
class Response:
def __init__(self):
self.status_code = 200
def json(self):
return [
{'project': '/projects/1/', 'start': 3, 'stop': 4, 'id': '1',
'tags': ['A']},
{'project': '/projects/2/', 'start': 4, 'stop': 5, 'id': '2',
'tags': []}
]
with mock.patch('requests.get') as mock_get:
mock_get.return_value = Response()
with mock.patch.object(
Watson, 'config', new_callable=mock.PropertyMock
) as mock_config:
mock_config.return_value = config
watson.pull()
requests.get.assert_called_once_with(
mock.ANY,
params={'last_sync': watson.last_sync},
headers={
'content-type': 'application/json',
'Authorization': "Token " + config.get('backend', 'token')
}
)
assert len(watson.frames) == 2
assert watson.frames[0].id == '1'
assert watson.frames[0].project == 'foo'
assert watson.frames[0].start.timestamp == 3
assert watson.frames[0].stop.timestamp == 4
assert watson.frames[0].tags == ['A']
assert watson.frames[1].id == '2'
assert watson.frames[1].project == 'bar'
assert watson.frames[1].start.timestamp == 4
assert watson.frames[1].stop.timestamp == 5
assert watson.frames[1].tags == []
# projects
def test_projects(watson):
for name in ('foo', 'bar', 'bar', 'bar', 'foo', 'lol'):
watson.frames.add(name, 0, 0)
assert watson.projects == ['bar', 'foo', 'lol']
def test_projects_no_frames(watson):
assert watson.projects == []
# tags
def test_tags(watson):
samples = (
('foo', ('A', 'D')),
('bar', ('A', 'C')),
('foo', ('B', 'C')),
('lol', ()),
('bar', ('C'))
)
for name, tags in samples:
watson.frames.add(name, 0, 0, tags)
assert watson.tags == ['A', 'B', 'C', 'D']
def test_tags_no_frames(watson):
assert watson.tags == []
# merge
@pytest.mark.datafiles(
TEST_FIXTURE_DIR / 'frames-with-conflict',
)
def test_merge_report(watson, datafiles):
# Get report
watson.frames.add('foo', 0, 15, id='1', updated_at=15)
watson.frames.add('bar', 20, 45, id='2', updated_at=45)
conflicting, merging = watson.merge_report(
str(datafiles) + '/frames-with-conflict')
assert len(conflicting) == 1
assert len(merging) == 1
assert conflicting[0].id == '2'
assert merging[0].id == '3'
# report/log
_dt = datetime.datetime
_tz = {'tzinfo': tzutc()}
@pytest.mark.parametrize('now, mode, start_time', [
(_dt(2016, 6, 2, **_tz), 'year', _dt(2016, 1, 1, **_tz)),
(_dt(2016, 6, 2, **_tz), 'month', _dt(2016, 6, 1, **_tz)),
(_dt(2016, 6, 2, **_tz), 'week', _dt(2016, 5, 30, **_tz)),
(_dt(2016, 6, 2, **_tz), 'day', _dt(2016, 6, 2, **_tz)),
(_dt(2012, 2, 24, **_tz), 'year', _dt(2012, 1, 1, **_tz)),
(_dt(2012, 2, 24, **_tz), 'month', _dt(2012, 2, 1, **_tz)),
(_dt(2012, 2, 24, **_tz), 'week', _dt(2012, 2, 20, **_tz)),
(_dt(2012, 2, 24, **_tz), 'day', _dt(2012, 2, 24, **_tz)),
])
def test_get_start_time_for_period(now, mode, start_time):
with mock_datetime(now, datetime):
assert get_start_time_for_period(mode).datetime == start_time
|
bak1an/django | refs/heads/master | django/contrib/auth/forms.py | 15 | import unicodedata
from django import forms
from django.contrib.auth import (
authenticate, get_user_model, password_validation,
)
from django.contrib.auth.hashers import (
UNUSABLE_PASSWORD_PREFIX, identify_hasher,
)
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import EmailMultiAlternatives
from django.template import loader
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
from django.utils.text import capfirst
from django.utils.translation import gettext, gettext_lazy as _
UserModel = get_user_model()
class ReadOnlyPasswordHashWidget(forms.Widget):
template_name = 'auth/widgets/read_only_password_hash.html'
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
summary = []
if not value or value.startswith(UNUSABLE_PASSWORD_PREFIX):
summary.append({'label': gettext("No password set.")})
else:
try:
hasher = identify_hasher(value)
except ValueError:
summary.append({'label': gettext("Invalid password format or unknown hashing algorithm.")})
else:
for key, value_ in hasher.safe_summary(value).items():
summary.append({'label': gettext(key), 'value': value_})
context['summary'] = summary
return context
class ReadOnlyPasswordHashField(forms.Field):
widget = ReadOnlyPasswordHashWidget
def __init__(self, *args, **kwargs):
kwargs.setdefault("required", False)
super().__init__(*args, **kwargs)
def bound_data(self, data, initial):
# Always return initial because the widget doesn't
# render an input field.
return initial
def has_changed(self, initial, data):
return False
class UsernameField(forms.CharField):
def to_python(self, value):
return unicodedata.normalize('NFKC', super().to_python(value))
class UserCreationForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = forms.CharField(
label=_("Password"),
strip=False,
widget=forms.PasswordInput,
help_text=password_validation.password_validators_help_text_html(),
)
password2 = forms.CharField(
label=_("Password confirmation"),
widget=forms.PasswordInput,
strip=False,
help_text=_("Enter the same password as before, for verification."),
)
class Meta:
model = User
fields = ("username",)
field_classes = {'username': UsernameField}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self._meta.model.USERNAME_FIELD in self.fields:
self.fields[self._meta.model.USERNAME_FIELD].widget.attrs.update({'autofocus': True})
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
self.instance.username = self.cleaned_data.get('username')
password_validation.validate_password(self.cleaned_data.get('password2'), self.instance)
return password2
def save(self, commit=True):
user = super().save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField(
label=_("Password"),
help_text=_(
"Raw passwords are not stored, so there is no way to see this "
"user's password, but you can change the password using "
"<a href=\"../password/\">this form</a>."
),
)
class Meta:
model = User
fields = '__all__'
field_classes = {'username': UsernameField}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
f = self.fields.get('user_permissions')
if f is not None:
f.queryset = f.queryset.select_related('content_type')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class AuthenticationForm(forms.Form):
"""
Base class for authenticating users. Extend this to get a form that accepts
username/password logins.
"""
username = UsernameField(
max_length=254,
widget=forms.TextInput(attrs={'autofocus': True}),
)
password = forms.CharField(
label=_("Password"),
strip=False,
widget=forms.PasswordInput,
)
error_messages = {
'invalid_login': _(
"Please enter a correct %(username)s and password. Note that both "
"fields may be case-sensitive."
),
'inactive': _("This account is inactive."),
}
def __init__(self, request=None, *args, **kwargs):
"""
The 'request' parameter is set for custom auth use by subclasses.
The form data comes in via the standard 'data' kwarg.
"""
self.request = request
self.user_cache = None
super().__init__(*args, **kwargs)
# Set the label for the "username" field.
self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD)
if self.fields['username'].label is None:
self.fields['username'].label = capfirst(self.username_field.verbose_name)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username is not None and password:
self.user_cache = authenticate(self.request, username=username, password=password)
if self.user_cache is None:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name},
)
else:
self.confirm_login_allowed(self.user_cache)
return self.cleaned_data
def confirm_login_allowed(self, user):
"""
Controls whether the given User may log in. This is a policy setting,
independent of end-user authentication. This default behavior is to
allow login by active users, and reject login by inactive users.
If the given user cannot log in, this method should raise a
``forms.ValidationError``.
If the given user may log in, this method should return None.
"""
if not user.is_active:
raise forms.ValidationError(
self.error_messages['inactive'],
code='inactive',
)
def get_user_id(self):
if self.user_cache:
return self.user_cache.id
return None
def get_user(self):
return self.user_cache
class PasswordResetForm(forms.Form):
email = forms.EmailField(label=_("Email"), max_length=254)
def send_mail(self, subject_template_name, email_template_name,
context, from_email, to_email, html_email_template_name=None):
"""
Send a django.core.mail.EmailMultiAlternatives to `to_email`.
"""
subject = loader.render_to_string(subject_template_name, context)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
body = loader.render_to_string(email_template_name, context)
email_message = EmailMultiAlternatives(subject, body, from_email, [to_email])
if html_email_template_name is not None:
html_email = loader.render_to_string(html_email_template_name, context)
email_message.attach_alternative(html_email, 'text/html')
email_message.send()
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This allows subclasses to more easily customize the default policies
that prevent inactive users and users with unusable passwords from
resetting their password.
"""
active_users = UserModel._default_manager.filter(**{
'%s__iexact' % UserModel.get_email_field_name(): email,
'is_active': True,
})
return (u for u in active_users if u.has_usable_password())
def save(self, domain_override=None,
subject_template_name='registration/password_reset_subject.txt',
email_template_name='registration/password_reset_email.html',
use_https=False, token_generator=default_token_generator,
from_email=None, request=None, html_email_template_name=None,
extra_email_context=None):
"""
Generate a one-use only link for resetting password and send it to the
user.
"""
email = self.cleaned_data["email"]
for user in self.get_users(email):
if not domain_override:
current_site = get_current_site(request)
site_name = current_site.name
domain = current_site.domain
else:
site_name = domain = domain_override
context = {
'email': email,
'domain': domain,
'site_name': site_name,
'uid': urlsafe_base64_encode(force_bytes(user.pk)).decode(),
'user': user,
'token': token_generator.make_token(user),
'protocol': 'https' if use_https else 'http',
}
if extra_email_context is not None:
context.update(extra_email_context)
self.send_mail(
subject_template_name, email_template_name, context, from_email,
email, html_email_template_name=html_email_template_name,
)
class SetPasswordForm(forms.Form):
"""
A form that lets a user change set their password without entering the old
password
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
new_password1 = forms.CharField(
label=_("New password"),
widget=forms.PasswordInput,
strip=False,
help_text=password_validation.password_validators_help_text_html(),
)
new_password2 = forms.CharField(
label=_("New password confirmation"),
strip=False,
widget=forms.PasswordInput,
)
def __init__(self, user, *args, **kwargs):
self.user = user
super().__init__(*args, **kwargs)
def clean_new_password2(self):
password1 = self.cleaned_data.get('new_password1')
password2 = self.cleaned_data.get('new_password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
password_validation.validate_password(password2, self.user)
return password2
def save(self, commit=True):
password = self.cleaned_data["new_password1"]
self.user.set_password(password)
if commit:
self.user.save()
return self.user
class PasswordChangeForm(SetPasswordForm):
"""
A form that lets a user change their password by entering their old
password.
"""
error_messages = dict(SetPasswordForm.error_messages, **{
'password_incorrect': _("Your old password was entered incorrectly. Please enter it again."),
})
old_password = forms.CharField(
label=_("Old password"),
strip=False,
widget=forms.PasswordInput(attrs={'autofocus': True}),
)
field_order = ['old_password', 'new_password1', 'new_password2']
def clean_old_password(self):
"""
Validate that the old_password field is correct.
"""
old_password = self.cleaned_data["old_password"]
if not self.user.check_password(old_password):
raise forms.ValidationError(
self.error_messages['password_incorrect'],
code='password_incorrect',
)
return old_password
class AdminPasswordChangeForm(forms.Form):
"""
A form used to change the password of a user in the admin interface.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
required_css_class = 'required'
password1 = forms.CharField(
label=_("Password"),
widget=forms.PasswordInput(attrs={'autofocus': True}),
strip=False,
help_text=password_validation.password_validators_help_text_html(),
)
password2 = forms.CharField(
label=_("Password (again)"),
widget=forms.PasswordInput,
strip=False,
help_text=_("Enter the same password as before, for verification."),
)
def __init__(self, user, *args, **kwargs):
self.user = user
super().__init__(*args, **kwargs)
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
password_validation.validate_password(password2, self.user)
return password2
def save(self, commit=True):
"""Save the new password."""
password = self.cleaned_data["password1"]
self.user.set_password(password)
if commit:
self.user.save()
return self.user
@property
def changed_data(self):
data = super().changed_data
for name in self.fields.keys():
if name not in data:
return []
return ['password']
|
jrrembert/django | refs/heads/master | django/contrib/gis/gdal/srs.py | 366 | """
The Spatial Reference class, represents OGR Spatial Reference objects.
Example:
>>> from django.contrib.gis.gdal import SpatialReference
>>> srs = SpatialReference('WGS84')
>>> print(srs)
GEOGCS["WGS 84",
DATUM["WGS_1984",
SPHEROID["WGS 84",6378137,298.257223563,
AUTHORITY["EPSG","7030"]],
TOWGS84[0,0,0,0,0,0,0],
AUTHORITY["EPSG","6326"]],
PRIMEM["Greenwich",0,
AUTHORITY["EPSG","8901"]],
UNIT["degree",0.01745329251994328,
AUTHORITY["EPSG","9122"]],
AUTHORITY["EPSG","4326"]]
>>> print(srs.proj)
+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs
>>> print(srs.ellipsoid)
(6378137.0, 6356752.3142451793, 298.25722356300003)
>>> print(srs.projected, srs.geographic)
False True
>>> srs.import_epsg(32140)
>>> print(srs.name)
NAD83 / Texas South Central
"""
from ctypes import byref, c_char_p, c_int
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import SRSException
from django.contrib.gis.gdal.prototypes import srs as capi
from django.utils import six
from django.utils.encoding import force_bytes, force_text
class SpatialReference(GDALBase):
"""
A wrapper for the OGRSpatialReference object. According to the GDAL Web site,
the SpatialReference object "provide[s] services to represent coordinate
systems (projections and datums) and to transform between them."
"""
def __init__(self, srs_input='', srs_type='user'):
"""
Creates a GDAL OSR Spatial Reference object from the given input.
The input may be string of OGC Well Known Text (WKT), an integer
EPSG code, a PROJ.4 string, and/or a projection "well known" shorthand
string (one of 'WGS84', 'WGS72', 'NAD27', 'NAD83').
"""
if srs_type == 'wkt':
self.ptr = capi.new_srs(c_char_p(b''))
self.import_wkt(srs_input)
return
elif isinstance(srs_input, six.string_types):
# Encoding to ASCII if unicode passed in.
if isinstance(srs_input, six.text_type):
srs_input = srs_input.encode('ascii')
try:
# If SRID is a string, e.g., '4326', then make acceptable
# as user input.
srid = int(srs_input)
srs_input = 'EPSG:%d' % srid
except ValueError:
pass
elif isinstance(srs_input, six.integer_types):
# EPSG integer code was input.
srs_type = 'epsg'
elif isinstance(srs_input, self.ptr_type):
srs = srs_input
srs_type = 'ogr'
else:
raise TypeError('Invalid SRS type "%s"' % srs_type)
if srs_type == 'ogr':
# Input is already an SRS pointer.
srs = srs_input
else:
# Creating a new SRS pointer, using the string buffer.
buf = c_char_p(b'')
srs = capi.new_srs(buf)
# If the pointer is NULL, throw an exception.
if not srs:
raise SRSException('Could not create spatial reference from: %s' % srs_input)
else:
self.ptr = srs
# Importing from either the user input string or an integer SRID.
if srs_type == 'user':
self.import_user_input(srs_input)
elif srs_type == 'epsg':
self.import_epsg(srs_input)
def __del__(self):
"Destroys this spatial reference."
if self._ptr and capi:
capi.release_srs(self._ptr)
def __getitem__(self, target):
"""
Returns the value of the given string attribute node, None if the node
doesn't exist. Can also take a tuple as a parameter, (target, child),
where child is the index of the attribute in the WKT. For example:
>>> wkt = 'GEOGCS["WGS 84", DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]]'
>>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326
>>> print(srs['GEOGCS'])
WGS 84
>>> print(srs['DATUM'])
WGS_1984
>>> print(srs['AUTHORITY'])
EPSG
>>> print(srs['AUTHORITY', 1]) # The authority value
4326
>>> print(srs['TOWGS84', 4]) # the fourth value in this wkt
0
>>> print(srs['UNIT|AUTHORITY']) # For the units authority, have to use the pipe symbole.
EPSG
>>> print(srs['UNIT|AUTHORITY', 1]) # The authority value for the units
9122
"""
if isinstance(target, tuple):
return self.attr_value(*target)
else:
return self.attr_value(target)
def __str__(self):
"The string representation uses 'pretty' WKT."
return self.pretty_wkt
# #### SpatialReference Methods ####
def attr_value(self, target, index=0):
"""
The attribute value for the given target node (e.g. 'PROJCS'). The index
keyword specifies an index of the child node to return.
"""
if not isinstance(target, six.string_types) or not isinstance(index, int):
raise TypeError
return capi.get_attr_value(self.ptr, force_bytes(target), index)
def auth_name(self, target):
"Returns the authority name for the given string target node."
return capi.get_auth_name(self.ptr, force_bytes(target))
def auth_code(self, target):
"Returns the authority code for the given string target node."
return capi.get_auth_code(self.ptr, force_bytes(target))
def clone(self):
"Returns a clone of this SpatialReference object."
return SpatialReference(capi.clone_srs(self.ptr))
def from_esri(self):
"Morphs this SpatialReference from ESRI's format to EPSG."
capi.morph_from_esri(self.ptr)
def identify_epsg(self):
"""
This method inspects the WKT of this SpatialReference, and will
add EPSG authority nodes where an EPSG identifier is applicable.
"""
capi.identify_epsg(self.ptr)
def to_esri(self):
"Morphs this SpatialReference to ESRI's format."
capi.morph_to_esri(self.ptr)
def validate(self):
"Checks to see if the given spatial reference is valid."
capi.srs_validate(self.ptr)
# #### Name & SRID properties ####
@property
def name(self):
"Returns the name of this Spatial Reference."
if self.projected:
return self.attr_value('PROJCS')
elif self.geographic:
return self.attr_value('GEOGCS')
elif self.local:
return self.attr_value('LOCAL_CS')
else:
return None
@property
def srid(self):
"Returns the SRID of top-level authority, or None if undefined."
try:
return int(self.attr_value('AUTHORITY', 1))
except (TypeError, ValueError):
return None
# #### Unit Properties ####
@property
def linear_name(self):
"Returns the name of the linear units."
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
return name
@property
def linear_units(self):
"Returns the value of the linear units."
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
return units
@property
def angular_name(self):
"Returns the name of the angular units."
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
return name
@property
def angular_units(self):
"Returns the value of the angular units."
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
return units
@property
def units(self):
"""
Returns a 2-tuple of the units value and the units name,
and will automatically determines whether to return the linear
or angular units.
"""
units, name = None, None
if self.projected or self.local:
units, name = capi.linear_units(self.ptr, byref(c_char_p()))
elif self.geographic:
units, name = capi.angular_units(self.ptr, byref(c_char_p()))
if name is not None:
name = force_text(name)
return (units, name)
# #### Spheroid/Ellipsoid Properties ####
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening)
"""
return (self.semi_major, self.semi_minor, self.inverse_flattening)
@property
def semi_major(self):
"Returns the Semi Major Axis for this Spatial Reference."
return capi.semi_major(self.ptr, byref(c_int()))
@property
def semi_minor(self):
"Returns the Semi Minor Axis for this Spatial Reference."
return capi.semi_minor(self.ptr, byref(c_int()))
@property
def inverse_flattening(self):
"Returns the Inverse Flattening for this Spatial Reference."
return capi.invflattening(self.ptr, byref(c_int()))
# #### Boolean Properties ####
@property
def geographic(self):
"""
Returns True if this SpatialReference is geographic
(root node is GEOGCS).
"""
return bool(capi.isgeographic(self.ptr))
@property
def local(self):
"Returns True if this SpatialReference is local (root node is LOCAL_CS)."
return bool(capi.islocal(self.ptr))
@property
def projected(self):
"""
Returns True if this SpatialReference is a projected coordinate system
(root node is PROJCS).
"""
return bool(capi.isprojected(self.ptr))
# #### Import Routines #####
def import_epsg(self, epsg):
"Imports the Spatial Reference from the EPSG code (an integer)."
capi.from_epsg(self.ptr, epsg)
def import_proj(self, proj):
"Imports the Spatial Reference from a PROJ.4 string."
capi.from_proj(self.ptr, proj)
def import_user_input(self, user_input):
"Imports the Spatial Reference from the given user input string."
capi.from_user_input(self.ptr, force_bytes(user_input))
def import_wkt(self, wkt):
"Imports the Spatial Reference from OGC WKT (string)"
capi.from_wkt(self.ptr, byref(c_char_p(wkt)))
def import_xml(self, xml):
"Imports the Spatial Reference from an XML string."
capi.from_xml(self.ptr, xml)
# #### Export Properties ####
@property
def wkt(self):
"Returns the WKT representation of this Spatial Reference."
return capi.to_wkt(self.ptr, byref(c_char_p()))
@property
def pretty_wkt(self, simplify=0):
"Returns the 'pretty' representation of the WKT."
return capi.to_pretty_wkt(self.ptr, byref(c_char_p()), simplify)
@property
def proj(self):
"Returns the PROJ.4 representation for this Spatial Reference."
return capi.to_proj(self.ptr, byref(c_char_p()))
@property
def proj4(self):
"Alias for proj()."
return self.proj
@property
def xml(self, dialect=''):
"Returns the XML representation of this Spatial Reference."
return capi.to_xml(self.ptr, byref(c_char_p()), dialect)
class CoordTransform(GDALBase):
"The coordinate system transformation object."
def __init__(self, source, target):
"Initializes on a source and target SpatialReference objects."
if not isinstance(source, SpatialReference) or not isinstance(target, SpatialReference):
raise TypeError('source and target must be of type SpatialReference')
self.ptr = capi.new_ct(source._ptr, target._ptr)
self._srs1_name = source.name
self._srs2_name = target.name
def __del__(self):
"Deletes this Coordinate Transformation object."
if self._ptr and capi:
capi.destroy_ct(self._ptr)
def __str__(self):
return 'Transform from "%s" to "%s"' % (self._srs1_name, self._srs2_name)
|
renegelinas/mi-instrument | refs/heads/master | mi/idk/platform/switch_driver.py | 11 | """
@file coi-services/mi/idk/platform/switch_driver.py
@author Bill French
@brief Main script class for running the switch_driver process
"""
from os.path import exists, join, isdir
from os import listdir
from mi.idk.metadata import Metadata
from mi.idk.comm_config import CommConfig
from mi.idk.config import Config
from mi.idk.exceptions import DriverDoesNotExist
from mi.core.log import get_logger ; log = get_logger()
import os
import re
from glob import glob
import subprocess
from mi.idk import prompt
import mi.idk.switch_driver
import mi.idk.platform.metadata
class SwitchDriver(mi.idk.switch_driver.SwitchDriver):
"""
Main class for running the switch driver process.
"""
def __init__(self, path=None, version=None):
self.driver_path = path
self.driver_version = version
def get_base_name(self):
return 'platform_%s_%s' % (self.driver_path.replace('/', '_'),
self.driver_version.replace('.', '_'))
def get_metadata(self):
self.metadata = mi.idk.platform.metadata.Metadata(self.driver_path)
return self.metadata
def fetch_metadata(self):
"""
@brief collect metadata from the user
"""
if not (self.driver_path):
self.driver_path = prompt.text( 'Driver Path' )
self.get_metadata()
self.driver_version = prompt.text('Driver Version', self.metadata.version)
def fetch_comm_config(self):
"""
@brief No comm config for dsa
"""
pass
@staticmethod
def list_drivers():
"""
@brief Print a list of all the different drivers and their versions
"""
drivers = SwitchDriver.get_drivers()
for driver in sorted(drivers.keys()):
for version in sorted(drivers[driver]):
print "%s %s" % (driver, version)
@staticmethod
def get_drivers():
"""
@brief Get a list of all the different drivers and their versions
"""
result = {}
driver_dir = join(Config().get("working_repo"), 'mi', 'platform', 'driver')
log.debug("Driver Dir: %s", driver_dir)
files = []
for dirname,_,_ in os.walk(driver_dir):
files.extend(glob(os.path.join(dirname,"metadata.yml")))
log.debug("Files: %s", files)
for f in files:
matcher = re.compile( "%s/(.*)/metadata.yml" % driver_dir )
match = matcher.match(f)
path = match.group(1)
result[path] = SwitchDriver.get_versions(path)
return result
@staticmethod
def get_versions(path):
"""
@brief Get all versions for this driver from the tags
@param path - the driver path
"""
# get all tags that start with this instrument
cmd = 'git tag -l ' + 'release_platform_' + path.replace('/', '_') + '*'
log.debug("git cmd: %s", cmd)
output = subprocess.check_output(cmd, shell=True)
version_list = ['master']
if len(output) > 0:
tag_regex = re.compile(r'release_platform_[a-z0-9_]+(\d+_\d+_\d+)')
tag_iter = tag_regex.finditer(output)
for tag_match in tag_iter:
version_list.append(tag_match.group(1))
return version_list
|
Adward-R/SwayMini | refs/heads/master | lib/python2.7/site-packages/pip/vcs/subversion.py | 86 | from __future__ import absolute_import
import logging
import os
import re
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip.index import Link
from pip.utils import rmtree, display_path, call_subprocess
from pip.utils.logging import indent_log
from pip.vcs import vcs, VersionControl
_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile('committed-rev="(\d+)"')
_svn_url_re = re.compile(r'URL: (.+)')
_svn_revision_re = re.compile(r'Revision: (.+)')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
logger = logging.getLogger(__name__)
class Subversion(VersionControl):
name = 'svn'
dirname = '.svn'
repo_name = 'checkout'
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
def get_info(self, location):
"""Returns (url, revision), where both are strings"""
assert not location.rstrip('/').endswith(self.dirname), \
'Bad directory: %s' % location
output = call_subprocess(
[self.cmd, 'info', location],
show_stdout=False,
extra_environ={'LANG': 'C'},
)
match = _svn_url_re.search(output)
if not match:
logger.warning(
'Cannot determine URL of svn checkout %s',
display_path(location),
)
logger.debug('Output that cannot be parsed: \n%s', output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
if not match:
logger.warning(
'Cannot determine revision of svn checkout %s',
display_path(location),
)
logger.debug('Output that cannot be parsed: \n%s', output)
return url, None
return url, match.group(1)
def export(self, location):
"""Export the svn repository at the url to the destination location"""
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
logger.info('Exporting svn repository %s to %s', url, location)
with indent_log():
if os.path.exists(location):
# Subversion doesn't like to check out over an existing
# directory --force fixes this, but was only added in svn 1.5
rmtree(location)
call_subprocess(
[self.cmd, 'export'] + rev_options + [url, location],
filter_stdout=self._filter, show_stdout=False)
def switch(self, dest, url, rev_options):
call_subprocess(
[self.cmd, 'switch'] + rev_options + [url, dest])
def update(self, dest, rev_options):
call_subprocess(
[self.cmd, 'update'] + rev_options + [dest])
def obtain(self, dest):
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
if rev:
rev_display = ' (to revision %s)' % rev
else:
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
call_subprocess(
[self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
def get_location(self, dist, dependency_links):
for url in dependency_links:
egg_fragment = Link(url).egg_fragment
if not egg_fragment:
continue
if '-' in egg_fragment:
# FIXME: will this work when a package has - in the name?
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
else:
key = egg_fragment
if key == dist.key:
return url.split('#', 1)[0]
return None
def get_revision(self, location):
"""
Return the maximum revision for all files under a given location
"""
# Note: taken from setuptools.command.egg_info
revision = 0
for base, dirs, files in os.walk(location):
if self.dirname not in dirs:
dirs[:] = []
continue # no sense walking uncontrolled subdirs
dirs.remove(self.dirname)
entries_fn = os.path.join(base, self.dirname, 'entries')
if not os.path.exists(entries_fn):
# FIXME: should we warn?
continue
dirurl, localrev = self._get_svn_url_rev(base)
if base == location:
base_url = dirurl + '/' # save the root url
elif not dirurl or not dirurl.startswith(base_url):
dirs[:] = []
continue # not part of the same svn tree, skip it
revision = max(revision, localrev)
return revision
def get_url_rev(self):
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
url, rev = super(Subversion, self).get_url_rev()
if url.startswith('ssh://'):
url = 'svn+' + url
return url, rev
def get_url(self, location):
# In cases where the source is in a subdirectory, not alongside
# setup.py we have to look up in the location until we find a real
# setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
# We've traversed up to the root of the filesystem without
# finding setup.py
logger.warning(
"Could not find setup.py for directory %s (tried all "
"parent directories)",
orig_location,
)
return None
return self._get_svn_url_rev(location)[0]
def _get_svn_url_rev(self, location):
from pip.exceptions import InstallationError
with open(os.path.join(location, self.dirname, 'entries')) as f:
data = f.read()
if (data.startswith('8')
or data.startswith('9')
or data.startswith('10')):
data = list(map(str.splitlines, data.split('\n\x0c\n')))
del data[0][0] # get rid of the '8'
url = data[0][3]
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
elif data.startswith('<?xml'):
match = _svn_xml_url_re.search(data)
if not match:
raise ValueError('Badly formatted data: %r' % data)
url = match.group(1) # get repository URL
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
else:
try:
# subversion >= 1.7
xml = call_subprocess(
[self.cmd, 'info', '--xml', location],
show_stdout=False,
)
url = _svn_info_xml_url_re.search(xml).group(1)
revs = [
int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
]
except InstallationError:
url, revs = None, []
if revs:
rev = max(revs)
else:
rev = 0
return url, rev
def get_tag_revs(self, svn_tag_url):
stdout = call_subprocess(
[self.cmd, 'ls', '-v', svn_tag_url], show_stdout=False)
results = []
for line in stdout.splitlines():
parts = line.split()
rev = int(parts[0])
tag = parts[-1].strip('/')
results.append((tag, rev))
return results
def find_tag_match(self, rev, tag_revs):
best_match_rev = None
best_tag = None
for tag, tag_rev in tag_revs:
if (tag_rev > rev and
(best_match_rev is None or best_match_rev > tag_rev)):
# FIXME: Is best_match > tag_rev really possible?
# or is it a sign something is wacky?
best_match_rev = tag_rev
best_tag = tag
return best_tag
def get_src_requirement(self, dist, location, find_tags=False):
repo = self.get_url(location)
if repo is None:
return None
parts = repo.split('/')
# FIXME: why not project name?
egg_project_name = dist.egg_name().split('-', 1)[0]
rev = self.get_revision(location)
if parts[-2] in ('tags', 'tag'):
# It's a tag, perfect!
full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
elif parts[-2] in ('branches', 'branch'):
# It's a branch :(
full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
elif parts[-1] == 'trunk':
# Trunk :-/
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
if find_tags:
tag_url = '/'.join(parts[:-1]) + '/tags'
tag_revs = self.get_tag_revs(tag_url)
match = self.find_tag_match(rev, tag_revs)
if match:
logger.info(
'trunk checkout %s seems to be equivalent to tag %s',
match,
)
repo = '%s/%s' % (tag_url, match)
full_egg_name = '%s-%s' % (egg_project_name, match)
else:
# Don't know what it is
logger.warning(
'svn URL does not fit normal structure (tags/branches/trunk): '
'%s',
repo,
)
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
def get_rev_options(url, rev):
if rev:
rev_options = ['-r', rev]
else:
rev_options = []
r = urllib_parse.urlsplit(url)
if hasattr(r, 'username'):
# >= Python-2.5
username, password = r.username, r.password
else:
netloc = r[1]
if '@' in netloc:
auth = netloc.split('@')[0]
if ':' in auth:
username, password = auth.split(':', 1)
else:
username, password = auth, None
else:
username, password = None, None
if username:
rev_options += ['--username', username]
if password:
rev_options += ['--password', password]
return rev_options
vcs.register(Subversion)
|
MUNDO-platform/srccode | refs/heads/master | ckan-extensions/ckanext-dbstore/ckanext/dbstore/logic/__init__.py | 1 | __author__="Tomasz Janisiewicz <tomasz.janisiewicz@orange.com>"
__date__ ="$2015-02-11 13:06:33$" |
nopjmp/SickRage | refs/heads/master | sickbeard/properFinder.py | 5 | # coding=utf-8
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: https://sickrage.github.io
# Git: https://github.com/SickRage/SickRage.git
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals
import datetime
import operator
import re
import threading
import time
import traceback
import sickbeard
from sickbeard import db, helpers, logger
from sickbeard.common import cpu_presets, DOWNLOADED, Quality, SNATCHED, SNATCHED_PROPER
from sickbeard.name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from sickbeard.search import pickBestResult, snatchEpisode
from sickrage.helper.exceptions import AuthException, ex
from sickrage.show.History import History
class ProperFinder(object): # pylint: disable=too-few-public-methods
def __init__(self):
self.amActive = False
def run(self, force=False): # pylint: disable=unused-argument
"""
Start looking for new propers
:param force: Start even if already running (currently not used, defaults to False)
"""
logger.log("Beginning the search for new propers")
self.amActive = True
propers = self._getProperList()
if propers:
self._downloadPropers(propers)
self._set_lastProperSearch(datetime.datetime.today().toordinal())
run_at = ""
if None is sickbeard.properFinderScheduler.start_time:
run_in = sickbeard.properFinderScheduler.lastRun + sickbeard.properFinderScheduler.cycleTime - datetime.datetime.now()
hours, remainder = divmod(run_in.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
run_at = ", next check in approx. " + (
"{0:d}h, {1:d}m".format(hours, minutes) if hours > 0 else "{0:d}m, {1:d}s".format(minutes, seconds))
logger.log("Completed the search for new propers{0}".format(run_at))
self.amActive = False
def _getProperList(self): # pylint: disable=too-many-locals, too-many-branches, too-many-statements
"""
Walk providers for propers
"""
propers = {}
search_date = datetime.datetime.today() - datetime.timedelta(days=2)
# for each provider get a list of the
origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList(sickbeard.RANDOMIZE_PROVIDERS) if x.is_active()]
for curProvider in providers:
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
logger.log("Searching for any new PROPER releases from " + curProvider.name)
try:
curPropers = curProvider.find_propers(search_date)
except AuthException as e:
logger.log("Authentication error: " + ex(e), logger.WARNING)
continue
except Exception as e:
logger.log("Exception while searching propers in " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
continue
# if they haven't been added by a different provider than add the proper to the list
for x in curPropers:
if not re.search(r'\b(proper|repack|real)\b', x.name, re.I):
logger.log('find_propers returned a non-proper, we have caught and skipped it.', logger.DEBUG)
continue
name = self._genericName(x.name)
if name not in propers:
logger.log("Found new proper: " + x.name, logger.DEBUG)
x.provider = curProvider
propers[name] = x
threading.currentThread().name = origThreadName
# take the list of unique propers and get it sorted by
sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
finalPropers = []
for curProper in sortedPropers:
try:
parse_result = NameParser(False).parse(curProper.name)
except (InvalidNameException, InvalidShowException) as error:
logger.log("{0}".format(error), logger.DEBUG)
continue
if not parse_result.series_name:
continue
if not parse_result.episode_numbers:
logger.log(
"Ignoring " + curProper.name + " because it's for a full season rather than specific episode",
logger.DEBUG)
continue
logger.log(
"Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name,
logger.DEBUG)
# set the indexerid in the db to the show's indexerid
curProper.indexerid = parse_result.show.indexerid
# set the indexer in the db to the show's indexer
curProper.indexer = parse_result.show.indexer
# populate our Proper instance
curProper.show = parse_result.show
curProper.season = parse_result.season_number if parse_result.season_number is not None else 1
curProper.episode = parse_result.episode_numbers[0]
curProper.release_group = parse_result.release_group
curProper.version = parse_result.version
curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
curProper.content = None
# filter release
bestResult = pickBestResult(curProper, parse_result.show)
if not bestResult:
logger.log("Proper " + curProper.name + " were rejected by our release filters.", logger.DEBUG)
continue
# only get anime proper if it has release group and version
if bestResult.show.is_anime and not bestResult.release_group and bestResult.version == -1:
logger.log("Proper " + bestResult.name + " doesn't have a release group and version, ignoring it",
logger.DEBUG)
continue
# check if we actually want this proper (if it's the right quality)
main_db_con = db.DBConnection()
sql_results = main_db_con.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
[bestResult.indexerid, bestResult.season, bestResult.episode])
if not sql_results:
continue
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sql_results[0][b"status"]))
if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != bestResult.quality:
continue
# check if we actually want this proper (if it's the right release group and a higher version)
if bestResult.show.is_anime:
main_db_con = db.DBConnection()
sql_results = main_db_con.select(
"SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
[bestResult.indexerid, bestResult.season, bestResult.episode])
oldVersion = int(sql_results[0][b"version"])
oldRelease_group = (sql_results[0][b"release_group"])
if -1 < oldVersion < bestResult.version:
logger.log("Found new anime v" + str(bestResult.version) + " to replace existing v" + str(oldVersion))
else:
continue
if oldRelease_group != bestResult.release_group:
logger.log(
"Skipping proper from release group: " + bestResult.release_group + ", does not match existing release group: " + oldRelease_group)
continue
# if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
if bestResult.indexerid != -1 and (bestResult.indexerid, bestResult.season, bestResult.episode) not in {(p.indexerid, p.season, p.episode) for p in finalPropers}:
logger.log("Found a proper that we need: " + str(bestResult.name))
finalPropers.append(bestResult)
return finalPropers
def _downloadPropers(self, properList):
"""
Download proper (snatch it)
:param properList:
"""
for curProper in properList:
historyLimit = datetime.datetime.today() - datetime.timedelta(days=30)
# make sure the episode has been downloaded before
main_db_con = db.DBConnection()
historyResults = main_db_con.select(
"SELECT resource FROM history " +
"WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? " +
"AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED + Quality.DOWNLOADED]) + ")",
[curProper.indexerid, curProper.season, curProper.episode, curProper.quality,
historyLimit.strftime(History.date_format)])
# if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it
if not historyResults:
logger.log(
"Unable to find an original history entry for proper " + curProper.name + " so I'm not downloading it.")
continue
else:
# make sure that none of the existing history downloads are the same proper we're trying to download
clean_proper_name = self._genericName(helpers.remove_non_release_groups(curProper.name))
isSame = False
for curResult in historyResults:
# if the result exists in history already we need to skip it
if self._genericName(helpers.remove_non_release_groups(curResult[b"resource"])) == clean_proper_name:
isSame = True
break
if isSame:
logger.log("This proper is already in history, skipping it", logger.DEBUG)
continue
# get the episode object
epObj = curProper.show.getEpisode(curProper.season, curProper.episode)
# make the result object
result = curProper.provider.get_result([epObj])
result.show = curProper.show
result.url = curProper.url
result.name = curProper.name
result.quality = curProper.quality
result.release_group = curProper.release_group
result.version = curProper.version
result.content = curProper.content
# snatch it
snatchEpisode(result, SNATCHED_PROPER)
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
@staticmethod
def _genericName(name):
return name.replace(".", " ").replace("-", " ").replace("_", " ").lower()
@staticmethod
def _set_lastProperSearch(when):
"""
Record last propersearch in DB
:param when: When was the last proper search
"""
logger.log("Setting the last Proper search in the DB to " + str(when), logger.DEBUG)
main_db_con = db.DBConnection()
sql_results = main_db_con.select("SELECT last_proper_search FROM info")
if not sql_results:
main_db_con.action("INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)",
[0, 0, str(when)])
else:
main_db_con.action("UPDATE info SET last_proper_search=" + str(when))
@staticmethod
def _get_lastProperSearch():
"""
Find last propersearch from DB
"""
main_db_con = db.DBConnection()
sql_results = main_db_con.select("SELECT last_proper_search FROM info")
try:
last_proper_search = datetime.date.fromordinal(int(sql_results[0][b"last_proper_search"]))
except Exception:
return datetime.date.fromordinal(1)
return last_proper_search
|
nfco/netforce | refs/heads/master | netforce_stock/netforce_stock/models/delivery_slot_capacity.py | 4 | from netforce.model import Model,fields,get_model
class DeliverySlotCapacity(Model):
_name="delivery.slot.capacity"
_string="Delivery Slot Capacity"
_fields={
"slot_id": fields.Many2One("delivery.slot","Delivery Slot",required=True,on_delete="cascade"),
"weekday": fields.Selection([["0","Monday"],["1","Tuesday"],["2","Wednesday"],["3","Thursday"],["4","Friday"],["5","Saturday"],["6","Sunday"]],"Weekday"),
"capacity": fields.Integer("Capacity",required=True),
"exclude_postal_codes": fields.Text("Exclude Postal Codes"),
}
DeliverySlotCapacity.register()
|
divyamodi128/django_comments | refs/heads/master | comment/migrations/0001_initial.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-06-19 21:02
from __future__ import unicode_literals
import comment.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('posts', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('timestamp', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='posts.Post')),
('user', models.ForeignKey(on_delete=models.SET(comment.models.get_sentinel_user), to=settings.AUTH_USER_MODEL)),
],
),
]
|
qrealka/skia-hc | refs/heads/master | tools/test_gpuveto.py | 142 | #!/usr/bin/env python
# Copyright 2014 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script to test out suitableForGpuRasterization (via gpuveto)"""
import argparse
import glob
import os
import re
import subprocess
import sys
# Set the PYTHONPATH to include the tools directory.
sys.path.append(
os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
import find_run_binary
def list_files(dir_or_file):
"""Returns a list of all the files from the provided argument
@param dir_or_file: either a directory or skp file
@returns a list containing the files in the directory or a single file
"""
files = []
for globbedpath in glob.iglob(dir_or_file): # useful on win32
if os.path.isdir(globbedpath):
for filename in os.listdir(globbedpath):
newpath = os.path.join(globbedpath, filename)
if os.path.isfile(newpath):
files.append(newpath)
elif os.path.isfile(globbedpath):
files.append(globbedpath)
return files
def execute_program(args):
"""Executes a process and waits for it to complete.
@param args: is passed into subprocess.Popen().
@returns a tuple of the process output (returncode, output)
"""
proc = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output, _ = proc.communicate()
errcode = proc.returncode
return (errcode, output)
class GpuVeto(object):
def __init__(self):
self.bench_pictures = find_run_binary.find_path_to_program(
'bench_pictures')
sys.stdout.write('Running: %s\n' % (self.bench_pictures))
self.gpuveto = find_run_binary.find_path_to_program('gpuveto')
assert os.path.isfile(self.bench_pictures)
assert os.path.isfile(self.gpuveto)
self.indeterminate = 0
self.truePositives = 0
self.falsePositives = 0
self.trueNegatives = 0
self.falseNegatives = 0
def process_skps(self, dir_or_file):
for skp in enumerate(dir_or_file):
self.process_skp(skp[1])
sys.stdout.write('TP %d FP %d TN %d FN %d IND %d\n' % (self.truePositives,
self.falsePositives,
self.trueNegatives,
self.falseNegatives,
self.indeterminate))
def process_skp(self, skp_file):
assert os.path.isfile(skp_file)
#print skp_file
# run gpuveto on the skp
args = [self.gpuveto, '-r', skp_file]
returncode, output = execute_program(args)
if (returncode != 0):
return
if ('unsuitable' in output):
suitable = False
else:
assert 'suitable' in output
suitable = True
# run raster config
args = [self.bench_pictures, '-r', skp_file,
'--repeat', '20',
'--timers', 'w',
'--config', '8888']
returncode, output = execute_program(args)
if (returncode != 0):
return
matches = re.findall('[\d]+\.[\d]+', output)
if len(matches) != 1:
return
rasterTime = float(matches[0])
# run gpu config
args2 = [self.bench_pictures, '-r', skp_file,
'--repeat', '20',
'--timers', 'w',
'--config', 'gpu']
returncode, output = execute_program(args2)
if (returncode != 0):
return
matches = re.findall('[\d]+\.[\d]+', output)
if len(matches) != 1:
return
gpuTime = float(matches[0])
# happens if page is too big it will not render
if 0 == gpuTime:
return
tolerance = 0.05
tol_range = tolerance * gpuTime
if rasterTime > gpuTime - tol_range and rasterTime < gpuTime + tol_range:
result = "NONE"
self.indeterminate += 1
elif suitable:
if gpuTime < rasterTime:
self.truePositives += 1
result = "TP"
else:
self.falsePositives += 1
result = "FP"
else:
if gpuTime < rasterTime:
self.falseNegatives += 1
result = "FN"
else:
self.trueNegatives += 1
result = "TN"
sys.stdout.write('%s: gpuveto: %d raster %.2f gpu: %.2f Result: %s\n' % (
skp_file, suitable, rasterTime, gpuTime, result))
def main(main_argv):
parser = argparse.ArgumentParser()
parser.add_argument('--skp_path',
help='Path to the SKP(s). Can either be a directory ' \
'containing SKPs or a single SKP.',
required=True)
args = parser.parse_args()
GpuVeto().process_skps(list_files(args.skp_path))
if __name__ == '__main__':
sys.exit(main(sys.argv[1]))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.