gt stringclasses 1 value | context stringlengths 2.49k 119k |
|---|---|
#!/usr/bin/env python
import requests
from raco.language.myrialang import *
from raco.catalog import Catalog
from raco import scheme
from raco.algebra import DEFAULT_CARDINALITY
from threading import Lock
from raco.myrial import parser as MyrialParser
from raco.myrial import interpreter as MyrialInterpreter
from raco.relation_key import RelationKey
from raco import expression
from raco.expression import UnnamedAttributeRef
from raco import types
import myria
import json
nar = UnnamedAttributeRef
def pretty_json(obj):
return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ':'))
# We need a (global) lock on the Myrial parser because yacc is not Threadsafe.
# .. see uwescience/datalogcompiler#39
# .. (https://github.com/uwescience/datalogcompiler/issues/39)
myrial_parser_lock = Lock()
myrial_parser = MyrialParser.Parser()
# need to be initiated
connection = None
# constants
NANO_IN_ONE_SEC = 1000000000
def get_plan(query, language, plan_type, connection,
phys_algebra=MyriaLeftDeepTreeAlgebra()):
# only support MyriaL
assert language == "myrial"
# We need a (global) lock on the Myrial parser because yacc
# .. is not Threadsafe and App Engine uses multiple threads.
with myrial_parser_lock:
parsed = myrial_parser.parse(query)
processor = MyrialInterpreter.StatementProcessor(
MyriaCatalog(connection))
processor.evaluate(parsed)
if plan_type == 'logical':
return processor.get_logical_plan()
elif plan_type == 'physical':
return processor.get_physical_plan(phys_algebra)
else:
raise NotImplementedError('Myria plan type %s' % plan_type)
def get_physical_algebra(phys_algebra_str, connection):
catalog = MyriaCatalog(connection)
# return corresponding physical algebra
if(phys_algebra_str == 'RS_HJ'):
return MyriaLeftDeepTreeAlgebra()
elif(phys_algebra_str == 'HC_HJ'):
return MyriaHyperCubeLeftDeepTreeJoinAlgebra(catalog)
elif(phys_algebra_str == 'BR_HJ'):
return MyriaBroadcastLeftDeepTreeJoinAlgebra(catalog)
elif(phys_algebra_str == 'RS_LFJ'):
return MyriaRegularShuffleLeapFrogAlgebra()
elif(phys_algebra_str == 'HC_LFJ'):
return MyriaHyperCubeAlgebra(catalog)
elif(phys_algebra_str == 'BR_LFJ'):
return MyriaBroadCastLeapFrogJoinAlgebra(catalog)
else:
raise ValueError("{} is not valid.".format(phys_algebra_str))
class MyriaCatalog(Catalog):
def __init__(self, connection):
self.connection = connection
def get_scheme(self, rel_key):
relation_args = {
'userName': rel_key.user,
'programName': rel_key.program,
'relationName': rel_key.relation
}
if not self.connection:
raise RuntimeError(
"no schema for relation %s because no connection" % rel_key)
try:
dataset_info = self.connection.dataset(relation_args)
except myria.MyriaError:
raise ValueError('No relation {} in the catalog'.format(rel_key))
schema = dataset_info['schema']
return scheme.Scheme(zip(schema['columnNames'], schema['columnTypes']))
def get_num_servers(self):
if not self.connection:
raise RuntimeError("no connection.")
return len(self.connection.workers_alive())
def num_tuples(self, rel_key):
relation_args = {
'userName': rel_key.user,
'programName': rel_key.program,
'relationName': rel_key.relation
}
if not self.connection:
raise RuntimeError(
"no cardinality of %s because no connection" % rel_key)
try:
dataset_info = self.connection.dataset(relation_args)
except myria.MyriaError:
raise ValueError(rel_key)
num_tuples = dataset_info['numTuples']
assert type(num_tuples) is int
# that's a work round. numTuples is -1 if the dataset is old
if num_tuples != -1:
assert num_tuples >= 0
return num_tuples
return DEFAULT_CARDINALITY
def execute_query(query, workers="ALL"):
"""
query - (language, phyiscal_algebra, profiling_mode, query_str, query_name)
"""
language, phys_algebra, profilingMode, query_str, query_name = query
print "executing query {}".format(query_name)
if connection is None:
raise Exception("connection is not initiated.")
physical_algebra = get_physical_algebra(phys_algebra, connection)
try:
# Generate logical plan
logical_plan = get_plan(
query_str, language, "logical", connection, physical_algebra)
# Generate physical plan
physical_plan = get_plan(
query_str, language, "physical", connection, physical_algebra)
# compile to json
compiled = compile_to_json(
query_str, logical_plan, physical_plan, language)
compiled['profilingMode'] = profilingMode
if workers != "ALL":
for fragment in compiled["plan"]["fragments"]:
fragment["workers"] = workers
# execute the query util it is finished (or errored)
query_status = connection.execute_query(compiled)
if query_status["status"] == 'SUCCESS':
return 'success', query_status
else:
return query_status["status"], query_status
except myria.MyriaError as e:
print "myrial error: {}".format(e)
return 'fail', 'MyriaError'
except requests.ConnectionError as e:
print e
return 'fail', 'ConnectionError'
def execute_json(json_query):
"""
Execute json query.
Argument:
json_query - json query
"""
# check connections
if connection is None:
raise Exception("connection is not initiated.")
try:
# execute the query util it is finished (or errored)
query_status = connection.execute_query(json_query)
if query_status["status"] == 'SUCCESS':
return 'success', query_status
else:
return query_status["status"], query_status
except myria.MyriaError as e:
print "myrial error: {}".format(e)
return 'fail', 'MyriaError'
except requests.ConnectionError as e:
print e
return 'fail', 'ConnectionError'
def execute_physical_plan(phys_str, logical_plan="LP", raw_query='query'):
"""
Execute physical plan
Argument:
phys_str: physical plan string
"""
physical_plan = eval(phys_str)
json_plan = compile_to_json(
str(raw_query), str(logical_plan), physical_plan)
json_plan["profilingMode"] = "QUERY"
print pretty_json(json_plan)
return execute_json(json_plan)
def init_connection(hostname, port):
global connection
connection = myria.MyriaConnection(hostname=hostname, port=port)
| |
from __future__ import print_function
from __future__ import absolute_import
import os
import re
import sys
import traceback
from .printer import print_err, colors
from typing import cast, Any, Callable, Dict, List, Optional, Tuple
def build_custom_checkers(by_lang):
# type: (Dict[str, List[str]]) -> Tuple[Callable[[], bool], Callable[[], bool]]
RuleList = List[Dict[str, Any]]
def custom_check_file(fn, identifier, rules, skip_rules=None, max_length=None):
# type: (str, str, RuleList, Optional[Any], Optional[int]) -> bool
failed = False
color = next(colors)
line_tups = []
for i, line in enumerate(open(fn)):
line_newline_stripped = line.strip('\n')
line_fully_stripped = line_newline_stripped.strip()
skip = False
for rule in skip_rules or []:
if re.match(rule, line):
skip = True
if line_fully_stripped.endswith(' # nolint'):
continue
if skip:
continue
tup = (i, line, line_newline_stripped, line_fully_stripped)
line_tups.append(tup)
rules_to_apply = []
fn_dirname = os.path.dirname(fn)
for rule in rules:
exclude_list = rule.get('exclude', set())
if fn in exclude_list or fn_dirname in exclude_list:
continue
if rule.get("include_only"):
found = False
for item in rule.get("include_only", set()):
if item in fn:
found = True
if not found:
continue
rules_to_apply.append(rule)
for rule in rules_to_apply:
exclude_lines = {
line for
(exclude_fn, line) in rule.get('exclude_line', set())
if exclude_fn == fn
}
pattern = rule['pattern']
for (i, line, line_newline_stripped, line_fully_stripped) in line_tups:
if line_fully_stripped in exclude_lines:
exclude_lines.remove(line_fully_stripped)
continue
try:
line_to_check = line_fully_stripped
if rule.get('strip') is not None:
if rule['strip'] == '\n':
line_to_check = line_newline_stripped
else:
raise Exception("Invalid strip rule")
if re.search(pattern, line_to_check):
print_err(identifier, color, '{} at {} line {}:'.format(
rule['description'], fn, i+1))
print_err(identifier, color, line)
failed = True
except Exception:
print("Exception with %s at %s line %s" % (rule['pattern'], fn, i+1))
traceback.print_exc()
if exclude_lines:
print('Please remove exclusions for file %s: %s' % (fn, exclude_lines))
lastLine = None
for (i, line, line_newline_stripped, line_fully_stripped) in line_tups:
if isinstance(line, bytes):
line_length = len(line.decode("utf-8"))
else:
line_length = len(line)
if (max_length is not None and line_length > max_length and
'# type' not in line and 'test' not in fn and 'example' not in fn and
not re.match("\[[ A-Za-z0-9_:,&()-]*\]: http.*", line) and
not re.match("`\{\{ external_api_uri_subdomain \}\}[^`]+`", line) and
"#ignorelongline" not in line and 'migrations' not in fn):
print("Line too long (%s) at %s line %s: %s" % (len(line), fn, i+1, line_newline_stripped))
failed = True
lastLine = line
if lastLine and ('\n' not in lastLine):
print("No newline at the end of file. Fix with `sed -i '$a\\' %s`" % (fn,))
failed = True
return failed
whitespace_rules = [
# This linter should be first since bash_rules depends on it.
{'pattern': '\s+$',
'strip': '\n',
'description': 'Fix trailing whitespace'},
{'pattern': '\t',
'strip': '\n',
'exclude': set(['zerver/lib/bugdown/codehilite.py',
'tools/travis/success-http-headers.txt']),
'description': 'Fix tab-based whitespace'},
] # type: RuleList
markdown_whitespace_rules = list([rule for rule in whitespace_rules if rule['pattern'] != '\s+$']) + [
# Two spaces trailing a line with other content is okay--it's a markdown line break.
# This rule finds one space trailing a non-space, three or more trailing spaces, and
# spaces on an empty line.
{'pattern': '((?<!\s)\s$)|(\s\s\s+$)|(^\s+$)',
'strip': '\n',
'description': 'Fix trailing whitespace'},
{'pattern': '^#+[A-Za-z0-9]',
'strip': '\n',
'description': 'Missing space after # in heading'},
] # type: RuleList
js_rules = cast(RuleList, [
{'pattern': '[^_]function\(',
'description': 'The keyword "function" should be followed by a space'},
{'pattern': '.*blueslip.warning\(.*',
'description': 'The module blueslip has no function warning, try using blueslip.warn'},
{'pattern': '[)]{$',
'description': 'Missing space between ) and {'},
{'pattern': '["\']json/',
'description': 'Relative URL for JSON route not supported by i18n'},
# This rule is constructed with + to avoid triggering on itself
{'pattern': " =" + '[^ =>~"]',
'description': 'Missing whitespace after "="'},
{'pattern': '^[ ]*//[A-Za-z0-9]',
'description': 'Missing space after // in comment'},
{'pattern': 'if[(]',
'description': 'Missing space between if and ('},
{'pattern': 'else{$',
'description': 'Missing space between else and {'},
{'pattern': '^else {$',
'description': 'Write JS else statements on same line as }'},
{'pattern': '^else if',
'description': 'Write JS else statements on same line as }'},
{'pattern': 'console[.][a-z]',
'exclude': set(['static/js/blueslip.js',
'frontend_tests/zjsunit',
'frontend_tests/casper_lib/common.js',
'frontend_tests/node_tests',
'static/js/debug.js']),
'description': 'console.log and similar should not be used in webapp'},
{'pattern': 'i18n[.]t',
'include_only': set(['static/js/portico/']),
'description': 'i18n.t is not available in portico pages yet'},
{'pattern': '[.]text\(["\'][a-zA-Z]',
'description': 'Strings passed to $().text should be wrapped in i18n.t() for internationalization'},
{'pattern': 'compose_error\(["\']',
'description': 'Argument to compose_error should be a literal string enclosed '
'by i18n.t()'},
{'pattern': 'ui.report_success\(',
'description': 'Deprecated function, use ui_report.success.'},
{'pattern': 'report.success\(["\']',
'description': 'Argument to report_success should be a literal string enclosed '
'by i18n.t()'},
{'pattern': 'ui.report_error\(',
'description': 'Deprecated function, use ui_report.error.'},
{'pattern': 'report.error\(["\']',
'description': 'Argument to report_error should be a literal string enclosed '
'by i18n.t()'},
]) + whitespace_rules
python_rules = cast(RuleList, [
{'pattern': '^(?!#)@login_required',
'description': '@login_required is unsupported; use @zulip_login_required'},
{'pattern': '".*"%\([a-z_].*\)?$',
'description': 'Missing space around "%"'},
{'pattern': "'.*'%\([a-z_].*\)?$",
'exclude': set(['analytics/lib/counts.py',
'analytics/tests/test_counts.py',
]),
'exclude_line': set([
('zerver/views/users.py',
"return json_error(_(\"Email '%(email)s' not allowed for realm '%(realm)s'\") %"),
('zproject/settings.py',
"'format': '%(asctime)s %(levelname)-8s %(message)s'"),
('static/templates/settings/bot-settings.handlebars',
"'https://hostname.example.com/bots/followup'"),
]),
'description': 'Missing space around "%"'},
# This rule is constructed with + to avoid triggering on itself
{'pattern': " =" + '[^ =>~"]',
'description': 'Missing whitespace after "="'},
{'pattern': '":\w[^"]*$',
'description': 'Missing whitespace after ":"'},
{'pattern': "':\w[^']*$",
'description': 'Missing whitespace after ":"'},
{'pattern': "^\s+[#]\w",
'strip': '\n',
'description': 'Missing whitespace after "#"'},
{'pattern': "assertEquals[(]",
'description': 'Use assertEqual, not assertEquals (which is deprecated).'},
{'pattern': "== None",
'description': 'Use `is None` to check whether something is None'},
{'pattern': "type:[(]",
'description': 'Missing whitespace after ":" in type annotation'},
{'pattern': "# type [(]",
'description': 'Missing : after type in type annotation'},
{'pattern': "#type",
'description': 'Missing whitespace after "#" in type annotation'},
{'pattern': 'if[(]',
'description': 'Missing space between if and ('},
{'pattern': ", [)]",
'description': 'Unnecessary whitespace between "," and ")"'},
{'pattern': "% [(]",
'description': 'Unnecessary whitespace between "%" and "("'},
# This next check could have false positives, but it seems pretty
# rare; if we find any, they can be added to the exclude list for
# this rule.
{'pattern': ' % [a-zA-Z0-9_.]*\)?$',
'exclude_line': set([
('tools/tests/test_template_parser.py', '{% foo'),
]),
'description': 'Used % comprehension without a tuple'},
{'pattern': '.*%s.* % \([a-zA-Z0-9_.]*\)$',
'description': 'Used % comprehension without a tuple'},
{'pattern': 'django.utils.translation',
'include_only': set(['test/']),
'description': 'Test strings should not be tagged for translationx'},
{'pattern': 'json_success\({}\)',
'description': 'Use json_success() to return nothing'},
# To avoid json_error(_variable) and json_error(_(variable))
{'pattern': '\Wjson_error\(_\(?\w+\)',
'exclude': set(['zerver/tests']),
'description': 'Argument to json_error should be a literal string enclosed by _()'},
{'pattern': '\Wjson_error\([\'"].+[),]$',
'exclude': set(['zerver/tests']),
'exclude_line': set([
# We don't want this string tagged for translation.
('zerver/views/compatibility.py', 'return json_error("Client is too old")'),
]),
'description': 'Argument to json_error should a literal string enclosed by _()'},
# To avoid JsonableError(_variable) and JsonableError(_(variable))
{'pattern': '\WJsonableError\(_\(?\w.+\)',
'exclude': set(['zerver/tests']),
'description': 'Argument to JsonableError should be a literal string enclosed by _()'},
{'pattern': '\WJsonableError\(["\'].+\)',
'exclude': set(['zerver/tests']),
'description': 'Argument to JsonableError should be a literal string enclosed by _()'},
{'pattern': '([a-zA-Z0-9_]+)=REQ\([\'"]\\1[\'"]',
'description': 'REQ\'s first argument already defaults to parameter name'},
{'pattern': 'self\.client\.(get|post|patch|put|delete)',
'exclude': set(['zilencer/tests.py']),
'description': \
'''Do not call self.client directly for put/patch/post/get.
See WRAPPER_COMMENT in test_helpers.py for details.
'''},
# Directly fetching Message objects in e.g. views code is often a security bug.
{'pattern': '[^r][M]essage.objects.get',
'exclude': set(["zerver/tests", "zerver/worker/queue_processors.py"]),
'description': 'Please use access_message() to fetch Message objects',
},
{'pattern': '[S]tream.objects.get',
'include_only': set(["zerver/views/"]),
'description': 'Please use access_stream_by_*() to fetch Stream objects',
},
{'pattern': 'get_stream[(]',
'include_only': set(["zerver/views/", "zerver/lib/actions.py"]),
# messages.py needs to support accessing invite-only streams
# that you are no longer subscribed to, so need get_stream.
'exclude': set(['zerver/views/messages.py']),
'exclude_line': set([
# This is a check for whether a stream rename is invalid because it already exists
('zerver/lib/actions.py', 'get_stream(new_name, stream.realm)'),
# This one in check_message is kinda terrible, since it's
# how most instances are written, but better to exclude something than nothing
('zerver/lib/actions.py', 'stream = get_stream(stream_name, realm)'),
('zerver/lib/actions.py', 'get_stream(signups_stream, admin_realm)'),
]),
'description': 'Please use access_stream_by_*() to fetch Stream objects',
},
{'pattern': '[S]tream.objects.filter',
'include_only': set(["zerver/views/"]),
'description': 'Please use access_stream_by_*() to fetch Stream objects',
},
{'pattern': '^from (zerver|analytics|confirmation)',
'include_only': set(["/migrations/"]),
'exclude': set(['zerver/migrations/0032_verify_all_medium_avatar_images.py',
'zerver/migrations/0041_create_attachments_for_old_messages.py',
'zerver/migrations/0060_move_avatars_to_be_uid_based.py']),
'description': "Don't import models or other code in migrations; see docs/schema-migrations.md",
},
{'pattern': 'datetime[.](now|utcnow)',
'include_only': set(["zerver/", "analytics/"]),
'description': "Don't use datetime in backend code.\n"
"See https://zulip.readthedocs.io/en/latest/code-style.html#naive-datetime-objects",
},
{'pattern': 'render_to_response\(',
'description': "Use render() instead of render_to_response().",
},
# This rule might give false positives in virtualenv setup files which should be excluded,
# and comments which should be rewritten to avoid use of "python2", "python3", etc.
{'pattern': 'python[23]',
'exclude': set(['tools/lib/provision.py',
'tools/setup/setup_venvs.py',
'scripts/lib/setup_venv.py']),
'description': 'Explicit python invocations should not include a version'},
{'pattern': '(^|\s)open\s*\(',
'description': 'open() should not be used in Zulip\'s bots. Use functions'
' provided by the bots framework to access the filesystem.',
'include_only': set(['api/bots/']),
'exclude': set(['api/bots/john/john.py'])},
]) + whitespace_rules
bash_rules = [
{'pattern': '#!.*sh [-xe]',
'description': 'Fix shebang line with proper call to /usr/bin/env for Bash path, change -x|-e switches'
' to set -x|set -e'},
] + whitespace_rules[0:1] # type: RuleList
css_rules = cast(RuleList, [
{'pattern': '^[^:]*:\S[^:]*;$',
'description': "Missing whitespace after : in CSS"},
{'pattern': '[a-z]{',
'description': "Missing whitespace before '{' in CSS."},
{'pattern': 'https://',
'description': "Zulip CSS should have no dependencies on external resources"},
{'pattern': '^[ ][ ][a-zA-Z0-9]',
'description': "Incorrect 2-space indentation in CSS",
'exclude': set(['static/third/thirdparty-fonts.css']),
'strip': '\n'},
{'pattern': '{\w',
'description': "Missing whitespace after '{' in CSS (should be newline)."},
{'pattern': ' thin[; ]',
'description': "thin CSS attribute is under-specified, please use 1px."},
{'pattern': ' medium[; ]',
'description': "medium CSS attribute is under-specified, please use pixels."},
{'pattern': ' thick[; ]',
'description': "thick CSS attribute is under-specified, please use pixels."},
]) + whitespace_rules # type: RuleList
prose_style_rules = [
{'pattern': '[^\/\#\-\"]([jJ]avascript)', # exclude usage in hrefs/divs
'description': "javascript should be spelled JavaScript"},
{'pattern': '[^\/\-\.\"\'\_\=\>]([gG]ithub)[^\.\-\_\"\<]', # exclude usage in hrefs/divs
'description': "github should be spelled GitHub"},
{'pattern': '[oO]rganisation', # exclude usage in hrefs/divs
'description': "Organization is spelled with a z"},
{'pattern': '!!! warning',
'description': "!!! warning is invalid; it's spelled '!!! warn'"},
] # type: RuleList
html_rules = whitespace_rules + prose_style_rules + [
{'pattern': 'placeholder="[^{]',
'description': "`placeholder` value should be translatable.",
'exclude_line': [('templates/zerver/register.html', 'placeholder="acme"'),
('templates/zerver/register.html', 'placeholder="Acme"'),
('static/templates/settings/realm-domains-modal.handlebars',
'<td><input type="text" class="new-realm-domain" placeholder="acme.com"></input></td>')],
'exclude': set(["static/templates/settings/emoji-settings-admin.handlebars",
"static/templates/settings/realm-filter-settings-admin.handlebars",
"static/templates/settings/bot-settings.handlebars"])},
{'pattern': "placeholder='[^{]",
'description': "`placeholder` value should be translatable."},
{'pattern': 'script src="http',
'description': "Don't directly load dependencies from CDNs. See docs/front-end-build-process.md"},
{'pattern': "title='[^{]",
'description': "`title` value should be translatable."},
{'pattern': 'title="[^{\:]',
'exclude_line': set([
('templates/zerver/markdown_help.html',
'<td><img alt=":heart:" class="emoji" src="/static/generated/emoji/images/emoji/heart.png" title=":heart:" /></td>')
]),
'exclude': set(["templates/zerver/emails"]),
'description': "`title` value should be translatable."},
{'pattern': '\Walt=["\'][^{"\']',
'description': "alt argument should be enclosed by _() or it should be an empty string.",
'exclude': set(['static/templates/settings/display-settings.handlebars',
'templates/zerver/keyboard_shortcuts.html',
'templates/zerver/markdown_help.html']),
},
{'pattern': '\Walt=["\']{{ ?["\']',
'description': "alt argument should be enclosed by _().",
},
] # type: RuleList
handlebars_rules = html_rules + [
{'pattern': "[<]script",
'description': "Do not use inline <script> tags here; put JavaScript in static/js instead."},
{'pattern': "{{t '.*' }}[\.\?!]",
'description': "Period should be part of the translatable string."},
{'pattern': '{{t ".*" }}[\.\?!]',
'description': "Period should be part of the translatable string."},
{'pattern': "{{/tr}}[\.\?!]",
'description': "Period should be part of the translatable string."},
]
jinja2_rules = html_rules + [
{'pattern': "{% endtrans %}[\.\?!]",
'description': "Period should be part of the translatable string."},
{'pattern': "{{ _(.+) }}[\.\?!]",
'description': "Period should be part of the translatable string."},
]
json_rules = [] # type: RuleList # fix newlines at ends of files
# It is okay that json_rules is empty, because the empty list
# ensures we'll still check JSON files for whitespace.
markdown_rules = markdown_whitespace_rules + prose_style_rules + [
{'pattern': '\[(?P<url>[^\]]+)\]\((?P=url)\)',
'description': 'Linkified markdown URLs should use cleaner <http://example.com> syntax.'}
]
help_markdown_rules = markdown_rules + [
{'pattern': '[a-z][.][A-Z]',
'description': "Likely missing space after end of sentence"},
{'pattern': '[rR]ealm',
'description': "Realms are referred to as Organizations in user-facing docs."},
]
txt_rules = whitespace_rules
def check_custom_checks_py():
# type: () -> bool
failed = False
for fn in by_lang['py']:
if 'custom_check.py' in fn:
continue
if custom_check_file(fn, 'py', python_rules, max_length=140):
failed = True
return failed
def check_custom_checks_nonpy():
# type: () -> bool
failed = False
for fn in by_lang['js']:
if custom_check_file(fn, 'js', js_rules):
failed = True
for fn in by_lang['sh']:
if custom_check_file(fn, 'sh', bash_rules):
failed = True
for fn in by_lang['css']:
if custom_check_file(fn, 'css', css_rules):
failed = True
for fn in by_lang['handlebars']:
if custom_check_file(fn, 'handlebars', handlebars_rules):
failed = True
for fn in by_lang['html']:
if custom_check_file(fn, 'html', jinja2_rules):
failed = True
for fn in by_lang['json']:
if custom_check_file(fn, 'json', json_rules):
failed = True
markdown_docs_length_exclude = {
"api/bots/converter/readme.md",
"docs/bots-guide.md",
"docs/dev-env-first-time-contributors.md",
"docs/webhook-walkthrough.md",
"docs/life-of-a-request.md",
"docs/logging.md",
"docs/migration-renumbering.md",
"docs/readme-symlink.md",
"README.md",
"zerver/webhooks/helloworld/doc.md",
"zerver/webhooks/trello/doc.md",
"templates/zerver/integrations/perforce.md",
}
for fn in by_lang['md']:
max_length = None
if fn not in markdown_docs_length_exclude:
max_length = 120
rules = markdown_rules
if fn.startswith("templates/zerver/help"):
rules = help_markdown_rules
if custom_check_file(fn, 'md', rules, max_length=max_length):
failed = True
for fn in by_lang['txt'] + by_lang['text']:
if custom_check_file(fn, 'txt', txt_rules):
failed = True
for fn in by_lang['yaml']:
if custom_check_file(fn, 'yaml', txt_rules):
failed = True
return failed
return (check_custom_checks_py, check_custom_checks_nonpy)
| |
"""
.. module:: django_core_models.contats.tests.factories
:synopsis: contats application unit test factories module.
*contats* application unit test factories module.
"""
from __future__ import absolute_import, print_function
import factory
from django_core_models.core.tests.factories import (
AnnotationModelFactory, CategoryModelFactory)
from django_core_models.images.tests.factories import (
ImageReferenceModelFactory)
from django_core_models.locations.tests.factories import (
GeographicLocationModelFactory, LanguageModelFactory,
TimezoneModelFactory, USAddressModelFactory)
from django_core_models.organizations.tests.factories import (
OrganizationModelFactory, RoleModelFactory,
TitleModelFactory)
from django_core_models.social_media.tests.factories import (
EmailModelFactory, FormattedNameModelFactory,
GroupModelFactory, InstantMessagingModelFactory,
NameModelFactory, NicknameModelFactory,
PhoneModelFactory, UrlModelFactory)
from django_core_utils.tests.factories import (
NamedModelFactory, VersionedModelFactory, UserFactory)
from .. import models
class ContactTypeModelFactory(NamedModelFactory):
"""ContactType model factory class.
"""
name = "Personal"
class Meta(object):
"""Model meta class."""
model = models.ContactType
class ContactRelationshipTypeModelFactory(NamedModelFactory):
"""ContactRelationshipType model factory class.
"""
name = "Colleague"
class Meta(object):
"""Model meta class."""
model = models.ContactRelationshipType
class ContactsModelFactory(VersionedModelFactory):
"""Contact association model factory class."""
class Meta(object):
"""Model meta class."""
abstract = True
model = models.ContactsModel
class ContactModelFactory(ContactsModelFactory):
"""Contact model factory class.
"""
name = factory.SubFactory(NameModelFactory)
class Meta(object):
"""Model meta class."""
model = models.Contact
class ContactAddressModelFactory(ContactsModelFactory):
"""ContactAddress association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
address = factory.SubFactory(USAddressModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactAddress
class ContactAnnotationModelFactory(ContactsModelFactory):
"""ContactAnnotation association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
annotation = factory.SubFactory(AnnotationModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactAnnotation
class ContactCategoryModelFactory(ContactsModelFactory):
"""ContactCategory association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
category = factory.SubFactory(CategoryModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactCategory
class ContactEmailModelFactory(ContactsModelFactory):
"""ContactEmail association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
email = factory.SubFactory(EmailModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactEmail
class ContactFormattedNameModelFactory(ContactsModelFactory):
"""ContactFormattedName association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
name = factory.SubFactory(FormattedNameModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactFormattedName
class ContactGeographicLocationModelFactory(ContactsModelFactory):
"""ContactGroup association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
geographic_location = factory.SubFactory(GeographicLocationModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactGeographicLocation
class ContactGroupModelFactory(ContactsModelFactory):
"""ContactGroup association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
group = factory.SubFactory(GroupModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactGroup
class ContactInstantMessagingModelFactory(ContactsModelFactory):
"""ContactInstantMessaging association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
instant_messaging = factory.SubFactory(InstantMessagingModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactInstantMessaging
class ContactLanguageModelFactory(ContactsModelFactory):
"""ContactLanguage association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
language = factory.SubFactory(LanguageModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactLanguage
class ContactLogoModelFactory(ContactsModelFactory):
"""ContactLogo association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
image_reference = factory.SubFactory(ImageReferenceModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactLogo
class ContactNameModelFactory(ContactsModelFactory):
"""ContactName association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
name = factory.SubFactory(NameModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactName
class ContactNicknameModelFactory(ContactsModelFactory):
"""ContactNickname association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
name = factory.SubFactory(NicknameModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactNickname
class ContactOrganizationModelFactory(ContactsModelFactory):
"""ContactOrganization association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
organization = factory.SubFactory(OrganizationModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactOrganization
class ContactPhoneModelFactory(ContactsModelFactory):
"""ContactPhone association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
phone = factory.SubFactory(PhoneModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactPhone
class ContactPhotoModelFactory(ContactsModelFactory):
"""ContactPhoto association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
image_reference = factory.SubFactory(ImageReferenceModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactPhoto
class RelatedContactModelFactory(ContactsModelFactory):
"""RelatedContact association model factory class.
"""
from_contact = factory.SubFactory(ContactModelFactory)
to_contact = factory.SubFactory(ContactModelFactory)
contact_relationship_type = factory.SubFactory(
ContactRelationshipTypeModelFactory)
class Meta(object):
"""Model meta class."""
model = models.RelatedContact
class ContactRoleModelFactory(ContactsModelFactory):
"""ContactRole association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
role = factory.SubFactory(RoleModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactRole
class ContactTimezoneModelFactory(ContactsModelFactory):
"""ContactTimezone association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
timezone = factory.SubFactory(TimezoneModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactTimezone
class ContactTitleModelFactory(ContactsModelFactory):
"""ContactTitle association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
title = factory.SubFactory(TitleModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactTitle
class ContactUrlModelFactory(ContactsModelFactory):
"""ContactUrl association model factory class.
"""
contact = factory.SubFactory(ContactModelFactory)
url = factory.SubFactory(UrlModelFactory)
class Meta(object):
"""Model meta class."""
model = models.ContactUrl
class UserProfileModelFactory(factory.DjangoModelFactory):
"""UserProfile model factory class.
"""
user = factory.SubFactory(UserFactory)
class Meta(object):
"""Model meta class."""
model = models.UserProfile
| |
#!/usr/bin/env python
"""Doxygen XML to SWIG docstring converter.
Converts Doxygen generated XML files into a file containing docstrings
that can be used by SWIG-1.3.x. Note that you need to get SWIG
version > 1.3.23 or use Robin Dunn's docstring patch to be able to use
the resulting output.
Usage:
doxy2swig.py input.xml output.i
input.xml is your doxygen generated XML file and output.i is where the
output will be written (the file will be clobbered).
"""
# This code is implemented using Mark Pilgrim's code as a guideline:
# http://www.faqs.org/docs/diveintopython/kgp_divein.html
#
# Author: Prabhu Ramachandran
# License: BSD style
from __future__ import print_function
from xml.dom import minidom
import re
import textwrap
import sys
import types
import os.path
def my_open_read(source):
if hasattr(source, "read"):
return source
else:
return open(source)
def my_open_write(dest, mode='w'):
if hasattr(dest, "write"):
return dest
else:
return open(dest, mode)
class Doxy2SWIG:
"""Converts Doxygen generated XML files into a file containing
docstrings that can be used by SWIG-1.3.x that have support for
feature("docstring"). Once the data is parsed it is stored in
self.pieces.
"""
def __init__(self, src):
"""Initialize the instance given a source object (file or
filename).
"""
f = my_open_read(src)
self.my_dir = os.path.dirname(f.name)
self.xmldoc = minidom.parse(f).documentElement
f.close()
self.pieces = []
self.pieces.append('\n// File: %s\n'%\
os.path.basename(f.name))
self.space_re = re.compile(r'\s+')
self.lead_spc = re.compile(r'^(%feature\S+\s+\S+\s*?)"\s+(\S)')
self.multi = 0
self.ignores = ('inheritancegraph', 'param', 'listofallmembers',
'innerclass', 'name', 'declname', 'incdepgraph',
'invincdepgraph', 'programlisting', 'type',
'references', 'referencedby', 'location',
'collaborationgraph', 'reimplements',
'reimplementedby', 'derivedcompoundref',
'basecompoundref')
#self.generics = []
def generate(self):
"""Parses the file set in the initialization. The resulting
data is stored in `self.pieces`.
"""
self.parse(self.xmldoc)
def parse(self, node):
"""Parse a given node. This function in turn calls the
`parse_<nodeType>` functions which handle the respective
nodes.
"""
pm = getattr(self, "parse_%s"%node.__class__.__name__)
pm(node)
def parse_Document(self, node):
self.parse(node.documentElement)
def parse_Text(self, node):
txt = node.data
txt = txt.replace('\\', r'\\\\')
txt = txt.replace('"', r'\"')
# ignore pure whitespace
m = self.space_re.match(txt)
if m and len(m.group()) == len(txt):
pass
else:
self.add_text(textwrap.fill(txt))
def parse_Element(self, node):
"""Parse an `ELEMENT_NODE`. This calls specific
`do_<tagName>` handers for different elements. If no handler
is available the `generic_parse` method is called. All
tagNames specified in `self.ignores` are simply ignored.
"""
name = node.tagName
ignores = self.ignores
if name in ignores:
return
attr = "do_%s" % name
if hasattr(self, attr):
handlerMethod = getattr(self, attr)
handlerMethod(node)
else:
self.generic_parse(node)
#if name not in self.generics: self.generics.append(name)
def add_text(self, value):
"""Adds text corresponding to `value` into `self.pieces`."""
if type(value) in (types.ListType, types.TupleType):
self.pieces.extend(value)
else:
self.pieces.append(value)
def get_specific_nodes(self, node, names):
"""Given a node and a sequence of strings in `names`, return a
dictionary containing the names as keys and child
`ELEMENT_NODEs`, that have a `tagName` equal to the name.
"""
nodes = [(x.tagName, x) for x in node.childNodes \
if x.nodeType == x.ELEMENT_NODE and \
x.tagName in names]
return dict(nodes)
def generic_parse(self, node, pad=0):
"""A Generic parser for arbitrary tags in a node.
Parameters:
- node: A node in the DOM.
- pad: `int` (default: 0)
If 0 the node data is not padded with newlines. If 1 it
appends a newline after parsing the childNodes. If 2 it
pads before and after the nodes are processed. Defaults to
0.
"""
npiece = 0
if pad:
npiece = len(self.pieces)
if pad == 2:
self.add_text('\n')
for n in node.childNodes:
self.parse(n)
if pad:
if len(self.pieces) > npiece:
self.add_text('\n')
def space_parse(self, node):
self.add_text(' ')
self.generic_parse(node)
do_ref = space_parse
do_emphasis = space_parse
do_bold = space_parse
do_computeroutput = space_parse
do_formula = space_parse
def do_compoundname(self, node):
self.add_text('\n\n')
data = node.firstChild.data
self.add_text('%%feature("docstring") %s "\n'%data)
def do_compounddef(self, node):
kind = node.attributes['kind'].value
if kind in ('class', 'struct'):
prot = node.attributes['prot'].value
if prot <> 'public':
return
names = ('compoundname', 'briefdescription',
'detaileddescription', 'includes')
first = self.get_specific_nodes(node, names)
for n in names:
if first.has_key(n):
self.parse(first[n])
self.add_text(['";','\n'])
for n in node.childNodes:
if n not in first.values():
self.parse(n)
elif kind in ('file', 'namespace'):
nodes = node.getElementsByTagName('sectiondef')
for n in nodes:
self.parse(n)
def do_includes(self, node):
self.add_text('C++ includes: ')
self.generic_parse(node, pad=1)
def do_parameterlist(self, node):
self.add_text(['\n', '\n', 'Parameters:', '\n'])
self.generic_parse(node, pad=1)
def do_para(self, node):
self.add_text('\n')
self.generic_parse(node, pad=1)
def do_parametername(self, node):
self.add_text('\n')
self.add_text("%s: "%node.firstChild.data)
def do_parameterdefinition(self, node):
self.generic_parse(node, pad=1)
def do_detaileddescription(self, node):
self.generic_parse(node, pad=1)
def do_briefdescription(self, node):
self.generic_parse(node, pad=1)
def do_memberdef(self, node):
prot = node.attributes['prot'].value
id = node.attributes['id'].value
kind = node.attributes['kind'].value
tmp = node.parentNode.parentNode.parentNode
compdef = tmp.getElementsByTagName('compounddef')[0]
cdef_kind = compdef.attributes['kind'].value
if prot == 'public':
first = self.get_specific_nodes(node, ('definition', 'name'))
name = first['name'].firstChild.data
if name[:8] == 'operator': # Don't handle operators yet.
return
defn = first['definition'].firstChild.data
self.add_text('\n')
self.add_text('%feature("docstring") ')
anc = node.parentNode.parentNode
if cdef_kind in ('file', 'namespace'):
ns_node = anc.getElementsByTagName('innernamespace')
if not ns_node and cdef_kind == 'namespace':
ns_node = anc.getElementsByTagName('compoundname')
if ns_node:
ns = ns_node[0].firstChild.data
self.add_text(' %s::%s "\n%s'%(ns, name, defn))
else:
self.add_text(' %s "\n%s'%(name, defn))
elif cdef_kind in ('class', 'struct'):
# Get the full function name.
anc_node = anc.getElementsByTagName('compoundname')
cname = anc_node[0].firstChild.data
self.add_text(' %s::%s "\n%s'%(cname, name, defn))
for n in node.childNodes:
if n not in first.values():
self.parse(n)
self.add_text(['";', '\n'])
def do_definition(self, node):
data = node.firstChild.data
self.add_text('%s "\n%s'%(data, data))
def do_sectiondef(self, node):
kind = node.attributes['kind'].value
if kind in ('public-func', 'func'):
self.generic_parse(node)
def do_simplesect(self, node):
kind = node.attributes['kind'].value
if kind in ('date', 'rcs', 'version'):
pass
elif kind == 'warning':
self.add_text(['\n', 'WARNING: '])
self.generic_parse(node)
elif kind == 'see':
self.add_text('\n')
self.add_text('See: ')
self.generic_parse(node)
else:
self.generic_parse(node)
def do_argsstring(self, node):
self.generic_parse(node, pad=1)
def do_member(self, node):
kind = node.attributes['kind'].value
refid = node.attributes['refid'].value
if kind == 'function' and refid[:9] == 'namespace':
self.generic_parse(node)
def do_doxygenindex(self, node):
self.multi = 1
comps = node.getElementsByTagName('compound')
for c in comps:
refid = c.attributes['refid'].value
fname = refid + '.xml'
if not os.path.exists(fname):
fname = os.path.join(self.my_dir, fname)
print("parsing file: %s"%fname)
p = Doxy2SWIG(fname)
p.generate()
self.pieces.extend(self.clean_pieces(p.pieces))
def write(self, fname, mode='w'):
o = my_open_write(fname, mode)
if self.multi:
o.write("".join(self.pieces))
else:
o.write("".join(self.clean_pieces(self.pieces)))
o.close()
def clean_pieces(self, pieces):
"""Cleans the list of strings given as `pieces`. It replaces
multiple newlines by a maximum of 2 and returns a new list.
It also wraps the paragraphs nicely.
"""
ret = []
count = 0
for i in pieces:
if i == '\n':
count = count + 1
else:
if i == '";':
if count:
ret.append('\n')
elif count > 2:
ret.append('\n\n')
elif count:
ret.append('\n'*count)
count = 0
ret.append(i)
_data = "".join(ret)
ret = []
for i in _data.split('\n\n'):
if i == 'Parameters:':
ret.extend(['Parameters:\n-----------', '\n\n'])
elif i.find('// File:') > -1: # leave comments alone.
ret.extend([i, '\n'])
else:
_tmp = textwrap.fill(i.strip())
_tmp = self.lead_spc.sub(r'\1"\2', _tmp)
ret.extend([_tmp, '\n\n'])
return ret
def main(input, output):
p = Doxy2SWIG(input)
p.generate()
p.write(output)
if __name__ == '__main__':
if len(sys.argv) != 3:
print(__doc__)
sys.exit(1)
main(sys.argv[1], sys.argv[2])
| |
from itertools import product, combinations_with_replacement
import numpy as np
from numba import jit, typeof
from numba.core.compiler import compile_isolated
from numba.tests.support import TestCase, MemoryLeakMixin, tag
import unittest
def array_all(arr):
return arr.all()
def array_all_global(arr):
return np.all(arr)
def array_any(arr):
return arr.any()
def array_any_global(arr):
return np.any(arr)
def array_cumprod(arr):
return arr.cumprod()
def array_cumprod_global(arr):
return np.cumprod(arr)
def array_nancumprod(arr):
return np.nancumprod(arr)
def array_cumsum(arr):
return arr.cumsum()
def array_cumsum_global(arr):
return np.cumsum(arr)
def array_nancumsum(arr):
return np.nancumsum(arr)
def array_sum(arr):
return arr.sum()
def array_sum_global(arr):
return np.sum(arr)
def array_prod(arr):
return arr.prod()
def array_prod_global(arr):
return np.prod(arr)
def array_mean(arr):
return arr.mean()
def array_mean_global(arr):
return np.mean(arr)
def array_var(arr):
return arr.var()
def array_var_global(arr):
return np.var(arr)
def array_std(arr):
return arr.std()
def array_std_global(arr):
return np.std(arr)
def array_min(arr):
return arr.min()
def array_min_global(arr):
return np.min(arr)
def array_max(arr):
return arr.max()
def array_max_global(arr):
return np.max(arr)
def array_argmin(arr):
return arr.argmin()
def array_argmin_global(arr):
return np.argmin(arr)
def array_argmax(arr):
return arr.argmax()
def array_argmax_global(arr):
return np.argmax(arr)
def array_median_global(arr):
return np.median(arr)
def array_nanmin(arr):
return np.nanmin(arr)
def array_nanmax(arr):
return np.nanmax(arr)
def array_nanmean(arr):
return np.nanmean(arr)
def array_nansum(arr):
return np.nansum(arr)
def array_nanprod(arr):
return np.nanprod(arr)
def array_nanstd(arr):
return np.nanstd(arr)
def array_nanvar(arr):
return np.nanvar(arr)
def array_nanmedian_global(arr):
return np.nanmedian(arr)
def array_percentile_global(arr, q):
return np.percentile(arr, q)
def array_nanpercentile_global(arr, q):
return np.nanpercentile(arr, q)
def array_ptp_global(a):
return np.ptp(a)
def array_quantile_global(arr, q):
return np.quantile(arr, q)
def array_nanquantile_global(arr, q):
return np.nanquantile(arr, q)
def base_test_arrays(dtype):
if dtype == np.bool_:
def factory(n):
assert n % 2 == 0
return np.bool_([0, 1] * (n // 2))
else:
def factory(n):
return np.arange(n, dtype=dtype) + 1
a1 = factory(10)
a2 = factory(10).reshape(2, 5)
# The prod() of this array fits in a 32-bit int
a3 = (factory(12))[::-1].reshape((2, 3, 2), order='A')
assert not (a3.flags.c_contiguous or a3.flags.f_contiguous)
return [a1, a2, a3]
def full_test_arrays(dtype):
array_list = base_test_arrays(dtype)
# Add floats with some mantissa
if dtype == np.float32:
array_list += [a / 10 for a in array_list]
# add imaginary part
if dtype == np.complex64:
acc = []
for a in array_list:
tmp = a / 10 + 1j * a / 11
tmp[::2] = np.conj(tmp[::2])
acc.append(tmp)
array_list.extend(acc)
for a in array_list:
assert a.dtype == np.dtype(dtype)
return array_list
def run_comparative(compare_func, test_array):
arrty = typeof(test_array)
cres = compile_isolated(compare_func, [arrty])
numpy_result = compare_func(test_array)
numba_result = cres.entry_point(test_array)
return numpy_result, numba_result
class TestArrayReductions(MemoryLeakMixin, TestCase):
"""
Test array reduction methods and functions such as .sum(), .max(), etc.
"""
def setUp(self):
super(TestArrayReductions, self).setUp()
np.random.seed(42)
def check_reduction_basic(self, pyfunc, **kwargs):
# Basic reduction checks on 1-d float64 arrays
cfunc = jit(nopython=True)(pyfunc)
def check(arr):
self.assertPreciseEqual(pyfunc(arr), cfunc(arr), **kwargs)
arr = np.float64([1.0, 2.0, 0.0, -0.0, 1.0, -1.5])
check(arr)
arr = np.float64([-0.0, -1.5])
check(arr)
arr = np.float64([-1.5, 2.5, 'inf'])
check(arr)
arr = np.float64([-1.5, 2.5, '-inf'])
check(arr)
arr = np.float64([-1.5, 2.5, 'inf', '-inf'])
check(arr)
arr = np.float64(['nan', -1.5, 2.5, 'nan', 3.0])
check(arr)
arr = np.float64(['nan', -1.5, 2.5, 'nan', 'inf', '-inf', 3.0])
check(arr)
arr = np.float64([5.0, 'nan', -1.5, 'nan'])
check(arr)
# Only NaNs
arr = np.float64(['nan', 'nan'])
check(arr)
def test_all_basic(self, pyfunc=array_all):
cfunc = jit(nopython=True)(pyfunc)
def check(arr):
self.assertPreciseEqual(pyfunc(arr), cfunc(arr))
arr = np.float64([1.0, 0.0, float('inf'), float('nan')])
check(arr)
arr[1] = -0.0
check(arr)
arr[1] = 1.5
check(arr)
arr = arr.reshape((2, 2))
check(arr)
check(arr[::-1])
def test_any_basic(self, pyfunc=array_any):
cfunc = jit(nopython=True)(pyfunc)
def check(arr):
self.assertPreciseEqual(pyfunc(arr), cfunc(arr))
arr = np.float64([0.0, -0.0, 0.0, 0.0])
check(arr)
arr[2] = float('nan')
check(arr)
arr[2] = float('inf')
check(arr)
arr[2] = 1.5
check(arr)
arr = arr.reshape((2, 2))
check(arr)
check(arr[::-1])
def test_sum_basic(self):
self.check_reduction_basic(array_sum)
def test_mean_basic(self):
self.check_reduction_basic(array_mean)
def test_var_basic(self):
self.check_reduction_basic(array_var, prec='double')
def test_std_basic(self):
self.check_reduction_basic(array_std)
def test_min_basic(self):
self.check_reduction_basic(array_min)
def test_max_basic(self):
self.check_reduction_basic(array_max)
def test_argmin_basic(self):
self.check_reduction_basic(array_argmin)
def test_argmax_basic(self):
self.check_reduction_basic(array_argmax)
def test_nanmin_basic(self):
self.check_reduction_basic(array_nanmin)
def test_nanmax_basic(self):
self.check_reduction_basic(array_nanmax)
def test_nanmean_basic(self):
self.check_reduction_basic(array_nanmean)
def test_nansum_basic(self):
self.check_reduction_basic(array_nansum)
def test_nanprod_basic(self):
self.check_reduction_basic(array_nanprod)
def test_nanstd_basic(self):
self.check_reduction_basic(array_nanstd)
def test_nanvar_basic(self):
self.check_reduction_basic(array_nanvar, prec='double')
def check_median_basic(self, pyfunc, array_variations):
cfunc = jit(nopython=True)(pyfunc)
def check(arr):
expected = pyfunc(arr)
got = cfunc(arr)
self.assertPreciseEqual(got, expected)
# Odd sizes
def check_odd(a):
check(a)
a = a.reshape((9, 7))
check(a)
check(a.T)
for a in array_variations(np.arange(63) + 10.5):
check_odd(a)
# Even sizes
def check_even(a):
check(a)
a = a.reshape((4, 16))
check(a)
check(a.T)
for a in array_variations(np.arange(64) + 10.5):
check_even(a)
@staticmethod
def _array_variations(a):
# Sorted, reversed, random, many duplicates, many NaNs, all NaNs
yield a
a = a[::-1].copy()
yield a
np.random.shuffle(a)
yield a
a[a % 4 >= 1] = 3.5
yield a
a[a % 4 >= 2] = np.nan
yield a
a[:] = np.nan
yield a
def test_median_basic(self):
pyfunc = array_median_global
def variations(a):
# Sorted, reversed, random, many duplicates
yield a
a = a[::-1].copy()
yield a
np.random.shuffle(a)
yield a
a[a % 4 >= 1] = 3.5
yield a
self.check_median_basic(pyfunc, variations)
def check_percentile_and_quantile(self, pyfunc, q_upper_bound):
cfunc = jit(nopython=True)(pyfunc)
def check(a, q, abs_tol=1e-12):
expected = pyfunc(a, q)
got = cfunc(a, q)
self.assertPreciseEqual(got, expected, abs_tol=abs_tol)
a = self.random.randn(27).reshape(3, 3, 3)
q = np.linspace(0, q_upper_bound, 14)[::-1]
check(a, q)
check(a, 0)
check(a, q_upper_bound / 2)
check(a, q_upper_bound)
not_finite = [np.nan, -np.inf, np.inf]
a.flat[:10] = self.random.choice(not_finite, 10)
self.random.shuffle(a)
self.random.shuffle(q)
check(a, q)
a = a.flatten().tolist()
q = q.flatten().tolist()
check(a, q)
check(tuple(a), tuple(q))
a = self.random.choice([1, 2, 3, 4], 10)
q = np.linspace(0, q_upper_bound, 5)
check(a, q)
# tests inspired by
# https://github.com/numpy/numpy/blob/345b2f6e/numpy/lib/tests/test_function_base.py
x = np.arange(8) * 0.5
np.testing.assert_equal(cfunc(x, 0), 0.)
np.testing.assert_equal(cfunc(x, q_upper_bound), 3.5)
np.testing.assert_equal(cfunc(x, q_upper_bound / 2), 1.75)
x = np.arange(12).reshape(3, 4)
q = np.array((0.25, 0.5, 1.0)) * q_upper_bound
np.testing.assert_equal(cfunc(x, q), [2.75, 5.5, 11.0])
x = np.arange(3 * 4 * 5 * 6).reshape(3, 4, 5, 6)
q = np.array((0.25, 0.50)) * q_upper_bound
np.testing.assert_equal(cfunc(x, q).shape, (2,))
q = np.array((0.25, 0.50, 0.75)) * q_upper_bound
np.testing.assert_equal(cfunc(x, q).shape, (3,))
x = np.arange(12).reshape(3, 4)
np.testing.assert_equal(cfunc(x, q_upper_bound / 2), 5.5)
self.assertTrue(np.isscalar(cfunc(x, q_upper_bound / 2)))
np.testing.assert_equal(cfunc([1, 2, 3], 0), 1)
a = np.array([2, 3, 4, 1])
cfunc(a, [q_upper_bound / 2])
np.testing.assert_equal(a, np.array([2, 3, 4, 1]))
def check_percentile_edge_cases(self, pyfunc, q_upper_bound=100):
cfunc = jit(nopython=True)(pyfunc)
def check(a, q, abs_tol=1e-14):
expected = pyfunc(a, q)
got = cfunc(a, q)
self.assertPreciseEqual(got, expected, abs_tol=abs_tol)
def convert_to_float_and_check(a, q, abs_tol=1e-14):
expected = pyfunc(a, q).astype(np.float64)
got = cfunc(a, q)
self.assertPreciseEqual(got, expected, abs_tol=abs_tol)
def _array_combinations(elements):
for i in range(1, 10):
for comb in combinations_with_replacement(elements, i):
yield np.array(comb)
# high number of combinations, many including non-finite values
q = (0, 0.1 * q_upper_bound, 0.2 * q_upper_bound, q_upper_bound)
element_pool = (1, -1, np.nan, np.inf, -np.inf)
for a in _array_combinations(element_pool):
check(a, q)
# edge cases - numpy exhibits behavioural differences across
# platforms, see: https://github.com/numpy/numpy/issues/13272
if q_upper_bound == 1:
_check = convert_to_float_and_check
else:
_check = check
a = np.array(5)
q = np.array(1)
_check(a, q)
a = True
q = False
_check(a, q)
a = np.array([False, True, True])
q = a
_check(a, q)
a = 5
q = q_upper_bound / 2
_check(a, q)
def check_percentile_exceptions(self, pyfunc):
cfunc = jit(nopython=True)(pyfunc)
def check_err(a, q):
with self.assertRaises(ValueError) as raises:
cfunc(a, q)
self.assertEqual(
"Percentiles must be in the range [0, 100]",
str(raises.exception)
)
# Exceptions leak references
self.disable_leak_check()
a = np.arange(5)
check_err(a, -5) # q less than 0
check_err(a, (1, 10, 105)) # q contains value greater than 100
check_err(a, (1, 10, np.nan)) # q contains nan
with self.assertTypingError() as e:
a = np.arange(5) * 1j
q = 0.1
cfunc(a, q)
self.assertIn('Not supported for complex dtype', str(e.exception))
def check_quantile_exceptions(self, pyfunc):
cfunc = jit(nopython=True)(pyfunc)
def check_err(a, q):
with self.assertRaises(ValueError) as raises:
cfunc(a, q)
self.assertEqual(
"Quantiles must be in the range [0, 1]",
str(raises.exception)
)
# Exceptions leak references
self.disable_leak_check()
a = np.arange(5)
check_err(a, -0.5) # q less than 0
check_err(a, (0.1, 0.10, 1.05)) # q contains value greater than 1
check_err(a, (0.1, 0.10, np.nan)) # q contains nan
with self.assertTypingError() as e:
a = np.arange(5) * 1j
q = 0.1
cfunc(a, q)
self.assertIn('Not supported for complex dtype', str(e.exception))
def test_percentile_basic(self):
pyfunc = array_percentile_global
self.check_percentile_and_quantile(pyfunc, q_upper_bound=100)
self.check_percentile_edge_cases(pyfunc, q_upper_bound=100)
self.check_percentile_exceptions(pyfunc)
def test_nanpercentile_basic(self):
pyfunc = array_nanpercentile_global
self.check_percentile_and_quantile(pyfunc, q_upper_bound=100)
self.check_percentile_edge_cases(pyfunc, q_upper_bound=100)
self.check_percentile_exceptions(pyfunc)
def test_quantile_basic(self):
pyfunc = array_quantile_global
self.check_percentile_and_quantile(pyfunc, q_upper_bound=1)
self.check_percentile_edge_cases(pyfunc, q_upper_bound=1)
self.check_quantile_exceptions(pyfunc)
def test_nanquantile_basic(self):
pyfunc = array_nanquantile_global
self.check_percentile_and_quantile(pyfunc, q_upper_bound=1)
self.check_percentile_edge_cases(pyfunc, q_upper_bound=1)
self.check_quantile_exceptions(pyfunc)
def test_nanmedian_basic(self):
pyfunc = array_nanmedian_global
self.check_median_basic(pyfunc, self._array_variations)
def test_array_sum_global(self):
arr = np.arange(10, dtype=np.int32)
arrty = typeof(arr)
self.assertEqual(arrty.ndim, 1)
self.assertEqual(arrty.layout, 'C')
cres = compile_isolated(array_sum_global, [arrty])
cfunc = cres.entry_point
self.assertEqual(np.sum(arr), cfunc(arr))
def test_array_prod_int_1d(self):
arr = np.arange(10, dtype=np.int32) + 1
arrty = typeof(arr)
self.assertEqual(arrty.ndim, 1)
self.assertEqual(arrty.layout, 'C')
cres = compile_isolated(array_prod, [arrty])
cfunc = cres.entry_point
self.assertEqual(arr.prod(), cfunc(arr))
def test_array_prod_float_1d(self):
arr = np.arange(10, dtype=np.float32) + 1 / 10
arrty = typeof(arr)
self.assertEqual(arrty.ndim, 1)
self.assertEqual(arrty.layout, 'C')
cres = compile_isolated(array_prod, [arrty])
cfunc = cres.entry_point
np.testing.assert_allclose(arr.prod(), cfunc(arr))
def test_array_prod_global(self):
arr = np.arange(10, dtype=np.int32)
arrty = typeof(arr)
self.assertEqual(arrty.ndim, 1)
self.assertEqual(arrty.layout, 'C')
cres = compile_isolated(array_prod_global, [arrty])
cfunc = cres.entry_point
np.testing.assert_allclose(np.prod(arr), cfunc(arr))
def check_cumulative(self, pyfunc):
arr = np.arange(2, 10, dtype=np.int16)
expected, got = run_comparative(pyfunc, arr)
self.assertPreciseEqual(got, expected)
arr = np.linspace(2, 8, 6)
expected, got = run_comparative(pyfunc, arr)
self.assertPreciseEqual(got, expected)
arr = arr.reshape((3, 2))
expected, got = run_comparative(pyfunc, arr)
self.assertPreciseEqual(got, expected)
def test_array_cumsum(self):
self.check_cumulative(array_cumsum)
def test_array_cumsum_global(self):
self.check_cumulative(array_cumsum_global)
def test_array_cumprod(self):
self.check_cumulative(array_cumprod)
def test_array_cumprod_global(self):
self.check_cumulative(array_cumprod_global)
def check_aggregation_magnitude(self, pyfunc, is_prod=False):
"""
Check that integer overflows are avoided (issue #931).
"""
# Overflows are avoided here (ints are cast either to intp
# or float64).
n_items = 2 if is_prod else 10 # avoid overflow on prod()
arr = (np.arange(n_items) + 40000).astype('int16')
npr, nbr = run_comparative(pyfunc, arr)
self.assertPreciseEqual(npr, nbr)
# Overflows are avoided for functions returning floats here.
# Other functions may wrap around.
arr = (np.arange(10) + 2**60).astype('int64')
npr, nbr = run_comparative(pyfunc, arr)
self.assertPreciseEqual(npr, nbr)
arr = arr.astype('uint64')
npr, nbr = run_comparative(pyfunc, arr)
self.assertPreciseEqual(npr, nbr)
def test_sum_magnitude(self):
self.check_aggregation_magnitude(array_sum)
self.check_aggregation_magnitude(array_sum_global)
def test_cumsum_magnitude(self):
self.check_aggregation_magnitude(array_cumsum)
self.check_aggregation_magnitude(array_cumsum_global)
def test_nancumsum_magnitude(self):
self.check_aggregation_magnitude(array_nancumsum, is_prod=True)
def test_prod_magnitude(self):
self.check_aggregation_magnitude(array_prod, is_prod=True)
self.check_aggregation_magnitude(array_prod_global, is_prod=True)
def test_cumprod_magnitude(self):
self.check_aggregation_magnitude(array_cumprod, is_prod=True)
self.check_aggregation_magnitude(array_cumprod_global, is_prod=True)
def test_nancumprod_magnitude(self):
self.check_aggregation_magnitude(array_nancumprod, is_prod=True)
def test_mean_magnitude(self):
self.check_aggregation_magnitude(array_mean)
self.check_aggregation_magnitude(array_mean_global)
def test_var_magnitude(self):
self.check_aggregation_magnitude(array_var)
self.check_aggregation_magnitude(array_var_global)
def test_std_magnitude(self):
self.check_aggregation_magnitude(array_std)
self.check_aggregation_magnitude(array_std_global)
def _do_check_nptimedelta(self, pyfunc, arr):
arrty = typeof(arr)
cfunc = jit(nopython=True)(pyfunc)
self.assertPreciseEqual(cfunc(arr), pyfunc(arr))
# Even vs. odd size, for np.median
self.assertPreciseEqual(cfunc(arr[:-1]), pyfunc(arr[:-1]))
# Test with different orders, for np.median
arr = arr[::-1].copy() # Keep 'C' layout
self.assertPreciseEqual(cfunc(arr), pyfunc(arr))
np.random.shuffle(arr)
self.assertPreciseEqual(cfunc(arr), pyfunc(arr))
# Test with a NaT
arr[arr.size // 2] = 'NaT'
self.assertPreciseEqual(cfunc(arr), pyfunc(arr))
if 'median' not in pyfunc.__name__:
# Test with (val, NaT)^N (and with the random NaT from above)
# use a loop, there's some weird thing/bug with arr[1::2] = 'NaT'
# Further Numba has bug(s) relating to NaN/NaT handling in anything
# using a partition such as np.median
for x in range(1, len(arr), 2):
arr[x] = 'NaT'
self.assertPreciseEqual(cfunc(arr), pyfunc(arr))
# Test with all NaTs
arr.fill(arrty.dtype('NaT'))
self.assertPreciseEqual(cfunc(arr), pyfunc(arr))
def check_npdatetime(self, pyfunc):
arr = np.arange(10).astype(dtype='M8[Y]')
self._do_check_nptimedelta(pyfunc, arr)
def check_nptimedelta(self, pyfunc):
arr = np.arange(10).astype(dtype='m8[s]')
self._do_check_nptimedelta(pyfunc, arr)
def test_min_npdatetime(self):
self.check_npdatetime(array_min)
self.check_nptimedelta(array_min)
def test_max_npdatetime(self):
self.check_npdatetime(array_max)
self.check_nptimedelta(array_max)
def test_argmin_npdatetime(self):
self.check_npdatetime(array_argmin)
self.check_nptimedelta(array_argmin)
def test_argmax_npdatetime(self):
self.check_npdatetime(array_argmax)
self.check_nptimedelta(array_argmax)
def test_median_npdatetime(self):
self.check_nptimedelta(array_median_global)
def test_sum_npdatetime(self):
self.check_nptimedelta(array_sum)
def test_cumsum_npdatetime(self):
self.check_nptimedelta(array_cumsum)
def test_mean_npdatetime(self):
self.check_nptimedelta(array_mean)
def check_nan_cumulative(self, pyfunc):
cfunc = jit(nopython=True)(pyfunc)
def check(a):
expected = pyfunc(a)
got = cfunc(a)
self.assertPreciseEqual(expected, got)
def _set_some_values_to_nan(a):
p = a.size // 2 # set approx half elements to NaN
np.put(a, np.random.choice(range(a.size), p, replace=False), np.nan)
return a
def a_variations():
yield np.linspace(-1, 3, 60).reshape(3, 4, 5)
yield np.array([np.inf, 3, 4])
yield np.array([True, True, True, False])
yield np.arange(1, 10)
yield np.asfortranarray(np.arange(1, 64) - 33.3)
yield np.arange(1, 10, dtype=np.float32)[::-1]
for a in a_variations():
check(a) # no nans
check(_set_some_values_to_nan(a.astype(np.float64))) # about 50% nans
# edge cases
check(np.array([]))
check(np.full(10, np.nan))
parts = np.array([np.nan, 2, np.nan, 4, 5, 6, 7, 8, 9])
a = parts + 1j * parts[::-1]
a = a.reshape(3, 3)
check(a)
def test_nancumprod_basic(self):
self.check_cumulative(array_nancumprod)
self.check_nan_cumulative(array_nancumprod)
def test_nancumsum_basic(self):
self.check_cumulative(array_nancumsum)
self.check_nan_cumulative(array_nancumsum)
def test_ptp_basic(self):
pyfunc = array_ptp_global
cfunc = jit(nopython=True)(pyfunc)
def check(a):
expected = pyfunc(a)
got = cfunc(a)
self.assertPreciseEqual(expected, got)
def a_variations():
yield np.arange(10)
yield np.array([-1.1, np.nan, 2.2])
yield np.array([-np.inf, 5])
yield (4, 2, 5)
yield (1,)
yield np.full(5, 5)
yield [2.2, -2.3, 0.1]
a = np.linspace(-10, 10, 16).reshape(4, 2, 2)
yield a
yield np.asfortranarray(a)
yield a[::-1]
np.random.RandomState(0).shuffle(a)
yield a
yield 6
yield 6.5
yield -np.inf
yield 1 + 4j
yield [2.2, np.nan]
yield [2.2, np.inf]
yield ((4.1, 2.0, -7.6), (4.3, 2.7, 5.2))
yield np.full(5, np.nan)
yield 1 + np.nan * 1j
yield np.nan + np.nan * 1j
yield np.nan
for a in a_variations():
check(a)
def test_ptp_complex(self):
pyfunc = array_ptp_global
cfunc = jit(nopython=True)(pyfunc)
def check(a):
expected = pyfunc(a)
got = cfunc(a)
self.assertPreciseEqual(expected, got)
def make_array(real_nan=False, imag_nan=False):
real = np.linspace(-4, 4, 25)
if real_nan:
real[4:9] = np.nan
imag = np.linspace(-5, 5, 25)
if imag_nan:
imag[7:12] = np.nan
return (real + 1j * imag).reshape(5, 5)
for real_nan, imag_nan in product([True, False], repeat=2):
comp = make_array(real_nan, imag_nan)
check(comp)
real = np.ones(8)
imag = np.arange(-4, 4)
comp = real + 1j * imag
check(comp)
comp = real - 1j * imag
check(comp)
comp = np.full((4, 4), fill_value=(1 - 1j))
check(comp)
def test_ptp_exceptions(self):
pyfunc = array_ptp_global
cfunc = jit(nopython=True)(pyfunc)
# Exceptions leak references
self.disable_leak_check()
with self.assertTypingError() as e:
cfunc(np.array((True, True, False)))
msg = "Boolean dtype is unsupported (as per NumPy)"
self.assertIn(msg, str(e.exception))
with self.assertRaises(ValueError) as e:
cfunc(np.array([]))
msg = "zero-size array reduction not possible"
self.assertIn(msg, str(e.exception))
def test_min_max_complex_basic(self):
pyfuncs = array_min_global, array_max_global
for pyfunc in pyfuncs:
cfunc = jit(nopython=True)(pyfunc)
def check(a):
expected = pyfunc(a)
got = cfunc(a)
self.assertPreciseEqual(expected, got)
real = np.linspace(-10, 10, 40)
real[:4] = real[-1]
imag = real * 2
a = real - imag * 1j
check(a)
for _ in range(10):
self.random.shuffle(real)
self.random.shuffle(imag)
dtype = self.random.choice([np.complex64, np.complex128])
a = real - imag * 1j
a[:4] = a[-1]
check(a.astype(dtype))
def test_nanmin_nanmax_complex_basic(self):
pyfuncs = array_nanmin, array_nanmax
for pyfunc in pyfuncs:
cfunc = jit(nopython=True)(pyfunc)
def check(a):
expected = pyfunc(a)
got = cfunc(a)
self.assertPreciseEqual(expected, got)
real = np.linspace(-10, 10, 40)
real[:4] = real[-1]
real[5:9] = np.nan
imag = real * 2
imag[7:12] = np.nan
a = real - imag * 1j
check(a)
for _ in range(10):
self.random.shuffle(real)
self.random.shuffle(imag)
a = real - imag * 1j
a[:4] = a[-1]
check(a)
def test_nanmin_nanmax_non_array_inputs(self):
pyfuncs = array_nanmin, array_nanmax
def check(a):
expected = pyfunc(a)
got = cfunc(a)
self.assertPreciseEqual(expected, got)
def a_variations():
yield [1, 6, 4, 2]
yield ((-10, 4, -12), (5, 200, -30))
yield np.array(3)
yield (2,)
yield 3.142
yield False
yield (np.nan, 3.142, -5.2, 3.0)
yield [np.inf, np.nan, -np.inf]
yield [(np.nan, 1.1), (-4.4, 8.7)]
for pyfunc in pyfuncs:
cfunc = jit(nopython=True)(pyfunc)
for a in a_variations():
check(a)
@classmethod
def install_generated_tests(cls):
# These form a testing product where each of the combinations are tested
# these function are tested in real and complex space
reduction_funcs = [array_sum, array_sum_global,
array_prod, array_prod_global,
array_mean, array_mean_global,
array_var, array_var_global,
array_std, array_std_global,
array_all, array_all_global,
array_any, array_any_global,
array_min, array_min_global,
array_max, array_max_global,
array_nanmax, array_nanmin,
array_nansum,
]
# these functions only work in real space as no complex comparison
# operator is implemented
reduction_funcs_rspace = [array_argmin, array_argmin_global,
array_argmax, array_argmax_global]
reduction_funcs += [array_nanmean, array_nanstd, array_nanvar]
reduction_funcs += [array_nanprod]
dtypes_to_test = [np.int32, np.float32, np.bool_, np.complex64]
def install_tests(dtypes, funcs):
# Install tests on class
for dt in dtypes:
test_arrays = full_test_arrays(dt)
for red_func, test_array in product(funcs, test_arrays):
# Create the name for the test function
test_name = "test_{0}_{1}_{2}d"
test_name = test_name.format(red_func.__name__,
test_array.dtype.name,
test_array.ndim)
def new_test_function(self, redFunc=red_func,
testArray=test_array,
testName=test_name):
ulps = 1
if 'prod' in red_func.__name__ and \
np.iscomplexobj(testArray):
# prod family accumulate slightly more error on
# some architectures (power, 32bit) for complex input
ulps = 3
npr, nbr = run_comparative(redFunc, testArray)
self.assertPreciseEqual(npr, nbr, msg=testName,
prec="single", ulps=ulps)
# Install it into the class
setattr(cls, test_name, new_test_function)
# install tests for reduction functions that only work in real space
install_tests(dtypes_to_test[:-1], reduction_funcs_rspace)
# install tests for reduction functions
install_tests(dtypes_to_test, reduction_funcs)
TestArrayReductions.install_generated_tests()
class TestArrayReductionsExceptions(MemoryLeakMixin, TestCase):
# int64, size 0
zero_size = np.arange(0)
def check_exception(self, pyfunc, msg):
cfunc = jit(nopython=True)(pyfunc)
# make sure NumPy raises consistently/no behaviour change
with self.assertRaises(BaseException):
pyfunc(self.zero_size)
# check numba impl raises expected
with self.assertRaises(ValueError) as e:
cfunc(self.zero_size)
self.assertIn(msg, str(e.exception))
@classmethod
def install(cls):
fn_to_msg = dict()
empty_seq = "attempt to get {0} of an empty sequence"
op_no_ident = ("zero-size array to reduction operation "
"{0}")
for x in [array_argmax, array_argmax_global, array_argmin,
array_argmin_global]:
fn_to_msg[x] = empty_seq
for x in [array_max, array_max, array_min, array_min]:
fn_to_msg[x] = op_no_ident
name_template = "test_zero_size_array_{0}"
for fn, msg in fn_to_msg.items():
test_name = name_template.format(fn.__name__)
lmsg = msg.format(fn.__name__)
lmsg = lmsg.replace('array_','').replace('_global','')
def test_fn(self, func=fn, message=lmsg):
self.check_exception(func, message)
setattr(cls, test_name, test_fn)
TestArrayReductionsExceptions.install()
if __name__ == '__main__':
unittest.main()
| |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import time
import _socket, poplib,imaplib
import frappe
from frappe import _
from frappe.utils import extract_email_id, convert_utc_to_user_timezone, now, cint, cstr, strip
from frappe.utils.scheduler import log
from email_reply_parser import EmailReplyParser
from email.header import decode_header
from frappe.utils.file_manager import get_random_filename
class EmailSizeExceededError(frappe.ValidationError): pass
class EmailTimeoutError(frappe.ValidationError): pass
class TotalSizeExceededError(frappe.ValidationError): pass
class LoginLimitExceeded(frappe.ValidationError): pass
class POP3Server:
"""Wrapper for POP server to pull emails."""
def __init__(self, args=None):
self.setup(args)
def setup(self, args=None):
# overrride
self.settings = args or frappe._dict()
def check_mails(self):
# overrride
return True
def process_message(self, mail):
# overrride
pass
def connect(self):
"""Connect to **Email Account**."""
if cint(self.settings.use_imap):
self.connect_imap()
return True
else:
try:
if cint(self.settings.use_ssl):
self.pop = Timed_POP3_SSL(self.settings.host, timeout=frappe.conf.get("pop_timeout"))
else:
self.pop = Timed_POP3(self.settings.host, timeout=frappe.conf.get("pop_timeout"))
self.pop.user(self.settings.username)
self.pop.pass_(self.settings.password)
# connection established!
return True
except _socket.error:
# Invalid mail server -- due to refusing connection
frappe.msgprint(_('Invalid Mail Server. Please rectify and try again.'))
raise
except poplib.error_proto, e:
if self.is_temporary_system_problem(e):
return False
else:
frappe.msgprint(_('Invalid User Name or Support Password. Please rectify and try again.'))
raise
def connect_imap(self):
"""Connect to **Email Account**."""
try:
if cint(self.settings.use_ssl):
self.pop = Timed_IMAP4_SSL(self.settings.host, timeout=frappe.conf.get("pop_timeout"))
else:
self.pop = Timed_IMAP4(self.settings.host, timeout=frappe.conf.get("pop_timeout"))
self.pop.login(self.settings.username,self.settings.password)
# connection established!
return True
except _socket.error:
# Invalid mail server -- due to refusing connection
frappe.msgprint(_('Invalid Mail Server. Please rectify and try again.'))
raise
except :
frappe.msgprint(_('Invalid User Name or Support Password. Please rectify and try again.'))
raise
def get_messages(self):
"""Returns new email messages in a list."""
if not self.check_mails():
return # nothing to do
frappe.db.commit()
if not self.connect():
return []
try:
# track if errors arised
self.errors = False
self.latest_messages = []
if cint(self.settings.use_imap):
self.pop.select("Inbox")
responce, message = self.pop.uid('search',None, "UNSEEN") # search and return Uids
pop_list = message[0].split()
else:
pop_list = self.pop.list()[1]
num = num_copy = len(pop_list)
# WARNING: Hard coded max no. of messages to be popped
if num > 20: num = 20
# size limits
self.total_size = 0
self.max_email_size = cint(frappe.local.conf.get("max_email_size"))
self.max_total_size = 5 * self.max_email_size
for i, pop_meta in enumerate(pop_list):
# do not pull more than NUM emails
if (i+1) > num:
break
try:
if cint(self.settings.use_imap):
self.retrieve_message(pop_meta)
else:
self.retrieve_message(pop_meta, i+1)
except (TotalSizeExceededError, EmailTimeoutError, LoginLimitExceeded):
break
# WARNING: Mark as read - message number 101 onwards from the pop list
# This is to avoid having too many messages entering the system
num = num_copy
if num > 100 and not self.errors and not cint(self.settings.use_imap):
for m in xrange(101, num+1):
self.pop.dele(m)
except Exception, e:
if self.has_login_limit_exceeded(e):
pass
else:
raise
finally:
# no matter the exception, pop should quit if connected
if cint(self.settings.use_imap):
self.pop.logout()
else:
self.pop.quit()
return self.latest_messages
def retrieve_message(self, pop_meta,msg_num=None):
incoming_mail = None
try:
self.validate_pop(pop_meta)
if cint(self.settings.use_imap):
status,message = self.pop.uid('fetch', pop_meta, '(RFC822)')
self.latest_messages.append(message[0][1])
else:
msg = self.pop.retr(msg_num)
self.latest_messages.append(b'\n'.join(msg[1]))
except (TotalSizeExceededError, EmailTimeoutError):
# propagate this error to break the loop
self.errors = True
raise
except Exception, e:
if self.has_login_limit_exceeded(e):
self.errors = True
raise LoginLimitExceeded, e
else:
# log performs rollback and logs error in scheduler log
log("receive.get_messages", self.make_error_msg(msg_num, incoming_mail))
self.errors = True
frappe.db.rollback()
if not cint(self.settings.use_imap):
self.pop.dele(msg_num)
else:
if not cint(self.settings.use_imap):
self.pop.dele(msg_num)
def has_login_limit_exceeded(self, e):
return "-ERR Exceeded the login limit" in strip(cstr(e.message))
def is_temporary_system_problem(self, e):
messages = (
"-ERR [SYS/TEMP] Temporary system problem. Please try again later.",
"Connection timed out",
)
for message in messages:
if message in strip(cstr(e.message)) or message in strip(cstr(getattr(e, 'strerror', ''))):
return True
return False
def validate_pop(self, pop_meta):
# throttle based on email size
if not self.max_email_size:
return
m, size = pop_meta.split()
size = cint(size)
if size < self.max_email_size:
self.total_size += size
if self.total_size > self.max_total_size:
raise TotalSizeExceededError
else:
raise EmailSizeExceededError
def make_error_msg(self, msg_num, incoming_mail):
error_msg = "Error in retrieving email."
if not incoming_mail:
try:
# retrieve headers
incoming_mail = Email(b'\n'.join(self.pop.top(msg_num, 5)[1]))
except:
pass
if incoming_mail:
error_msg += "\nDate: {date}\nFrom: {from_email}\nSubject: {subject}\n".format(
date=incoming_mail.date, from_email=incoming_mail.from_email, subject=incoming_mail.subject)
return error_msg
class Email:
"""Wrapper for an email."""
def __init__(self, content):
"""Parses headers, content, attachments from given raw message.
:param content: Raw message."""
import email, email.utils
import datetime
self.raw = content
self.mail = email.message_from_string(self.raw)
self.text_content = ''
self.html_content = ''
self.attachments = []
self.parse()
self.set_content_and_type()
self.set_subject()
self.from_email = extract_email_id(self.mail["From"])
self.from_real_name = email.utils.parseaddr(self.mail["From"])[0]
if self.mail["Date"]:
utc = email.utils.mktime_tz(email.utils.parsedate_tz(self.mail["Date"]))
utc_dt = datetime.datetime.utcfromtimestamp(utc)
self.date = convert_utc_to_user_timezone(utc_dt).strftime('%Y-%m-%d %H:%M:%S')
else:
self.date = now()
def parse(self):
"""Walk and process multi-part email."""
for part in self.mail.walk():
self.process_part(part)
def set_subject(self):
"""Parse and decode `Subject` header."""
import email.header
_subject = email.header.decode_header(self.mail.get("Subject", "No Subject"))
self.subject = _subject[0][0] or ""
if _subject[0][1]:
self.subject = self.subject.decode(_subject[0][1])
else:
# assume that the encoding is utf-8
self.subject = self.subject.decode("utf-8")
if not self.subject:
self.subject = "No Subject"
def set_content_and_type(self):
self.content, self.content_type = '[Blank Email]', 'text/plain'
if self.html_content:
self.content, self.content_type = self.html_content, 'text/html'
else:
self.content, self.content_type = EmailReplyParser.parse_reply(self.text_content), 'text/plain'
def process_part(self, part):
"""Parse email `part` and set it to `text_content`, `html_content` or `attachments`."""
content_type = part.get_content_type()
charset = part.get_content_charset()
if not charset: charset = self.get_charset(part)
if content_type == 'text/plain':
self.text_content += self.get_payload(part, charset)
if content_type == 'text/html':
self.html_content += self.get_payload(part, charset)
if part.get_filename():
self.get_attachment(part, charset)
def get_charset(self, part):
"""Detect chartset."""
charset = part.get_content_charset()
if not charset:
import chardet
charset = chardet.detect(str(part))['encoding']
return charset
def get_payload(self, part, charset):
try:
return unicode(part.get_payload(decode=True),str(charset),"ignore")
except LookupError:
return part.get_payload()
def get_attachment(self, part, charset):
fcontent = part.get_payload(decode=True)
if fcontent:
content_type = part.get_content_type()
fname = part.get_filename()
if fname:
try:
fname = cstr(decode_header(fname)[0][0])
except:
fname = get_random_filename(content_type=content_type)
else:
fname = get_random_filename(content_type=content_type)
self.attachments.append({
'content_type': content_type,
'fname': fname,
'fcontent': fcontent,
})
def save_attachments_in_doc(self, doc):
"""Save email attachments in given document."""
from frappe.utils.file_manager import save_file, MaxFileSizeReachedError
saved_attachments = []
for attachment in self.attachments:
try:
file_data = save_file(attachment['fname'], attachment['fcontent'],
doc.doctype, doc.name)
saved_attachments.append(file_data.file_name)
except MaxFileSizeReachedError:
# WARNING: bypass max file size exception
pass
except frappe.DuplicateEntryError:
# same file attached twice??
pass
return saved_attachments
def get_thread_id(self):
"""Extract thread ID from `[]`"""
import re
l = re.findall('(?<=\[)[\w/-]+', self.subject)
return l and l[0] or None
class TimerMixin(object):
def __init__(self, *args, **kwargs):
self.timeout = kwargs.pop('timeout', 0.0)
self.elapsed_time = 0.0
self._super.__init__(self, *args, **kwargs)
if self.timeout:
# set per operation timeout to one-fifth of total pop timeout
self.sock.settimeout(self.timeout / 5.0)
def _getline(self, *args, **kwargs):
start_time = time.time()
ret = self._super._getline(self, *args, **kwargs)
self.elapsed_time += time.time() - start_time
if self.timeout and self.elapsed_time > self.timeout:
raise EmailTimeoutError
return ret
def quit(self, *args, **kwargs):
self.elapsed_time = 0.0
return self._super.quit(self, *args, **kwargs)
class Timed_POP3(TimerMixin, poplib.POP3):
_super = poplib.POP3
class Timed_POP3_SSL(TimerMixin, poplib.POP3_SSL):
_super = poplib.POP3_SSL
class Timed_IMAP4(TimerMixin, imaplib.IMAP4):
_super = imaplib.IMAP4
class Timed_IMAP4_SSL(TimerMixin, imaplib.IMAP4_SSL):
_super = imaplib.IMAP4_SSL
| |
r"""
===============================================================================
Submodule -- generic_source_term
===============================================================================
"""
import scipy as _sp
def linear(physics, phase, A1='', A2='', x='', return_rate=True, **kwargs):
r"""
For the following source term:
.. math::
r = A_{1} x + A_{2}
If return_rate is True, it returns the value of source term for the
provided x in each pore.
If return_rate is False, it calculates the slope and intercept for the
following linear form :
.. math::
r = S_{1} x + S_{2}
Parameters
----------
A1 , A2 : string
The property name of the coefficients in the source term model.
With A2 set to zero this equation takes on the familiar for of r=kx.
x : string or float/int or array/list
The property name or numerical value or array for the main quantity
Notes
-----
Because this source term is linear in concentration (x) is it not necessary
to iterate during the solver step. Thus, when using the
``set_source_term`` method for an algorithm, it is recommended to set the
``maxiter``
argument to 0. This will save 1 unncessary solution of the system, since
the solution would coverge after the first pass anyway.
"""
if x is '':
X = _sp.ones(physics.Np) * _sp.nan
else:
if type(x) == str:
x = 'pore.' + x.split('.')[-1]
try:
X = physics[x]
except KeyError:
raise Exception(physics.name +
' does not have the pore property :' + x + '!')
else:
X = _sp.array(x)
length_X = _sp.size(X)
if length_X != physics.Np:
if length_X == 1:
X = X * _sp.ones(physics.Np)
elif length_X >= phase.Np:
X = X[physics.map_pores()]
else:
raise Exception('Wrong size for the numerical array of x!')
a = {}
source_params = [A1, A2]
for ind in _sp.arange(_sp.size(source_params)):
A = source_params[ind]
if A is '':
a[str(ind+1)] = 0
else:
if type(A) == str:
A = 'pore.' + A.split('.')[-1]
try:
a[str(ind+1)] = physics[A]
except KeyError:
raise Exception(physics.name + '/' + phase.name +
' does not have the pore property :' +
A + '!')
else:
raise Exception('source_term parameters can only be string '
'type!')
if return_rate:
return(a['1'] * X + a['2'])
else:
S1 = a['1']
S2 = a['2']
return(_sp.vstack((S1, S2)).T)
def power_law(physics, phase, A1='', A2='', A3='', x='',
return_rate=True, **kwargs):
r"""
For the following source term:
.. math::
r = A_{1} x^{A_{2}} + A_{3}
If return_rate is True, it returns the value of source term for the
provided x in each pore.
If return_rate is False, it calculates the slope and intercept for the
following linear form :
.. math::
r = S_{1} x + S_{2}
Parameters
----------
A1 -> A3 : string
The property name of the coefficients in the source term model
x : string or float/int or array/list
The property name or numerical value or array for the main quantity
Notes
-----
"""
if x is '':
X = _sp.ones(physics.Np) * _sp.nan
else:
if type(x) == str:
x = 'pore.' + x.split('.')[-1]
try:
X = physics[x]
except KeyError:
raise Exception(physics.name +
' does not have the pore property :' + x + '!')
else:
X = _sp.array(x)
length_X = _sp.size(X)
if length_X != physics.Np:
if length_X == 1:
X = X * _sp.ones(physics.Np)
elif length_X >= phase.Np:
X = X[physics.map_pores()]
else:
raise Exception('Wrong size for the numerical array of x!')
a = {}
source_params = [A1, A2, A3]
for ind in _sp.arange(_sp.size(source_params)):
A = source_params[ind]
if A is '':
a[str(ind+1)] = 0
else:
if type(A) == str:
A = 'pore.' + A.split('.')[-1]
try:
a[str(ind+1)] = physics[A]
except KeyError:
raise Exception(physics.name + '/' + phase.name +
' does not have the pore property :' +
A + '!')
else:
raise Exception('source_term parameters can only be string '
'type!')
if return_rate:
return(a['1'] * X ** a['2'] + a['3'])
else:
S1 = a['1'] * a['2'] * X ** (a['2'] - 1)
S2 = a['1'] * X ** a['2'] * (1 - a['2']) + a['3']
return(_sp.vstack((S1, S2)).T)
def exponential(physics, phase, A1='', A2='', A3='', A4='', A5='', A6='',
x='', return_rate=True, **kwargs):
r"""
For the following source term:
.. math::
r = A_{1} A_{2}^{( A_{3} x^{ A_{4} } + A_{5})} + A_{6}
If return_rate is True, it returns the value of source term for the
provided x in each pore.
If return_rate is False, it calculates the slope and intercept for the
following linear form :
.. math::
r = S_{1} x + S_{2}
Parameters
----------
A1 -> A6 : string
The property name of the coefficients in the source term model
x : string or float/int or array/list
The property name or numerical value or array for the main quantity
Notes
-----
"""
if x is '':
X = _sp.ones(physics.Np) * _sp.nan
else:
if type(x) == str:
x = 'pore.'+x.split('.')[-1]
try:
X = physics[x]
except KeyError:
raise Exception(physics.name +
' does not have the pore property :' + x + '!')
else:
X = _sp.array(x)
length_X = _sp.size(X)
if length_X != physics.Np:
if length_X == 1:
X = X * _sp.ones(physics.Np)
elif length_X >= phase.Np:
X = X[physics.map_pores()]
else:
raise Exception('Wrong size for the numerical array of x!')
a = {}
source_params = [A1, A2, A3, A4, A5, A6]
for ind in _sp.arange(_sp.size(source_params)):
A = source_params[ind]
if A is '':
if ind == 0:
a[str(ind+1)] = 1
else:
a[str(ind+1)] = 0
else:
if type(A) == str:
A = 'pore.' + A.split('.')[-1]
try:
a[str(ind+1)] = physics[A]
except KeyError:
raise Exception(physics.name + '/' + phase.name +
' does not have the pore property :' +
A + '!')
else:
raise Exception('source_term parameters can only be string '
'type!')
if return_rate:
return a['1'] * a['2'] ** (a['3'] * X ** a['4'] + a['5']) + a['6']
else:
S1 = a['1'] * a['3'] * a['4'] * \
X ** (a['4'] - 1) * _sp.log(a['2']) * \
a['2'] ** (a['3'] * X ** a['4'] + a['5'])
S2 = a['1'] * a['2'] ** (a['3'] * X ** a['4'] + a['5']) * \
(1 - a['3'] * a['4'] * _sp.log(a['2']) * X ** a['4']) + a['6']
return(_sp.vstack((S1, S2)).T)
def natural_exponential(physics, phase, A1='', A2='', A3='', A4='', A5='',
x='', return_rate=True, **kwargs):
r"""
For the following source term:
.. math::
r = A_{1} exp( A_{2} x^{ A_{3} } + A_{4} )+ A_{5}
If return_rate is True, it returns the value of source term for the
provided x in each pore.
If return_rate is False, it calculates the slope and intercept for the
following linear form :
.. math::
r = S_{1} x + S_{2}
Parameters
----------
A1 -> A5 : string
The property name of the coefficients in the source term model
x : string or float/int or array/list
The property name or numerical value or array for the main quantity
Notes
-----
"""
if x is '':
X = _sp.ones(physics.Np)*_sp.nan
else:
if type(x) == str:
x = 'pore.'+x.split('.')[-1]
try:
X = physics[x]
except KeyError:
raise Exception(physics.name +
' does not have the pore property :' + x + '!')
else:
X = _sp.array(x)
length_X = _sp.size(X)
if length_X != physics.Np:
if length_X == 1:
X = X * _sp.ones(physics.Np)
elif length_X >= phase.Np:
X = X[physics.map_pores()]
else:
raise Exception('Wrong size for the numerical array of x!')
a = {}
source_params = [A1, A2, A3, A4, A5]
for ind in _sp.arange(_sp.size(source_params)):
A = source_params[ind]
if A is '':
if ind == 0:
a[str(ind+1)] = 1
else:
a[str(ind+1)] = 0
else:
if type(A) == str:
A = 'pore.' + A.split('.')[-1]
try:
a[str(ind+1)] = physics[A]
except KeyError:
raise Exception(physics.name + '/' + phase.name +
' does not have the pore property :' +
A + '!')
else:
raise Exception('source_term parameters can only be string '
'type!')
if return_rate:
return(a['1'] * _sp.exp(a['2'] * X ** a['3'] + a['4']) + a['5'])
else:
S1 = a['1'] * a['2'] * \
a['3'] * X ** (a['3'] - 1) * \
_sp.exp(a['2'] * X ** a['3'] + a['4'])
S2 = a['1'] * (1 - a['2'] * a['3'] * X ** a['3']) * \
_sp.exp(a['2'] * X ** a['3'] + a['4']) + a['5']
return(_sp.vstack((S1, S2)).T)
def logarithm(physics, phase, A1='', A2='', A3='', A4='', A5='', A6='',
x='', return_rate=True, **kwargs):
r"""
For the following source term:
.. math::
r = A_{1} Log_{ A_{2} }( A_{3} x^{ A_{4} }+ A_{5})+ A_{6}
If return_rate is True, it returns the value of source term for the
provided x in each pore.
If return_rate is False, it calculates the slope and intercept for the
following linear form :
.. math::
r = S_{1} x + S_{2}
Parameters
----------
A1 -> A6 : string
The property name of the coefficients in the source term model
x : string or float/int or array/list
The property name or numerical value or array for the main quantity
Notes
-----
"""
if x is '':
X = _sp.ones(physics.Np)*_sp.nan
else:
if type(x) == str:
x = 'pore.' + x.split('.')[-1]
try:
X = physics[x]
except KeyError:
raise Exception(physics.name +
' does not have the pore property :' + x + '!')
else:
X = _sp.array(x)
length_X = _sp.size(X)
if length_X != physics.Np:
if length_X == 1:
X = X * _sp.ones(physics.Np)
elif length_X >= phase.Np:
X = X[physics.map_pores()]
else:
raise Exception('Wrong size for the numerical array of x!')
a = {}
source_params = [A1, A2, A3, A4, A5, A6]
for ind in _sp.arange(_sp.size(source_params)):
A = source_params[ind]
if A is '':
if ind == 0:
a[str(ind+1)] = 1
else:
a[str(ind+1)] = 0
else:
if type(A) == str:
A = 'pore.' + A.split('.')[-1]
try:
a[str(ind+1)] = physics[A]
except KeyError:
raise Exception(physics.name + '/' + phase.name +
' does not have the pore property :' +
A + '!')
else:
raise Exception('source_term parameters can only be string '
'type!')
if return_rate:
return(a['1'] * _sp.log(a['3'] * X ** a['4'] + a['5']) /
_sp.log(a['2']) + a['6'])
else:
S1 = a['1'] * a['3'] * a['4'] * \
X ** (a['4'] - 1) / \
(_sp.log(a['2']) * (a['3'] * X ** a['4'] + a['5']))
S2 = a['1'] * _sp.log(a['3'] * X ** a['4'] + a['5']) / \
_sp.log(a['2']) + a['6'] - a['1'] * a['3'] * \
a['4'] * X ** a['4'] / \
(_sp.log(a['2']) * (a['3'] * X ** a['4'] + a['5']))
return(_sp.vstack((S1, S2)).T)
def natural_logarithm(physics, phase, A1='', A2='', A3='', A4='', A5='',
x='', return_rate=True, **kwargs):
r"""
For the following source term:
.. math::
r = A_{1} Ln( A_{2} x^{ A_{3} }+ A_{4})+ A_{5}
If return_rate is True, it returns the value of source term for the
provided x in each pore.
If return_rate is False, it calculates the slope and intercept for the
following linear form :
.. math::
r = S_{1} x + S_{2}
Parameters
----------
A1 -> A5 : string
The property name of the coefficients in the source term model
x : string or float/int or array/list
The property name or numerical value or array for the main quantity
Notes
-----
"""
if x is '':
X = _sp.ones(physics.Np)*_sp.nan
else:
if type(x) == str:
x = 'pore.' + x.split('.')[-1]
try:
X = physics[x]
except KeyError:
raise Exception(physics.name +
' does not have the pore property :' + x + '!')
else:
X = _sp.array(x)
length_X = _sp.size(X)
if length_X != physics.Np:
if length_X == 1:
X = X * _sp.ones(physics.Np)
elif length_X >= phase.Np:
X = X[physics.map_pores()]
else:
raise Exception('Wrong size for the numerical array of x!')
a = {}
source_params = [A1, A2, A3, A4, A5]
for ind in _sp.arange(_sp.size(source_params)):
A = source_params[ind]
if A is '':
if ind == 0:
a[str(ind+1)] = 1
else:
a[str(ind+1)] = 0
else:
if type(A) == str:
A = 'pore.' + A.split('.')[-1]
try:
a[str(ind+1)] = physics[A]
except KeyError:
raise Exception(physics.name + '/' + phase.name +
' does not have the pore property :' +
A + '!')
else:
raise Exception('source_term parameters can only be string '
'type!')
if return_rate:
return(a['1'] * _sp.log(a['2'] * X ** a['3'] + a['4']) + a['5'])
else:
S1 = a['1'] * a['2'] * a['3'] * \
X ** (a['3'] - 1) / \
(a['2'] * X ** a['3'] + a['4'])
S2 = a['1'] * _sp.log(a['2'] * X ** a['3'] + a['4']) + \
a['5'] - a['1'] * a['2'] * a['3'] * \
X ** a['3'] / (a['2'] * X ** a['3'] + a['4'])
return(_sp.vstack((S1, S2)).T)
| |
#!/usr/bin/env python
"""
=============================================
dMRI: Connectivity - Camino, CMTK, FreeSurfer
=============================================
Introduction
============
This script, connectivity_tutorial.py, demonstrates the ability to perform connectivity mapping
using Nipype for pipelining, Freesurfer for Reconstruction / Parcellation, Camino for tensor-fitting
and tractography, and the Connectome Mapping Toolkit (CMTK) for connectivity analysis.
python connectivity_tutorial.py
We perform this analysis using the FSL course data, which can be acquired from here:
* http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz
This pipeline also requires the Freesurfer directory for 'subj1' from the FSL course data.
To save time, this data can be downloaded from here:
* http://dl.dropbox.com/u/315714/subj1.zip?dl=1
A data package containing the outputs of this pipeline can be obtained from here:
* http://db.tt/1vx4vLeP
Along with Camino (http://web4.cs.ucl.ac.uk/research/medic/camino/pmwiki/pmwiki.php?n=Main.HomePage),
Camino-Trackvis (http://www.nitrc.org/projects/camino-trackvis/), FSL (http://www.fmrib.ox.ac.uk/fsl/),
and Freesurfer (http://surfer.nmr.mgh.harvard.edu/), you must also have the Connectome File Format
library installed as well as the Connectome Mapper.
These are written by Stephan Gerhard and can be obtained from:
http://www.cmtk.org/
Or on github at:
CFFlib: https://github.com/LTS5/cfflib
CMP: https://github.com/LTS5/cmp
Output data can be visualized in the ConnectomeViewer
ConnectomeViewer: https://github.com/LTS5/connectomeviewer
First, we import the necessary modules from nipype.
"""
import nipype.interfaces.io as nio # Data i/o
import nipype.interfaces.utility as util # utility
import nipype.pipeline.engine as pe # pypeline engine
import nipype.interfaces.camino as camino
import nipype.interfaces.fsl as fsl
import nipype.interfaces.camino2trackvis as cam2trk
import nipype.interfaces.freesurfer as fs # freesurfer
import nipype.interfaces.cmtk as cmtk
import nipype.algorithms.misc as misc
import inspect
import os.path as op # system functions
import cmp # connectome mapper
"""
We define the following functions to scrape the voxel and data dimensions of the input images. This allows the
pipeline to be flexible enough to accept and process images of varying size. The SPM Face tutorial
(fmri_spm_face.py) also implements this inferral of voxel size from the data. We also define functions to
select the proper parcellation/segregation file from Freesurfer's output for each subject. For the mapping in
this tutorial, we use the aparc+seg.mgz file. While it is possible to change this to use the regions defined in
aparc.a2009s+aseg.mgz, one would also have to write/obtain a network resolution map defining the nodes based on those
regions.
"""
def get_vox_dims(volume):
import nibabel as nb
if isinstance(volume, list):
volume = volume[0]
nii = nb.load(volume)
hdr = nii.get_header()
voxdims = hdr.get_zooms()
return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])]
def get_data_dims(volume):
import nibabel as nb
if isinstance(volume, list):
volume = volume[0]
nii = nb.load(volume)
hdr = nii.get_header()
datadims = hdr.get_data_shape()
return [int(datadims[0]), int(datadims[1]), int(datadims[2])]
def get_affine(volume):
import nibabel as nb
nii = nb.load(volume)
return nii.get_affine()
def select_aparc(list_of_files):
for in_file in list_of_files:
if 'aparc+aseg.mgz' in in_file:
idx = list_of_files.index(in_file)
return list_of_files[idx]
def select_aparc_annot(list_of_files):
for in_file in list_of_files:
if '.aparc.annot' in in_file:
idx = list_of_files.index(in_file)
return list_of_files[idx]
"""
These need to point to the main Freesurfer directory as well as the freesurfer subjects directory.
No assumptions are made about where the directory of subjects is placed.
Recon-all must have been run on subj1 from the FSL course data.
"""
fs_dir = op.abspath('/usr/local/freesurfer')
subjects_dir = op.abspath(op.join(op.curdir,'./subjects'))
fsl.FSLCommand.set_default_output_type('NIFTI')
"""
This needs to point to the fdt folder you can find after extracting
http://www.fmrib.ox.ac.uk/fslcourse/fsl_course_data2.tar.gz
"""
data_dir = op.abspath('fsl_course_data/fdt/')
fs.FSCommand.set_default_subjects_dir(subjects_dir)
subject_list = ['subj1']
"""
An infosource node is used to loop through the subject list and define the input files.
For our purposes, these are the diffusion-weighted MR image, b vectors, and b values.
The info dictionary is used to provide a template of the naming of these files. For instance,
the 4D nifti diffusion image is stored in the FSL course data as data.nii.gz.
"""
infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource")
infosource.iterables = ('subject_id', subject_list)
info = dict(dwi=[['subject_id', 'data']],
bvecs=[['subject_id','bvecs']],
bvals=[['subject_id','bvals']])
"""
A datasource node is used to perform the actual data grabbing.
Templates for the associated images are used to obtain the correct images.
The data are assumed to lie in data_dir/subject_id/.
"""
datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'],
outfields=info.keys()),
name = 'datasource')
datasource.inputs.template = "%s/%s"
datasource.inputs.base_directory = data_dir
datasource.inputs.field_template = dict(dwi='%s/%s.nii.gz')
datasource.inputs.template_args = info
datasource.inputs.base_directory = data_dir
"""
FreeSurferSource nodes are used to retrieve a number of image
files that were automatically generated by the recon-all process.
Here we use three of these nodes, two of which are defined to return files for solely the left and right hemispheres.
"""
FreeSurferSource = pe.Node(interface=nio.FreeSurferSource(), name='fssource')
FreeSurferSource.inputs.subjects_dir = subjects_dir
FreeSurferSourceLH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceLH')
FreeSurferSourceLH.inputs.subjects_dir = subjects_dir
FreeSurferSourceLH.inputs.hemi = 'lh'
FreeSurferSourceRH = pe.Node(interface=nio.FreeSurferSource(), name='fssourceRH')
FreeSurferSourceRH.inputs.subjects_dir = subjects_dir
FreeSurferSourceRH.inputs.hemi = 'rh'
"""
Since the b values and b vectors come from the FSL course, we must convert it to a scheme file
for use in Camino.
"""
fsl2scheme = pe.Node(interface=camino.FSL2Scheme(), name="fsl2scheme")
fsl2scheme.inputs.usegradmod = True
"""
FSL's Brain Extraction tool is used to create a mask from the b0 image
"""
b0Strip = pe.Node(interface=fsl.BET(mask = True), name = 'bet_b0')
"""
FSL's FLIRT function is used to coregister the b0 mask and the structural image.
A convert_xfm node is then used to obtain the inverse of the transformation matrix.
FLIRT is used once again to apply the inverse transformation to the parcellated brain image.
"""
coregister = pe.Node(interface=fsl.FLIRT(dof=6), name = 'coregister')
coregister.inputs.cost = ('corratio')
convertxfm = pe.Node(interface=fsl.ConvertXFM(), name = 'convertxfm')
convertxfm.inputs.invert_xfm = True
inverse = pe.Node(interface=fsl.FLIRT(), name = 'inverse')
inverse.inputs.interp = ('nearestneighbour')
inverse_AparcAseg = pe.Node(interface=fsl.FLIRT(), name = 'inverse_AparcAseg')
inverse_AparcAseg.inputs.interp = ('nearestneighbour')
"""
A number of conversion operations are required to obtain NIFTI files from the FreesurferSource for each subject.
Nodes are used to convert the following:
* Original structural image to NIFTI
* Parcellated white matter image to NIFTI
* Parcellated whole-brain image to NIFTI
* Pial, white, inflated, and spherical surfaces for both the left and right hemispheres
are converted to GIFTI for visualization in ConnectomeViewer
* Parcellated annotation files for the left and right hemispheres are also converted to GIFTI
"""
mri_convert_Brain = pe.Node(interface=fs.MRIConvert(), name='mri_convert_Brain')
mri_convert_Brain.inputs.out_type = 'nii'
mri_convert_WMParc = mri_convert_Brain.clone('mri_convert_WMParc')
mri_convert_AparcAseg = mri_convert_Brain.clone('mri_convert_AparcAseg')
mris_convertLH = pe.Node(interface=fs.MRIsConvert(), name='mris_convertLH')
mris_convertLH.inputs.out_datatype = 'gii'
mris_convertRH = mris_convertLH.clone('mris_convertRH')
mris_convertRHwhite = mris_convertLH.clone('mris_convertRHwhite')
mris_convertLHwhite = mris_convertLH.clone('mris_convertLHwhite')
mris_convertRHinflated = mris_convertLH.clone('mris_convertRHinflated')
mris_convertLHinflated = mris_convertLH.clone('mris_convertLHinflated')
mris_convertRHsphere = mris_convertLH.clone('mris_convertRHsphere')
mris_convertLHsphere = mris_convertLH.clone('mris_convertLHsphere')
mris_convertLHlabels = mris_convertLH.clone('mris_convertLHlabels')
mris_convertRHlabels = mris_convertLH.clone('mris_convertRHlabels')
"""
An inputnode is used to pass the data obtained by the data grabber to the actual processing functions
"""
inputnode = pe.Node(interface=util.IdentityInterface(fields=["dwi", "bvecs", "bvals", "subject_id"]), name="inputnode")
"""
In this section we create the nodes necessary for diffusion analysis.
First, the diffusion image is converted to voxel order, since this is the format in which Camino does
its processing.
"""
image2voxel = pe.Node(interface=camino.Image2Voxel(), name="image2voxel")
"""
Second, diffusion tensors are fit to the voxel-order data.
If desired, these tensors can be converted to a Nifti tensor image using the DT2NIfTI interface.
"""
dtifit = pe.Node(interface=camino.DTIFit(),name='dtifit')
"""
Next, a lookup table is generated from the schemefile and the
signal-to-noise ratio (SNR) of the unweighted (q=0) data.
"""
dtlutgen = pe.Node(interface=camino.DTLUTGen(), name="dtlutgen")
dtlutgen.inputs.snr = 16.0
dtlutgen.inputs.inversion = 1
"""
In this tutorial we implement probabilistic tractography using the PICo algorithm.
PICo tractography requires an estimate of the fibre direction and a model of its uncertainty in each voxel;
this probabilitiy distribution map is produced using the following node.
"""
picopdfs = pe.Node(interface=camino.PicoPDFs(), name="picopdfs")
picopdfs.inputs.inputmodel = 'dt'
"""
Finally, tractography is performed. In this tutorial, we will use only one iteration for time-saving purposes.
It is important to note that we use the TrackPICo interface here. This interface now expects the files required
for PICo tracking (i.e. the output from picopdfs). Similar interfaces exist for alternative types of tracking,
such as Bayesian tracking with Dirac priors (TrackBayesDirac).
"""
track = pe.Node(interface=camino.TrackPICo(), name="track")
track.inputs.iterations = 1
"""
Currently, the best program for visualizing tracts is TrackVis. For this reason, a node is included to
convert the raw tract data to .trk format. Solely for testing purposes, another node is added to perform the reverse.
"""
camino2trackvis = pe.Node(interface=cam2trk.Camino2Trackvis(), name="camino2trk")
camino2trackvis.inputs.min_length = 30
camino2trackvis.inputs.voxel_order = 'LAS'
trk2camino = pe.Node(interface=cam2trk.Trackvis2Camino(), name="trk2camino")
"""
Tracts can also be converted to VTK and OOGL formats, for use in programs such as GeomView and Paraview,
using the following two nodes.
"""
vtkstreamlines = pe.Node(interface=camino.VtkStreamlines(), name="vtkstreamlines")
procstreamlines = pe.Node(interface=camino.ProcStreamlines(), name="procstreamlines")
procstreamlines.inputs.outputtracts = 'oogl'
"""
We can easily produce a variety of scalar values from our fitted tensors. The following nodes generate the
fractional anisotropy and diffusivity trace maps and their associated headers, and then merge them back
into a single .nii file.
"""
fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(),name='fa')
trace = pe.Node(interface=camino.ComputeTensorTrace(),name='trace')
dteig = pe.Node(interface=camino.ComputeEigensystem(), name='dteig')
analyzeheader_fa = pe.Node(interface=camino.AnalyzeHeader(),name='analyzeheader_fa')
analyzeheader_fa.inputs.datatype = 'double'
analyzeheader_trace = pe.Node(interface=camino.AnalyzeHeader(),name='analyzeheader_trace')
analyzeheader_trace.inputs.datatype = 'double'
fa2nii = pe.Node(interface=misc.CreateNifti(),name='fa2nii')
trace2nii = fa2nii.clone("trace2nii")
"""
This section adds the Connectome Mapping Toolkit (CMTK) nodes.
These interfaces are fairly experimental and may not function properly.
In order to perform connectivity mapping using CMTK, the parcellated structural data is rewritten
using the indices and parcellation scheme from the connectome mapper (CMP). This process has been
written into the ROIGen interface, which will output a remapped aparc+aseg image as well as a
dictionary of label information (i.e. name, display colours) pertaining to the original and remapped regions.
These label values are input from a user-input lookup table, if specified, and otherwise the default
Freesurfer LUT (/freesurfer/FreeSurferColorLUT.txt).
"""
roigen = pe.Node(interface=cmtk.ROIGen(), name="ROIGen")
cmp_config = cmp.configuration.PipelineConfiguration(parcellation_scheme = "NativeFreesurfer")
cmp_config.parcellation_scheme = "NativeFreesurfer"
roigen.inputs.LUT_file = cmp_config.get_freeview_lut("NativeFreesurfer")['freesurferaparc']
roigen_structspace = roigen.clone('ROIGen_structspace')
"""
The CreateMatrix interface takes in the remapped aparc+aseg image as well as the label dictionary and fiber tracts
and outputs a number of different files. The most important of which is the connectivity network itself, which is stored
as a 'gpickle' and can be loaded using Python's NetworkX package (see CreateMatrix docstring). Also outputted are various
NumPy arrays containing detailed tract information, such as the start and endpoint regions, and statistics on the mean and
standard deviation for the fiber length of each connection. These matrices can be used in the ConnectomeViewer to plot the
specific tracts that connect between user-selected regions.
"""
creatematrix = pe.Node(interface=cmtk.CreateMatrix(), name="CreateMatrix")
creatematrix.inputs.count_region_intersections = True
createnodes = pe.Node(interface=cmtk.CreateNodes(), name="CreateNodes")
createnodes.inputs.resolution_network_file = cmp_config.parcellation['freesurferaparc']['node_information_graphml']
"""
Here we define the endpoint of this tutorial, which is the CFFConverter node, as well as a few nodes which use
the Nipype Merge utility. These are useful for passing lists of the files we want packaged in our CFF file.
"""
CFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="CFFConverter")
giftiSurfaces = pe.Node(interface=util.Merge(8), name="GiftiSurfaces")
giftiLabels = pe.Node(interface=util.Merge(2), name="GiftiLabels")
niftiVolumes = pe.Node(interface=util.Merge(3), name="NiftiVolumes")
fiberDataArrays = pe.Node(interface=util.Merge(4), name="FiberDataArrays")
gpickledNetworks = pe.Node(interface=util.Merge(1), name="NetworkFiles")
"""
Since we have now created all our nodes, we can define our workflow and start making connections.
"""
mapping = pe.Workflow(name='mapping')
"""
First, we connect the input node to the early conversion functions.
FreeSurfer input nodes:
"""
mapping.connect([(inputnode, FreeSurferSource,[("subject_id","subject_id")])])
mapping.connect([(inputnode, FreeSurferSourceLH,[("subject_id","subject_id")])])
mapping.connect([(inputnode, FreeSurferSourceRH,[("subject_id","subject_id")])])
"""
Required conversions for processing in Camino:
"""
mapping.connect([(inputnode, image2voxel, [("dwi", "in_file")]),
(inputnode, fsl2scheme, [("bvecs", "bvec_file"),
("bvals", "bval_file")]),
(image2voxel, dtifit,[['voxel_order','in_file']]),
(fsl2scheme, dtifit,[['scheme','scheme_file']])
])
"""
Nifti conversions for the parcellated white matter image (used in Camino's conmap),
and the subject's stripped brain image from Freesurfer:
"""
mapping.connect([(FreeSurferSource, mri_convert_WMParc,[('wmparc','in_file')])])
mapping.connect([(FreeSurferSource, mri_convert_Brain,[('brain','in_file')])])
"""
Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres)
"""
mapping.connect([(FreeSurferSourceLH, mris_convertLH,[('pial','in_file')])])
mapping.connect([(FreeSurferSourceRH, mris_convertRH,[('pial','in_file')])])
mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite,[('white','in_file')])])
mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite,[('white','in_file')])])
mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated,[('inflated','in_file')])])
mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated,[('inflated','in_file')])])
mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere,[('sphere','in_file')])])
mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere,[('sphere','in_file')])])
"""
The annotation files are converted using the pial surface as a map via the MRIsConvert interface.
One of the functions defined earlier is used to select the lh.aparc.annot and rh.aparc.annot files
specifically (rather than i.e. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource.
"""
mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels,[('pial','in_file')])])
mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels,[('pial','in_file')])])
mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, [(('annot', select_aparc_annot), 'annot_file')])])
mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, [(('annot', select_aparc_annot), 'annot_file')])])
"""
This section coregisters the diffusion-weighted and parcellated white-matter / whole brain images.
At present the conmap node connection is left commented, as there have been recent changes in Camino
code that have presented some users with errors.
"""
mapping.connect([(inputnode, b0Strip,[('dwi','in_file')])])
mapping.connect([(b0Strip, coregister,[('out_file','in_file')])])
mapping.connect([(mri_convert_Brain, coregister,[('out_file','reference')])])
mapping.connect([(coregister, convertxfm,[('out_matrix_file','in_file')])])
mapping.connect([(b0Strip, inverse,[('out_file','reference')])])
mapping.connect([(convertxfm, inverse,[('out_file','in_matrix_file')])])
mapping.connect([(mri_convert_WMParc, inverse,[('out_file','in_file')])])
"""
The tractography pipeline consists of the following nodes. Further information about the tractography
can be found in nipype/examples/dmri_camino_dti.py.
"""
mapping.connect([(b0Strip, track,[("mask_file","seed_file")])])
mapping.connect([(fsl2scheme, dtlutgen,[("scheme","scheme_file")])])
mapping.connect([(dtlutgen, picopdfs,[("dtLUT","luts")])])
mapping.connect([(dtifit, picopdfs,[("tensor_fitted","in_file")])])
mapping.connect([(picopdfs, track,[("pdfs","in_file")])])
"""
Connecting the Fractional Anisotropy and Trace nodes is simple, as they obtain their input from the
tensor fitting. This is also where our voxel- and data-grabbing functions come in. We pass these functions,
along with the original DWI image from the input node, to the header-generating nodes. This ensures that the
files will be correct and readable.
"""
mapping.connect([(dtifit, fa,[("tensor_fitted","in_file")])])
mapping.connect([(fa, analyzeheader_fa,[("fa","in_file")])])
mapping.connect([(inputnode, analyzeheader_fa,[(('dwi', get_vox_dims), 'voxel_dims'),
(('dwi', get_data_dims), 'data_dims')])])
mapping.connect([(fa, fa2nii,[('fa','data_file')])])
mapping.connect([(inputnode, fa2nii,[(('dwi', get_affine), 'affine')])])
mapping.connect([(analyzeheader_fa, fa2nii,[('header', 'header_file')])])
mapping.connect([(dtifit, trace,[("tensor_fitted","in_file")])])
mapping.connect([(trace, analyzeheader_trace,[("trace","in_file")])])
mapping.connect([(inputnode, analyzeheader_trace,[(('dwi', get_vox_dims), 'voxel_dims'),
(('dwi', get_data_dims), 'data_dims')])])
mapping.connect([(trace, trace2nii,[('trace','data_file')])])
mapping.connect([(inputnode, trace2nii,[(('dwi', get_affine), 'affine')])])
mapping.connect([(analyzeheader_trace, trace2nii,[('header', 'header_file')])])
mapping.connect([(dtifit, dteig,[("tensor_fitted","in_file")])])
"""
The output tracts are converted to Trackvis format (and back). Here we also use the voxel- and data-grabbing
functions defined at the beginning of the pipeline.
"""
mapping.connect([(track, camino2trackvis, [('tracked','in_file')]),
(track, vtkstreamlines,[['tracked','in_file']]),
(camino2trackvis, trk2camino,[['trackvis','in_file']])
])
mapping.connect([(inputnode, camino2trackvis,[(('dwi', get_vox_dims), 'voxel_dims'),
(('dwi', get_data_dims), 'data_dims')])])
"""
Here the CMTK connectivity mapping nodes are connected.
The original aparc+aseg image is converted to NIFTI, then registered to
the diffusion image and delivered to the ROIGen node. The remapped parcellation,
original tracts, and label file are then given to CreateMatrix.
"""
mapping.connect(createnodes, 'node_network',
creatematrix, 'resolution_network_file')
mapping.connect([(FreeSurferSource, mri_convert_AparcAseg, [(('aparc_aseg', select_aparc), 'in_file')])])
mapping.connect([(b0Strip, inverse_AparcAseg,[('out_file','reference')])])
mapping.connect([(convertxfm, inverse_AparcAseg,[('out_file','in_matrix_file')])])
mapping.connect([(mri_convert_AparcAseg, inverse_AparcAseg,[('out_file','in_file')])])
mapping.connect([(mri_convert_AparcAseg, roigen_structspace,[('out_file','aparc_aseg_file')])])
mapping.connect([(roigen_structspace, createnodes,[("roi_file","roi_file")])])
mapping.connect([(inverse_AparcAseg, roigen,[("out_file","aparc_aseg_file")])])
mapping.connect([(roigen, creatematrix,[("roi_file","roi_file")])])
mapping.connect([(camino2trackvis, creatematrix,[("trackvis","tract_file")])])
mapping.connect([(inputnode, creatematrix,[("subject_id","out_matrix_file")])])
mapping.connect([(inputnode, creatematrix,[("subject_id","out_matrix_mat_file")])])
"""
The merge nodes defined earlier are used here to create lists of the files which are
destined for the CFFConverter.
"""
mapping.connect([(creatematrix, gpickledNetworks,[("matrix_files","in1")])])
mapping.connect([(mris_convertLH, giftiSurfaces,[("converted","in1")])])
mapping.connect([(mris_convertRH, giftiSurfaces,[("converted","in2")])])
mapping.connect([(mris_convertLHwhite, giftiSurfaces,[("converted","in3")])])
mapping.connect([(mris_convertRHwhite, giftiSurfaces,[("converted","in4")])])
mapping.connect([(mris_convertLHinflated, giftiSurfaces,[("converted","in5")])])
mapping.connect([(mris_convertRHinflated, giftiSurfaces,[("converted","in6")])])
mapping.connect([(mris_convertLHsphere, giftiSurfaces,[("converted","in7")])])
mapping.connect([(mris_convertRHsphere, giftiSurfaces,[("converted","in8")])])
mapping.connect([(mris_convertLHlabels, giftiLabels,[("converted","in1")])])
mapping.connect([(mris_convertRHlabels, giftiLabels,[("converted","in2")])])
mapping.connect([(roigen, niftiVolumes,[("roi_file","in1")])])
mapping.connect([(inputnode, niftiVolumes,[("dwi","in2")])])
mapping.connect([(mri_convert_Brain, niftiVolumes,[("out_file","in3")])])
mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file","in1")])])
mapping.connect([(creatematrix, fiberDataArrays,[("endpoint_file_mm","in2")])])
mapping.connect([(creatematrix, fiberDataArrays,[("fiber_length_file","in3")])])
mapping.connect([(creatematrix, fiberDataArrays,[("fiber_label_file","in4")])])
"""
This block actually connects the merged lists to the CFF converter. We pass the surfaces
and volumes that are to be included, as well as the tracts and the network itself. The currently
running pipeline (dmri_connectivity.py) is also scraped and included in the CFF file. This
makes it easy for the user to examine the entire processing pathway used to generate the end
product.
"""
CFFConverter.inputs.script_files = op.abspath(inspect.getfile(inspect.currentframe()))
mapping.connect([(giftiSurfaces, CFFConverter,[("out","gifti_surfaces")])])
mapping.connect([(giftiLabels, CFFConverter,[("out","gifti_labels")])])
mapping.connect([(gpickledNetworks, CFFConverter,[("out","gpickled_networks")])])
mapping.connect([(niftiVolumes, CFFConverter,[("out","nifti_volumes")])])
mapping.connect([(fiberDataArrays, CFFConverter,[("out","data_files")])])
mapping.connect([(creatematrix, CFFConverter,[("filtered_tractographies","tract_files")])])
mapping.connect([(inputnode, CFFConverter,[("subject_id","title")])])
"""
Finally, we create another higher-level workflow to connect our mapping workflow with the info and datagrabbing nodes
declared at the beginning. Our tutorial can is now extensible to any arbitrary number of subjects by simply adding
their names to the subject list and their data to the proper folders.
"""
connectivity = pe.Workflow(name="connectivity")
connectivity.base_dir = op.abspath('dmri_connectivity')
connectivity.connect([
(infosource,datasource,[('subject_id', 'subject_id')]),
(datasource,mapping,[('dwi','inputnode.dwi'),
('bvals','inputnode.bvals'),
('bvecs','inputnode.bvecs')
]),
(infosource,mapping,[('subject_id','inputnode.subject_id')])
])
"""
The following functions run the whole workflow and produce graphs describing the processing pipeline.
By default, write_graph outputs a .dot file and a .png image, but here we set it to output the image
as a vector graphic, by passing the format='eps' argument.
"""
if __name__ == '__main__':
connectivity.run()
connectivity.write_graph(format='eps')
"""
The output CFF file of this pipeline can be loaded in the Connectome Viewer (http://www.cmtk.org)
After loading the network into memory it can be examined in 3D or as a connectivity matrix
using the default scripts produced by the Code Oracle.
To compare networks, one must use the MergeCNetworks interface to merge two networks into
a single CFF file. Statistics can then be run using the Network Brain Statistics (NBS) plugin
Surfaces can also be loaded along with their labels from the aparc+aseg file. The tractography
is included in the file so that region-to-region fibers can be individually plotted using the
Code Oracle.
"""
| |
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
# python 2 and python 3 compatibility library
import six
from onshape_client.oas.api_client import ApiClient
from onshape_client.oas.exceptions import ApiTypeError, ApiValueError
from onshape_client.oas.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
int,
none_type,
str,
validate_and_convert_types,
)
from onshape_client.oas.models import bt_translation_request_info
from onshape_client.oas.models import form_data_body_part
from onshape_client.oas.models import form_data_content_disposition
from onshape_client.oas.models import bt_model_format_full_info
from onshape_client.oas.models import bt_list_response_bt_translation_request_info
class TranslationsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __create_translation(self, did, wid, **kwargs):
"""Create translation from upload. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_translation(did, wid, async_req=True)
>>> result = thread.get()
Args:
did (str):
wid (str):
Keyword Args:
allow_faulty_parts (bool): [optional]
create_composite (bool): [optional]
create_drawing_if_possible (bool): [optional]
encoded_filename (str): [optional]
extract_assembly_hierarchy (bool): [optional]
file (file_type): [optional]
file_body_with_details (form_data_body_part.FormDataBodyPart): [optional]
file_content_length (int): [optional]
file_detail (form_data_content_disposition.FormDataContentDisposition): [optional]
flatten_assemblies (bool): [optional]
format_name (str): [optional]
isy_axis_is_up (bool): [optional]
join_adjacent_surfaces (bool): [optional]
location_element_id (str): [optional]
location_group_id (str): [optional]
location_position (int): [optional]
notify_user (bool): [optional]
owner_id (str): [optional]
owner_type (str): [optional]
parent_id (str): [optional]
project_id (str): [optional]
public (bool): [optional]
split_assemblies_into_multiple_documents (bool): [optional]
store_in_document (bool): [optional]
translate (bool): [optional]
unit (str): [optional]
upload_id (str): [optional]
version_string (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
bt_translation_request_info.BTTranslationRequestInfo
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get(
"_return_http_data_only", True
)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_host_index"] = kwargs.get("_host_index", 0)
kwargs["did"] = did
kwargs["wid"] = wid
return self.call_with_http_info(**kwargs)
self.create_translation = Endpoint(
settings={
"response_type": (
bt_translation_request_info.BTTranslationRequestInfo,
),
"auth": ["OAuth2"],
"endpoint_path": "/api/translations/d/{did}/w/{wid}",
"operation_id": "create_translation",
"http_method": "POST",
"servers": [],
},
params_map={
"all": [
"did",
"wid",
"allow_faulty_parts",
"create_composite",
"create_drawing_if_possible",
"encoded_filename",
"extract_assembly_hierarchy",
"file",
"file_body_with_details",
"file_content_length",
"file_detail",
"flatten_assemblies",
"format_name",
"isy_axis_is_up",
"join_adjacent_surfaces",
"location_element_id",
"location_group_id",
"location_position",
"notify_user",
"owner_id",
"owner_type",
"parent_id",
"project_id",
"public",
"split_assemblies_into_multiple_documents",
"store_in_document",
"translate",
"unit",
"upload_id",
"version_string",
],
"required": ["did", "wid",],
"nullable": [],
"enum": ["owner_type",],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {
("owner_type",): {
"USER": "USER",
"COMPANY": "COMPANY",
"ONSHAPE": "ONSHAPE",
},
},
"openapi_types": {
"did": (str,),
"wid": (str,),
"allow_faulty_parts": (bool,),
"create_composite": (bool,),
"create_drawing_if_possible": (bool,),
"encoded_filename": (str,),
"extract_assembly_hierarchy": (bool,),
"file": (file_type,),
"file_body_with_details": (form_data_body_part.FormDataBodyPart,),
"file_content_length": (int,),
"file_detail": (
form_data_content_disposition.FormDataContentDisposition,
),
"flatten_assemblies": (bool,),
"format_name": (str,),
"isy_axis_is_up": (bool,),
"join_adjacent_surfaces": (bool,),
"location_element_id": (str,),
"location_group_id": (str,),
"location_position": (int,),
"notify_user": (bool,),
"owner_id": (str,),
"owner_type": (str,),
"parent_id": (str,),
"project_id": (str,),
"public": (bool,),
"split_assemblies_into_multiple_documents": (bool,),
"store_in_document": (bool,),
"translate": (bool,),
"unit": (str,),
"upload_id": (str,),
"version_string": (str,),
},
"attribute_map": {
"did": "did",
"wid": "wid",
"allow_faulty_parts": "allowFaultyParts",
"create_composite": "createComposite",
"create_drawing_if_possible": "createDrawingIfPossible",
"encoded_filename": "encodedFilename",
"extract_assembly_hierarchy": "extractAssemblyHierarchy",
"file": "file",
"file_body_with_details": "fileBodyWithDetails",
"file_content_length": "fileContentLength",
"file_detail": "fileDetail",
"flatten_assemblies": "flattenAssemblies",
"format_name": "formatName",
"isy_axis_is_up": "isyAxisIsUp",
"join_adjacent_surfaces": "joinAdjacentSurfaces",
"location_element_id": "locationElementId",
"location_group_id": "locationGroupId",
"location_position": "locationPosition",
"notify_user": "notifyUser",
"owner_id": "ownerId",
"owner_type": "ownerType",
"parent_id": "parentId",
"project_id": "projectId",
"public": "public",
"split_assemblies_into_multiple_documents": "splitAssembliesIntoMultipleDocuments",
"store_in_document": "storeInDocument",
"translate": "translate",
"unit": "unit",
"upload_id": "uploadId",
"version_string": "versionString",
},
"location_map": {
"did": "path",
"wid": "path",
"allow_faulty_parts": "form",
"create_composite": "form",
"create_drawing_if_possible": "form",
"encoded_filename": "form",
"extract_assembly_hierarchy": "form",
"file": "form",
"file_body_with_details": "form",
"file_content_length": "form",
"file_detail": "form",
"flatten_assemblies": "form",
"format_name": "form",
"isy_axis_is_up": "form",
"join_adjacent_surfaces": "form",
"location_element_id": "form",
"location_group_id": "form",
"location_position": "form",
"notify_user": "form",
"owner_id": "form",
"owner_type": "form",
"parent_id": "form",
"project_id": "form",
"public": "form",
"split_assemblies_into_multiple_documents": "form",
"store_in_document": "form",
"translate": "form",
"unit": "form",
"upload_id": "form",
"version_string": "form",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/vnd.onshape.v1+json;charset=UTF-8;qs=0.1"],
"content_type": ["multipart/form-data"],
},
api_client=api_client,
callable=__create_translation,
)
def __delete_translation(self, tid, **kwargs):
"""delete_translation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_translation(tid, async_req=True)
>>> result = thread.get()
Args:
tid (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get(
"_return_http_data_only", True
)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_host_index"] = kwargs.get("_host_index", 0)
kwargs["tid"] = tid
return self.call_with_http_info(**kwargs)
self.delete_translation = Endpoint(
settings={
"response_type": None,
"auth": ["OAuth2"],
"endpoint_path": "/api/translations/{tid}",
"operation_id": "delete_translation",
"http_method": "DELETE",
"servers": [],
},
params_map={
"all": ["tid",],
"required": ["tid",],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {"tid": (str,),},
"attribute_map": {"tid": "tid",},
"location_map": {"tid": "path",},
"collection_format_map": {},
},
headers_map={
"accept": ["application/vnd.onshape.v1+json;charset=UTF-8;qs=0.1"],
"content_type": [],
},
api_client=api_client,
callable=__delete_translation,
)
def __get_all_translator_formats(self, **kwargs):
"""get_all_translator_formats # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_translator_formats(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
[bt_model_format_full_info.BTModelFormatFullInfo]
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get(
"_return_http_data_only", True
)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_host_index"] = kwargs.get("_host_index", 0)
return self.call_with_http_info(**kwargs)
self.get_all_translator_formats = Endpoint(
settings={
"response_type": ([bt_model_format_full_info.BTModelFormatFullInfo],),
"auth": [],
"endpoint_path": "/api/translations/translationformats",
"operation_id": "get_all_translator_formats",
"http_method": "GET",
"servers": [],
},
params_map={
"all": [],
"required": [],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {},
"attribute_map": {},
"location_map": {},
"collection_format_map": {},
},
headers_map={
"accept": ["application/vnd.onshape.v1+json;charset=UTF-8;qs=0.1"],
"content_type": [],
},
api_client=api_client,
callable=__get_all_translator_formats,
)
def __get_document_translations(self, did, **kwargs):
"""get_document_translations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_document_translations(did, async_req=True)
>>> result = thread.get()
Args:
did (str):
Keyword Args:
offset (int): [optional] if omitted the server will use the default value of 0
limit (int): [optional] if omitted the server will use the default value of 20
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
bt_list_response_bt_translation_request_info.BTListResponseBTTranslationRequestInfo
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get(
"_return_http_data_only", True
)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_host_index"] = kwargs.get("_host_index", 0)
kwargs["did"] = did
return self.call_with_http_info(**kwargs)
self.get_document_translations = Endpoint(
settings={
"response_type": (
bt_list_response_bt_translation_request_info.BTListResponseBTTranslationRequestInfo,
),
"auth": ["OAuth2"],
"endpoint_path": "/api/translations/d/{did}",
"operation_id": "get_document_translations",
"http_method": "GET",
"servers": [],
},
params_map={
"all": ["did", "offset", "limit",],
"required": ["did",],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {"did": (str,), "offset": (int,), "limit": (int,),},
"attribute_map": {"did": "did", "offset": "offset", "limit": "limit",},
"location_map": {"did": "path", "offset": "query", "limit": "query",},
"collection_format_map": {},
},
headers_map={
"accept": ["application/vnd.onshape.v1+json;charset=UTF-8;qs=0.1"],
"content_type": [],
},
api_client=api_client,
callable=__get_document_translations,
)
def __get_translation(self, tid, **kwargs):
"""get_translation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_translation(tid, async_req=True)
>>> result = thread.get()
Args:
tid (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int): specifies the index of the server
that we want to use.
Default is 0.
async_req (bool): execute request asynchronously
Returns:
bt_translation_request_info.BTTranslationRequestInfo
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get(
"_return_http_data_only", True
)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_host_index"] = kwargs.get("_host_index", 0)
kwargs["tid"] = tid
return self.call_with_http_info(**kwargs)
self.get_translation = Endpoint(
settings={
"response_type": (
bt_translation_request_info.BTTranslationRequestInfo,
),
"auth": ["OAuth2"],
"endpoint_path": "/api/translations/{tid}",
"operation_id": "get_translation",
"http_method": "GET",
"servers": [],
},
params_map={
"all": ["tid",],
"required": ["tid",],
"nullable": [],
"enum": [],
"validation": [],
},
root_map={
"validations": {},
"allowed_values": {},
"openapi_types": {"tid": (str,),},
"attribute_map": {"tid": "tid",},
"location_map": {"tid": "path",},
"collection_format_map": {},
},
headers_map={
"accept": ["application/vnd.onshape.v1+json;charset=UTF-8;qs=0.1"],
"content_type": [],
},
api_client=api_client,
callable=__get_translation,
)
class Endpoint(object):
def __init__(
self,
settings=None,
params_map=None,
root_map=None,
headers_map=None,
api_client=None,
callable=None,
):
"""Creates an endpoint
Args:
settings (dict): see below key value pairs
'response_type' (tuple/None): response type
'auth' (list): a list of auth type keys
'endpoint_path' (str): the endpoint path
'operation_id' (str): endpoint string identifier
'http_method' (str): POST/PUT/PATCH/GET etc
'servers' (list): list of str servers that this endpoint is at
params_map (dict): see below key value pairs
'all' (list): list of str endpoint parameter names
'required' (list): list of required parameter names
'nullable' (list): list of nullable parameter names
'enum' (list): list of parameters with enum values
'validation' (list): list of parameters with validations
root_map
'validations' (dict): the dict mapping endpoint parameter tuple
paths to their validation dictionaries
'allowed_values' (dict): the dict mapping endpoint parameter
tuple paths to their allowed_values (enum) dictionaries
'openapi_types' (dict): param_name to openapi type
'attribute_map' (dict): param_name to camelCase name
'location_map' (dict): param_name to 'body', 'file', 'form',
'header', 'path', 'query'
collection_format_map (dict): param_name to `csv` etc.
headers_map (dict): see below key value pairs
'accept' (list): list of Accept header strings
'content_type' (list): list of Content-Type header strings
api_client (ApiClient) api client instance
callable (function): the function which is invoked when the
Endpoint is called
"""
self.settings = settings
self.params_map = params_map
self.params_map["all"].extend(
[
"async_req",
"_host_index",
"_preload_content",
"_request_timeout",
"_return_http_data_only",
"_check_input_type",
"_check_return_type",
]
)
self.params_map["nullable"].extend(["_request_timeout"])
self.validations = root_map["validations"]
self.allowed_values = root_map["allowed_values"]
self.openapi_types = root_map["openapi_types"]
extra_types = {
"async_req": (bool,),
"_host_index": (int,),
"_preload_content": (bool,),
"_request_timeout": (none_type, int, (int,), [int]),
"_return_http_data_only": (bool,),
"_check_input_type": (bool,),
"_check_return_type": (bool,),
}
self.openapi_types.update(extra_types)
self.attribute_map = root_map["attribute_map"]
self.location_map = root_map["location_map"]
self.collection_format_map = root_map["collection_format_map"]
self.headers_map = headers_map
self.api_client = api_client
self.callable = callable
def __validate_inputs(self, kwargs):
for param in self.params_map["enum"]:
if param in kwargs:
check_allowed_values(self.allowed_values, (param,), kwargs[param])
for param in self.params_map["validation"]:
if param in kwargs:
check_validations(self.validations, (param,), kwargs[param])
if kwargs["_check_input_type"] is False:
return
for key, value in six.iteritems(kwargs):
fixed_val = validate_and_convert_types(
value,
self.openapi_types[key],
[key],
False,
kwargs["_check_input_type"],
configuration=self.api_client.configuration,
)
kwargs[key] = fixed_val
def __gather_params(self, kwargs):
params = {
"body": None,
"collection_format": {},
"file": {},
"form": [],
"header": {},
"path": {},
"query": [],
}
for param_name, param_value in six.iteritems(kwargs):
param_location = self.location_map.get(param_name)
if param_location is None:
continue
if param_location:
if param_location == "body":
params["body"] = param_value
continue
base_name = self.attribute_map[param_name]
if param_location == "form" and self.openapi_types[param_name] == (
file_type,
):
params["file"][param_name] = [param_value]
elif param_location == "form" and self.openapi_types[param_name] == (
[file_type],
):
# param_value is already a list
params["file"][param_name] = param_value
elif param_location in {"form", "query"}:
param_value_full = (base_name, param_value)
params[param_location].append(param_value_full)
if param_location not in {"form", "query"}:
params[param_location][base_name] = param_value
collection_format = self.collection_format_map.get(param_name)
if collection_format:
params["collection_format"][base_name] = collection_format
return params
def __call__(self, *args, **kwargs):
""" This method is invoked when endpoints are called
Example:
pet_api = PetApi()
pet_api.add_pet # this is an instance of the class Endpoint
pet_api.add_pet() # this invokes pet_api.add_pet.__call__()
which then invokes the callable functions stored in that endpoint at
pet_api.add_pet.callable or self.callable in this class
"""
return self.callable(self, *args, **kwargs)
def call_with_http_info(self, **kwargs):
try:
_host = self.settings["servers"][kwargs["_host_index"]]
except IndexError:
if self.settings["servers"]:
raise ApiValueError(
"Invalid host index. Must be 0 <= index < %s"
% len(self.settings["servers"])
)
_host = None
for key, value in six.iteritems(kwargs):
if key not in self.params_map["all"]:
raise ApiTypeError(
"Got an unexpected parameter '%s'"
" to method `%s`" % (key, self.settings["operation_id"])
)
# only throw this nullable ApiValueError if _check_input_type
# is False, if _check_input_type==True we catch this case
# in self.__validate_inputs
if (
key not in self.params_map["nullable"]
and value is None
and kwargs["_check_input_type"] is False
):
raise ApiValueError(
"Value may not be None for non-nullable parameter `%s`"
" when calling `%s`" % (key, self.settings["operation_id"])
)
for key in self.params_map["required"]:
if key not in kwargs.keys():
raise ApiValueError(
"Missing the required parameter `%s` when calling "
"`%s`" % (key, self.settings["operation_id"])
)
self.__validate_inputs(kwargs)
params = self.__gather_params(kwargs)
accept_headers_list = self.headers_map["accept"]
if accept_headers_list:
params["header"]["Accept"] = self.api_client.select_header_accept(
accept_headers_list
)
content_type_headers_list = self.headers_map["content_type"]
if content_type_headers_list:
header_list = self.api_client.select_header_content_type(
content_type_headers_list
)
params["header"]["Content-Type"] = header_list
return self.api_client.call_api(
self.settings["endpoint_path"],
self.settings["http_method"],
params["path"],
params["query"],
params["header"],
body=params["body"],
post_params=params["form"],
files=params["file"],
response_type=self.settings["response_type"],
auth_settings=self.settings["auth"],
async_req=kwargs["async_req"],
_check_type=kwargs["_check_return_type"],
_return_http_data_only=kwargs["_return_http_data_only"],
_preload_content=kwargs["_preload_content"],
_request_timeout=kwargs["_request_timeout"],
_host=_host,
collection_formats=params["collection_format"],
)
| |
import re
from django.db.models import Q
from rest_framework import generics
from rest_framework.exceptions import NotFound, PermissionDenied, NotAuthenticated
from rest_framework import permissions as drf_permissions
from website.models import PreprintService
from framework.auth.oauth_scopes import CoreScopes
from api.base.exceptions import Conflict
from api.base.views import JSONAPIBaseView
from api.base.filters import DjangoFilterMixin
from api.base.parsers import (
JSONAPIMultipleRelationshipsParser,
JSONAPIMultipleRelationshipsParserForRegularJSON,
)
from api.base.utils import get_object_or_error, get_user_auth
from api.base import permissions as base_permissions
from api.citations.utils import render_citation, preprint_csl
from api.preprints.serializers import (
PreprintSerializer,
PreprintCreateSerializer,
PreprintCitationSerializer,
)
from api.nodes.serializers import (
NodeCitationStyleSerializer,
)
from api.nodes.views import NodeMixin, WaterButlerMixin
from api.nodes.permissions import ContributorOrPublic
from api.preprints.permissions import PreprintPublishedOrAdmin
class PreprintMixin(NodeMixin):
serializer_class = PreprintSerializer
preprint_lookup_url_kwarg = 'preprint_id'
def get_preprint(self, check_object_permissions=True):
preprint = get_object_or_error(
PreprintService,
self.kwargs[self.preprint_lookup_url_kwarg],
display_name='preprint'
)
if not preprint:
raise NotFound
# May raise a permission denied
if check_object_permissions:
self.check_object_permissions(self.request, preprint)
return preprint
class PreprintList(JSONAPIBaseView, generics.ListCreateAPIView, DjangoFilterMixin):
"""Preprints that represent a special kind of preprint node. *Writeable*.
Paginated list of preprints ordered by their `date_created`. Each resource contains a representation of the
preprint.
##Preprint Attributes
OSF Preprint entities have the "preprints" `type`.
name type description
====================================================================================
date_created iso8601 timestamp timestamp that the preprint was created
date_modified iso8601 timestamp timestamp that the preprint was last modified
date_published iso8601 timestamp timestamp when the preprint was published
is_published boolean whether or not this preprint is published
is_preprint_orphan boolean whether or not this preprint is orphaned
subjects list of lists of dictionaries ids of Subject in the PLOS taxonomy. Dictionary, containing the subject text and subject ID
doi string bare DOI for the manuscript, as entered by the user
##Relationships
###Node
The node that this preprint was created for
###Primary File
The file that is designated as the preprint's primary file, or the manuscript of the preprint.
###Provider
Link to preprint_provider detail for this preprint
##Links
- `self` -- Preprint detail page for the current preprint
- `html` -- Project on the OSF corresponding to the current preprint
- `doi` -- URL representation of the DOI entered by the user for the preprint manuscript
See the [JSON-API spec regarding pagination](http://jsonapi.org/format/1.0/#fetching-pagination).
##Query Params
+ `page=<Int>` -- page number of results to view, default 1
+ `filter[<fieldname>]=<Str>` -- fields and values to filter the search results on.
Preprints may be filtered by their `id`, `is_published`, `date_created`, `date_modified`, `provider`
Most are string fields and will be filtered using simple substring matching.
###Creating New Preprints
Create a new preprint by posting to the guid of the existing **node**, including the file_id for the
file you'd like to make the primary preprint file. Note that the **node id** will not be accessible via the
preprints detail view until after the preprint has been created.
Method: POST
URL: /preprints/
Query Params: <none>
Body (JSON): {
"data": {
"attributes": {},
"relationships": {
"node": { # required
"data": {
"type": "nodes",
"id": {node_id}
}
},
"primary_file": { # required
"data": {
"type": "primary_files",
"id": {file_id}
}
},
"provider": { # required
"data": {
"type": "providers",
"id": {provider_id}
}
},
}
}
}
Success: 201 CREATED + preprint representation
New preprints are created by issuing a POST request to this endpoint, along with the guid for the node to create a preprint from.
Provider defaults to osf.
#This Request/Response
"""
# These permissions are not checked for the list of preprints, permissions handled by the query
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
ContributorOrPublic,
)
parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON,)
required_read_scopes = [CoreScopes.NODE_PREPRINTS_READ]
required_write_scopes = [CoreScopes.NODE_PREPRINTS_WRITE]
serializer_class = PreprintSerializer
ordering = ('-date_created')
view_category = 'preprints'
view_name = 'preprint-list'
# overrides FilterMixin
def postprocess_query_param(self, key, field_name, operation):
if field_name == 'provider':
operation['source_field_name'] = 'provider___id'
if field_name == 'id':
operation['source_field_name'] = 'guids___id'
def get_serializer_class(self):
if self.request.method == 'POST':
return PreprintCreateSerializer
else:
return PreprintSerializer
# overrides DjangoFilterMixin
def get_default_django_query(self):
auth = get_user_auth(self.request)
auth_user = getattr(auth, 'user', None)
# Permissions on the list objects are handled by the query
default_query = Q(node__isnull=False, node__is_deleted=False)
no_user_query = Q(is_published=True, node__is_public=True)
if auth_user:
contrib_user_query = Q(is_published=True, node__contributor__user_id=auth_user.id, node__contributor__read=True)
admin_user_query = Q(node__contributor__user_id=auth_user.id, node__contributor__admin=True)
return (default_query & (no_user_query | contrib_user_query | admin_user_query))
return (default_query & no_user_query)
# overrides ListAPIView
def get_queryset(self):
return PreprintService.objects.filter(self.get_query_from_request()).distinct()
class PreprintDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, PreprintMixin, WaterButlerMixin):
"""Preprint Detail *Writeable*.
##Preprint Attributes
OSF Preprint entities have the "preprints" `type`.
name type description
====================================================================================
date_created iso8601 timestamp timestamp that the preprint was created
date_modified iso8601 timestamp timestamp that the preprint was last modified
date_published iso8601 timestamp timestamp when the preprint was published
is_published boolean whether or not this preprint is published
is_preprint_orphan boolean whether or not this preprint is orphaned
subjects array of tuples of dictionaries ids of Subject in the PLOS taxonomy. Dictionary, containing the subject text and subject ID
doi string bare DOI for the manuscript, as entered by the user
##Relationships
###Node
The node that this preprint was created for
###Primary File
The file that is designated as the preprint's primary file, or the manuscript of the preprint.
###Provider
Link to preprint_provider detail for this preprint
##Links
- `self` -- Preprint detail page for the current preprint
- `html` -- Project on the OSF corresponding to the current preprint
- `doi` -- URL representation of the DOI entered by the user for the preprint manuscript
##Updating Preprints
Update a preprint by sending a patch request to the guid of the existing preprint node that you'd like to update.
Method: PATCH
URL: /preprints/{node_id}/
Query Params: <none>
Body (JSON): {
"data": {
"id": node_id,
"attributes": {
"subjects": [({root_subject_id}, {child_subject_id}), ...] # optional
"is_published": true, # optional
"doi": {valid_doi} # optional
},
"relationships": {
"primary_file": { # optional
"data": {
"type": "primary_files",
"id": {file_id}
}
}
}
}
}
Success: 200 OK + preprint representation
#This Request/Response
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
ContributorOrPublic,
PreprintPublishedOrAdmin,
)
parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON,)
required_read_scopes = [CoreScopes.NODE_PREPRINTS_READ]
required_write_scopes = [CoreScopes.NODE_PREPRINTS_WRITE]
serializer_class = PreprintSerializer
view_category = 'preprints'
view_name = 'preprint-detail'
def get_object(self):
return self.get_preprint()
def perform_destroy(self, instance):
if instance.is_published:
raise Conflict('Published preprints cannot be deleted.')
PreprintService.remove_one(instance)
class PreprintCitationDetail(JSONAPIBaseView, generics.RetrieveAPIView, PreprintMixin):
""" The citation details for a preprint, in CSL format *Read Only*
##PreprintCitationDetail Attributes
name type description
=================================================================================
id string unique ID for the citation
title string title of project or component
author list list of authors for the preprint
publisher string publisher - the preprint provider
type string type of citation - web
doi string doi of the resource
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.NODE_CITATIONS_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = PreprintCitationSerializer
view_category = 'preprints'
view_name = 'preprint-citation'
def get_object(self):
preprint = self.get_preprint()
auth = get_user_auth(self.request)
if preprint.node.is_public or preprint.node.can_view(auth) or preprint.is_published:
return preprint_csl(preprint, preprint.node)
raise PermissionDenied if auth.user else NotAuthenticated
class PreprintCitationStyleDetail(JSONAPIBaseView, generics.RetrieveAPIView, PreprintMixin):
""" The citation for a preprint in a specific style's format. *Read Only*
##NodeCitationDetail Attributes
name type description
=================================================================================
citation string complete citation for a preprint in the given style
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.NODE_CITATIONS_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = NodeCitationStyleSerializer
view_category = 'preprint'
view_name = 'preprint-citation'
def get_object(self):
preprint = self.get_preprint()
auth = get_user_auth(self.request)
style = self.kwargs.get('style_id')
if preprint.node.is_public or preprint.node.can_view(auth) or preprint.is_published:
try:
citation = render_citation(node=preprint, style=style)
except ValueError as err: # style requested could not be found
csl_name = re.findall('[a-zA-Z]+\.csl', err.message)[0]
raise NotFound('{} is not a known style.'.format(csl_name))
return {'citation': citation, 'id': style}
raise PermissionDenied if auth.user else NotAuthenticated
| |
"""
This implements the common managers that are used by the
abstract models in dbobjects.py (and which are thus shared by
all Attributes and TypedObjects).
"""
from functools import update_wrapper
from django.db.models import Q
from evennia.utils import idmapper
from evennia.utils.utils import make_iter, variable_from_module
__all__ = ("TypedObjectManager", )
_GA = object.__getattribute__
_Tag = None
#
# Decorators
#
def returns_typeclass_list(method):
"""
Decorator: Always returns a list, even if it is empty.
"""
def func(self, *args, **kwargs):
self.__doc__ = method.__doc__
raw_queryset = kwargs.pop('raw_queryset', False)
result = method(self, *args, **kwargs)
if raw_queryset:
return result
else:
return list(result)
return update_wrapper(func, method)
def returns_typeclass(method):
"""
Decorator: Returns a single typeclass match or None.
"""
def func(self, *args, **kwargs):
self.__doc__ = method.__doc__
query = method(self, *args, **kwargs)
if hasattr(query, "__iter__"):
result = list(query)
return result[0] if result else None
else:
return query
return update_wrapper(func, method)
# Managers
class TypedObjectManager(idmapper.manager.SharedMemoryManager):
"""
Common ObjectManager for all dbobjects.
"""
# common methods for all typed managers. These are used
# in other methods. Returns querysets.
# Attribute manager methods
def get_attribute(self, key=None, category=None, value=None, strvalue=None, obj=None, attrtype=None):
"""
Return Attribute objects by key, by category, by value, by
strvalue, by object (it is stored on) or with a combination of
those criteria.
Attrs:
key (str, optional): The attribute's key to search for
category (str, optional): The category of the attribute(s)
to search for.
value (str, optional): The attribute value to search for.
Note that this is not a very efficient operation since it
will query for a pickled entity. Mutually exclusive to
`strvalue`.
strvalue (str, optional): The str-value to search for.
Most Attributes will not have strvalue set. This is
mutually exclusive to the `value` keyword and will take
precedence if given.
obj (Object, optional): On which object the Attribute to
search for is.
attrype (str, optional): An attribute-type to search for.
By default this is either `None` (normal Attributes) or
`"nick"`.
Returns:
attributes (list): The matching Attributes.
"""
query = [("attribute__db_attrtype", attrtype)]
if obj:
query.append(("%s__id" % self.model.__name__.lower(), obj.id))
if key:
query.append(("attribute__db_key", key))
if category:
query.append(("attribute__db_category", category))
if strvalue:
query.append(("attribute__db_strvalue", strvalue))
elif value:
# strvalue and value are mutually exclusive
query.append(("attribute__db_value", value))
return [th.attribute for th in self.model.db_attributes.through.objects.filter(**dict(query))]
def get_nick(self, key=None, category=None, value=None, strvalue=None, obj=None):
"""
Get a nick, in parallel to `get_attribute`.
Attrs:
key (str, optional): The nicks's key to search for
category (str, optional): The category of the nicks(s) to search for.
value (str, optional): The attribute value to search for. Note that this
is not a very efficient operation since it will query for a pickled
entity. Mutually exclusive to `strvalue`.
strvalue (str, optional): The str-value to search for. Most Attributes
will not have strvalue set. This is mutually exclusive to the `value`
keyword and will take precedence if given.
obj (Object, optional): On which object the Attribute to search for is.
Returns:
nicks (list): The matching Nicks.
"""
return self.get_attribute(key=key, category=category, value=value, strvalue=strvalue, obj=obj)
@returns_typeclass_list
def get_by_attribute(self, key=None, category=None, value=None, strvalue=None, attrtype=None):
"""
Return objects having attributes with the given key, category,
value, strvalue or combination of those criteria.
Args:
key (str, optional): The attribute's key to search for
category (str, optional): The category of the attribute
to search for.
value (str, optional): The attribute value to search for.
Note that this is not a very efficient operation since it
will query for a pickled entity. Mutually exclusive to
`strvalue`.
strvalue (str, optional): The str-value to search for.
Most Attributes will not have strvalue set. This is
mutually exclusive to the `value` keyword and will take
precedence if given.
attrype (str, optional): An attribute-type to search for.
By default this is either `None` (normal Attributes) or
`"nick"`.
Returns:
obj (list): Objects having the matching Attributes.
"""
query = [("db_attributes__db_attrtype", attrtype)]
if key:
query.append(("db_attributes__db_key", key))
if category:
query.append(("db_attributes__db_category", category))
if strvalue:
query.append(("db_attributes__db_strvalue", strvalue))
elif value:
# strvalue and value are mutually exclusive
query.append(("db_attributes__db_value", value))
return self.filter(**dict(query))
def get_by_nick(self, key=None, nick=None, category="inputline"):
"""
Get object based on its key or nick.
Args:
key (str, optional): The attribute's key to search for
nick (str, optional): The nickname to search for
category (str, optional): The category of the nick
to search for.
Returns:
obj (list): Objects having the matching Nicks.
"""
return self.get_by_attribute(key=key, category=category, strvalue=nick, attrtype="nick")
# Tag manager methods
def get_tag(self, key=None, category=None, obj=None, tagtype=None, global_search=False):
"""
Return Tag objects by key, by category, by object (it is
stored on) or with a combination of those criteria.
Attrs:
key (str, optional): The Tag's key to search for
category (str, optional): The Tag of the attribute(s)
to search for.
obj (Object, optional): On which object the Tag to
search for is.
tagtype (str, optional): One of None (normal tags),
"alias" or "permission"
global_search (bool, optional): Include all possible tags,
not just tags on this object
Returns:
tag (list): The matching Tags.
"""
global _Tag
if not _Tag:
from evennia.typeclasses.models import Tag as _Tag
if global_search:
# search all tags using the Tag model
query = [("db_tagtype", tagtype)]
if obj:
query.append(("id", obj.id))
if key:
query.append(("db_key", key))
if category:
query.append(("db_category", category))
return _Tag.objects.filter(**dict(query))
else:
# search only among tags stored on on this model
query = [("tag__db_tagtype", tagtype)]
if obj:
query.append(("%s__id" % self.model.__name__.lower(), obj.id))
if key:
query.append(("tag__db_key", key))
if category:
query.append(("tag__db_category", category))
return [th.tag for th in self.model.db_tags.through.objects.filter(**dict(query))]
def get_permission(self, key=None, category=None, obj=None):
"""
Get a permission from the database.
Args:
key (str, optional): The permission's identifier.
category (str, optional): The permission's category.
obj (object, optional): The object on which this Tag is set.
Returns:
permission (list): Permission objects.
"""
return self.get_tag(key=key, category=category, obj=obj, tagtype="permission")
def get_alias(self, key=None, category=None, obj=None):
"""
Get an alias from the database.
Args:
key (str, optional): The permission's identifier.
category (str, optional): The permission's category.
obj (object, optional): The object on which this Tag is set.
Returns:
alias (list): Alias objects.
"""
return self.get_tag(key=key, category=category, obj=obj, tagtype="alias")
@returns_typeclass_list
def get_by_tag(self, key=None, category=None, tagtype=None):
"""
Return objects having tags with a given key or category or
combination of the two.
Args:
key (str, optional): Tag key. Not case sensitive.
category (str, optional): Tag category. Not case sensitive.
tagtype (str or None, optional): 'type' of Tag, by default
this is either `None` (a normal Tag), `alias` or
`permission`.
Returns:
objects (list): Objects with matching tag.
"""
query = [("db_tags__db_tagtype", tagtype)]
if key:
query.append(("db_tags__db_key", key.lower()))
if category:
query.append(("db_tags__db_category", category.lower()))
return self.filter(**dict(query))
def get_by_permission(self, key=None, category=None):
"""
Return objects having permissions with a given key or category or
combination of the two.
Args:
key (str, optional): Permissions key. Not case sensitive.
category (str, optional): Permission category. Not case sensitive.
Returns:
objects (list): Objects with matching permission.
"""
return self.get_by_tag(key=key, category=category, tagtype="permission")
def get_by_alias(self, key=None, category=None):
"""
Return objects having aliases with a given key or category or
combination of the two.
Args:
key (str, optional): Alias key. Not case sensitive.
category (str, optional): Alias category. Not case sensitive.
Returns:
objects (list): Objects with matching alias.
"""
return self.get_by_tag(key=key, category=category, tagtype="alias")
def create_tag(self, key=None, category=None, data=None, tagtype=None):
"""
Create a new Tag of the base type associated with this
object. This makes sure to create case-insensitive tags.
If the exact same tag configuration (key+category+tagtype)
exists on the model, a new tag will not be created, but an old
one returned.
Args:
key (str, optional): Tag key. Not case sensitive.
category (str, optional): Tag category. Not case sensitive.
data (str, optional): Extra information about the tag.
tagtype (str or None, optional): 'type' of Tag, by default
this is either `None` (a normal Tag), `alias` or
`permission`.
Notes:
The `data` field is not part of the uniqueness of the tag:
Setting `data` on an existing tag will overwrite the old
data field. It is intended only as a way to carry
information about the tag (like a help text), not to carry
any information about the tagged objects themselves.
"""
data = str(data) if data is not None else None
# try to get old tag
tag = self.get_tag(key=key, category=category, tagtype=tagtype, global_search=True)
if tag and data is not None:
# overload data on tag
tag.db_data = data
tag.save()
elif not tag:
# create a new tag
global _Tag
if not _Tag:
from evennia.typeclasses.models import Tag as _Tag
tag = _Tag.objects.create(
db_key=key.strip().lower() if key is not None else None,
db_category=category.strip().lower() if category and key is not None else None,
db_data=data,
db_tagtype=tagtype.strip().lower() if tagtype is not None else None)
tag.save()
return make_iter(tag)[0]
# object-manager methods
def dbref(self, dbref, reqhash=True):
"""
Determing if input is a valid dbref.
Args:
dbref (str or int): A possible dbref.
reqhash (bool, optional): If the "#" is required for this
to be considered a valid hash.
Returns:
dbref (int or None): The integer part of the dbref.
Notes:
Valid forms of dbref (database reference number) are
either a string '#N' or an integer N.
"""
if reqhash and not (isinstance(dbref, basestring) and dbref.startswith("#")):
return None
if isinstance(dbref, basestring):
dbref = dbref.lstrip('#')
try:
if int(dbref) < 0:
return None
except Exception:
return None
return dbref
@returns_typeclass
def get_id(self, dbref):
"""
Find object with given dbref.
Args:
dbref (str or int): The id to search for.
Returns:
object (TypedObject): The matched object.
"""
dbref = self.dbref(dbref, reqhash=False)
try:
return self.get(id=dbref)
except self.model.DoesNotExist:
pass
return None
def dbref_search(self, dbref):
"""
Alias to get_id.
Args:
dbref (str or int): The id to search for.
Returns:
object (TypedObject): The matched object.
"""
return self.get_id(dbref)
@returns_typeclass_list
def get_dbref_range(self, min_dbref=None, max_dbref=None):
"""
Get objects within a certain range of dbrefs.
Args:
min_dbref (int): Start of dbref range.
max_dbref (int): End of dbref range (inclusive)
Returns:
objects (list): TypedObjects with dbrefs within
the given dbref ranges.
"""
retval = super(TypedObjectManager, self).all()
if min_dbref is not None:
retval = retval.filter(id__gte=self.dbref(min_dbref, reqhash=False))
if max_dbref is not None:
retval = retval.filter(id__lte=self.dbref(max_dbref, reqhash=False))
return retval
def object_totals(self):
"""
Get info about database statistics.
Returns:
census (dict): A dictionary `{typeclass_path: number, ...}` with
all the typeclasses active in-game as well as the number
of such objects defined (i.e. the number of database
object having that typeclass set on themselves).
"""
dbtotals = {}
typeclass_paths = set(self.values_list('db_typeclass_path', flat=True))
for typeclass_path in typeclass_paths:
dbtotals[typeclass_path] = \
self.filter(db_typeclass_path=typeclass_path).count()
return dbtotals
@returns_typeclass_list
def typeclass_search(self, typeclass, include_children=False, include_parents=False):
"""
Searches through all objects returning those which has a
certain typeclass. If location is set, limit search to objects
in that location.
Args:
typeclass (str or class): A typeclass class or a python path to a typeclass.
include_children (bool, optional): Return objects with
given typeclass *and* all children inheriting from this
typeclass. Mutuall exclusive to `include_parents`.
include_parents (bool, optional): Return objects with
given typeclass *and* all parents to this typeclass.
Mutually exclusive to `include_children`.
Returns:
objects (list): The objects found with the given typeclasses.
"""
if callable(typeclass):
cls = typeclass.__class__
typeclass = "%s.%s" % (cls.__module__, cls.__name__)
elif not isinstance(typeclass, basestring) and hasattr(typeclass, "path"):
typeclass = typeclass.path
# query objects of exact typeclass
query = Q(db_typeclass_path__exact=typeclass)
if include_children:
# build requests for child typeclass objects
clsmodule, clsname = typeclass.rsplit(".", 1)
cls = variable_from_module(clsmodule, clsname)
subclasses = cls.__subclasses__()
if subclasses:
for child in (child for child in subclasses if hasattr(child, "path")):
query = query | Q(db_typeclass_path__exact=child.path)
elif include_parents:
# build requests for parent typeclass objects
clsmodule, clsname = typeclass.rsplit(".", 1)
cls = variable_from_module(clsmodule, clsname)
parents = cls.__mro__
if parents:
for parent in (parent for parent in parents if hasattr(parent, "path")):
query = query | Q(db_typeclass_path__exact=parent.path)
# actually query the database
return self.filter(query)
class TypeclassManager(TypedObjectManager):
"""
Manager for the typeclasses. The main purpose of this manager is
to limit database queries to the given typeclass despite all
typeclasses technically being defined in the same core database
model.
"""
def get(self, *args, **kwargs):
"""
Overload the standard get. This will limit itself to only
return the current typeclass.
Args:
args (any): These are passed on as arguments to the default
django get method.
Kwargs:
kwargs (any): These are passed on as normal arguments
to the default django get method
Returns:
object (object): The object found.
Raises:
ObjectNotFound: The exact name of this exception depends
on the model base used.
"""
kwargs.update({"db_typeclass_path":self.model.path})
return super(TypedObjectManager, self).get(**kwargs)
def filter(self, *args, **kwargs):
"""
Overload of the standard filter function. This filter will
limit itself to only the current typeclass.
Args:
args (any): These are passed on as arguments to the default
django filter method.
Kwargs:
kwargs (any): These are passed on as normal arguments
to the default django filter method.
Returns:
objects (queryset): The objects found.
"""
kwargs.update({"db_typeclass_path":self.model.path})
return super(TypedObjectManager, self).filter(*args, **kwargs)
def all(self):
"""
Overload method to return all matches, filtering for typeclass.
Returns:
objects (queryset): The objects found.
"""
return super(TypedObjectManager, self).all().filter(db_typeclass_path=self.model.path)
def _get_subclasses(self, cls):
"""
Recursively get all subclasses to a class.
Args:
cls (classoject): A class to get subclasses from.
"""
all_subclasses = cls.__subclasses__()
for subclass in all_subclasses:
all_subclasses.extend(self._get_subclasses(subclass))
return all_subclasses
def get_family(self, **kwargs):
"""
Variation of get that not only returns the current typeclass
but also all subclasses of that typeclass.
Kwargs:
kwargs (any): These are passed on as normal arguments
to the default django get method.
Returns:
objects (list): The objects found.
Raises:
ObjectNotFound: The exact name of this exception depends
on the model base used.
"""
paths = [self.model.path] + ["%s.%s" % (cls.__module__, cls.__name__)
for cls in self._get_subclasses(self.model)]
kwargs.update({"db_typeclass_path__in":paths})
return super(TypedObjectManager, self).get(**kwargs)
def filter_family(self, *args, **kwargs):
"""
Variation of filter that allows results both from typeclass
and from subclasses of typeclass
Args:
args (any): These are passed on as arguments to the default
django filter method.
Kwargs:
kwargs (any): These are passed on as normal arguments
to the default django filter method.
Returns:
objects (list): The objects found.
"""
# query, including all subclasses
paths = [self.model.path] + ["%s.%s" % (cls.__module__, cls.__name__)
for cls in self._get_subclasses(self.model)]
kwargs.update({"db_typeclass_path__in":paths})
return super(TypedObjectManager, self).filter(*args, **kwargs)
def all_family(self):
"""
Return all matches, allowing matches from all subclasses of
the typeclass.
Returns:
objects (list): The objects found.
"""
paths = [self.model.path] + ["%s.%s" % (cls.__module__, cls.__name__)
for cls in self._get_subclasses(self.model)]
return super(TypedObjectManager, self).all().filter(db_typeclass_path__in=paths)
| |
"""
Climate on Zigbee Home Automation networks.
For more details on this platform, please refer to the documentation
at https://home-assistant.io/components/zha.climate/
"""
from __future__ import annotations
from datetime import datetime, timedelta
import enum
import functools
from random import randint
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_FAN,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
CURRENT_HVAC_OFF,
DOMAIN,
FAN_AUTO,
FAN_ON,
HVAC_MODE_COOL,
HVAC_MODE_DRY,
HVAC_MODE_FAN_ONLY,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
PRESET_AWAY,
PRESET_BOOST,
PRESET_COMFORT,
PRESET_ECO,
PRESET_NONE,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS, TEMP_CELSIUS
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.event import async_track_time_interval
import homeassistant.util.dt as dt_util
from .core import discovery
from .core.const import (
CHANNEL_FAN,
CHANNEL_THERMOSTAT,
DATA_ZHA,
DATA_ZHA_DISPATCHERS,
PRESET_COMPLEX,
PRESET_SCHEDULE,
SIGNAL_ADD_ENTITIES,
SIGNAL_ATTR_UPDATED,
)
from .core.registries import ZHA_ENTITIES
from .entity import ZhaEntity
DEPENDENCIES = ["zha"]
ATTR_SYS_MODE = "system_mode"
ATTR_RUNNING_MODE = "running_mode"
ATTR_SETPT_CHANGE_SRC = "setpoint_change_source"
ATTR_SETPT_CHANGE_AMT = "setpoint_change_amount"
ATTR_OCCUPANCY = "occupancy"
ATTR_PI_COOLING_DEMAND = "pi_cooling_demand"
ATTR_PI_HEATING_DEMAND = "pi_heating_demand"
ATTR_OCCP_COOL_SETPT = "occupied_cooling_setpoint"
ATTR_OCCP_HEAT_SETPT = "occupied_heating_setpoint"
ATTR_UNOCCP_HEAT_SETPT = "unoccupied_heating_setpoint"
ATTR_UNOCCP_COOL_SETPT = "unoccupied_cooling_setpoint"
STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, DOMAIN)
RUNNING_MODE = {0x00: HVAC_MODE_OFF, 0x03: HVAC_MODE_COOL, 0x04: HVAC_MODE_HEAT}
class ThermostatFanMode(enum.IntEnum):
"""Fan channel enum for thermostat Fans."""
OFF = 0x00
ON = 0x04
AUTO = 0x05
class RunningState(enum.IntFlag):
"""ZCL Running state enum."""
HEAT = 0x0001
COOL = 0x0002
FAN = 0x0004
HEAT_STAGE_2 = 0x0008
COOL_STAGE_2 = 0x0010
FAN_STAGE_2 = 0x0020
FAN_STAGE_3 = 0x0040
SEQ_OF_OPERATION = {
0x00: (HVAC_MODE_OFF, HVAC_MODE_COOL), # cooling only
0x01: (HVAC_MODE_OFF, HVAC_MODE_COOL), # cooling with reheat
0x02: (HVAC_MODE_OFF, HVAC_MODE_HEAT), # heating only
0x03: (HVAC_MODE_OFF, HVAC_MODE_HEAT), # heating with reheat
# cooling and heating 4-pipes
0x04: (HVAC_MODE_OFF, HVAC_MODE_HEAT_COOL, HVAC_MODE_COOL, HVAC_MODE_HEAT),
# cooling and heating 4-pipes
0x05: (HVAC_MODE_OFF, HVAC_MODE_HEAT_COOL, HVAC_MODE_COOL, HVAC_MODE_HEAT),
0x06: (HVAC_MODE_COOL, HVAC_MODE_HEAT, HVAC_MODE_OFF), # centralite specific
0x07: (HVAC_MODE_HEAT_COOL, HVAC_MODE_OFF), # centralite specific
}
class SystemMode(enum.IntEnum):
"""ZCL System Mode attribute enum."""
OFF = 0x00
HEAT_COOL = 0x01
COOL = 0x03
HEAT = 0x04
AUX_HEAT = 0x05
PRE_COOL = 0x06
FAN_ONLY = 0x07
DRY = 0x08
SLEEP = 0x09
HVAC_MODE_2_SYSTEM = {
HVAC_MODE_OFF: SystemMode.OFF,
HVAC_MODE_HEAT_COOL: SystemMode.HEAT_COOL,
HVAC_MODE_COOL: SystemMode.COOL,
HVAC_MODE_HEAT: SystemMode.HEAT,
HVAC_MODE_FAN_ONLY: SystemMode.FAN_ONLY,
HVAC_MODE_DRY: SystemMode.DRY,
}
SYSTEM_MODE_2_HVAC = {
SystemMode.OFF: HVAC_MODE_OFF,
SystemMode.HEAT_COOL: HVAC_MODE_HEAT_COOL,
SystemMode.COOL: HVAC_MODE_COOL,
SystemMode.HEAT: HVAC_MODE_HEAT,
SystemMode.AUX_HEAT: HVAC_MODE_HEAT,
SystemMode.PRE_COOL: HVAC_MODE_COOL, # this is 'precooling'. is it the same?
SystemMode.FAN_ONLY: HVAC_MODE_FAN_ONLY,
SystemMode.DRY: HVAC_MODE_DRY,
SystemMode.SLEEP: HVAC_MODE_OFF,
}
ZCL_TEMP = 100
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation sensor from config entry."""
entities_to_create = hass.data[DATA_ZHA][DOMAIN]
unsub = async_dispatcher_connect(
hass,
SIGNAL_ADD_ENTITIES,
functools.partial(
discovery.async_add_entities, async_add_entities, entities_to_create
),
)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
@STRICT_MATCH(channel_names=CHANNEL_THERMOSTAT, aux_channels=CHANNEL_FAN)
class Thermostat(ZhaEntity, ClimateEntity):
"""Representation of a ZHA Thermostat device."""
DEFAULT_MAX_TEMP = 35
DEFAULT_MIN_TEMP = 7
_domain = DOMAIN
value_attribute = 0x0000
def __init__(self, unique_id, zha_device, channels, **kwargs):
"""Initialize ZHA Thermostat instance."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._thrm = self.cluster_channels.get(CHANNEL_THERMOSTAT)
self._preset = PRESET_NONE
self._presets = []
self._supported_flags = SUPPORT_TARGET_TEMPERATURE
self._fan = self.cluster_channels.get(CHANNEL_FAN)
@property
def current_temperature(self):
"""Return the current temperature."""
if self._thrm.local_temp is None:
return None
return self._thrm.local_temp / ZCL_TEMP
@property
def extra_state_attributes(self):
"""Return device specific state attributes."""
data = {}
if self.hvac_mode:
mode = SYSTEM_MODE_2_HVAC.get(self._thrm.system_mode, "unknown")
data[ATTR_SYS_MODE] = f"[{self._thrm.system_mode}]/{mode}"
if self._thrm.occupancy is not None:
data[ATTR_OCCUPANCY] = self._thrm.occupancy
if self._thrm.occupied_cooling_setpoint is not None:
data[ATTR_OCCP_COOL_SETPT] = self._thrm.occupied_cooling_setpoint
if self._thrm.occupied_heating_setpoint is not None:
data[ATTR_OCCP_HEAT_SETPT] = self._thrm.occupied_heating_setpoint
if self._thrm.pi_heating_demand is not None:
data[ATTR_PI_HEATING_DEMAND] = self._thrm.pi_heating_demand
if self._thrm.pi_cooling_demand is not None:
data[ATTR_PI_COOLING_DEMAND] = self._thrm.pi_cooling_demand
unoccupied_cooling_setpoint = self._thrm.unoccupied_cooling_setpoint
if unoccupied_cooling_setpoint is not None:
data[ATTR_UNOCCP_HEAT_SETPT] = unoccupied_cooling_setpoint
unoccupied_heating_setpoint = self._thrm.unoccupied_heating_setpoint
if unoccupied_heating_setpoint is not None:
data[ATTR_UNOCCP_COOL_SETPT] = unoccupied_heating_setpoint
return data
@property
def fan_mode(self) -> str | None:
"""Return current FAN mode."""
if self._thrm.running_state is None:
return FAN_AUTO
if self._thrm.running_state & (
RunningState.FAN | RunningState.FAN_STAGE_2 | RunningState.FAN_STAGE_3
):
return FAN_ON
return FAN_AUTO
@property
def fan_modes(self) -> list[str] | None:
"""Return supported FAN modes."""
if not self._fan:
return None
return [FAN_AUTO, FAN_ON]
@property
def hvac_action(self) -> str | None:
"""Return the current HVAC action."""
if (
self._thrm.pi_heating_demand is None
and self._thrm.pi_cooling_demand is None
):
return self._rm_rs_action
return self._pi_demand_action
@property
def _rm_rs_action(self) -> str | None:
"""Return the current HVAC action based on running mode and running state."""
running_mode = self._thrm.running_mode
if running_mode == SystemMode.HEAT:
return CURRENT_HVAC_HEAT
if running_mode == SystemMode.COOL:
return CURRENT_HVAC_COOL
running_state = self._thrm.running_state
if running_state and running_state & (
RunningState.FAN | RunningState.FAN_STAGE_2 | RunningState.FAN_STAGE_3
):
return CURRENT_HVAC_FAN
if self.hvac_mode != HVAC_MODE_OFF and running_mode == SystemMode.OFF:
return CURRENT_HVAC_IDLE
return CURRENT_HVAC_OFF
@property
def _pi_demand_action(self) -> str | None:
"""Return the current HVAC action based on pi_demands."""
heating_demand = self._thrm.pi_heating_demand
if heating_demand is not None and heating_demand > 0:
return CURRENT_HVAC_HEAT
cooling_demand = self._thrm.pi_cooling_demand
if cooling_demand is not None and cooling_demand > 0:
return CURRENT_HVAC_COOL
if self.hvac_mode != HVAC_MODE_OFF:
return CURRENT_HVAC_IDLE
return CURRENT_HVAC_OFF
@property
def hvac_mode(self) -> str | None:
"""Return HVAC operation mode."""
return SYSTEM_MODE_2_HVAC.get(self._thrm.system_mode)
@property
def hvac_modes(self) -> tuple[str, ...]:
"""Return the list of available HVAC operation modes."""
return SEQ_OF_OPERATION.get(self._thrm.ctrl_seqe_of_oper, (HVAC_MODE_OFF,))
@property
def precision(self):
"""Return the precision of the system."""
return PRECISION_TENTHS
@property
def preset_mode(self) -> str | None:
"""Return current preset mode."""
return self._preset
@property
def preset_modes(self) -> list[str] | None:
"""Return supported preset modes."""
return self._presets
@property
def supported_features(self):
"""Return the list of supported features."""
features = self._supported_flags
if HVAC_MODE_HEAT_COOL in self.hvac_modes:
features |= SUPPORT_TARGET_TEMPERATURE_RANGE
if self._fan is not None:
self._supported_flags |= SUPPORT_FAN_MODE
return features
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
temp = None
if self.hvac_mode == HVAC_MODE_COOL:
if self.preset_mode == PRESET_AWAY:
temp = self._thrm.unoccupied_cooling_setpoint
else:
temp = self._thrm.occupied_cooling_setpoint
elif self.hvac_mode == HVAC_MODE_HEAT:
if self.preset_mode == PRESET_AWAY:
temp = self._thrm.unoccupied_heating_setpoint
else:
temp = self._thrm.occupied_heating_setpoint
if temp is None:
return temp
return round(temp / ZCL_TEMP, 1)
@property
def target_temperature_high(self):
"""Return the upper bound temperature we try to reach."""
if self.hvac_mode != HVAC_MODE_HEAT_COOL:
return None
if self.preset_mode == PRESET_AWAY:
temp = self._thrm.unoccupied_cooling_setpoint
else:
temp = self._thrm.occupied_cooling_setpoint
if temp is None:
return temp
return round(temp / ZCL_TEMP, 1)
@property
def target_temperature_low(self):
"""Return the lower bound temperature we try to reach."""
if self.hvac_mode != HVAC_MODE_HEAT_COOL:
return None
if self.preset_mode == PRESET_AWAY:
temp = self._thrm.unoccupied_heating_setpoint
else:
temp = self._thrm.occupied_heating_setpoint
if temp is None:
return temp
return round(temp / ZCL_TEMP, 1)
@property
def temperature_unit(self):
"""Return the unit of measurement used by the platform."""
return TEMP_CELSIUS
@property
def max_temp(self) -> float:
"""Return the maximum temperature."""
temps = []
if HVAC_MODE_HEAT in self.hvac_modes:
temps.append(self._thrm.max_heat_setpoint_limit)
if HVAC_MODE_COOL in self.hvac_modes:
temps.append(self._thrm.max_cool_setpoint_limit)
if not temps:
return self.DEFAULT_MAX_TEMP
return round(max(temps) / ZCL_TEMP, 1)
@property
def min_temp(self) -> float:
"""Return the minimum temperature."""
temps = []
if HVAC_MODE_HEAT in self.hvac_modes:
temps.append(self._thrm.min_heat_setpoint_limit)
if HVAC_MODE_COOL in self.hvac_modes:
temps.append(self._thrm.min_cool_setpoint_limit)
if not temps:
return self.DEFAULT_MIN_TEMP
return round(min(temps) / ZCL_TEMP, 1)
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
self.async_accept_signal(
self._thrm, SIGNAL_ATTR_UPDATED, self.async_attribute_updated
)
async def async_attribute_updated(self, record):
"""Handle attribute update from device."""
if (
record.attr_name in (ATTR_OCCP_COOL_SETPT, ATTR_OCCP_HEAT_SETPT)
and self.preset_mode == PRESET_AWAY
):
# occupancy attribute is an unreportable attribute, but if we get
# an attribute update for an "occupied" setpoint, there's a chance
# occupancy has changed
occupancy = await self._thrm.get_occupancy()
if occupancy is True:
self._preset = PRESET_NONE
self.debug("Attribute '%s' = %s update", record.attr_name, record.value)
self.async_write_ha_state()
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set fan mode."""
if fan_mode not in self.fan_modes:
self.warning("Unsupported '%s' fan mode", fan_mode)
return
if fan_mode == FAN_ON:
mode = ThermostatFanMode.ON
else:
mode = ThermostatFanMode.AUTO
await self._fan.async_set_speed(mode)
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new target operation mode."""
if hvac_mode not in self.hvac_modes:
self.warning(
"can't set '%s' mode. Supported modes are: %s",
hvac_mode,
self.hvac_modes,
)
return
if await self._thrm.async_set_operation_mode(HVAC_MODE_2_SYSTEM[hvac_mode]):
self.async_write_ha_state()
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if preset_mode not in self.preset_modes:
self.debug("preset mode '%s' is not supported", preset_mode)
return
if (
self.preset_mode
not in (
preset_mode,
PRESET_NONE,
)
and not await self.async_preset_handler(self.preset_mode, enable=False)
):
self.debug("Couldn't turn off '%s' preset", self.preset_mode)
return
if preset_mode != PRESET_NONE and not await self.async_preset_handler(
preset_mode, enable=True
):
self.debug("Couldn't turn on '%s' preset", preset_mode)
return
self._preset = preset_mode
self.async_write_ha_state()
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
low_temp = kwargs.get(ATTR_TARGET_TEMP_LOW)
high_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH)
temp = kwargs.get(ATTR_TEMPERATURE)
hvac_mode = kwargs.get(ATTR_HVAC_MODE)
if hvac_mode is not None:
await self.async_set_hvac_mode(hvac_mode)
thrm = self._thrm
if self.hvac_mode == HVAC_MODE_HEAT_COOL:
success = True
if low_temp is not None:
low_temp = int(low_temp * ZCL_TEMP)
success = success and await thrm.async_set_heating_setpoint(
low_temp, self.preset_mode == PRESET_AWAY
)
self.debug("Setting heating %s setpoint: %s", low_temp, success)
if high_temp is not None:
high_temp = int(high_temp * ZCL_TEMP)
success = success and await thrm.async_set_cooling_setpoint(
high_temp, self.preset_mode == PRESET_AWAY
)
self.debug("Setting cooling %s setpoint: %s", low_temp, success)
elif temp is not None:
temp = int(temp * ZCL_TEMP)
if self.hvac_mode == HVAC_MODE_COOL:
success = await thrm.async_set_cooling_setpoint(
temp, self.preset_mode == PRESET_AWAY
)
elif self.hvac_mode == HVAC_MODE_HEAT:
success = await thrm.async_set_heating_setpoint(
temp, self.preset_mode == PRESET_AWAY
)
else:
self.debug("Not setting temperature for '%s' mode", self.hvac_mode)
return
else:
self.debug("incorrect %s setting for '%s' mode", kwargs, self.hvac_mode)
return
if success:
self.async_write_ha_state()
async def async_preset_handler(self, preset: str, enable: bool = False) -> bool:
"""Set the preset mode via handler."""
handler = getattr(self, f"async_preset_handler_{preset}")
return await handler(enable)
@STRICT_MATCH(
channel_names={CHANNEL_THERMOSTAT, "sinope_manufacturer_specific"},
manufacturers="Sinope Technologies",
)
class SinopeTechnologiesThermostat(Thermostat):
"""Sinope Technologies Thermostat."""
manufacturer = 0x119C
update_time_interval = timedelta(minutes=randint(45, 75))
def __init__(self, unique_id, zha_device, channels, **kwargs):
"""Initialize ZHA Thermostat instance."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._presets = [PRESET_AWAY, PRESET_NONE]
self._supported_flags |= SUPPORT_PRESET_MODE
self._manufacturer_ch = self.cluster_channels["sinope_manufacturer_specific"]
@callback
def _async_update_time(self, timestamp=None) -> None:
"""Update thermostat's time display."""
secs_2k = (
dt_util.now().replace(tzinfo=None) - datetime(2000, 1, 1, 0, 0, 0, 0)
).total_seconds()
self.debug("Updating time: %s", secs_2k)
self._manufacturer_ch.cluster.create_catching_task(
self._manufacturer_ch.cluster.write_attributes(
{"secs_since_2k": secs_2k}, manufacturer=self.manufacturer
)
)
async def async_added_to_hass(self):
"""Run when about to be added to Hass."""
await super().async_added_to_hass()
async_track_time_interval(
self.hass, self._async_update_time, self.update_time_interval
)
self._async_update_time()
async def async_preset_handler_away(self, is_away: bool = False) -> bool:
"""Set occupancy."""
mfg_code = self._zha_device.manufacturer_code
res = await self._thrm.write_attributes(
{"set_occupancy": 0 if is_away else 1}, manufacturer=mfg_code
)
self.debug("set occupancy to %s. Status: %s", 0 if is_away else 1, res)
return res
@STRICT_MATCH(
channel_names=CHANNEL_THERMOSTAT,
aux_channels=CHANNEL_FAN,
manufacturers="Zen Within",
)
class ZenWithinThermostat(Thermostat):
"""Zen Within Thermostat implementation."""
@property
def _rm_rs_action(self) -> str | None:
"""Return the current HVAC action based on running mode and running state."""
running_state = self._thrm.running_state
if running_state is None:
return None
if running_state & (RunningState.HEAT | RunningState.HEAT_STAGE_2):
return CURRENT_HVAC_HEAT
if running_state & (RunningState.COOL | RunningState.COOL_STAGE_2):
return CURRENT_HVAC_COOL
if running_state & (
RunningState.FAN | RunningState.FAN_STAGE_2 | RunningState.FAN_STAGE_3
):
return CURRENT_HVAC_FAN
if self.hvac_mode != HVAC_MODE_OFF:
return CURRENT_HVAC_IDLE
return CURRENT_HVAC_OFF
@STRICT_MATCH(
channel_names=CHANNEL_THERMOSTAT,
aux_channels=CHANNEL_FAN,
manufacturers="Centralite",
models="3157100",
)
class CentralitePearl(ZenWithinThermostat):
"""Centralite Pearl Thermostat implementation."""
@STRICT_MATCH(
channel_names=CHANNEL_THERMOSTAT,
manufacturers={
"_TZE200_ckud7u2l",
"_TZE200_ywdxldoj",
"_TZE200_cwnjrr72",
"_TZE200_b6wax7g0",
"_TYST11_ckud7u2l",
"_TYST11_ywdxldoj",
"_TYST11_cwnjrr72",
"_TYST11_b6wax7g0",
},
)
class MoesThermostat(Thermostat):
"""Moes Thermostat implementation."""
def __init__(self, unique_id, zha_device, channels, **kwargs):
"""Initialize ZHA Thermostat instance."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._presets = [
PRESET_NONE,
PRESET_AWAY,
PRESET_SCHEDULE,
PRESET_COMFORT,
PRESET_ECO,
PRESET_BOOST,
PRESET_COMPLEX,
]
self._supported_flags |= SUPPORT_PRESET_MODE
@property
def hvac_modes(self) -> tuple[str, ...]:
"""Return only the heat mode, because the device can't be turned off."""
return (HVAC_MODE_HEAT,)
async def async_attribute_updated(self, record):
"""Handle attribute update from device."""
if record.attr_name == "operation_preset":
if record.value == 0:
self._preset = PRESET_AWAY
if record.value == 1:
self._preset = PRESET_SCHEDULE
if record.value == 2:
self._preset = PRESET_NONE
if record.value == 3:
self._preset = PRESET_COMFORT
if record.value == 4:
self._preset = PRESET_ECO
if record.value == 5:
self._preset = PRESET_BOOST
if record.value == 6:
self._preset = PRESET_COMPLEX
await super().async_attribute_updated(record)
async def async_preset_handler(self, preset: str, enable: bool = False) -> bool:
"""Set the preset mode."""
mfg_code = self._zha_device.manufacturer_code
if not enable:
return await self._thrm.write_attributes(
{"operation_preset": 2}, manufacturer=mfg_code
)
if preset == PRESET_AWAY:
return await self._thrm.write_attributes(
{"operation_preset": 0}, manufacturer=mfg_code
)
if preset == PRESET_SCHEDULE:
return await self._thrm.write_attributes(
{"operation_preset": 1}, manufacturer=mfg_code
)
if preset == PRESET_COMFORT:
return await self._thrm.write_attributes(
{"operation_preset": 3}, manufacturer=mfg_code
)
if preset == PRESET_ECO:
return await self._thrm.write_attributes(
{"operation_preset": 4}, manufacturer=mfg_code
)
if preset == PRESET_BOOST:
return await self._thrm.write_attributes(
{"operation_preset": 5}, manufacturer=mfg_code
)
if preset == PRESET_COMPLEX:
return await self._thrm.write_attributes(
{"operation_preset": 6}, manufacturer=mfg_code
)
return False
| |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
CONTENT_TYPE dictionary created against freedesktop.org's shared mime info
database version 1.1.
Deviations from official standards:
- .md: application/x-genesis-rom --> text/x-markdown
- .png: image/x-apple-ios-png --> image/png
Additions:
- .load: text/html
- .json: application/json
- .jsonp: application/jsonp
- .pickle: application/python-pickle
- .w2p': application/w2p
"""
__all__ = ['contenttype']
CONTENT_TYPE = {
'.123': 'application/vnd.lotus-1-2-3',
'.3ds': 'image/x-3ds',
'.3g2': 'video/3gpp2',
'.3ga': 'video/3gpp',
'.3gp': 'video/3gpp',
'.3gp2': 'video/3gpp2',
'.3gpp': 'video/3gpp',
'.3gpp2': 'video/3gpp2',
'.602': 'application/x-t602',
'.669': 'audio/x-mod',
'.7z': 'application/x-7z-compressed',
'.a': 'application/x-archive',
'.aac': 'audio/aac',
'.abw': 'application/x-abiword',
'.abw.crashed': 'application/x-abiword',
'.abw.gz': 'application/x-abiword',
'.ac3': 'audio/ac3',
'.ace': 'application/x-ace',
'.adb': 'text/x-adasrc',
'.ads': 'text/x-adasrc',
'.afm': 'application/x-font-afm',
'.ag': 'image/x-applix-graphics',
'.ai': 'application/illustrator',
'.aif': 'audio/x-aiff',
'.aifc': 'audio/x-aifc',
'.aiff': 'audio/x-aiff',
'.aiffc': 'audio/x-aifc',
'.al': 'application/x-perl',
'.alz': 'application/x-alz',
'.amr': 'audio/amr',
'.amz': 'audio/x-amzxml',
'.ani': 'application/x-navi-animation',
'.anim[1-9j]': 'video/x-anim',
'.anx': 'application/annodex',
'.ape': 'audio/x-ape',
'.apk': 'application/vnd.android.package-archive',
'.ar': 'application/x-archive',
'.arj': 'application/x-arj',
'.arw': 'image/x-sony-arw',
'.as': 'application/x-applix-spreadsheet',
'.asc': 'text/plain',
'.asf': 'video/x-ms-asf',
'.asp': 'application/x-asp',
'.ass': 'text/x-ssa',
'.asx': 'audio/x-ms-asx',
'.atom': 'application/atom+xml',
'.au': 'audio/basic',
'.avf': 'video/x-msvideo',
'.avi': 'video/x-msvideo',
'.aw': 'application/x-applix-word',
'.awb': 'audio/amr-wb',
'.awk': 'application/x-awk',
'.axa': 'audio/annodex',
'.axv': 'video/annodex',
'.bak': 'application/x-trash',
'.bcpio': 'application/x-bcpio',
'.bdf': 'application/x-font-bdf',
'.bdm': 'video/mp2t',
'.bdmv': 'video/mp2t',
'.bib': 'text/x-bibtex',
'.bin': 'application/octet-stream',
'.blend': 'application/x-blender',
'.blender': 'application/x-blender',
'.bmp': 'image/bmp',
'.bz': 'application/x-bzip',
'.bz2': 'application/x-bzip',
'.c': 'text/x-csrc',
'.c++': 'text/x-c++src',
'.cab': 'application/vnd.ms-cab-compressed',
'.cap': 'application/vnd.tcpdump.pcap',
'.cb7': 'application/x-cb7',
'.cbl': 'text/x-cobol',
'.cbr': 'application/x-cbr',
'.cbt': 'application/x-cbt',
'.cbz': 'application/x-cbz',
'.cc': 'text/x-c++src',
'.ccmx': 'application/x-ccmx',
'.cdf': 'application/x-netcdf',
'.cdr': 'application/vnd.corel-draw',
'.cer': 'application/pkix-cert',
'.cert': 'application/x-x509-ca-cert',
'.cgm': 'image/cgm',
'.chm': 'application/vnd.ms-htmlhelp',
'.chrt': 'application/x-kchart',
'.class': 'application/x-java',
'.clpi': 'video/mp2t',
'.cls': 'text/x-tex',
'.cmake': 'text/x-cmake',
'.cob': 'text/x-cobol',
'.cpi': 'video/mp2t',
'.cpio': 'application/x-cpio',
'.cpio.gz': 'application/x-cpio-compressed',
'.cpp': 'text/x-c++src',
'.cr2': 'image/x-canon-cr2',
'.crl': 'application/pkix-crl',
'.crt': 'application/x-x509-ca-cert',
'.crw': 'image/x-canon-crw',
'.cs': 'text/x-csharp',
'.csh': 'application/x-csh',
'.css': 'text/css',
'.cssl': 'text/css',
'.csv': 'text/csv',
'.cue': 'application/x-cue',
'.cur': 'image/x-win-bitmap',
'.cxx': 'text/x-c++src',
'.d': 'text/x-dsrc',
'.dar': 'application/x-dar',
'.dbf': 'application/x-dbf',
'.dc': 'application/x-dc-rom',
'.dcl': 'text/x-dcl',
'.dcm': 'application/dicom',
'.dcr': 'image/x-kodak-dcr',
'.dds': 'image/x-dds',
'.deb': 'application/x-deb',
'.der': 'application/x-x509-ca-cert',
'.desktop': 'application/x-desktop',
'.di': 'text/x-dsrc',
'.dia': 'application/x-dia-diagram',
'.diff': 'text/x-patch',
'.divx': 'video/x-msvideo',
'.djv': 'image/vnd.djvu',
'.djvu': 'image/vnd.djvu',
'.dmg': 'application/x-apple-diskimage',
'.dmp': 'application/vnd.tcpdump.pcap',
'.dng': 'image/x-adobe-dng',
'.doc': 'application/msword',
'.docbook': 'application/x-docbook+xml',
'.docm': 'application/vnd.ms-word.document.macroenabled.12',
'.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'.dot': 'text/vnd.graphviz',
'.dotm': 'application/vnd.ms-word.template.macroenabled.12',
'.dotx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.template',
'.dsl': 'text/x-dsl',
'.dtd': 'application/xml-dtd',
'.dts': 'audio/vnd.dts',
'.dtshd': 'audio/vnd.dts.hd',
'.dtx': 'text/x-tex',
'.dv': 'video/dv',
'.dvi': 'application/x-dvi',
'.dvi.bz2': 'application/x-bzdvi',
'.dvi.gz': 'application/x-gzdvi',
'.dwg': 'image/vnd.dwg',
'.dxf': 'image/vnd.dxf',
'.e': 'text/x-eiffel',
'.egon': 'application/x-egon',
'.eif': 'text/x-eiffel',
'.el': 'text/x-emacs-lisp',
'.emf': 'image/x-emf',
'.eml': 'message/rfc822',
'.emp': 'application/vnd.emusic-emusic_package',
'.ent': 'application/xml-external-parsed-entity',
'.eps': 'image/x-eps',
'.eps.bz2': 'image/x-bzeps',
'.eps.gz': 'image/x-gzeps',
'.epsf': 'image/x-eps',
'.epsf.bz2': 'image/x-bzeps',
'.epsf.gz': 'image/x-gzeps',
'.epsi': 'image/x-eps',
'.epsi.bz2': 'image/x-bzeps',
'.epsi.gz': 'image/x-gzeps',
'.epub': 'application/epub+zip',
'.erl': 'text/x-erlang',
'.es': 'application/ecmascript',
'.etheme': 'application/x-e-theme',
'.etx': 'text/x-setext',
'.exe': 'application/x-ms-dos-executable',
'.exr': 'image/x-exr',
'.ez': 'application/andrew-inset',
'.f': 'text/x-fortran',
'.f4a': 'audio/mp4',
'.f4b': 'audio/x-m4b',
'.f4v': 'video/mp4',
'.f90': 'text/x-fortran',
'.f95': 'text/x-fortran',
'.fb2': 'application/x-fictionbook+xml',
'.fig': 'image/x-xfig',
'.fits': 'image/fits',
'.fl': 'application/x-fluid',
'.flac': 'audio/flac',
'.flc': 'video/x-flic',
'.fli': 'video/x-flic',
'.flv': 'video/x-flv',
'.flw': 'application/x-kivio',
'.fo': 'text/x-xslfo',
'.fodg': 'application/vnd.oasis.opendocument.graphics-flat-xml',
'.fodp': 'application/vnd.oasis.opendocument.presentation-flat-xml',
'.fods': 'application/vnd.oasis.opendocument.spreadsheet-flat-xml',
'.fodt': 'application/vnd.oasis.opendocument.text-flat-xml',
'.for': 'text/x-fortran',
'.fxm': 'video/x-javafx',
'.g3': 'image/fax-g3',
'.gb': 'application/x-gameboy-rom',
'.gba': 'application/x-gba-rom',
'.gcrd': 'text/vcard',
'.ged': 'application/x-gedcom',
'.gedcom': 'application/x-gedcom',
'.gem': 'application/x-tar',
'.gen': 'application/x-genesis-rom',
'.gf': 'application/x-tex-gf',
'.gg': 'application/x-sms-rom',
'.gif': 'image/gif',
'.glade': 'application/x-glade',
'.gml': 'application/gml+xml',
'.gmo': 'application/x-gettext-translation',
'.gnc': 'application/x-gnucash',
'.gnd': 'application/gnunet-directory',
'.gnucash': 'application/x-gnucash',
'.gnumeric': 'application/x-gnumeric',
'.gnuplot': 'application/x-gnuplot',
'.go': 'text/x-go',
'.gp': 'application/x-gnuplot',
'.gpg': 'application/pgp-encrypted',
'.gplt': 'application/x-gnuplot',
'.gra': 'application/x-graphite',
'.gsf': 'application/x-font-type1',
'.gsm': 'audio/x-gsm',
'.gtar': 'application/x-tar',
'.gv': 'text/vnd.graphviz',
'.gvp': 'text/x-google-video-pointer',
'.gz': 'application/gzip',
'.h': 'text/x-chdr',
'.h++': 'text/x-c++hdr',
'.h4': 'application/x-hdf',
'.h5': 'application/x-hdf',
'.hdf': 'application/x-hdf',
'.hdf4': 'application/x-hdf',
'.hdf5': 'application/x-hdf',
'.hh': 'text/x-c++hdr',
'.hlp': 'application/winhlp',
'.hp': 'text/x-c++hdr',
'.hpgl': 'application/vnd.hp-hpgl',
'.hpp': 'text/x-c++hdr',
'.hs': 'text/x-haskell',
'.htm': 'text/html',
'.html': 'text/html',
'.hwp': 'application/x-hwp',
'.hwt': 'application/x-hwt',
'.hxx': 'text/x-c++hdr',
'.ica': 'application/x-ica',
'.icb': 'image/x-tga',
'.icc': 'application/vnd.iccprofile',
'.icm': 'application/vnd.iccprofile',
'.icns': 'image/x-icns',
'.ico': 'image/vnd.microsoft.icon',
'.ics': 'text/calendar',
'.idl': 'text/x-idl',
'.ief': 'image/ief',
'.iff': 'image/x-ilbm',
'.ilbm': 'image/x-ilbm',
'.ime': 'text/x-imelody',
'.imy': 'text/x-imelody',
'.ins': 'text/x-tex',
'.iptables': 'text/x-iptables',
'.iso': 'application/x-cd-image',
'.iso9660': 'application/x-cd-image',
'.it': 'audio/x-it',
'.it87': 'application/x-it87',
'.j2k': 'image/jp2',
'.jad': 'text/vnd.sun.j2me.app-descriptor',
'.jar': 'application/x-java-archive',
'.java': 'text/x-java',
'.jceks': 'application/x-java-jce-keystore',
'.jks': 'application/x-java-keystore',
'.jng': 'image/x-jng',
'.jnlp': 'application/x-java-jnlp-file',
'.jp2': 'image/jp2',
'.jpc': 'image/jp2',
'.jpe': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.jpf': 'image/jp2',
'.jpg': 'image/jpeg',
'.jpr': 'application/x-jbuilder-project',
'.jpx': 'image/jp2',
'.js': 'application/javascript',
'.json': 'application/json',
'.jsonp': 'application/jsonp',
'.k25': 'image/x-kodak-k25',
'.kar': 'audio/midi',
'.karbon': 'application/x-karbon',
'.kdc': 'image/x-kodak-kdc',
'.kdelnk': 'application/x-desktop',
'.kexi': 'application/x-kexiproject-sqlite3',
'.kexic': 'application/x-kexi-connectiondata',
'.kexis': 'application/x-kexiproject-shortcut',
'.kfo': 'application/x-kformula',
'.kil': 'application/x-killustrator',
'.kino': 'application/smil',
'.kml': 'application/vnd.google-earth.kml+xml',
'.kmz': 'application/vnd.google-earth.kmz',
'.kon': 'application/x-kontour',
'.kpm': 'application/x-kpovmodeler',
'.kpr': 'application/x-kpresenter',
'.kpt': 'application/x-kpresenter',
'.kra': 'application/x-krita',
'.ks': 'application/x-java-keystore',
'.ksp': 'application/x-kspread',
'.kud': 'application/x-kugar',
'.kwd': 'application/x-kword',
'.kwt': 'application/x-kword',
'.la': 'application/x-shared-library-la',
'.latex': 'text/x-tex',
'.lbm': 'image/x-ilbm',
'.ldif': 'text/x-ldif',
'.lha': 'application/x-lha',
'.lhs': 'text/x-literate-haskell',
'.lhz': 'application/x-lhz',
'.load' : 'text/html',
'.log': 'text/x-log',
'.lrz': 'application/x-lrzip',
'.ltx': 'text/x-tex',
'.lua': 'text/x-lua',
'.lwo': 'image/x-lwo',
'.lwob': 'image/x-lwo',
'.lwp': 'application/vnd.lotus-wordpro',
'.lws': 'image/x-lws',
'.ly': 'text/x-lilypond',
'.lyx': 'application/x-lyx',
'.lz': 'application/x-lzip',
'.lzh': 'application/x-lha',
'.lzma': 'application/x-lzma',
'.lzo': 'application/x-lzop',
'.m': 'text/x-matlab',
'.m15': 'audio/x-mod',
'.m1u': 'video/vnd.mpegurl',
'.m2t': 'video/mp2t',
'.m2ts': 'video/mp2t',
'.m3u': 'application/vnd.apple.mpegurl',
'.m3u8': 'application/vnd.apple.mpegurl',
'.m4': 'application/x-m4',
'.m4a': 'audio/mp4',
'.m4b': 'audio/x-m4b',
'.m4u': 'video/vnd.mpegurl',
'.m4v': 'video/mp4',
'.mab': 'application/x-markaby',
'.mak': 'text/x-makefile',
'.man': 'application/x-troff-man',
'.manifest': 'text/cache-manifest',
'.markdown': 'text/x-markdown',
'.mbox': 'application/mbox',
'.md': 'text/x-markdown',
'.mdb': 'application/vnd.ms-access',
'.mdi': 'image/vnd.ms-modi',
'.me': 'text/x-troff-me',
'.med': 'audio/x-mod',
'.meta4': 'application/metalink4+xml',
'.metalink': 'application/metalink+xml',
'.mgp': 'application/x-magicpoint',
'.mht': 'application/x-mimearchive',
'.mhtml': 'application/x-mimearchive',
'.mid': 'audio/midi',
'.midi': 'audio/midi',
'.mif': 'application/x-mif',
'.minipsf': 'audio/x-minipsf',
'.mk': 'text/x-makefile',
'.mka': 'audio/x-matroska',
'.mkd': 'text/x-markdown',
'.mkv': 'video/x-matroska',
'.ml': 'text/x-ocaml',
'.mli': 'text/x-ocaml',
'.mm': 'text/x-troff-mm',
'.mmf': 'application/x-smaf',
'.mml': 'application/mathml+xml',
'.mng': 'video/x-mng',
'.mo': 'text/x-modelica',
'.mo3': 'audio/x-mo3',
'.mobi': 'application/x-mobipocket-ebook',
'.moc': 'text/x-moc',
'.mod': 'audio/x-mod',
'.mof': 'text/x-mof',
'.moov': 'video/quicktime',
'.mov': 'video/quicktime',
'.movie': 'video/x-sgi-movie',
'.mp+': 'audio/x-musepack',
'.mp2': 'video/mpeg',
'.mp3': 'audio/mpeg',
'.mp4': 'video/mp4',
'.mpc': 'audio/x-musepack',
'.mpe': 'video/mpeg',
'.mpeg': 'video/mpeg',
'.mpg': 'video/mpeg',
'.mpga': 'audio/mpeg',
'.mpl': 'video/mp2t',
'.mpls': 'video/mp2t',
'.mpp': 'audio/x-musepack',
'.mrl': 'text/x-mrml',
'.mrml': 'text/x-mrml',
'.mrw': 'image/x-minolta-mrw',
'.ms': 'text/x-troff-ms',
'.msi': 'application/x-msi',
'.msod': 'image/x-msod',
'.msx': 'application/x-msx-rom',
'.mtm': 'audio/x-mod',
'.mts': 'video/mp2t',
'.mup': 'text/x-mup',
'.mxf': 'application/mxf',
'.mxu': 'video/vnd.mpegurl',
'.n64': 'application/x-n64-rom',
'.nb': 'application/mathematica',
'.nc': 'application/x-netcdf',
'.nds': 'application/x-nintendo-ds-rom',
'.nef': 'image/x-nikon-nef',
'.nes': 'application/x-nes-rom',
'.nfo': 'text/x-nfo',
'.not': 'text/x-mup',
'.nsc': 'application/x-netshow-channel',
'.nsv': 'video/x-nsv',
'.nzb': 'application/x-nzb',
'.o': 'application/x-object',
'.obj': 'application/x-tgif',
'.ocl': 'text/x-ocl',
'.oda': 'application/oda',
'.odb': 'application/vnd.oasis.opendocument.database',
'.odc': 'application/vnd.oasis.opendocument.chart',
'.odf': 'application/vnd.oasis.opendocument.formula',
'.odg': 'application/vnd.oasis.opendocument.graphics',
'.odi': 'application/vnd.oasis.opendocument.image',
'.odm': 'application/vnd.oasis.opendocument.text-master',
'.odp': 'application/vnd.oasis.opendocument.presentation',
'.ods': 'application/vnd.oasis.opendocument.spreadsheet',
'.odt': 'application/vnd.oasis.opendocument.text',
'.oga': 'audio/ogg',
'.ogg': 'application/ogg',
'.ogm': 'video/x-ogm+ogg',
'.ogv': 'video/ogg',
'.ogx': 'application/ogg',
'.old': 'application/x-trash',
'.oleo': 'application/x-oleo',
'.ooc': 'text/x-ooc',
'.opml': 'text/x-opml+xml',
'.oprc': 'application/vnd.palm',
'.ora': 'image/openraster',
'.orf': 'image/x-olympus-orf',
'.otc': 'application/vnd.oasis.opendocument.chart-template',
'.otf': 'application/x-font-otf',
'.otg': 'application/vnd.oasis.opendocument.graphics-template',
'.oth': 'application/vnd.oasis.opendocument.text-web',
'.otp': 'application/vnd.oasis.opendocument.presentation-template',
'.ots': 'application/vnd.oasis.opendocument.spreadsheet-template',
'.ott': 'application/vnd.oasis.opendocument.text-template',
'.owl': 'application/rdf+xml',
'.oxps': 'application/oxps',
'.oxt': 'application/vnd.openofficeorg.extension',
'.p': 'text/x-pascal',
'.p10': 'application/pkcs10',
'.p12': 'application/x-pkcs12',
'.p7b': 'application/x-pkcs7-certificates',
'.p7c': 'application/pkcs7-mime',
'.p7m': 'application/pkcs7-mime',
'.p7s': 'application/pkcs7-signature',
'.p8': 'application/pkcs8',
'.pack': 'application/x-java-pack200',
'.pak': 'application/x-pak',
'.par2': 'application/x-par2',
'.pas': 'text/x-pascal',
'.patch': 'text/x-patch',
'.pbm': 'image/x-portable-bitmap',
'.pcap': 'application/vnd.tcpdump.pcap',
'.pcd': 'image/x-photo-cd',
'.pcf': 'application/x-cisco-vpn-settings',
'.pcf.gz': 'application/x-font-pcf',
'.pcf.z': 'application/x-font-pcf',
'.pcl': 'application/vnd.hp-pcl',
'.pct': 'image/x-pict',
'.pcx': 'image/x-pcx',
'.pdb': 'chemical/x-pdb',
'.pdc': 'application/x-aportisdoc',
'.pdf': 'application/pdf',
'.pdf.bz2': 'application/x-bzpdf',
'.pdf.gz': 'application/x-gzpdf',
'.pdf.xz': 'application/x-xzpdf',
'.pef': 'image/x-pentax-pef',
'.pem': 'application/x-x509-ca-cert',
'.perl': 'application/x-perl',
'.pfa': 'application/x-font-type1',
'.pfb': 'application/x-font-type1',
'.pfx': 'application/x-pkcs12',
'.pgm': 'image/x-portable-graymap',
'.pgn': 'application/x-chess-pgn',
'.pgp': 'application/pgp-encrypted',
'.php': 'application/x-php',
'.php3': 'application/x-php',
'.php4': 'application/x-php',
'.php5': 'application/x-php',
'.phps': 'application/x-php',
'.pict': 'image/x-pict',
'.pict1': 'image/x-pict',
'.pict2': 'image/x-pict',
'.pk': 'application/x-tex-pk',
'.pkipath': 'application/pkix-pkipath',
'.pkr': 'application/pgp-keys',
'.pl': 'application/x-perl',
'.pla': 'audio/x-iriver-pla',
'.pln': 'application/x-planperfect',
'.pls': 'audio/x-scpls',
'.pm': 'application/x-perl',
'.png': 'image/png',
'.pnm': 'image/x-portable-anymap',
'.pntg': 'image/x-macpaint',
'.po': 'text/x-gettext-translation',
'.por': 'application/x-spss-por',
'.pot': 'text/x-gettext-translation-template',
'.potm': 'application/vnd.ms-powerpoint.template.macroenabled.12',
'.potx': 'application/vnd.openxmlformats-officedocument.presentationml.template',
'.ppam': 'application/vnd.ms-powerpoint.addin.macroenabled.12',
'.ppm': 'image/x-portable-pixmap',
'.pps': 'application/vnd.ms-powerpoint',
'.ppsm': 'application/vnd.ms-powerpoint.slideshow.macroenabled.12',
'.ppsx': 'application/vnd.openxmlformats-officedocument.presentationml.slideshow',
'.ppt': 'application/vnd.ms-powerpoint',
'.pptm': 'application/vnd.ms-powerpoint.presentation.macroenabled.12',
'.pptx': 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'.ppz': 'application/vnd.ms-powerpoint',
'.pqa': 'application/vnd.palm',
'.prc': 'application/vnd.palm',
'.ps': 'application/postscript',
'.ps.bz2': 'application/x-bzpostscript',
'.ps.gz': 'application/x-gzpostscript',
'.psd': 'image/vnd.adobe.photoshop',
'.psf': 'audio/x-psf',
'.psf.gz': 'application/x-gz-font-linux-psf',
'.psflib': 'audio/x-psflib',
'.psid': 'audio/prs.sid',
'.psw': 'application/x-pocket-word',
'.pw': 'application/x-pw',
'.py': 'text/x-python',
'.pyc': 'application/x-python-bytecode',
'.pickle': 'application/python-pickle',
'.pyo': 'application/x-python-bytecode',
'.qif': 'image/x-quicktime',
'.qml': 'text/x-qml',
'.qt': 'video/quicktime',
'.qti': 'application/x-qtiplot',
'.qti.gz': 'application/x-qtiplot',
'.qtif': 'image/x-quicktime',
'.qtl': 'application/x-quicktime-media-link',
'.qtvr': 'video/quicktime',
'.ra': 'audio/vnd.rn-realaudio',
'.raf': 'image/x-fuji-raf',
'.ram': 'application/ram',
'.rar': 'application/x-rar',
'.ras': 'image/x-cmu-raster',
'.raw': 'image/x-panasonic-raw',
'.rax': 'audio/vnd.rn-realaudio',
'.rb': 'application/x-ruby',
'.rdf': 'application/rdf+xml',
'.rdfs': 'application/rdf+xml',
'.reg': 'text/x-ms-regedit',
'.rej': 'text/x-reject',
'.rgb': 'image/x-rgb',
'.rle': 'image/rle',
'.rm': 'application/vnd.rn-realmedia',
'.rmj': 'application/vnd.rn-realmedia',
'.rmm': 'application/vnd.rn-realmedia',
'.rms': 'application/vnd.rn-realmedia',
'.rmvb': 'application/vnd.rn-realmedia',
'.rmx': 'application/vnd.rn-realmedia',
'.rnc': 'application/relax-ng-compact-syntax',
'.rng': 'application/xml',
'.roff': 'text/troff',
'.rp': 'image/vnd.rn-realpix',
'.rpm': 'application/x-rpm',
'.rss': 'application/rss+xml',
'.rt': 'text/vnd.rn-realtext',
'.rtf': 'application/rtf',
'.rtx': 'text/richtext',
'.rv': 'video/vnd.rn-realvideo',
'.rvx': 'video/vnd.rn-realvideo',
'.rw2': 'image/x-panasonic-raw2',
'.s3m': 'audio/x-s3m',
'.sam': 'application/x-amipro',
'.sami': 'application/x-sami',
'.sav': 'application/x-spss-sav',
'.scala': 'text/x-scala',
'.scm': 'text/x-scheme',
'.sda': 'application/vnd.stardivision.draw',
'.sdc': 'application/vnd.stardivision.calc',
'.sdd': 'application/vnd.stardivision.impress',
'.sdp': 'application/sdp',
'.sds': 'application/vnd.stardivision.chart',
'.sdw': 'application/vnd.stardivision.writer',
'.sgf': 'application/x-go-sgf',
'.sgi': 'image/x-sgi',
'.sgl': 'application/vnd.stardivision.writer',
'.sgm': 'text/sgml',
'.sgml': 'text/sgml',
'.sh': 'application/x-shellscript',
'.shape': 'application/x-dia-shape',
'.shar': 'application/x-shar',
'.shn': 'application/x-shorten',
'.siag': 'application/x-siag',
'.sid': 'audio/prs.sid',
'.sik': 'application/x-trash',
'.sis': 'application/vnd.symbian.install',
'.sisx': 'x-epoc/x-sisx-app',
'.sit': 'application/x-stuffit',
'.siv': 'application/sieve',
'.sk': 'image/x-skencil',
'.sk1': 'image/x-skencil',
'.skr': 'application/pgp-keys',
'.sldm': 'application/vnd.ms-powerpoint.slide.macroenabled.12',
'.sldx': 'application/vnd.openxmlformats-officedocument.presentationml.slide',
'.slk': 'text/spreadsheet',
'.smaf': 'application/x-smaf',
'.smc': 'application/x-snes-rom',
'.smd': 'application/vnd.stardivision.mail',
'.smf': 'application/vnd.stardivision.math',
'.smi': 'application/x-sami',
'.smil': 'application/smil',
'.sml': 'application/smil',
'.sms': 'application/x-sms-rom',
'.snd': 'audio/basic',
'.so': 'application/x-sharedlib',
'.spc': 'application/x-pkcs7-certificates',
'.spd': 'application/x-font-speedo',
'.spec': 'text/x-rpm-spec',
'.spl': 'application/x-shockwave-flash',
'.spm': 'application/x-source-rpm',
'.spx': 'audio/x-speex',
'.sql': 'text/x-sql',
'.sr2': 'image/x-sony-sr2',
'.src': 'application/x-wais-source',
'.src.rpm': 'application/x-source-rpm',
'.srf': 'image/x-sony-srf',
'.srt': 'application/x-subrip',
'.ss': 'text/x-scheme',
'.ssa': 'text/x-ssa',
'.stc': 'application/vnd.sun.xml.calc.template',
'.std': 'application/vnd.sun.xml.draw.template',
'.sti': 'application/vnd.sun.xml.impress.template',
'.stm': 'audio/x-stm',
'.stw': 'application/vnd.sun.xml.writer.template',
'.sty': 'text/x-tex',
'.sub': 'text/x-subviewer',
'.sun': 'image/x-sun-raster',
'.sv': 'text/x-svsrc',
'.sv4cpio': 'application/x-sv4cpio',
'.sv4crc': 'application/x-sv4crc',
'.svg': 'image/svg+xml',
'.svgz': 'image/svg+xml-compressed',
'.svh': 'text/x-svhdr',
'.swf': 'application/x-shockwave-flash',
'.swm': 'application/x-ms-wim',
'.sxc': 'application/vnd.sun.xml.calc',
'.sxd': 'application/vnd.sun.xml.draw',
'.sxg': 'application/vnd.sun.xml.writer.global',
'.sxi': 'application/vnd.sun.xml.impress',
'.sxm': 'application/vnd.sun.xml.math',
'.sxw': 'application/vnd.sun.xml.writer',
'.sylk': 'text/spreadsheet',
'.t': 'text/troff',
'.t2t': 'text/x-txt2tags',
'.tar': 'application/x-tar',
'.tar.bz': 'application/x-bzip-compressed-tar',
'.tar.bz2': 'application/x-bzip-compressed-tar',
'.tar.gz': 'application/x-compressed-tar',
'.tar.lrz': 'application/x-lrzip-compressed-tar',
'.tar.lzma': 'application/x-lzma-compressed-tar',
'.tar.lzo': 'application/x-tzo',
'.tar.xz': 'application/x-xz-compressed-tar',
'.tar.z': 'application/x-tarz',
'.taz': 'application/x-tarz',
'.tb2': 'application/x-bzip-compressed-tar',
'.tbz': 'application/x-bzip-compressed-tar',
'.tbz2': 'application/x-bzip-compressed-tar',
'.tcl': 'text/x-tcl',
'.tex': 'text/x-tex',
'.texi': 'text/x-texinfo',
'.texinfo': 'text/x-texinfo',
'.tga': 'image/x-tga',
'.tgz': 'application/x-compressed-tar',
'.theme': 'application/x-theme',
'.themepack': 'application/x-windows-themepack',
'.tif': 'image/tiff',
'.tiff': 'image/tiff',
'.tk': 'text/x-tcl',
'.tlrz': 'application/x-lrzip-compressed-tar',
'.tlz': 'application/x-lzma-compressed-tar',
'.tnef': 'application/vnd.ms-tnef',
'.tnf': 'application/vnd.ms-tnef',
'.toc': 'application/x-cdrdao-toc',
'.torrent': 'application/x-bittorrent',
'.tpic': 'image/x-tga',
'.tr': 'text/troff',
'.ts': 'video/mp2t',
'.tsv': 'text/tab-separated-values',
'.tta': 'audio/x-tta',
'.ttc': 'application/x-font-ttf',
'.ttf': 'application/x-font-ttf',
'.ttx': 'application/x-font-ttx',
'.txt': 'text/plain',
'.txz': 'application/x-xz-compressed-tar',
'.tzo': 'application/x-tzo',
'.ufraw': 'application/x-ufraw',
'.ui': 'application/x-gtk-builder',
'.uil': 'text/x-uil',
'.ult': 'audio/x-mod',
'.uni': 'audio/x-mod',
'.url': 'application/x-mswinurl',
'.ustar': 'application/x-ustar',
'.uue': 'text/x-uuencode',
'.v': 'text/x-verilog',
'.vala': 'text/x-vala',
'.vapi': 'text/x-vala',
'.vcard': 'text/vcard',
'.vcf': 'text/vcard',
'.vcs': 'text/calendar',
'.vct': 'text/vcard',
'.vda': 'image/x-tga',
'.vhd': 'text/x-vhdl',
'.vhdl': 'text/x-vhdl',
'.viv': 'video/vivo',
'.vivo': 'video/vivo',
'.vlc': 'audio/x-mpegurl',
'.vob': 'video/mpeg',
'.voc': 'audio/x-voc',
'.vor': 'application/vnd.stardivision.writer',
'.vrm': 'model/vrml',
'.vrml': 'model/vrml',
'.vsd': 'application/vnd.visio',
'.vss': 'application/vnd.visio',
'.vst': 'image/x-tga',
'.vsw': 'application/vnd.visio',
'.vtt': 'text/vtt',
'.w2p': 'application/w2p',
'.wav': 'audio/x-wav',
'.wax': 'audio/x-ms-asx',
'.wb1': 'application/x-quattropro',
'.wb2': 'application/x-quattropro',
'.wb3': 'application/x-quattropro',
'.wbmp': 'image/vnd.wap.wbmp',
'.wcm': 'application/vnd.ms-works',
'.wdb': 'application/vnd.ms-works',
'.webm': 'video/webm',
'.wim': 'application/x-ms-wim',
'.wk1': 'application/vnd.lotus-1-2-3',
'.wk3': 'application/vnd.lotus-1-2-3',
'.wk4': 'application/vnd.lotus-1-2-3',
'.wks': 'application/vnd.ms-works',
'.wma': 'audio/x-ms-wma',
'.wmf': 'image/x-wmf',
'.wml': 'text/vnd.wap.wml',
'.wmls': 'text/vnd.wap.wmlscript',
'.wmv': 'video/x-ms-wmv',
'.wmx': 'audio/x-ms-asx',
'.woff': 'application/font-woff',
'.wp': 'application/vnd.wordperfect',
'.wp4': 'application/vnd.wordperfect',
'.wp5': 'application/vnd.wordperfect',
'.wp6': 'application/vnd.wordperfect',
'.wpd': 'application/vnd.wordperfect',
'.wpg': 'application/x-wpg',
'.wpl': 'application/vnd.ms-wpl',
'.wpp': 'application/vnd.wordperfect',
'.wps': 'application/vnd.ms-works',
'.wri': 'application/x-mswrite',
'.wrl': 'model/vrml',
'.wsgi': 'text/x-python',
'.wv': 'audio/x-wavpack',
'.wvc': 'audio/x-wavpack-correction',
'.wvp': 'audio/x-wavpack',
'.wvx': 'audio/x-ms-asx',
'.wwf': 'application/x-wwf',
'.x3f': 'image/x-sigma-x3f',
'.xac': 'application/x-gnucash',
'.xbel': 'application/x-xbel',
'.xbl': 'application/xml',
'.xbm': 'image/x-xbitmap',
'.xcf': 'image/x-xcf',
'.xcf.bz2': 'image/x-compressed-xcf',
'.xcf.gz': 'image/x-compressed-xcf',
'.xhtml': 'application/xhtml+xml',
'.xi': 'audio/x-xi',
'.xla': 'application/vnd.ms-excel',
'.xlam': 'application/vnd.ms-excel.addin.macroenabled.12',
'.xlc': 'application/vnd.ms-excel',
'.xld': 'application/vnd.ms-excel',
'.xlf': 'application/x-xliff',
'.xliff': 'application/x-xliff',
'.xll': 'application/vnd.ms-excel',
'.xlm': 'application/vnd.ms-excel',
'.xlr': 'application/vnd.ms-works',
'.xls': 'application/vnd.ms-excel',
'.xlsb': 'application/vnd.ms-excel.sheet.binary.macroenabled.12',
'.xlsm': 'application/vnd.ms-excel.sheet.macroenabled.12',
'.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'.xlt': 'application/vnd.ms-excel',
'.xltm': 'application/vnd.ms-excel.template.macroenabled.12',
'.xltx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.template',
'.xlw': 'application/vnd.ms-excel',
'.xm': 'audio/x-xm',
'.xmf': 'audio/x-xmf',
'.xmi': 'text/x-xmi',
'.xml': 'application/xml',
'.xpi': 'application/x-xpinstall',
'.xpm': 'image/x-xpixmap',
'.xps': 'application/oxps',
'.xsd': 'application/xml',
'.xsl': 'application/xslt+xml',
'.xslfo': 'text/x-xslfo',
'.xslm' : 'application/vnd.ms-excel.sheet.macroEnabled.12',
'.xslt': 'application/xslt+xml',
'.xspf': 'application/xspf+xml',
'.xul': 'application/vnd.mozilla.xul+xml',
'.xwd': 'image/x-xwindowdump',
'.xyz': 'chemical/x-pdb',
'.xz': 'application/x-xz',
'.yaml': 'application/x-yaml',
'.yml': 'application/x-yaml',
'.z': 'application/x-compress',
'.zabw': 'application/x-abiword',
'.zip': 'application/zip',
'.zoo': 'application/x-zoo',
}
def contenttype(filename, default='text/plain'):
"""
Returns the Content-Type string matching extension of the given filename.
"""
i = filename.rfind('.')
if i >= 0:
default = CONTENT_TYPE.get(filename[i:].lower(), default)
j = filename.rfind('.', 0, i)
if j >= 0:
default = CONTENT_TYPE.get(filename[j:].lower(), default)
if default.startswith('text/'):
default += '; charset=utf-8'
return default
| |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Configuration variable management for the cr tool.
This holds the classes that support the hierarchical variable management used
in the cr tool to provide all the command configuration controls.
"""
import string
import cr.visitor
_PARSE_CONSTANT_VALUES = [None, True, False]
_PARSE_CONSTANTS = dict((str(value), value) for value in _PARSE_CONSTANT_VALUES)
# GLOBALS is the singleton used to tie static global configuration objects
# together.
GLOBALS = []
class _MissingToErrorFormatter(string.Formatter):
"""A string formatter used in value resolve.
The main extra it adds is a new conversion specifier 'e' that throws a
KeyError if it could not find the value.
This allows a string value to use {A_KEY!e} to indicate that it is a
formatting error if A_KEY is not present.
"""
def convert_field(self, value, conversion):
if conversion == 'e':
result = str(value)
if not result:
raise KeyError('unknown')
return result
return super(_MissingToErrorFormatter, self).convert_field(
value, conversion)
class _Tracer(object):
"""Traces variable lookups.
This adds a hook to a config object, and uses it to track all variable
lookups that happen and add them to a trail. When done, it removes the hook
again. This is used to provide debugging information about what variables are
used in an operation.
"""
def __init__(self, config):
self.config = config
self.trail = []
def __enter__(self):
self.config.fixup_hooks.append(self._Trace)
return self
def __exit__(self, *_):
self.config.fixup_hooks.remove(self._Trace)
self.config.trail = self.trail
return False
def _Trace(self, _, key, value):
self.trail.append((key, value))
return value
class Config(cr.visitor.Node, cr.loader.AutoExport):
"""The main variable holding class.
This holds a set of unresolved key value pairs, and the set of child Config
objects that should be referenced when looking up a key.
Key search is one in a pre-order traversal, and new children are prepended.
This means parents override children, and the most recently added child
overrides the rest.
Values can be simple python types, callable dynamic values, or strings.
If the value is a string, it is assumed to be a standard python format string
where the root config object is used to resolve the keys. This allows values
to refer to variables that are overriden in another part of the hierarchy.
"""
@classmethod
def From(cls, *args, **kwargs):
"""Builds an unnamed config object from a set of key,value args."""
return Config('??').Apply(args, kwargs)
@classmethod
def If(cls, condition, true_value, false_value=''):
"""Returns a config value that selects a value based on the condition.
Args:
condition: The variable name to select a value on.
true_value: The value to use if the variable is True.
false_value: The value to use if the resolved variable is False.
Returns:
A dynamic value.
"""
def Resolve(base):
test = base.Get(condition)
if test:
value = true_value
else:
value = false_value
return base.Substitute(value)
return Resolve
@classmethod
def Optional(cls, value, alternate=''):
"""Returns a dynamic value that defaults to an alternate.
Args:
value: The main value to resolve.
alternate: The value to use if the main value does not resolve.
Returns:
value if it resolves, alternate otherwise.
"""
def Resolve(base):
try:
return base.Substitute(value)
except KeyError:
return base.Substitute(alternate)
return Resolve
def __init__(self, name='--', literal=False, export=None, enabled=True):
super(Config, self).__init__(name=name, enabled=enabled, export=export)
self._literal = literal
self._formatter = _MissingToErrorFormatter()
self.fixup_hooks = []
self.trail = []
@property
def literal(self):
return self._literal
def Substitute(self, value):
return self._formatter.vformat(str(value), (), self)
def Resolve(self, visitor, key, value):
"""Resolves a value to it's final form.
Raw values can be callable, simple values, or contain format strings.
Args:
visitor: The visitor asking to resolve a value.
key: The key being visited.
value: The unresolved value associated with the key.
Returns:
the fully resolved value.
"""
error = None
if callable(value):
value = value(self)
# Using existence of value.swapcase as a proxy for is a string
elif hasattr(value, 'swapcase'):
if not visitor.current_node.literal:
try:
value = self.Substitute(value)
except KeyError as e:
error = e
return self.Fixup(key, value), error
def Fixup(self, key, value):
for hook in self.fixup_hooks:
value = hook(self, key, value)
return value
def Missing(self, key):
for hook in self.fixup_hooks:
hook(self, key, None)
raise KeyError(key)
@staticmethod
def ParseValue(value):
"""Converts a string to a value.
Takes a string from something like an environment variable, and tries to
build an internal typed value. Recognizes Null, booleans, and numbers as
special.
Args:
value: The the string value to interpret.
Returns:
the parsed form of the value.
"""
if value in _PARSE_CONSTANTS:
return _PARSE_CONSTANTS[value]
try:
return int(value)
except ValueError:
pass
try:
return float(value)
except ValueError:
pass
return value
def _Set(self, key, value):
# early out if the value did not change, so we don't call change callbacks
if value == self._values.get(key, None):
return
self._values[key] = value
self.NotifyChanged()
return self
def ApplyMap(self, arg):
for key, value in arg.items():
self._Set(key, value)
return self
def Apply(self, args, kwargs):
"""Bulk set variables from arguments.
Intended for internal use by the Set and From methods.
Args:
args: must be either a dict or something that can build a dict.
kwargs: must be a dict.
Returns:
self for easy chaining.
"""
if len(args) == 1:
arg = args[0]
if isinstance(arg, dict):
self.ApplyMap(arg)
else:
self.ApplyMap(dict(arg))
elif len(args) > 1:
self.ApplyMap(dict(args))
self.ApplyMap(kwargs)
return self
def Set(self, *args, **kwargs):
return self.Apply(args, kwargs)
def Trace(self):
return _Tracer(self)
def __getitem__(self, key):
return self.Get(key)
def __setitem__(self, key, value):
self._Set(key, value)
def __contains__(self, key):
return self.Find(key) is not None
| |
"""Support for the Netatmo devices."""
import logging
from datetime import timedelta
from urllib.error import HTTPError
import voluptuous as vol
from homeassistant.const import (
CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME, CONF_DISCOVERY, CONF_URL,
EVENT_HOMEASSISTANT_STOP)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
REQUIREMENTS = ['pyatmo==1.8']
DEPENDENCIES = ['webhook']
_LOGGER = logging.getLogger(__name__)
DATA_PERSONS = 'netatmo_persons'
DATA_WEBHOOK_URL = 'netatmo_webhook_url'
CONF_SECRET_KEY = 'secret_key'
CONF_WEBHOOKS = 'webhooks'
DOMAIN = 'netatmo'
SERVICE_ADDWEBHOOK = 'addwebhook'
SERVICE_DROPWEBHOOK = 'dropwebhook'
NETATMO_AUTH = None
NETATMO_WEBHOOK_URL = None
DEFAULT_PERSON = 'Unknown'
DEFAULT_DISCOVERY = True
DEFAULT_WEBHOOKS = False
EVENT_PERSON = 'person'
EVENT_MOVEMENT = 'movement'
EVENT_HUMAN = 'human'
EVENT_ANIMAL = 'animal'
EVENT_VEHICLE = 'vehicle'
EVENT_BUS_PERSON = 'netatmo_person'
EVENT_BUS_MOVEMENT = 'netatmo_movement'
EVENT_BUS_HUMAN = 'netatmo_human'
EVENT_BUS_ANIMAL = 'netatmo_animal'
EVENT_BUS_VEHICLE = 'netatmo_vehicle'
EVENT_BUS_OTHER = 'netatmo_other'
ATTR_ID = 'id'
ATTR_PSEUDO = 'pseudo'
ATTR_NAME = 'name'
ATTR_EVENT_TYPE = 'event_type'
ATTR_MESSAGE = 'message'
ATTR_CAMERA_ID = 'camera_id'
ATTR_HOME_NAME = 'home_name'
ATTR_PERSONS = 'persons'
ATTR_IS_KNOWN = 'is_known'
ATTR_FACE_URL = 'face_url'
ATTR_SNAPSHOT_URL = 'snapshot_url'
ATTR_VIGNETTE_URL = 'vignette_url'
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=10)
MIN_TIME_BETWEEN_EVENT_UPDATES = timedelta(seconds=10)
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_SECRET_KEY): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_WEBHOOKS, default=DEFAULT_WEBHOOKS): cv.boolean,
vol.Optional(CONF_DISCOVERY, default=DEFAULT_DISCOVERY): cv.boolean,
})
}, extra=vol.ALLOW_EXTRA)
SCHEMA_SERVICE_ADDWEBHOOK = vol.Schema({
vol.Optional(CONF_URL): cv.string,
})
SCHEMA_SERVICE_DROPWEBHOOK = vol.Schema({})
def setup(hass, config):
"""Set up the Netatmo devices."""
import pyatmo
global NETATMO_AUTH
hass.data[DATA_PERSONS] = {}
try:
NETATMO_AUTH = pyatmo.ClientAuth(
config[DOMAIN][CONF_API_KEY], config[DOMAIN][CONF_SECRET_KEY],
config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD],
'read_station read_camera access_camera '
'read_thermostat write_thermostat '
'read_presence access_presence read_homecoach')
except HTTPError:
_LOGGER.error("Unable to connect to Netatmo API")
return False
if config[DOMAIN][CONF_DISCOVERY]:
for component in 'camera', 'sensor', 'binary_sensor', 'climate':
discovery.load_platform(hass, component, DOMAIN, {}, config)
if config[DOMAIN][CONF_WEBHOOKS]:
webhook_id = hass.components.webhook.async_generate_id()
hass.data[
DATA_WEBHOOK_URL] = hass.components.webhook.async_generate_url(
webhook_id)
hass.components.webhook.async_register(
DOMAIN, 'Netatmo', webhook_id, handle_webhook)
NETATMO_AUTH.addwebhook(hass.data[DATA_WEBHOOK_URL])
hass.bus.listen_once(
EVENT_HOMEASSISTANT_STOP, dropwebhook)
def _service_addwebhook(service):
"""Service to (re)add webhooks during runtime."""
url = service.data.get(CONF_URL)
if url is None:
url = hass.data[DATA_WEBHOOK_URL]
_LOGGER.info("Adding webhook for URL: %s", url)
NETATMO_AUTH.addwebhook(url)
hass.services.register(
DOMAIN, SERVICE_ADDWEBHOOK, _service_addwebhook,
schema=SCHEMA_SERVICE_ADDWEBHOOK)
def _service_dropwebhook(service):
"""Service to drop webhooks during runtime."""
_LOGGER.info("Dropping webhook")
NETATMO_AUTH.dropwebhook()
hass.services.register(
DOMAIN, SERVICE_DROPWEBHOOK, _service_dropwebhook,
schema=SCHEMA_SERVICE_DROPWEBHOOK)
return True
def dropwebhook(hass):
"""Drop the webhook subscription."""
NETATMO_AUTH.dropwebhook()
async def handle_webhook(hass, webhook_id, request):
"""Handle webhook callback."""
try:
data = await request.json()
except ValueError:
return None
_LOGGER.debug("Got webhook data: %s", data)
published_data = {
ATTR_EVENT_TYPE: data.get(ATTR_EVENT_TYPE),
ATTR_HOME_NAME: data.get(ATTR_HOME_NAME),
ATTR_CAMERA_ID: data.get(ATTR_CAMERA_ID),
ATTR_MESSAGE: data.get(ATTR_MESSAGE)
}
if data.get(ATTR_EVENT_TYPE) == EVENT_PERSON:
for person in data[ATTR_PERSONS]:
published_data[ATTR_ID] = person.get(ATTR_ID)
published_data[ATTR_NAME] = hass.data[DATA_PERSONS].get(
published_data[ATTR_ID], DEFAULT_PERSON)
published_data[ATTR_IS_KNOWN] = person.get(ATTR_IS_KNOWN)
published_data[ATTR_FACE_URL] = person.get(ATTR_FACE_URL)
hass.bus.async_fire(EVENT_BUS_PERSON, published_data)
elif data.get(ATTR_EVENT_TYPE) == EVENT_MOVEMENT:
published_data[ATTR_VIGNETTE_URL] = data.get(ATTR_VIGNETTE_URL)
published_data[ATTR_SNAPSHOT_URL] = data.get(ATTR_SNAPSHOT_URL)
hass.bus.async_fire(EVENT_BUS_MOVEMENT, published_data)
elif data.get(ATTR_EVENT_TYPE) == EVENT_HUMAN:
published_data[ATTR_VIGNETTE_URL] = data.get(ATTR_VIGNETTE_URL)
published_data[ATTR_SNAPSHOT_URL] = data.get(ATTR_SNAPSHOT_URL)
hass.bus.async_fire(EVENT_BUS_HUMAN, published_data)
elif data.get(ATTR_EVENT_TYPE) == EVENT_ANIMAL:
published_data[ATTR_VIGNETTE_URL] = data.get(ATTR_VIGNETTE_URL)
published_data[ATTR_SNAPSHOT_URL] = data.get(ATTR_SNAPSHOT_URL)
hass.bus.async_fire(EVENT_BUS_ANIMAL, published_data)
elif data.get(ATTR_EVENT_TYPE) == EVENT_VEHICLE:
hass.bus.async_fire(EVENT_BUS_VEHICLE, published_data)
published_data[ATTR_VIGNETTE_URL] = data.get(ATTR_VIGNETTE_URL)
published_data[ATTR_SNAPSHOT_URL] = data.get(ATTR_SNAPSHOT_URL)
else:
hass.bus.async_fire(EVENT_BUS_OTHER, data)
class CameraData:
"""Get the latest data from Netatmo."""
def __init__(self, hass, auth, home=None):
"""Initialize the data object."""
self._hass = hass
self.auth = auth
self.camera_data = None
self.camera_names = []
self.module_names = []
self.home = home
self.camera_type = None
def get_camera_names(self):
"""Return all camera available on the API as a list."""
self.camera_names = []
self.update()
if not self.home:
for home in self.camera_data.cameras:
for camera in self.camera_data.cameras[home].values():
self.camera_names.append(camera['name'])
else:
for camera in self.camera_data.cameras[self.home].values():
self.camera_names.append(camera['name'])
return self.camera_names
def get_module_names(self, camera_name):
"""Return all module available on the API as a list."""
self.module_names = []
self.update()
cam_id = self.camera_data.cameraByName(camera=camera_name,
home=self.home)['id']
for module in self.camera_data.modules.values():
if cam_id == module['cam_id']:
self.module_names.append(module['name'])
return self.module_names
def get_camera_type(self, camera=None, home=None, cid=None):
"""Return camera type for a camera, cid has preference over camera."""
self.camera_type = self.camera_data.cameraType(camera=camera,
home=home, cid=cid)
return self.camera_type
def get_persons(self):
"""Gather person data for webhooks."""
for person_id, person_data in self.camera_data.persons.items():
self._hass.data[DATA_PERSONS][person_id] = person_data.get(
ATTR_PSEUDO)
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Call the Netatmo API to update the data."""
import pyatmo
self.camera_data = pyatmo.CameraData(self.auth, size=100)
@Throttle(MIN_TIME_BETWEEN_EVENT_UPDATES)
def update_event(self):
"""Call the Netatmo API to update the events."""
self.camera_data.updateEvent(
home=self.home, cameratype=self.camera_type)
| |
from framework.celery_tasks.handlers import enqueue_task
import hashlib
from api.providers.tasks import prepare_for_registration_bulk_creation
from django.db.models import Case, CharField, Q, Value, When, IntegerField
from django.http import JsonResponse
from guardian.shortcuts import get_objects_for_user
from rest_framework.exceptions import ValidationError
from rest_framework import generics
from rest_framework import permissions as drf_permissions
from rest_framework.exceptions import NotAuthenticated, NotFound
from rest_framework.views import APIView
from rest_framework.parsers import FileUploadParser
from rest_framework.response import Response
from api.actions.serializers import RegistrationActionSerializer
from api.base import permissions as base_permissions
from osf.models.action import RegistrationAction
from api.base.exceptions import InvalidFilterValue, InvalidFilterOperator, Conflict
from api.base.filters import PreprintFilterMixin, ListFilterMixin
from api.base.views import JSONAPIBaseView, DeprecatedView
from api.base.metrics import MetricsViewMixin
from api.base.pagination import MaxSizePagination, IncreasedPageSizePagination
from api.base.utils import get_object_or_error, get_user_auth, is_truthy
from api.licenses.views import LicenseList
from api.collections.permissions import CanSubmitToCollectionOrPublic
from api.collections.serializers import CollectionSubmissionSerializer, CollectionSubmissionCreateSerializer
from api.preprints.permissions import PreprintPublishedOrAdmin
from api.preprints.serializers import PreprintSerializer
from api.providers.permissions import CanAddModerator, CanDeleteModerator, CanUpdateModerator, CanSetUpProvider, MustBeModerator
from api.providers.serializers import (
CollectionProviderSerializer,
PreprintProviderSerializer,
PreprintModeratorSerializer,
RegistrationProviderSerializer,
RegistrationModeratorSerializer,
)
from api.registrations import annotations as registration_annotations
from api.registrations.serializers import RegistrationSerializer
from api.requests.serializers import PreprintRequestSerializer, RegistrationRequestSerializer
from api.schemas.serializers import RegistrationSchemaSerializer
from api.subjects.views import SubjectList
from api.subjects.serializers import SubjectSerializer
from api.taxonomies.serializers import TaxonomySerializer
from api.taxonomies.utils import optimize_subject_query
from framework.auth.oauth_scopes import CoreScopes
from api.base.settings import BULK_SETTINGS
from osf.models import (
AbstractNode,
CollectionProvider,
CollectionSubmission,
NodeLicense,
OSFUser,
RegistrationProvider,
Subject,
PreprintRequest,
PreprintProvider,
WhitelistedSHAREPreprintProvider,
NodeRequest,
Registration,
RegistrationBulkUploadJob,
)
from osf.utils.permissions import REVIEW_PERMISSIONS, ADMIN
from osf.utils.workflows import RequestTypes
from osf.metrics import PreprintDownload, PreprintView
from osf.registrations.utils import BulkRegistrationUpload, InvalidHeadersError
class ProviderMixin:
provider_class = None
def get_provider(self):
# used in perms class
assert self.provider_class is not None, 'must define provider class to use ProviderMixin'
if self.kwargs.get('provider_id'):
return get_object_or_error(
self.provider_class,
self.kwargs['provider_id'],
self.request,
display_name=self.provider_class.__name__,
)
if self.kwargs.get('node_id'):
return get_object_or_error(
AbstractNode,
self.kwargs['node_id'],
self.request,
display_name=AbstractNode.__name__,
).provider
class GenericProviderList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.ALWAYS_PUBLIC]
required_write_scopes = [CoreScopes.NULL]
pagination_class = MaxSizePagination
ordering = ('name', )
def get_default_queryset(self):
return self.model_class.objects.all()
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
class CollectionProviderList(GenericProviderList):
model_class = CollectionProvider
serializer_class = CollectionProviderSerializer
view_category = 'collection-providers'
view_name = 'collection-providers-list'
class RegistrationProviderList(GenericProviderList):
model_class = RegistrationProvider
serializer_class = RegistrationProviderSerializer
view_category = 'registration-providers'
view_name = 'registration-providers-list'
class PreprintProviderList(MetricsViewMixin, GenericProviderList):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/preprint_provider_list).
"""
model_class = PreprintProvider
serializer_class = PreprintProviderSerializer
view_category = 'preprint-providers'
view_name = 'preprint-providers-list'
metric_map = {
'downloads': PreprintDownload,
'views': PreprintView,
}
# overrides MetricsViewMixin
def get_annotated_queryset_with_metrics(self, queryset, metric_class, metric_name, after):
return metric_class.get_top_by_count(
qs=queryset,
model_field='_id',
metric_field='provider_id',
annotation=metric_name,
after=after,
size=None,
)
def get_renderer_context(self):
context = super(PreprintProviderList, self).get_renderer_context()
context['meta'] = {
'whitelisted_providers': WhitelistedSHAREPreprintProvider.objects.all().values_list('provider_name', flat=True),
}
return context
def build_query_from_field(self, field_name, operation):
if field_name == 'permissions':
if operation['op'] != 'eq':
raise InvalidFilterOperator(value=operation['op'], valid_operators=['eq'])
auth = get_user_auth(self.request)
auth_user = getattr(auth, 'user', None)
if not auth_user:
raise NotAuthenticated()
value = operation['value'].lstrip('[').rstrip(']')
permissions = [v.strip() for v in value.split(',')]
perm_options = [perm[0] for perm in REVIEW_PERMISSIONS]
if not set(permissions).issubset(set(perm_options)):
valid_permissions = ', '.join(perm_options)
raise InvalidFilterValue('Invalid permission! Valid values are: {}'.format(valid_permissions))
return Q(id__in=get_objects_for_user(auth_user, permissions, PreprintProvider, any_perm=True))
return super(PreprintProviderList, self).build_query_from_field(field_name, operation)
class GenericProviderDetail(JSONAPIBaseView, generics.RetrieveAPIView):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.ALWAYS_PUBLIC]
required_write_scopes = [CoreScopes.PROVIDERS_WRITE]
def get_object(self):
provider = get_object_or_error(self.model_class, self.kwargs['provider_id'], self.request, display_name=self.model_class.__name__)
self.check_object_permissions(self.request, provider)
return provider
class CollectionProviderDetail(GenericProviderDetail):
model_class = CollectionProvider
serializer_class = CollectionProviderSerializer
view_category = 'collection-providers'
view_name = 'collection-provider-detail'
class RegistrationProviderDetail(GenericProviderDetail):
model_class = RegistrationProvider
serializer_class = RegistrationProviderSerializer
view_category = 'registration-providers'
view_name = 'registration-provider-detail'
class PreprintProviderDetail(GenericProviderDetail, generics.UpdateAPIView):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/preprint_provider_detail).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
CanSetUpProvider,
)
model_class = PreprintProvider
serializer_class = PreprintProviderSerializer
view_category = 'preprint-providers'
view_name = 'preprint-provider-detail'
def perform_update(self, serializer):
if serializer.instance.is_reviewed:
raise Conflict('Reviews settings may be set only once. Contact support@osf.io if you need to update them.')
super(PreprintProviderDetail, self).perform_update(serializer)
class GenericProviderTaxonomies(JSONAPIBaseView, generics.ListAPIView):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.ALWAYS_PUBLIC]
required_write_scopes = [CoreScopes.NULL]
serializer_class = TaxonomySerializer
pagination_class = IncreasedPageSizePagination
view_name = 'taxonomy-list'
ordering = ('-id',)
def get_queryset(self):
parent = self.request.query_params.get('filter[parents]', None) or self.request.query_params.get('filter[parent]', None)
provider = get_object_or_error(self.provider_class, self.kwargs['provider_id'], self.request, display_name=self.provider_class.__name__)
if parent:
if parent == 'null':
return provider.top_level_subjects
return optimize_subject_query(provider.all_subjects.filter(parent___id=parent))
return optimize_subject_query(provider.all_subjects)
class CollectionProviderTaxonomies(DeprecatedView, GenericProviderTaxonomies):
"""
To be deprecated: In favor of CollectionProviderSubjects
"""
view_category = 'collection-providers'
provider_class = CollectionProvider # Not actually the model being serialized, privatize to avoid issues
max_version = '2.14'
class RegistrationProviderTaxonomies(DeprecatedView, GenericProviderTaxonomies):
"""
To be deprecated: In favor of RegistrationProviderSubjects
"""
view_category = 'registration-providers'
provider_class = RegistrationProvider # Not actually the model being serialized, privatize to avoid issues
max_version = '2.14'
class PreprintProviderTaxonomies(GenericProviderTaxonomies):
"""
To be deprecated: In favor of PreprintProviderSubjects
"""
view_category = 'preprint-providers'
provider_class = PreprintProvider # Not actually the model being serialized, privatize to avoid issues
max_version = '2.14'
class BaseProviderSubjects(SubjectList):
pagination_class = IncreasedPageSizePagination
view_name = 'subject-list'
def get_default_queryset(self):
parent = self.request.query_params.get('filter[parent]', None)
provider = get_object_or_error(self.provider_class, self.kwargs['provider_id'], self.request, display_name=self.provider_class.__name__)
if parent:
if parent == 'null':
return provider.top_level_subjects
return optimize_subject_query(provider.all_subjects.filter(parent___id=parent))
return optimize_subject_query(provider.all_subjects)
class CollectionProviderSubjects(BaseProviderSubjects):
view_category = 'collection-providers'
provider_class = CollectionProvider # Not actually the model being serialized, privatize to avoid issues
class RegistrationProviderSubjects(BaseProviderSubjects):
view_category = 'registration-providers'
provider_class = RegistrationProvider # Not actually the model being serialized, privatize to avoid issues
class PreprintProviderSubjects(BaseProviderSubjects):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/preprint_provider_subjects_list).
"""
view_category = 'preprint-providers'
provider_class = PreprintProvider # Not actually the model being serialized, privatize to avoid issues
class GenericProviderHighlightedTaxonomyList(JSONAPIBaseView, generics.ListAPIView):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
view_name = 'highlighted-taxonomy-list'
required_read_scopes = [CoreScopes.ALWAYS_PUBLIC]
required_write_scopes = [CoreScopes.NULL]
serializer_class = TaxonomySerializer
def get_queryset(self):
provider = get_object_or_error(self.provider_class, self.kwargs['provider_id'], self.request, display_name=self.provider_class.__name__)
return optimize_subject_query(Subject.objects.filter(id__in=[s.id for s in provider.highlighted_subjects]).order_by('text'))
class CollectionProviderHighlightedTaxonomyList(DeprecatedView, GenericProviderHighlightedTaxonomyList):
"""
To be deprecated: In favor of CollectionProviderHighlightedSubjectList
"""
view_category = 'collection-providers'
provider_class = CollectionProvider
max_version = '2.14'
class RegistrationProviderHighlightedTaxonomyList(DeprecatedView, GenericProviderHighlightedTaxonomyList):
"""
To be deprecated: In favor of RegistrationProviderHighlightedSubjectList
"""
view_category = 'registration-providers'
provider_class = RegistrationProvider
max_version = '2.14'
class PreprintProviderHighlightedTaxonomyList(DeprecatedView, GenericProviderHighlightedTaxonomyList):
"""
To be deprecated: In favor of PreprintProviderHighlightedSubjectList
"""
view_category = 'preprint-providers'
provider_class = PreprintProvider
max_version = '2.14'
class GenericProviderHighlightedSubjectList(GenericProviderHighlightedTaxonomyList):
view_name = 'highlighted-subject-list'
serializer_class = SubjectSerializer
class CollectionProviderHighlightedSubjectList(GenericProviderHighlightedSubjectList):
view_category = 'collection-providers'
provider_class = CollectionProvider
class RegistrationProviderHighlightedSubjectList(GenericProviderHighlightedSubjectList):
view_category = 'registration-providers'
provider_class = RegistrationProvider
class PreprintProviderHighlightedSubjectList(GenericProviderHighlightedSubjectList):
view_category = 'preprint-providers'
provider_class = PreprintProvider
class GenericProviderLicenseList(LicenseList):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/preprint_provider_licenses_list)
"""
ordering = () # TODO: should be ordered once the frontend for selecting default licenses no longer relies on order
def get_default_queryset(self):
"""
Returns provider.acceptable_licenses if they exist, otherwise returns all licenses.
The provider's default_license is also included in the queryset if one exists.
"""
provider = get_object_or_error(
self.provider_class,
self.kwargs['provider_id'],
self.request,
display_name=self.provider_class.__name__,
)
if provider.licenses_acceptable.count():
licenses = provider.licenses_acceptable.get_queryset()
else:
licenses = NodeLicense.objects.all()
if provider.default_license:
licenses |= NodeLicense.objects.filter(id=provider.default_license.id)
# Since default_license could also be in acceptable_licenses, filtering
# this way to avoid duplicates without .distinct() usage
return NodeLicense.objects.filter(
Q(id__in=licenses.values_list('id', flat=True)),
)
class CollectionProviderLicenseList(GenericProviderLicenseList):
view_category = 'collection-providers'
provider_class = CollectionProvider
class RegistrationProviderLicenseList(GenericProviderLicenseList):
view_category = 'registration-providers'
provider_class = RegistrationProvider
class PreprintProviderLicenseList(GenericProviderLicenseList):
view_category = 'preprint-providers'
provider_class = PreprintProvider
class PreprintProviderPreprintList(JSONAPIBaseView, generics.ListAPIView, PreprintFilterMixin, ProviderMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/preprint_providers_preprints_list).
"""
provider_class = PreprintProvider
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
PreprintPublishedOrAdmin,
)
ordering = ('-created')
serializer_class = PreprintSerializer
model_class = AbstractNode
required_read_scopes = [CoreScopes.NODE_PREPRINTS_READ]
required_write_scopes = [CoreScopes.NULL]
view_category = 'preprint-providers'
view_name = 'preprints-list'
def get_default_queryset(self):
auth = get_user_auth(self.request)
auth_user = getattr(auth, 'user', None)
provider = self.get_provider()
# Permissions on the list objects are handled by the query
return self.preprints_queryset(provider.preprints.all(), auth_user)
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
# overrides APIView
def get_renderer_context(self):
context = super(PreprintProviderPreprintList, self).get_renderer_context()
show_counts = is_truthy(self.request.query_params.get('meta[reviews_state_counts]', False))
if show_counts:
# TODO don't duplicate the above
auth = get_user_auth(self.request)
auth_user = getattr(auth, 'user', None)
provider = self.get_provider()
if auth_user and auth_user.has_perm('view_submissions', provider):
context['meta'] = {
'reviews_state_counts': provider.get_reviewable_state_counts(),
}
return context
class CollectionProviderSubmissionList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMixin, ProviderMixin):
provider_class = CollectionProvider
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
CanSubmitToCollectionOrPublic,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.COLLECTED_META_READ]
required_write_scopes = [CoreScopes.COLLECTED_META_WRITE]
model_class = CollectionSubmission
serializer_class = CollectionSubmissionSerializer
view_category = 'collected-metadata'
view_name = 'provider-collected-metadata-list'
def get_serializer_class(self):
if self.request.method == 'POST':
return CollectionSubmissionCreateSerializer
else:
return CollectionSubmissionSerializer
def get_default_queryset(self):
provider = self.get_provider()
if provider and provider.primary_collection:
return provider.primary_collection.collectionsubmission_set.all()
return CollectionSubmission.objects.none()
def get_queryset(self):
return self.get_queryset_from_request()
def perform_create(self, serializer):
user = self.request.user
provider = self.get_provider()
if provider and provider.primary_collection:
return serializer.save(creator=user, collection=provider.primary_collection)
raise ValidationError('Provider {} has no primary collection to submit to.'.format(provider.name))
class RegistrationProviderSubmissionList(JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMixin, ProviderMixin):
provider_class = RegistrationProvider
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
CanSubmitToCollectionOrPublic,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.COLLECTED_META_READ]
required_write_scopes = [CoreScopes.COLLECTED_META_WRITE]
model_class = CollectionSubmission
serializer_class = CollectionSubmissionSerializer
view_category = 'collected-metadata'
view_name = 'provider-collected-registration-metadata-list'
def get_serializer_class(self):
if self.request.method == 'POST':
return CollectionSubmissionCreateSerializer
else:
return CollectionSubmissionSerializer
def get_default_queryset(self):
provider = self.get_provider()
if provider and provider.primary_collection:
return provider.primary_collection.collectionsubmission_set.all()
return CollectionSubmission.objects.none()
def get_queryset(self):
return self.get_queryset_from_request()
def perform_create(self, serializer):
user = self.request.user
provider = get_object_or_error(RegistrationProvider, self.kwargs['provider_id'], self.request, display_name='RegistrationProvider')
if provider and provider.primary_collection:
return serializer.save(creator=user, collection=provider.primary_collection)
raise ValidationError('Provider {} has no primary collection to submit to.'.format(provider.name))
class PreprintProviderWithdrawRequestList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, ProviderMixin):
provider_class = PreprintProvider
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.TokenHasScope,
MustBeModerator,
)
view_category = 'requests'
view_name = 'provider-withdrawal-request-list'
required_read_scopes = [CoreScopes.PREPRINT_REQUESTS_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = PreprintRequestSerializer
def get_default_queryset(self):
return PreprintRequest.objects.filter(
request_type=RequestTypes.WITHDRAWAL.value,
target__provider_id=self.get_provider().id,
target__is_public=True,
target__deleted__isnull=True,
)
def get_renderer_context(self):
context = super(PreprintProviderWithdrawRequestList, self).get_renderer_context()
if is_truthy(self.request.query_params.get('meta[requests_state_counts]', False)):
auth = get_user_auth(self.request)
auth_user = getattr(auth, 'user', None)
provider = self.get_provider()
if auth_user and auth_user.has_perm('view_submissions', provider):
context['meta'] = {
'requests_state_counts': provider.get_request_state_counts(),
}
return context
def get_queryset(self):
return self.get_queryset_from_request()
class ModeratorMixin(ProviderMixin):
provider_class = PreprintProvider
model_class = OSFUser
def get_provider(self):
return get_object_or_error(self.provider_type, self.kwargs['provider_id'], self.request, display_name='PreprintProvider')
def get_serializer_context(self, *args, **kwargs):
ctx = super(ModeratorMixin, self).get_serializer_context(*args, **kwargs)
ctx.update({'provider': self.get_provider()})
return ctx
class ProviderModeratorsList(ModeratorMixin, JSONAPIBaseView, generics.ListCreateAPIView, ListFilterMixin):
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.TokenHasScope,
MustBeModerator,
CanAddModerator,
)
view_name = 'provider-moderator-list'
required_read_scopes = [CoreScopes.MODERATORS_READ]
required_write_scopes = [CoreScopes.MODERATORS_WRITE]
def get_default_queryset(self):
provider = self.get_provider()
admin_group = provider.get_group(ADMIN)
mod_group = provider.get_group('moderator')
return (admin_group.user_set.all() | mod_group.user_set.all()).annotate(permission_group=Case(
When(groups=admin_group, then=Value(ADMIN)),
default=Value('moderator'),
output_field=CharField(),
)).order_by('fullname')
def get_queryset(self):
return self.get_queryset_from_request()
class ProviderModeratorsDetail(ModeratorMixin, JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView):
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.TokenHasScope,
MustBeModerator,
CanUpdateModerator,
CanDeleteModerator,
)
view_name = 'provider-moderator-detail'
required_read_scopes = [CoreScopes.MODERATORS_READ]
required_write_scopes = [CoreScopes.MODERATORS_WRITE]
def get_object(self):
provider = self.get_provider()
user = get_object_or_error(OSFUser, self.kwargs['moderator_id'], self.request, display_name='OSFUser')
try:
perm_group = user.groups.filter(name__contains=self.provider_type.group_format.format(self=provider, group='')).order_by('name').first().name.split('_')[-1]
except AttributeError:
# Group doesn't exist -- users not moderator
raise NotFound
setattr(user, 'permission_group', perm_group)
return user
def perform_destroy(self, instance):
try:
self.get_provider().remove_from_group(instance, instance.permission_group)
except ValueError as e:
raise ValidationError(str(e))
class PreprintProviderModeratorsList(ProviderModeratorsList):
provider_type = PreprintProvider
serializer_class = PreprintModeratorSerializer
view_category = 'preprint-providers'
class PreprintProviderModeratorsDetail(ProviderModeratorsDetail):
provider_type = PreprintProvider
serializer_class = PreprintModeratorSerializer
view_category = 'preprint-providers'
class RegistrationProviderModeratorsList(ProviderModeratorsList):
provider_type = RegistrationProvider
serializer_class = RegistrationModeratorSerializer
view_category = 'registration-providers'
class RegistrationProviderModeratorsDetail(ProviderModeratorsDetail):
provider_type = RegistrationProvider
serializer_class = RegistrationModeratorSerializer
view_category = 'registration-providers'
class RegistrationProviderSchemaList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, ProviderMixin):
provider_class = RegistrationProvider
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
view_category = 'registration-providers'
view_name = 'registration-schema-list'
required_read_scopes = [CoreScopes.SCHEMA_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = RegistrationSchemaSerializer
def get_default_queryset(self):
provider = self.get_provider()
default_schema_id = provider.default_schema.id if provider.default_schema else None
schemas = provider.schemas.get_latest_versions(request=self.request, invisible=True).filter(active=True)
if not default_schema_id:
return schemas
filtered = schemas.annotate(default_schema_ordering=Case(
When(id=default_schema_id, then=Value(1)),
default=Value(0),
output_field=IntegerField(),
)).order_by('-default_schema_ordering', 'name')
return filtered
def get_queryset(self):
return self.get_queryset_from_request()
class RegistrationProviderRegistrationList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, ProviderMixin):
provider_class = RegistrationProvider
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.TokenHasScope,
MustBeModerator,
)
ordering = ('-created')
serializer_class = RegistrationSerializer
required_read_scopes = [CoreScopes.NODE_REGISTRATIONS_READ]
required_write_scopes = [CoreScopes.NULL]
view_category = 'registration-providers'
view_name = 'registrations-list'
def get_default_queryset(self):
provider = self.get_provider()
return Registration.objects.filter(
provider=provider,
).annotate(
revision_state=registration_annotations.REVISION_STATE,
)
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
# overrides APIView
def get_renderer_context(self):
context = super().get_renderer_context()
if is_truthy(self.request.query_params.get('meta[reviews_state_counts]', False)):
provider = self.get_provider()
context['meta'] = {
'reviews_state_counts': provider.get_reviewable_state_counts(),
}
return context
class RegistrationProviderRequestList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, ProviderMixin):
provider_class = RegistrationProvider
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.TokenHasScope,
MustBeModerator,
)
view_category = 'requests'
view_name = 'registration-provider-request-list'
required_read_scopes = [CoreScopes.REGISTRATION_REQUESTS_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = RegistrationRequestSerializer
def get_default_queryset(self):
return NodeRequest.objects.filter(
target__provider_id=self.get_provider().id,
target__deleted__isnull=True,
)
def get_queryset(self):
return self.get_queryset_from_request()
class RegistrationProviderActionList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin, ProviderMixin):
provider_class = RegistrationProvider
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.TokenHasScope,
MustBeModerator,
)
view_category = 'actions'
view_name = 'registration-provider-action-list'
required_read_scopes = [CoreScopes.ACTIONS_READ]
required_write_scopes = [CoreScopes.ACTIONS_WRITE]
serializer_class = RegistrationActionSerializer
def get_default_queryset(self):
return RegistrationAction.objects.filter(
target__provider_id=self.get_provider().id,
target__deleted__isnull=True,
)
def get_queryset(self):
return self.get_queryset_from_request()
class RegistrationBulkCreate(APIView, ProviderMixin):
provider_class = RegistrationProvider
parser_classes = [FileUploadParser]
permission_classes = (
drf_permissions.IsAuthenticated,
base_permissions.TokenHasScope,
CanUpdateModerator,
)
def get_hash(self, file_obj):
BLOCK_SIZE = 2**16
file_hash = hashlib.md5()
block = file_obj.read(BLOCK_SIZE)
while len(block) > 0:
file_hash.update(block)
block = file_obj.read(BLOCK_SIZE)
file_obj.seek(0)
return file_hash.hexdigest()
def put(self, request, *args, **kwargs):
provider_id = kwargs['provider_id']
user_id = self.request.user._id
file_size_limit = BULK_SETTINGS['DEFAULT_BULK_LIMIT'] * 10000
file_obj = request.data['file']
if file_obj.size > file_size_limit:
return JsonResponse(
{'errors': [{'type': 'sizeExceedsLimit'}]},
status=413,
content_type='application/vnd.api+json; application/json',
)
if file_obj.content_type != 'text/csv':
return JsonResponse(
{'errors': [{'type': 'invalidFileType'}]},
status=413,
content_type='application/vnd.api+json; application/json',
)
file_md5 = self.get_hash(file_obj)
if RegistrationBulkUploadJob.objects.filter(payload_hash=file_md5).exists():
return JsonResponse(
{'errors': [{'type': 'bulkUploadJobExists'}]},
status=409,
content_type='application/vnd.api+json; application/json',
)
try:
upload = BulkRegistrationUpload(file_obj, provider_id)
upload.validate()
errors = upload.errors
except InvalidHeadersError as e:
invalid_headers = [str(detail) for detail in e.detail['invalid_headers']]
missing_headers = [str(detail) for detail in e.detail['missing_headers']]
return JsonResponse(
{'errors': [{'type': 'invalidColumnId', 'invalidHeaders': invalid_headers, 'missingHeaders': missing_headers}]},
status=400,
content_type='application/vnd.api+json; application/json',
)
except NotFound:
return JsonResponse(
{'errors': [{'type': 'invalidSchemaId'}]},
status=404,
content_type='application/vnd.api+json; application/json',
)
if errors:
return JsonResponse(
{'errors': errors},
status=400,
content_type='application/vnd.api+json; application/json',
)
parsed = upload.get_parsed()
enqueue_task(prepare_for_registration_bulk_creation.s(file_md5, user_id, provider_id, parsed, dry_run=False))
return Response(status=204)
| |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from django import http
from mox import IsA
from novaclient.v1_1 import servers
from horizon import api
from horizon import test
class ServerWrapperTests(test.TestCase):
def test_get_base_attribute(self):
server = api.nova.Server(self.servers.first(), self.request)
self.assertEqual(server.id, self.servers.first().id)
def test_image_name(self):
image = self.images.first()
self.mox.StubOutWithMock(api.glance, 'image_get')
api.glance.image_get(IsA(http.HttpRequest),
image.id).AndReturn(image)
self.mox.ReplayAll()
server = api.Server(self.servers.first(), self.request)
self.assertEqual(server.image_name, image.name)
class ComputeApiTests(test.APITestCase):
def test_server_reboot(self):
server = self.servers.first()
HARDNESS = servers.REBOOT_HARD
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
novaclient.servers.reboot(server.id, HARDNESS)
self.mox.ReplayAll()
ret_val = api.nova.server_reboot(self.request, server.id)
self.assertIsNone(ret_val)
def test_server_vnc_console(self):
server = self.servers.first()
console = self.servers.console_data
console_type = console["console"]["type"]
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get_vnc_console(server.id,
console_type).AndReturn(console)
self.mox.ReplayAll()
ret_val = api.server_vnc_console(self.request,
server.id,
console_type)
self.assertIsInstance(ret_val, api.nova.VNCConsole)
def test_server_list(self):
servers = self.servers.list()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.list(True, {'all_tenants': True}).AndReturn(servers)
self.mox.ReplayAll()
ret_val = api.nova.server_list(self.request, all_tenants=True)
for server in ret_val:
self.assertIsInstance(server, api.Server)
def test_usage_get(self):
novaclient = self.stub_novaclient()
novaclient.usage = self.mox.CreateMockAnything()
novaclient.usage.get(self.tenant.id,
'start',
'end').AndReturn(self.usages.first())
self.mox.ReplayAll()
ret_val = api.usage_get(self.request, self.tenant.id, 'start', 'end')
self.assertIsInstance(ret_val, api.nova.Usage)
def test_usage_list(self):
usages = self.usages.list()
novaclient = self.stub_novaclient()
novaclient.usage = self.mox.CreateMockAnything()
novaclient.usage.list('start', 'end', True).AndReturn(usages)
self.mox.ReplayAll()
ret_val = api.usage_list(self.request, 'start', 'end')
for usage in ret_val:
self.assertIsInstance(usage, api.Usage)
def test_server_get(self):
server = self.servers.first()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
self.mox.ReplayAll()
ret_val = api.server_get(self.request, server.id)
self.assertIsInstance(ret_val, api.nova.Server)
def test_server_remove_floating_ip(self):
server = api.nova.Server(self.servers.first(), self.request)
floating_ip = self.floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
novaclient.floating_ips.get(floating_ip.id).AndReturn(floating_ip)
novaclient.servers.remove_floating_ip(server.id, floating_ip.ip) \
.AndReturn(server)
self.mox.ReplayAll()
server = api.server_remove_floating_ip(self.request,
server.id,
floating_ip.id)
self.assertIsInstance(server, api.nova.Server)
def test_server_add_floating_ip(self):
server = api.nova.Server(self.servers.first(), self.request)
floating_ip = self.floating_ips.first()
novaclient = self.stub_novaclient()
novaclient.floating_ips = self.mox.CreateMockAnything()
novaclient.servers = self.mox.CreateMockAnything()
novaclient.servers.get(server.id).AndReturn(server)
novaclient.floating_ips.get(floating_ip.id).AndReturn(floating_ip)
novaclient.servers.add_floating_ip(server.id, floating_ip.ip) \
.AndReturn(server)
self.mox.ReplayAll()
server = api.server_add_floating_ip(self.request,
server.id,
floating_ip.id)
self.assertIsInstance(server, api.nova.Server)
@test.create_stubs({api.nova: ('volume_list',
'server_list',
'flavor_list',
'tenant_floating_ip_list',
'tenant_quota_get',)})
def test_tenant_quota_usages(self):
api.nova.flavor_list(IsA(http.HttpRequest)) \
.AndReturn(self.flavors.list())
api.nova.tenant_quota_get(IsA(http.HttpRequest), '1') \
.AndReturn(self.quotas.first())
api.nova.tenant_floating_ip_list(IsA(http.HttpRequest)) \
.AndReturn(self.floating_ips.list())
api.nova.server_list(IsA(http.HttpRequest)) \
.AndReturn(self.servers.list())
api.nova.volume_list(IsA(http.HttpRequest)) \
.AndReturn(self.volumes.list())
self.mox.ReplayAll()
quota_usages = api.tenant_quota_usages(self.request)
expected_output = {'gigabytes': {
'used': 80,
'flavor_fields': [],
'quota': 1000},
'ram': {
'available': 8976,
'used': 1024,
'flavor_fields': ['ram'],
'quota': 10000},
'floating_ips': {
'used': 2,
'flavor_fields': [],
'quota': 1},
'instances': {
'used': 2,
'flavor_fields': [],
'quota': 10},
'volumes': {
'used': 3,
'flavor_fields': [],
'quota': 1},
'cores': {
'used': 2,
'flavor_fields': ['vcpus'],
'quota': 10}}
self.assertEquals(quota_usages, expected_output)
| |
# implementation of structured perceptron model for ubuntu disentanglement project
EPOCHS = 10
import re
import random
import sys
import string
import numpy
import collections
numpy.set_printoptions(threshold=numpy.nan)
class Post(object):
# constructor
def __init__(self, username, message_words, message, postid, raw_line):
# stores name of user
self.username = username
# stores list of strings which contains every 'word' in the message
self.message_words = self.strip_words(message_words)
# string of message body
self.message = message
# stores id number assigned to post
self.postid = postid
# stores actual line of raw input corresponding to current post
self.raw_line = raw_line
# set containing the line number of every message this post is linked to
self.links = set()
self.predictions = set()
def __str__(self):
return 'Username: ' + self.username + ', Post ID: ' + str(self.postid) + '\nMessage: ' + self.message
def strip_words(self, message_words_list):
new_set = set()
for word in message_words_list:
new_set.add(word.strip(string.punctuation))
new_set.add(word)
return new_set
class TrainingExample:
# constructor
def __init__(self, response_line, prev_line, features, correct_prediction = 0, prediction = -1):
self.features = features
self.response_line = response_line
self.prev_line = prev_line
self.correct_prediction = correct_prediction
self.prediction = prediction
def file_processing(entries, file_in):
'''
Example of typical line:
[08:11] <Peppernrino> Hello everyone!
The parser will extract from this message the username (Peppernrino) and the message (Hello everyone!)
'''
post_num = 0
for line in file_in:
# if line contains a normal post, store information in Post object
current_line = re.search(r'\[.+?\]\s<(.+?)>', line)
if current_line:
username = current_line.group(1)
# tokenize post body and store strings in list
current_entry = re.findall(r'\[.+?\]\s<.+?>\s(.*)', line)
split = current_entry[0].split()
# add object storing current comment's information to data list
new_post = Post(username, split, current_entry[0], post_num, line)
entries.append(new_post)
# otherwise, create default object
else:
new_post = Post("", [], "", post_num, "")
entries.append(new_post)
post_num += 1
def add_to_dictionary(d_file, master_dictionary):
global dictionary_index
entries = []
data_file = open(d_file, 'r')
# read posts from data file and store post objects in entries
file_processing(entries, data_file)
for post in entries:
for word in post.message_words:
if word not in master_dictionary:
master_dictionary[word] = dictionary_index
dictionary_index += 1
def annotation_processing(entries, annotation_file_in):
# go through all possible message pairs and set link bool
# to true if link exists in the annotation file
for line in annotation_file_in:
links = re.findall(r'\d+', line)
for link in range(1, len(links)):
(entries[int(links[0])].links).add(int(links[link]))
def generate_features(current_post, prev_post, post_file, compiled_dictionary, linked_pairs):
# not using feature hashing
features = numpy.zeros(18)
if current_post == None:
return features;
# using feature hashing
#features = numpy.zeros(18 + 2 * len(linked_pairs), bool)
# bias
features[0] = 1
# 'distance' categories
distance = current_post.postid - prev_post.postid
if distance == 1:
features[1] = 1
elif distance < 6:
features[2] = 1
elif distance < 21:
features[3] = 1
elif distance < 51:
features[4] = 1
else:
features[5] = 1
# compare usernames
if prev_post.username == current_post.username:
features[6] = 1
# if current message contains username of previous poster
if prev_post.username in current_post.message_words:
features[7] = 1
# if previous message contains username of current poster
if current_post.username in prev_post.message_words:
features[8] = 1
size_intersection = len(prev_post.message_words.intersection(current_post.message_words))
if size_intersection == 0:
features[9] = 1
elif size_intersection == 1:
features[10] = 1
elif size_intersection < 6:
features[11] = 1
elif size_intersection < 15:
features[12] = 1
else:
features[13] = 1
# of posts by previous user between current and previous post
posts_between = 0
for i in range(1, current_post.postid - prev_post.postid):
if(post_file[prev_post.postid + i].username == prev_post.username):
posts_between += 1
if posts_between == 0:
features[14] = 1
elif posts_between == 1:
features[15] = 1
elif posts_between < 5:
features[16] = 1
else:
features[17] = 1
index = 18
'''
hash_range = len(linked_pairs)
for i in prev_post.message_words:
for j in current_post.message_words:
if (i, j) in linked_pairs:
features[index + linked_pairs[(i,j)]] = 1
else:
features[index + len(linked_pairs) + (hash(i+j) % len(linked_pairs))] = 1
'''
'''
for i in compiled_dictionary:
if i in prev_post.message_words and i in current_post.message_words:
features[index + compiled_dictionary[i]] = 1
'''
return features
def create_message_file(d_file, a_file):
entries = []
data_file = open(d_file, 'r')
annotation_file = open(a_file, 'r')
# read posts from data file and store post objects in entries
file_processing(entries, data_file)
dim = len(entries)
# generate 2-dim list to hold the possible training examples
# add correct classification to zeroth element of features vector
annotation_processing(entries, annotation_file)
return entries
def make_structured_prediction(weights, msg_file, msg_line, compiled_dictionary, linked_pairs):
summation = 0
max_index = -1
# pick message pair that generates greatest activation
# search done in reverse order so ties are broken in favor of closest message5
for prev_msg_line in reversed(range(msg_line)):
features = generate_features(msg_file[msg_line], msg_file[prev_msg_line], msg_file, compiled_dictionary, linked_pairs)
temp_sum = numpy.dot(weights, features)
if temp_sum > summation:
summation = temp_sum
max_index = prev_msg_line
return max_index
def train_perceptron(weights, enumerated_examples, files, compiled_dictionary, linked_pairs):
# used for keeping track of training statistics
correct_matches = 0
true_pos = 0
false_pos = 0
false_neg = 0
true_neg = 0
num_examples = 0
random.shuffle(enumerated_examples)
for i in range(len(enumerated_examples)):
# print status update at certain intervals
if i % ((len(enumerated_examples) - (len(enumerated_examples) % 100)) // 10) == 0:
print("training on example " + str(i))
temp_prediction_set = set()
file_num = enumerated_examples[i][0]
msg_line = enumerated_examples[i][1]
num_examples += msg_line
# generate prediction index
prediction_index = make_structured_prediction(weights, files[file_num], msg_line, compiled_dictionary, linked_pairs)
if prediction_index > -1:
temp_prediction_set.add(prediction_index)
correct_index = -1
if files[file_num][msg_line].links:
if prediction_index in files[file_num][msg_line].links:
correct_index = prediction_index
else:
correct_index = random.choice(tuple(files[file_num][msg_line].links))
correct_features = 0
prediction_features = 0
if correct_index == -1:
correct_features = generate_features(None, None, None, None, None)
else:
correct_features = generate_features(files[file_num][msg_line], files[file_num][correct_index], files[file_num], compiled_dictionary, linked_pairs)
if prediction_index == -1:
prediction_features = generate_features(None, None, None, None, None)
else:
prediction_features = generate_features(files[file_num][msg_line], files[file_num][prediction_index], files[file_num], compiled_dictionary, linked_pairs)
weights += correct_features - prediction_features
#TODO
'''
# first incorrect case... where there was a link
if files[file_num][msg_line].links and prediction_index not in files[file_num][msg_line].links:
# false negative
if prediction_index == -1:
correct_features = generate_features(files[file_num][msg_line], files[file_num][correct_index], files[file_num], compiled_dictionary, linked_pairs)
weights += correct_features
else:
correct_features = generate_features(files[file_num][msg_line], files[file_num][correct_index], files[file_num], compiled_dictionary, linked_pairs)
predicted_features = generate_features(files[file_num][msg_line], files[file_num][prediction_index], files[file_num], compiled_dictionary, linked_pairs)
weights += correct_features - predicted_features
# second incorrect case... where there wasn't a link
elif correct_index == -1 and prediction_index != -1:
predicted_features = generate_features(files[file_num][msg_line], files[file_num][prediction_index], files[file_num], compiled_dictionary, linked_pairs)
weights -= predicted_features
'''
true_pos += len(temp_prediction_set.intersection(files[file_num][msg_line].links))
false_pos += len(temp_prediction_set.difference(files[file_num][msg_line].links))
false_neg += len(files[file_num][msg_line].links.difference(temp_prediction_set))
true_neg += msg_line - len(temp_prediction_set.union(files[file_num][msg_line].links))
accuracy = (true_pos + true_neg)/num_examples
precision = true_pos / (true_pos + false_pos)
recall = true_pos / (true_pos + false_neg)
fscore = 2 * precision * recall / (precision + recall)
print ("Training Accuracy: " + str(accuracy) + "\nTraining Precision: " + str(precision) + "\nTraining Recall: " + str(recall) + "\nTraining Fscore:" + str(fscore))
def calculate_predictions(weights, testing_list, compiled_dictionary, linked_pairs):
correct_matches = 0
true_pos = 0
false_pos = 0
false_neg = 0
true_neg = 0
num_examples = 0
for k in range(0, len(testing_list), 2):
print("Working on test file " + str(k//2 + 1) + " of " + str(len(testing_list)//2))
test_set = create_message_file(testing_list[k], testing_list[k+1])
for message in range(100, len(test_set)):
num_examples += message
prediction_index = make_structured_prediction(weights, test_set, message, compiled_dictionary, linked_pairs)
if prediction_index > -1:
test_set[message].predictions.add(prediction_index)
'''
print("prediction index")
print(prediction_index)
print("potential correct options")
print(test_set[message].links)
'''
#print (test_set_files[0][message].predictions, test_set_files[0][message].links)
true_pos += len(test_set[message].predictions.intersection(test_set[message].links))
false_pos += len(test_set[message].predictions.difference(test_set[message].links))
false_neg += len(test_set[message].links.difference(test_set[message].predictions))
true_neg += message - len(test_set[message].predictions.union(test_set[message].links))
print (correct_matches, num_examples, true_pos, false_pos, false_neg, true_neg)
accuracy = (true_pos + true_neg)/num_examples
precision = true_pos / (true_pos + false_pos)
recall = true_pos / (true_pos + false_neg)
fscore = 2 * precision * recall / (precision + recall)
print ("Accuracy: " + str(accuracy) + "\nPrecision: " + str(precision) + "\nRecall: " + str(recall) + "\nFscore:" + str(fscore))
return fscore
def generate_annotation_file(weights, benchmark_list, compiled_dictionary, linked_pairs):
for k in range(0, len(benchmark_list), 2):
# generate test set from file
test_set = create_message_file(benchmark_list[k], benchmark_list[k+1])
output_dict = {}
# determine diff in prediction vs actual but don't update weights
output_file_name = sys.argv[len(sys.argv) - 2] + ".annotated.structured.final"
f = open(output_file_name, 'w')
for message in range(100, len(test_set)):
prediction_index = make_structured_prediction(weights, test_set, message, compiled_dictionary, linked_pairs)
if prediction_index > -1:
test_set[message].predictions.add(prediction_index)
if test_set[message].predictions:
output = str(message) + ' -'
for val in test_set[message].predictions:
output += ' ' + str(val)
output += '\n'
f.write(output)
def create_pairs(enumerated_examples, linked_pairs, files):
location = 0
for i in range(len(enumerated_examples)):
file_num = enumerated_examples[i][0]
msg_line = enumerated_examples[i][1]
for prev_msg_line in range(msg_line):
if prev_msg_line in files[file_num][msg_line].links:
for wordA in files[file_num][prev_msg_line].message_words:
for wordB in files[file_num][msg_line].message_words:
if (wordA != files[file_num][msg_line].username and wordB != files[file_num][prev_msg_line].username):
pair_tuple = (wordA, wordB)
if(pair_tuple not in linked_pairs):
linked_pairs[pair_tuple] = [location, 1]
location += 1
else:
linked_pairs[pair_tuple][1] += 1
def remove_overfitted_pairs(linked_pairs):
new_pairs = collections.OrderedDict()
pair_index = 0
for word_pair in linked_pairs:
if linked_pairs[word_pair][1] > 10:
new_pairs[word_pair] = pair_index
pair_index += 1
return new_pairs
if __name__ == "__main__":
training_files = []
compiled_dictionary = collections.OrderedDict()
dictionary_index = 0
linked_pairs = collections.OrderedDict()
training_list_file = open(sys.argv[1], 'r')
testing_list_file = open(sys.argv[2], 'r')
benchmark_list_file = open(sys.argv[3], 'r')
training_list = []
testing_list = []
benchmark_list = []
for line in training_list_file:
lines = line.strip()
training_list.append(lines)
for line in testing_list_file:
lines = line.strip()
testing_list.append(lines)
for line in benchmark_list_file:
lines = line.strip()
benchmark_list.append(lines)
# index of every data file from argv
# command line arguments are in <data_file, annotation_file>
# pairs with the last pair of files being the test set
print ("creating dictionary")
for k in range(0, len(training_list), 2):
add_to_dictionary(training_list[k], compiled_dictionary)
print ("size of structured compiled dictionary:")
print(len(compiled_dictionary))
for k in range(0, len(training_list), 2):
training_files.append(create_message_file(training_list[k], training_list[k+1]))
enumerated_examples = []
for file in range(len(training_files)):
for message in range(100, len(training_files[file])):
enumerated_examples.append((file, message))
create_pairs(enumerated_examples, linked_pairs, training_files)
print("number of linked pairs is " + str(len(linked_pairs)))
linked_pairs = remove_overfitted_pairs(linked_pairs)
print("number of training examples is " + str(len(enumerated_examples)))
print("number of reduced pairs is " + str(len(linked_pairs)))
#for line in linked_pairs:
# print(line[0] + "\t" + line[1])
dummy_feature_vector = generate_features(training_files[0][100], training_files[0][3], training_files[0], compiled_dictionary, linked_pairs)
weights = numpy.zeros(len(dummy_feature_vector))
best_weights = 0
best_fscore = -1
for i in range(EPOCHS):
print ("\nepoch " + str(i + 1) + " of perceptron training")
train_perceptron(weights, enumerated_examples, training_files, compiled_dictionary, linked_pairs)
print("\ntesting perceptron and calculating predictions for epoch " + str(i + 1))
fscore = calculate_predictions(weights, testing_list, compiled_dictionary, linked_pairs)
if fscore > best_fscore:
best_weights = numpy.copy(weights)
best_fscore = fscore
#print("testing perceptron and calculating predictions for epoch " + str(i) + "\n")
#calculate_predictions(weights, sys.argv[len(sys.argv)-2], sys.argv[len(sys.argv)-1], compiled_dictionary)
print("\nrunning perceptron on benchmark files using best weights:")
calculate_predictions(best_weights, benchmark_list, compiled_dictionary, linked_pairs)
print("\ngenerating annotation files")
generate_annotation_file(best_weights, benchmark_list, compiled_dictionary, linked_pairs)
print(best_weights)
| |
# Copyright 2017, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from collections.abc import MutableMapping
from collections.abc import Sequence
except ImportError:
from collections import MutableMapping
from collections import Sequence
import threading
from collections import OrderedDict, deque
from datetime import datetime
from itertools import chain
from opencensus.common import utils
from opencensus.trace import attributes as attributes_module
from opencensus.trace import base_span
from opencensus.trace import link as link_module
from opencensus.trace import stack_trace as stack_trace_module
from opencensus.trace import status as status_module
from opencensus.trace import time_event
from opencensus.trace.span_context import generate_span_id
from opencensus.trace.tracers import base
# https://github.com/census-instrumentation/opencensus-specs/blob/master/trace/TraceConfig.md # noqa
MAX_NUM_ATTRIBUTES = 32
MAX_NUM_ANNOTATIONS = 32
MAX_NUM_MESSAGE_EVENTS = 128
MAX_NUM_LINKS = 32
class BoundedList(Sequence):
"""An append only list with a fixed max size."""
def __init__(self, maxlen):
self.dropped = 0
self._dq = deque(maxlen=maxlen)
self._lock = threading.Lock()
def __repr__(self):
return ("{}({}, maxlen={})"
.format(
type(self).__name__,
list(self._dq),
self._dq.maxlen
))
def __getitem__(self, index):
return self._dq[index]
def __len__(self):
return len(self._dq)
def __iter__(self):
return iter(self._dq)
def append(self, item):
with self._lock:
if len(self._dq) == self._dq.maxlen:
self.dropped += 1
self._dq.append(item)
def extend(self, seq):
with self._lock:
to_drop = len(seq) + len(self._dq) - self._dq.maxlen
if to_drop > 0:
self.dropped += to_drop
self._dq.extend(seq)
@classmethod
def from_seq(cls, maxlen, seq):
seq = tuple(seq)
if len(seq) > maxlen:
raise ValueError
bounded_list = cls(maxlen)
bounded_list._dq = deque(seq, maxlen=maxlen)
return bounded_list
class BoundedDict(MutableMapping):
"""A dict with a fixed max capacity."""
def __init__(self, maxlen):
self.maxlen = maxlen
self.dropped = 0
self._dict = OrderedDict()
self._lock = threading.Lock()
def __repr__(self):
return ("{}({}, maxlen={})"
.format(
type(self).__name__,
dict(self._dict),
self.maxlen
))
def __getitem__(self, key):
return self._dict[key]
def __setitem__(self, key, value):
with self._lock:
if key in self._dict:
del self._dict[key]
elif len(self._dict) == self.maxlen:
del self._dict[next(iter(self._dict.keys()))]
self.dropped += 1
self._dict[key] = value
def __delitem__(self, key):
del self._dict[key]
def __iter__(self):
return iter(self._dict)
def __len__(self):
return len(self._dict)
@classmethod
def from_map(cls, maxlen, mapping):
mapping = OrderedDict(mapping)
if len(mapping) > maxlen:
raise ValueError
bounded_dict = cls(maxlen)
bounded_dict._dict = mapping
return bounded_dict
class SpanKind(object):
UNSPECIFIED = 0
SERVER = 1
CLIENT = 2
class Span(base_span.BaseSpan):
"""A span is an individual timed event which forms a node of the trace
tree. Each span has its name, span id and parent id. The parent id
indicates the causal relationships between the individual spans in a
single distributed trace. Span that does not have a parent id is called
root span. All spans associated with a specific trace also share a common
trace id. Spans do not need to be continuous, there can be gaps between
two spans.
:type name: str
:param name: The name of the span.
:type parent_span: :class:`~opencensus.trace.span.Span`
:param parent_span: (Optional) Parent span.
:type attributes: dict
:param attributes: Collection of attributes associated with the span.
Attribute keys must be less than 128 bytes.
Attribute values must be less than 16 kilobytes.
:type start_time: str
:param start_time: (Optional) Start of the time interval (inclusive)
during which the trace data was collected from the
application.
:type end_time: str
:param end_time: (Optional) End of the time interval (inclusive) during
which the trace data was collected from the application.
:type span_id: int
:param span_id: Identifier for the span, unique within a trace.
:type stack_trace: :class: `~opencensus.trace.stack_trace.StackTrace`
:param stack_trace: (Optional) A call stack appearing in a trace
:type annotations: list(:class:`opencensus.trace.time_event.Annotation`)
:param annotations: (Optional) The list of span annotations.
:type message_events:
list(:class:`opencensus.trace.time_event.MessageEvent`)
:param message_events: (Optional) The list of span message events.
:type links: list
:param links: (Optional) Links associated with the span. You can have up
to 128 links per Span.
:type status: :class: `~opencensus.trace.status.Status`
:param status: (Optional) An optional final status for this span.
:type same_process_as_parent_span: bool
:param same_process_as_parent_span: (Optional) A highly recommended but not
required flag that identifies when a
trace crosses a process boundary.
True when the parent_span belongs to
the same process as the current span.
:type context_tracer: :class:`~opencensus.trace.tracers.context_tracer.
ContextTracer`
:param context_tracer: The tracer that holds a stack of spans. If this is
not None, then when exiting a span, use the end_span
method in the tracer class to finish a span. If no
tracer is passed in, then just finish the span using
the finish method in the Span class.
:type span_kind: int
:param span_kind: (Optional) Highly recommended flag that denotes the type
of span (valid values defined by :class:
`opencensus.trace.span.SpanKind`)
"""
def __init__(
self,
name,
parent_span=None,
attributes=None,
start_time=None,
end_time=None,
span_id=None,
stack_trace=None,
annotations=None,
message_events=None,
links=None,
status=None,
same_process_as_parent_span=None,
context_tracer=None,
span_kind=SpanKind.UNSPECIFIED):
self.name = name
self.parent_span = parent_span
self.start_time = start_time
self.end_time = end_time
if span_id is None:
span_id = generate_span_id()
if attributes is None:
self.attributes = BoundedDict(MAX_NUM_ATTRIBUTES)
else:
self.attributes = BoundedDict.from_map(
MAX_NUM_ATTRIBUTES, attributes)
# Do not manipulate spans directly using the methods in Span Class,
# make sure to use the Tracer.
if parent_span is None:
parent_span = base.NullContextManager()
if annotations is None:
self.annotations = BoundedList(MAX_NUM_ANNOTATIONS)
else:
self.annotations = BoundedList.from_seq(MAX_NUM_LINKS, annotations)
if message_events is None:
self.message_events = BoundedList(MAX_NUM_MESSAGE_EVENTS)
else:
self.message_events = BoundedList.from_seq(
MAX_NUM_LINKS, message_events)
if links is None:
self.links = BoundedList(MAX_NUM_LINKS)
else:
self.links = BoundedList.from_seq(MAX_NUM_LINKS, links)
if status is None:
self.status = status_module.Status.as_ok()
else:
self.status = status
self.span_id = span_id
self.stack_trace = stack_trace
self.same_process_as_parent_span = same_process_as_parent_span
self._child_spans = []
self.context_tracer = context_tracer
self.span_kind = span_kind
for callback in Span._on_create_callbacks:
callback(self)
_on_create_callbacks = []
@staticmethod
def on_create(callback):
Span._on_create_callbacks.append(callback)
@property
def children(self):
"""The child spans of the current span."""
return self._child_spans
def span(self, name='child_span'):
"""Create a child span for the current span and append it to the child
spans list.
:type name: str
:param name: (Optional) The name of the child span.
:rtype: :class: `~opencensus.trace.span.Span`
:returns: A child Span to be added to the current span.
"""
child_span = Span(name, parent_span=self)
self._child_spans.append(child_span)
return child_span
def add_attribute(self, attribute_key, attribute_value):
"""Add attribute to span.
:type attribute_key: str
:param attribute_key: Attribute key.
:type attribute_value:str
:param attribute_value: Attribute value.
"""
self.attributes[attribute_key] = attribute_value
def add_annotation(self, description, **attrs):
"""Add an annotation to span.
:type description: str
:param description: A user-supplied message describing the event.
The maximum length for the description is 256 bytes.
:type attrs: kwargs
:param attrs: keyworded arguments e.g. failed=True, name='Caching'
"""
self.annotations.append(time_event.Annotation(
datetime.utcnow(),
description,
attributes_module.Attributes(attrs)
))
def add_message_event(self, message_event):
"""Add a message event to this span.
:type message_event: :class:`opencensus.trace.time_event.MessageEvent`
:param message_event: The message event to attach to this span.
"""
self.message_events.append(message_event)
def add_link(self, link):
"""Add a Link.
:type link: :class: `~opencensus.trace.link.Link`
:param link: A Link object.
"""
if isinstance(link, link_module.Link):
self.links.append(link)
else:
raise TypeError("Type Error: received {}, but requires Link.".
format(type(link).__name__))
def set_status(self, status):
"""Sets span status.
:type code: :class: `~opencensus.trace.status.Status`
:param code: A Status object.
"""
if isinstance(status, status_module.Status):
self.status = status
else:
raise TypeError("Type Error: received {}, but requires Status.".
format(type(status).__name__))
def start(self):
"""Set the start time for a span."""
self.start_time = utils.to_iso_str()
def finish(self):
"""Set the end time for a span."""
self.end_time = utils.to_iso_str()
def __iter__(self):
"""Iterate through the span tree."""
for span in chain.from_iterable(map(iter, self.children)):
yield span
yield self
def __enter__(self):
"""Start a span."""
self.start()
return self
def __exit__(self, exception_type, exception_value, traceback):
"""Finish a span."""
if traceback is not None:
self.stack_trace =\
stack_trace_module.StackTrace.from_traceback(traceback)
if exception_value is not None:
self.status = status_module.Status.from_exception(exception_value)
if self.context_tracer is not None:
self.context_tracer.end_span()
return
self.finish()
def format_span_json(span):
"""Helper to format a Span in JSON format.
:type span: :class:`~opencensus.trace.span.Span`
:param span: A Span to be transferred to JSON format.
:rtype: dict
:returns: Formatted Span.
"""
span_json = {
'displayName': utils.get_truncatable_str(span.name),
'spanId': span.span_id,
'startTime': span.start_time,
'endTime': span.end_time,
'childSpanCount': len(span._child_spans)
}
parent_span_id = None
if span.parent_span is not None:
parent_span_id = span.parent_span.span_id
if parent_span_id is not None:
span_json['parentSpanId'] = parent_span_id
if span.attributes:
span_json['attributes'] = attributes_module.Attributes(
span.attributes).format_attributes_json()
if span.stack_trace is not None:
span_json['stackTrace'] = span.stack_trace.format_stack_trace_json()
formatted_time_events = []
if span.annotations:
formatted_time_events.extend(
{'time': aa.timestamp,
'annotation': aa.format_annotation_json()}
for aa in span.annotations)
if span.message_events:
formatted_time_events.extend(
{'time': aa.timestamp,
'message_event': aa.format_message_event_json()}
for aa in span.message_events)
if formatted_time_events:
span_json['timeEvents'] = {
'timeEvent': formatted_time_events
}
if span.links:
span_json['links'] = {
'link': [
link.format_link_json() for link in span.links]
}
if span.status is not None:
span_json['status'] = span.status.format_status_json()
if span.same_process_as_parent_span is not None:
span_json['sameProcessAsParentSpan'] = \
span.same_process_as_parent_span
return span_json
| |
#! /usr/bin/python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# TODO(robinson): Flesh this out considerably. We focused on reflection_test.py
# first, since it's testing the subtler code, and since it provides decent
# indirect testing of the protocol compiler output.
"""Unittest that directly tests the output of the pure-Python protocol
compiler. See //google/protobuf/reflection_test.py for a test which
further ensures that we can use Python protocol message objects as we expect.
"""
__author__ = 'robinson@google.com (Will Robinson)'
from google.apputils import basetest
from google.protobuf.internal import test_bad_identifiers_pb2
from google.protobuf import unittest_custom_options_pb2
from google.protobuf import unittest_import_pb2
from google.protobuf import unittest_import_public_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_no_generic_services_pb2
from google.protobuf import unittest_pb2
from google.protobuf import service
from google.protobuf import symbol_database
MAX_EXTENSION = 536870912
class GeneratorTest(basetest.TestCase):
def testNestedMessageDescriptor(self):
field_name = 'optional_nested_message'
proto_type = unittest_pb2.TestAllTypes
self.assertEqual(
proto_type.NestedMessage.DESCRIPTOR,
proto_type.DESCRIPTOR.fields_by_name[field_name].message_type)
def testEnums(self):
# We test only module-level enums here.
# TODO(robinson): Examine descriptors directly to check
# enum descriptor output.
self.assertEqual(4, unittest_pb2.FOREIGN_FOO)
self.assertEqual(5, unittest_pb2.FOREIGN_BAR)
self.assertEqual(6, unittest_pb2.FOREIGN_BAZ)
proto = unittest_pb2.TestAllTypes()
self.assertEqual(1, proto.FOO)
self.assertEqual(1, unittest_pb2.TestAllTypes.FOO)
self.assertEqual(2, proto.BAR)
self.assertEqual(2, unittest_pb2.TestAllTypes.BAR)
self.assertEqual(3, proto.BAZ)
self.assertEqual(3, unittest_pb2.TestAllTypes.BAZ)
def testExtremeDefaultValues(self):
message = unittest_pb2.TestExtremeDefaultValues()
# Python pre-2.6 does not have isinf() or isnan() functions, so we have
# to provide our own.
def isnan(val):
# NaN is never equal to itself.
return val != val
def isinf(val):
# Infinity times zero equals NaN.
return not isnan(val) and isnan(val * 0)
self.assertTrue(isinf(message.inf_double))
self.assertTrue(message.inf_double > 0)
self.assertTrue(isinf(message.neg_inf_double))
self.assertTrue(message.neg_inf_double < 0)
self.assertTrue(isnan(message.nan_double))
self.assertTrue(isinf(message.inf_float))
self.assertTrue(message.inf_float > 0)
self.assertTrue(isinf(message.neg_inf_float))
self.assertTrue(message.neg_inf_float < 0)
self.assertTrue(isnan(message.nan_float))
self.assertEqual("? ? ?? ?? ??? ??/ ??-", message.cpp_trigraph)
def testHasDefaultValues(self):
desc = unittest_pb2.TestAllTypes.DESCRIPTOR
expected_has_default_by_name = {
'optional_int32': False,
'repeated_int32': False,
'optional_nested_message': False,
'default_int32': True,
}
has_default_by_name = dict(
[(f.name, f.has_default_value)
for f in desc.fields
if f.name in expected_has_default_by_name])
self.assertEqual(expected_has_default_by_name, has_default_by_name)
def testContainingTypeBehaviorForExtensions(self):
self.assertEqual(unittest_pb2.optional_int32_extension.containing_type,
unittest_pb2.TestAllExtensions.DESCRIPTOR)
self.assertEqual(unittest_pb2.TestRequired.single.containing_type,
unittest_pb2.TestAllExtensions.DESCRIPTOR)
def testExtensionScope(self):
self.assertEqual(unittest_pb2.optional_int32_extension.extension_scope,
None)
self.assertEqual(unittest_pb2.TestRequired.single.extension_scope,
unittest_pb2.TestRequired.DESCRIPTOR)
def testIsExtension(self):
self.assertTrue(unittest_pb2.optional_int32_extension.is_extension)
self.assertTrue(unittest_pb2.TestRequired.single.is_extension)
message_descriptor = unittest_pb2.TestRequired.DESCRIPTOR
non_extension_descriptor = message_descriptor.fields_by_name['a']
self.assertTrue(not non_extension_descriptor.is_extension)
def testOptions(self):
proto = unittest_mset_pb2.TestMessageSet()
self.assertTrue(proto.DESCRIPTOR.GetOptions().message_set_wire_format)
def testMessageWithCustomOptions(self):
proto = unittest_custom_options_pb2.TestMessageWithCustomOptions()
enum_options = proto.DESCRIPTOR.enum_types_by_name['AnEnum'].GetOptions()
self.assertTrue(enum_options is not None)
# TODO(gps): We really should test for the presense of the enum_opt1
# extension and for its value to be set to -789.
def testNestedTypes(self):
self.assertEquals(
set(unittest_pb2.TestAllTypes.DESCRIPTOR.nested_types),
set([
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR,
unittest_pb2.TestAllTypes.OptionalGroup.DESCRIPTOR,
unittest_pb2.TestAllTypes.RepeatedGroup.DESCRIPTOR,
]))
self.assertEqual(unittest_pb2.TestEmptyMessage.DESCRIPTOR.nested_types, [])
self.assertEqual(
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.nested_types, [])
def testContainingType(self):
self.assertTrue(
unittest_pb2.TestEmptyMessage.DESCRIPTOR.containing_type is None)
self.assertTrue(
unittest_pb2.TestAllTypes.DESCRIPTOR.containing_type is None)
self.assertEqual(
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.containing_type,
unittest_pb2.TestAllTypes.DESCRIPTOR)
self.assertEqual(
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.containing_type,
unittest_pb2.TestAllTypes.DESCRIPTOR)
self.assertEqual(
unittest_pb2.TestAllTypes.RepeatedGroup.DESCRIPTOR.containing_type,
unittest_pb2.TestAllTypes.DESCRIPTOR)
def testContainingTypeInEnumDescriptor(self):
self.assertTrue(unittest_pb2._FOREIGNENUM.containing_type is None)
self.assertEqual(unittest_pb2._TESTALLTYPES_NESTEDENUM.containing_type,
unittest_pb2.TestAllTypes.DESCRIPTOR)
def testPackage(self):
self.assertEqual(
unittest_pb2.TestAllTypes.DESCRIPTOR.file.package,
'protobuf_unittest')
desc = unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR
self.assertEqual(desc.file.package, 'protobuf_unittest')
self.assertEqual(
unittest_import_pb2.ImportMessage.DESCRIPTOR.file.package,
'protobuf_unittest_import')
self.assertEqual(
unittest_pb2._FOREIGNENUM.file.package, 'protobuf_unittest')
self.assertEqual(
unittest_pb2._TESTALLTYPES_NESTEDENUM.file.package,
'protobuf_unittest')
self.assertEqual(
unittest_import_pb2._IMPORTENUM.file.package,
'protobuf_unittest_import')
def testExtensionRange(self):
self.assertEqual(
unittest_pb2.TestAllTypes.DESCRIPTOR.extension_ranges, [])
self.assertEqual(
unittest_pb2.TestAllExtensions.DESCRIPTOR.extension_ranges,
[(1, MAX_EXTENSION)])
self.assertEqual(
unittest_pb2.TestMultipleExtensionRanges.DESCRIPTOR.extension_ranges,
[(42, 43), (4143, 4244), (65536, MAX_EXTENSION)])
def testFileDescriptor(self):
self.assertEqual(unittest_pb2.DESCRIPTOR.name,
'google/protobuf/unittest.proto')
self.assertEqual(unittest_pb2.DESCRIPTOR.package, 'protobuf_unittest')
self.assertFalse(unittest_pb2.DESCRIPTOR.serialized_pb is None)
self.assertEqual(unittest_pb2.DESCRIPTOR.dependencies,
[unittest_import_pb2.DESCRIPTOR])
self.assertEqual(unittest_import_pb2.DESCRIPTOR.dependencies,
[unittest_import_public_pb2.DESCRIPTOR])
def testNoGenericServices(self):
self.assertTrue(hasattr(unittest_no_generic_services_pb2, "TestMessage"))
self.assertTrue(hasattr(unittest_no_generic_services_pb2, "FOO"))
self.assertTrue(hasattr(unittest_no_generic_services_pb2, "test_extension"))
# Make sure unittest_no_generic_services_pb2 has no services subclassing
# Proto2 Service class.
if hasattr(unittest_no_generic_services_pb2, "TestService"):
self.assertFalse(issubclass(unittest_no_generic_services_pb2.TestService,
service.Service))
def testMessageTypesByName(self):
file_type = unittest_pb2.DESCRIPTOR
self.assertEqual(
unittest_pb2._TESTALLTYPES,
file_type.message_types_by_name[unittest_pb2._TESTALLTYPES.name])
# Nested messages shouldn't be included in the message_types_by_name
# dictionary (like in the C++ API).
self.assertFalse(
unittest_pb2._TESTALLTYPES_NESTEDMESSAGE.name in
file_type.message_types_by_name)
def testEnumTypesByName(self):
file_type = unittest_pb2.DESCRIPTOR
self.assertEqual(
unittest_pb2._FOREIGNENUM,
file_type.enum_types_by_name[unittest_pb2._FOREIGNENUM.name])
def testExtensionsByName(self):
file_type = unittest_pb2.DESCRIPTOR
self.assertEqual(
unittest_pb2.my_extension_string,
file_type.extensions_by_name[unittest_pb2.my_extension_string.name])
def testPublicImports(self):
# Test public imports as embedded message.
all_type_proto = unittest_pb2.TestAllTypes()
self.assertEqual(0, all_type_proto.optional_public_import_message.e)
# PublicImportMessage is actually defined in unittest_import_public_pb2
# module, and is public imported by unittest_import_pb2 module.
public_import_proto = unittest_import_pb2.PublicImportMessage()
self.assertEqual(0, public_import_proto.e)
self.assertTrue(unittest_import_public_pb2.PublicImportMessage is
unittest_import_pb2.PublicImportMessage)
def testBadIdentifiers(self):
# We're just testing that the code was imported without problems.
message = test_bad_identifiers_pb2.TestBadIdentifiers()
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.message],
"foo")
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.descriptor],
"bar")
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.reflection],
"baz")
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.service],
"qux")
def testOneof(self):
desc = unittest_pb2.TestAllTypes.DESCRIPTOR
self.assertEqual(1, len(desc.oneofs))
self.assertEqual('oneof_field', desc.oneofs[0].name)
self.assertEqual(0, desc.oneofs[0].index)
self.assertIs(desc, desc.oneofs[0].containing_type)
self.assertIs(desc.oneofs[0], desc.oneofs_by_name['oneof_field'])
nested_names = set(['oneof_uint32', 'oneof_nested_message',
'oneof_string', 'oneof_bytes'])
self.assertSameElements(
nested_names,
[field.name for field in desc.oneofs[0].fields])
for field_name, field_desc in desc.fields_by_name.iteritems():
if field_name in nested_names:
self.assertIs(desc.oneofs[0], field_desc.containing_oneof)
else:
self.assertIsNone(field_desc.containing_oneof)
class SymbolDatabaseRegistrationTest(basetest.TestCase):
"""Checks that messages, enums and files are correctly registered."""
def testGetSymbol(self):
self.assertEquals(
unittest_pb2.TestAllTypes, symbol_database.Default().GetSymbol(
'protobuf_unittest.TestAllTypes'))
self.assertEquals(
unittest_pb2.TestAllTypes.NestedMessage,
symbol_database.Default().GetSymbol(
'protobuf_unittest.TestAllTypes.NestedMessage'))
with self.assertRaises(KeyError):
symbol_database.Default().GetSymbol('protobuf_unittest.NestedMessage')
self.assertEquals(
unittest_pb2.TestAllTypes.OptionalGroup,
symbol_database.Default().GetSymbol(
'protobuf_unittest.TestAllTypes.OptionalGroup'))
self.assertEquals(
unittest_pb2.TestAllTypes.RepeatedGroup,
symbol_database.Default().GetSymbol(
'protobuf_unittest.TestAllTypes.RepeatedGroup'))
def testEnums(self):
self.assertEquals(
'protobuf_unittest.ForeignEnum',
symbol_database.Default().pool.FindEnumTypeByName(
'protobuf_unittest.ForeignEnum').full_name)
self.assertEquals(
'protobuf_unittest.TestAllTypes.NestedEnum',
symbol_database.Default().pool.FindEnumTypeByName(
'protobuf_unittest.TestAllTypes.NestedEnum').full_name)
def testFindFileByName(self):
self.assertEquals(
'google/protobuf/unittest.proto',
symbol_database.Default().pool.FindFileByName(
'google/protobuf/unittest.proto').name)
if __name__ == '__main__':
basetest.main()
| |
import unittest
from zipper import (
Zipper,
)
# Tests adapted from `problem-specifications//canonical-data.json`
class ZipperTest(unittest.TestCase):
def test_data_is_retained(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
expected = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.to_tree()
self.assertEqual(result, expected)
def test_left_right_and_value(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.left().right().value()
self.assertEqual(result, 3)
def test_dead_end(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.left().left()
self.assertIsNone(result)
def test_tree_from_deep_focus(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
expected = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.left().right().to_tree()
self.assertEqual(result, expected)
def test_traversing_up_from_top(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.up()
self.assertIsNone(result)
def test_left_right_and_up(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.left().up().right().up().left().right().value()
self.assertEqual(result, 3)
def test_test_ability_to_descend_multiple_levels_and_return(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.left().right().up().up().value()
self.assertEqual(result, 1)
def test_set_value(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
expected = {
"value": 1,
"left": {
"value": 5,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.left().set_value(5).to_tree()
self.assertEqual(result, expected)
def test_set_value_after_traversing_up(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
expected = {
"value": 1,
"left": {
"value": 5,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.left().right().up().set_value(5).to_tree()
self.assertEqual(result, expected)
def test_set_left_with_leaf(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
expected = {
"value": 1,
"left": {
"value": 2,
"left": {"value": 5, "left": None, "right": None},
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = (
zipper.left().set_left({"value": 5, "left": None, "right": None}).to_tree()
)
self.assertEqual(result, expected)
def test_set_right_with_null(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
expected = {
"value": 1,
"left": {"value": 2, "left": None, "right": None},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.left().set_right(None).to_tree()
self.assertEqual(result, expected)
def test_set_right_with_subtree(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
expected = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {
"value": 6,
"left": {"value": 7, "left": None, "right": None},
"right": {"value": 8, "left": None, "right": None},
},
}
zipper = Zipper.from_tree(initial)
result = zipper.set_right(
{
"value": 6,
"left": {"value": 7, "left": None, "right": None},
"right": {"value": 8, "left": None, "right": None},
}
).to_tree()
self.assertEqual(result, expected)
def test_set_value_on_deep_focus(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
expected = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 5, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
zipper = Zipper.from_tree(initial)
result = zipper.left().right().set_value(5).to_tree()
self.assertEqual(result, expected)
def test_different_paths_to_same_zipper(self):
initial = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
result = Zipper.from_tree(initial).left().up().right().to_tree()
final = {
"value": 1,
"left": {
"value": 2,
"left": None,
"right": {"value": 3, "left": None, "right": None},
},
"right": {"value": 4, "left": None, "right": None},
}
expected = Zipper.from_tree(final).right().to_tree()
self.assertEqual(result, expected)
if __name__ == "__main__":
unittest.main()
| |
#
# Copyright 2013 - Tom Alessi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains all of the escalation functions of SSD."""
import logging
from django.core.cache import cache
from django.db import IntegrityError
from ssd.dashboard.decorators import staff_member_required_ssd
from django.shortcuts import render_to_response
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseBadRequest
from django.template import RequestContext
from django.db.models import F
from django.contrib import messages
from ssd.dashboard.models import Config_Escalation, Escalation
from ssd.dashboard.forms import AddContactForm, EscalationConfigForm, XEditableModifyForm, SwitchContactForm, RemoveContactForm
# Get an instance of the ssd logger
logger = logging.getLogger(__name__)
def escalation(request):
"""Escalation page
Print an escalation page should a user want additional information
on who to contact when incidents occur
"""
logger.debug('%s view being executed.' % 'escalation.escalation')
# If this functionality is disabled in the admin, let the user know
enable_escalation = cache.get('enable_escalation')
if enable_escalation == None:
enable_escalation = Config_Escalation.objects.filter(id=Config_Escalation.objects.values('id')[0]['id']).values('enabled')[0]['enabled']
cache.set('enable_escalation', enable_escalation)
if enable_escalation == 0:
# Escalation is disabled, send them to the homepage with an error message
messages.add_message(request, messages.ERROR, 'Your system administrator has disabled the escalation path functionality')
return HttpResponseRedirect('/')
# Obtain the escalation contacts
contacts = Escalation.objects.filter(hidden=False).values('id','name','contact_details').order_by('order')
# Print the page
return render_to_response(
'escalation/escalation.html',
{
'title':'System Status Dashboard | Escalation Path',
'contacts':contacts,
'instructions':Config_Escalation.objects.filter(id=Config_Escalation.objects.values('id')[0]['id']).values('instructions')[0]['instructions']
},
context_instance=RequestContext(request)
)
@staff_member_required_ssd
def escalation_config(request):
"""Main admin index view
"""
logger.debug('%s view being executed.' % 'escalation.escalation_config')
# If this is a POST, then validate the form and save the data
if request.method == 'POST':
# Check the form elements
form = EscalationConfigForm(request.POST)
logger.debug('Form submit (POST): %s, with result: %s' % ('EscalationConfigForm',form))
if form.is_valid():
# Obtain the cleaned data
enabled = form.cleaned_data['enabled']
instructions = form.cleaned_data['instructions']
# There should only ever be one record in this table
Config_Escalation.objects.filter(id=Config_Escalation.objects.values('id')[0]['id']).update(enabled=enabled,instructions=instructions)
# Clear the cache
cache.delete('enable_escalation')
# Set a success message
messages.add_message(request, messages.SUCCESS, 'Escalation configuration saved successfully')
else:
messages.add_message(request, messages.ERROR, 'Invalid data entered, please correct the errors below:')
# Not a POST or a failed form submit
else:
# Create a blank form
form = EscalationConfigForm
# Obtain the escalation config
escalation_config = Config_Escalation.objects.filter(id=Config_Escalation.objects.values('id')[0]['id']).values('enabled','instructions')
# Print the page
return render_to_response(
'escalation/config.html',
{
'title':'System Status Dashboard | Escalation Admin',
'escalation_config':escalation_config,
'form':form,
'nav_section':'escalation',
'nav_sub':'escalation_config'
},
context_instance=RequestContext(request)
)
@staff_member_required_ssd
def escalation_contacts(request):
"""View and Add Escalation Contacts
"""
logger.debug('%s view being executed.' % 'escalation.escalation_contacts')
# If this is a POST, then validate the form and save the data
if request.method == 'POST':
# Check the form elements
form = AddContactForm(request.POST)
logger.debug('Form submit (POST): %s, with result: %s' % ('AddContactForm',form))
if form.is_valid():
name = form.cleaned_data['name']
contact_details = form.cleaned_data['contact_details']
# Obtain the last entry
order = Escalation.objects.values('order').order_by('-order')[:1]
# If there are no entries, this will be 1
if not order:
order = 1
# Increase the order by 1
else:
order = order[0]['order'] + 1
# Don't allow duplicates
try:
Escalation(order=order,name=name,contact_details=contact_details,hidden=True).save()
except IntegrityError:
pass
# Send them back so they can see the newly created email addresses
# incident
return HttpResponseRedirect('/admin/escalation_contacts')
# Invalid form
else:
print 'Invalid form: AddContactForm: %s' % form.errors
# Not a POST
else:
# Create a blank form
form = AddContactForm()
# Obtain all current email addresses
contacts = Escalation.objects.values('id','order','name','contact_details','hidden').order_by('order')
# Print the page
return render_to_response(
'escalation/contacts.html',
{
'title':'System Status Dashboard | Manage Escalation Contacts',
'form':form,
'contacts':contacts,
'nav_section':'escalation',
'nav_sub':'escalation_contacts'
},
context_instance=RequestContext(request)
)
@staff_member_required_ssd
def contact_switch(request):
"""Switch Contacts Around or Hide Them"""
logger.debug('%s view being executed.' % 'escalation.contact_switch')
# If this is a GET, then validate the form and save the data, otherise send them
# to the main escalation page
if request.method == 'GET':
# Check the form elements
form = SwitchContactForm(request.GET)
logger.debug('Form submit (GET): %s, with result: %s' % ('SwitchContactForm',form))
if form.is_valid():
id = form.cleaned_data['id']
action = form.cleaned_data['action']
# Obtain all id's and orders and put into a dictionary
orders = Escalation.objects.values('id','order').order_by('order')
# Run through the orders and see if we need to change anything
# If we are moving up, switch places with the previous
# If we are moving down, switch places with the next
# If we are hiding, remove the order
# If we are unhiding, add to the end
# Move this up, meaning decrease the order (only if greater than 1)
if action == 'up':
# Get the order
id_order = Escalation.objects.filter(id=id).values('order')[0]['order']
# If the order if greater than 1, move it
if id_order > 1:
# Get the id of the one before this one so we can switch places with it
after_order = id_order - 1
after_id = Escalation.objects.filter(order=after_order).values('id')[0]['id']
# Switch places
Escalation.objects.filter(id=id).update(order=F('order')-1)
Escalation.objects.filter(id=after_id).update(order=F('order')+1)
# Set a success message
messages.add_message(request, messages.SUCCESS, 'Escalation contacts successfully modified.')
# Move this down, meaning increase the order
elif action == "down":
# Get the order
id_order = Escalation.objects.filter(id=id).values('order')[0]['order']
# If it's already at the bottom, don't do anything
# Get a count of contacts
contacts_count = Escalation.objects.count()
# If the order is less than the total, move it down (otherwise it's already at the bottom)
if id_order < contacts_count:
# Get the id of the one after this one so we can switch places with it
after_order = id_order + 1
after_id = Escalation.objects.filter(order=after_order).values('id')[0]['id']
# Switch places
Escalation.objects.filter(id=id).update(order=F('order')+1)
Escalation.objects.filter(id=after_id).update(order=F('order')-1)
# Set a success message
messages.add_message(request, messages.SUCCESS, 'Escalation contacts successfully modified.')
# Hide
elif action == 'hide':
Escalation.objects.filter(id=id).update(hidden=True)
# Set a success message
messages.add_message(request, messages.SUCCESS, 'Escalation contacts successfully modified.')
# Show
elif action == 'show':
Escalation.objects.filter(id=id).update(hidden=False)
# Set a success message
messages.add_message(request, messages.SUCCESS, 'Escalation contacts successfully modified.')
# Unknown request
else:
# Set an error message
messages.add_message(request, messages.ERROR, 'Unknown request type - contact not modified.')
# Invalid form
else:
messages.add_message(request, messages.ERROR, 'There was an error processing your request: %s' % form.errors)
# Send them back so they can see the newly updated services list
return HttpResponseRedirect('/admin/escalation_contacts')
@staff_member_required_ssd
def contact_delete(request):
"""Remove Contact"""
logger.debug('%s view being executed.' % 'escalation.contact_delete')
# If it's a POST, then we are going to delete it after confirmation
if request.method == 'POST':
# Check the form elements
form = RemoveContactForm(request.POST)
logger.debug('Form submit (POST): %s, with result: %s' % ('RemoveContactForm',form))
if form.is_valid():
id = form.cleaned_data['id']
# Delete the contact and then re-order what's left (if there's more than 1)
Escalation.objects.filter(id=id).delete()
# Get the orders
orders = Escalation.objects.values('id','order').order_by('order')
# If there's more than 1, re-order
if orders > 1:
# Start a counter at 1 and reset the orders
counter = 1
for contact in orders:
Escalation.objects.filter(id=contact['id']).update(order=counter)
counter += 1
# There is only 1 so set it's order to 1
else:
Escalation.objects.filter(id=id).update(order=1)
# Set a message that delete was successful
messages.add_message(request, messages.SUCCESS, 'Contact successfully removed.')
# Redirect to the escalation contacts page
return HttpResponseRedirect('/admin/escalation_contacts')
# If we get this far, it's a GET and we are confirming that the contact should be removed.
# Make sure we have an ID
form = RemoveContactForm(request.GET)
logger.debug('Form submit (GET): %s, with result: %s' % ('RemoveContactForm',form))
if form.is_valid():
# Obtain the cleaned data
id = form.cleaned_data['id']
# Obtain the contact name
contact_name = Escalation.objects.filter(id=id).values('name')
# If someone already deleted it, set an error message and send back to the services listing
if not contact_name:
messages.add_message(request, messages.ERROR, 'That contact has already been removed, perhaps someone else deleted it?')
return HttpResponseRedirect('/admin/escalation_contacts')
# Print the page (confirm they want to delete the service)
return render_to_response(
'escalation/contact_delete.html',
{
'title':'System Status Dashboard | Confirm Delete',
'id':id,
'contact_name':contact_name,
'nav_section':'escalation',
'nav_sub':'contact_delete'
},
context_instance=RequestContext(request)
)
# Invalid request
else:
# Set a message that the delete failed and send back to the services page
messages.add_message(request, messages.ERROR, 'Invalid request.')
return HttpResponseRedirect('/admin/escalation_contacts')
@staff_member_required_ssd
def contact_modify(request):
"""Modify contact properties
- This occurs only via AJAX from the escalation_contacts view (it's a POST)
"""
logger.debug('%s view being executed.' % 'escalation.contact_modify')
# If this is a POST, then validate the form and save the data, otherise do nothing
if request.method == 'POST':
# Check the form elements
form = XEditableModifyForm(request.POST)
logger.debug('Form submit (POST): %s, with result: %s' % ('XEditableModifyForm',form))
if form.is_valid():
pk = form.cleaned_data['pk']
name = form.cleaned_data['name']
value = form.cleaned_data['value']
# Add the column we are updating (but only allow specific values)
if name == 'name' or name == 'contact_details':
pass
else:
logger.error('Invalid column specified during contact modification: %s' % name)
return HttpResponseBadRequest('An error was encountered with this request.')
filter = {}
filter[name] = value
# Update it
try:
Escalation.objects.filter(id=pk).update(**filter)
except Exception as e:
logger.error('%s: Error saving update: %s' % ('escalation.contact_modify',e))
return HttpResponseBadRequest('An error was encountered with this request.')
return HttpResponse('Value successfully modified')
else:
logger.error('%s: invalid form: %s' % ('escalation.contact_modify',form.errors))
return HttpResponseBadRequest('Invalid request')
else:
logger.error('%s: Invalid request: GET received but only POST accepted.' % ('escalation.contact_modify'))
messages.add_message(request, messages.ERROR, 'Invalid request.')
return HttpResponseRedirect('/admin/escalation_contacts')
| |
#!/usr/bin/env python
import sys, re
from collections import Counter
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import matplotlib
matplotlib.style.use('ggplot')
import wordcloud as wc
import pdb # pdb.set_trace() when needed
RE_ALL_NUM = re.compile(r'[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?')
RE_START_INT = re.compile(r'^\d+') # Purposefully don't handle decimals
# Utility transformation functions
# Derive total hour slept by handling AM vs PM
def sleep_duration(row):
sleep = row['Bed']
wakeup = row['Wakeup']
return (wakeup - sleep).seconds / 3600.0
# Just a sum of every number found in a text blob, 0 otherwise
def sum_all_nums(field):
# Old but easier to debug the new one + test for nulls
# return lambda x: sum(float(n[0]) for n in RE_ALL_NUM.findall(x[field]))
def sum_row_vals(row):
if pd.notnull(row[field]):
nums = RE_ALL_NUM.findall(row[field])
return sum(float(x[0]) for x in nums)
else:
return 0.0
return sum_row_vals
# Filter a list to only keep text matching a given hashtag
# IF None is passed in it will take everything
def extract_hashtag(field, hashtag):
def get_hashtag_contents(row):
if pd.notnull(row[field]):
out = []
pieces = row[field].split("\n")
for piece in pieces:
piece_clean = ''
if hashtag is None: # Just take everything
piece_clean = piece.replace('- ', '').strip()
elif hashtag in piece:
piece_clean = piece.replace(hashtag, '').replace('- ', '').strip()
items = [x.strip() for x in piece_clean.split(", ")]
# Add multiple times
for item in items:
nums = RE_START_INT.findall(item)
nums = [int(x) for x in nums]
cnt = 1
if len(nums) > 0:
cnt = nums[0]
out.extend((RE_START_INT.sub('', item).strip(),) * cnt)
return [o.strip().lower() for o in out if o != '']
else:
return []
return get_hashtag_contents
def count_values(field, value):
def get_value_counts(row):
total = 0.0
for piece in row[field]:
if value in piece:
nums = RE_ALL_NUM.findall(piece)
if len(nums):
total += float(nums[0][0])
else:
total += 1.0
return total
return get_value_counts
class Analyze:
def __init__(self, fn):
self.fn = fn
def read_file(self):
self.d = pd.read_csv(fn,
parse_dates=['Date', 'Bed', 'Wakeup'],
index_col=0,
low_memory=False)
def add_cols(self):
d = self.d
d['SleepDuration'] = d.apply(sleep_duration, axis=1)
d['Breakfast'] = d.apply(extract_hashtag('Food', '#breakfast'), axis=1)
d['Lunch'] = d.apply(extract_hashtag('Food', '#lunch'), axis=1)
d['Dinner'] = d.apply(extract_hashtag('Food', '#dinner'), axis=1)
d['Snack'] = d.apply(extract_hashtag('Food', '#snack'), axis=1)
d['DrinksList'] = d.apply(extract_hashtag('Drinks', None), axis=1)
d['Coffee'] = d.apply(count_values('DrinksList', 'coffee'), axis=1)
d['Tea'] = d.apply(count_values('DrinksList', 'tea'), axis=1)
d['Coke'] = d.apply(count_values('DrinksList', 'coke'), axis=1)
d['Beer'] = d.apply(count_values('DrinksList', 'beer'), axis=1)
d['Wine'] = d.apply(count_values('DrinksList', 'wine'), axis=1)
d['Cocktail'] = d.apply(count_values('DrinksList', 'cocktail'), axis=1)
d['Alcohol'] = d['Beer'] + d['Wine'] + d['Cocktail']
d['AlcoholLag'] = d['Alcohol'].shift()
# Various plotting functions
def generate_wordcloud(self, column):
d = self.d
# Strip out numbers and extra spaces
text = ", ".join([", ".join(x) for x in d[column]])
text = text.lower().replace('.', ' ')
text = re.sub('\d+',' ',text)
text = re.sub('\s+',' ',text)
freq = Counter(text.split(", "))
wordcloud = wc.WordCloud(stopwords=None, mask=None,
width=1000, height=1000, font_path=None,
margin=10, relative_scaling=0.0,
color_func=wc.random_color_func,
background_color='black')
wordcloud.generate_from_frequencies(freq)
image = wordcloud.to_image()
fn = ('wordcloud-' + column + '.png').lower()
image.save(fn, format='png')
def do_fit_and_plot(self, columns):
d = self.d
if len(columns) != 2:
print('Need to pass in two columns for now y, x')
return
y, x = columns
print('Fitting ', y, 'vs', x)
f = d.dropna(axis=0, how='any', subset=columns)
plt.figure()
ax = f.plot(kind='scatter', x=x, y=y)
z = np.polyfit(x=f[x], y=f[y], deg=1, full=True)
p = np.poly1d(z[0])
f['fit'] = p(f[x])
f.set_index(x, inplace=True)
f['fit'].sort_index(ascending=False).plot(ax=ax)
plt.gca().invert_xaxis()
plt.savefig((x + '-vs-' + y + '.png').lower())
plt.close()
print(z)
print(p)
y_mean = np.sum(f['fit'])/len(f['fit'])
ssr = np.sum((y_mean - f['fit'])**2)
sst = np.sum((f[y] - f['fit'])**2)
rsq = ssr / sst
print(rsq)
def plot_weekly(self):
by_week = self.d.groupby(self.d.index.week).sum()
print(by_week.describe())
plt.figure()
by_week.boxplot(column=['Coffee', 'Tea', 'Alcohol', 'Coke'])
plt.savefig('coffee-tea-alcohol-coke-weekly.png')
plt.close()
def plot(self):
self.plot_weekly()
plt.figure()
self.d.boxplot(column=['SleepDuration'])
plt.savefig('sleep-duration.png')
plt.close()
plt.figure()
self.d.boxplot(column=['Coffee', 'Tea', 'Alcohol', 'Coke'])
plt.savefig('coffee-tea-alcohol-coke-daily.png')
plt.close()
plt.figure()
self.d.plot(kind='scatter', x='Alcohol', y='SleepDuration')
plt.savefig('sleep-vs-alcohol.png')
plt.close()
self.do_fit_and_plot(['SleepDuration', 'Alcohol'])
self.do_fit_and_plot(['SleepDuration', 'AlcoholLag'])
for col in ['Breakfast', 'Lunch', 'Dinner', 'Snack', 'DrinksList']:
self.generate_wordcloud(col)
def run(self):
self.read_file()
print('Before column addition')
print(self.d)
print(self.d.describe())
self.add_cols()
print('After column addition')
print(self.d)
print(self.d.describe())
print('Starting plots')
self.plot()
if __name__ == '__main__':
if len(sys.argv) != 2:
print('Specify a filename')
exit(1)
fn = sys.argv[1]
a = Analyze(fn)
a.run()
| |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importmulti RPC."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class ImportMultiTest (BitcoinTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
self.setup_clean_chain = True
def setup_network(self):
self.setup_nodes()
def run_test (self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
# keyword definition
PRIV_KEY = 'privkey'
PUB_KEY = 'pubkey'
ADDRESS_KEY = 'address'
SCRIPT_KEY = 'script'
node0_address1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
node0_address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
node0_address3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
#Check only one address
assert_equal(node0_address1['ismine'], True)
#Node 1 sync test
assert_equal(self.nodes[1].getblockcount(),1)
#Address Test - before import
address_info = self.nodes[1].validateaddress(node0_address1['address'])
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
# RPC importmulti -----------------------------------------------
# Bitcoin Address
self.log.info("Should import an address")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
watchonly_address = address['address']
watchonly_timestamp = timestamp
self.log.info("Should not import an invalid address")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": "not valid address",
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Invalid address')
# ScriptPubKey + internal
self.log.info("Should import a scriptPubKey with internal flag")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"internal": True
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + !internal
self.log.info("Should not import a scriptPubKey without internal flag")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Address + Public key + !Internal
self.log.info("Should import an address with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"pubkeys": [ address['pubkey'] ]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Public key + internal
self.log.info("Should import a scriptPubKey with internal and with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address['pubkey'] ],
"internal": True
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Public key + !internal
self.log.info("Should not import a scriptPubKey without internal and with public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address['pubkey'] ]
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Address + Private key + !watchonly
self.log.info("Should import an address with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
# Address + Private key + watchonly
self.log.info("Should not import an address with private key and with watchonly")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ],
"watchonly": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# ScriptPubKey + Private key + internal
self.log.info("Should import a scriptPubKey with internal and with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ],
"internal": True
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Private key + !internal
self.log.info("Should not import a scriptPubKey without internal and with private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# P2SH address
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['isscript'], True)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], False)
# P2SH + Redeem script
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript']
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + !Watchonly
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript'],
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])]
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + Watchonly
sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['address'], sig_address_2['address'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": multi_sig_script['address']
},
"timestamp": "now",
"redeemscript": multi_sig_script['redeemScript'],
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])],
"watchonly": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
# Address + Public key + !Internal + Wrong pubkey
self.log.info("Should not import an address with a wrong public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"pubkeys": [ address2['pubkey'] ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# ScriptPubKey + Public key + internal + Wrong pubkey
self.log.info("Should not import a scriptPubKey with internal and with a wrong public key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"pubkeys": [ address2['pubkey'] ],
"internal": True
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Address + Private key + !watchonly + Wrong private key
self.log.info("Should not import an address with a wrong private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
},
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address2['address']) ]
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# ScriptPubKey + Private key + internal + Wrong private key
self.log.info("Should not import a scriptPubKey with internal and with a wrong private key")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"keys": [ self.nodes[0].dumpprivkey(address2['address']) ],
"internal": True
}])
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
address_assert = self.nodes[1].validateaddress(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Importing existing watch only address with new timestamp should replace saved timestamp.
assert_greater_than(timestamp, watchonly_timestamp)
self.log.info("Should replace previously saved watch only timestamp.")
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": watchonly_address,
},
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
watchonly_timestamp = timestamp
# restart nodes to check for proper serialization/deserialization of watch only address
self.stop_nodes()
self.nodes = self.start_nodes(2, self.options.tmpdir)
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], watchonly_timestamp)
# Bad or missing timestamps
self.log.info("Should throw on invalid or missing timestamp values")
assert_raises_message(JSONRPCException, 'Missing required timestamp field for key',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
}])
assert_raises_message(JSONRPCException, 'Expected number or "now" timestamp value for key. got type string',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "",
}])
if __name__ == '__main__':
ImportMultiTest ().main ()
| |
import numpy as np
import Tile
import random
import Move
from copy import deepcopy
class Board:
''' the Board class. to be treated as an abstract class. Use a subclass for the actual board with more
specific behaviour '''
BLUE = 0
RED = 1
GREEN = 2
YELLOW = 3
conversionDict = {"BLUE":0, "RED":1, "GREEN":2, "YELLOW":3}
def __init__(self, rows, cols):
self.rows = rows
self.cols = cols
self.allMoves = Move.AllMoves()
self.previousMove = None
def initializeTiles(self):
''' this method initializes the array of tiles randomly.
this includes wall placement but not robots or targets '''
raise NotImplementedError("Please implement this method")
def initializeTargetPositions(self):
''' this method places the 17 targets on the board.
consider an abstract method '''
raise NotImplementedError("Please implement this method")
def setTarget(self):
''' this method will pick a random target from the target list and make that the currentTarget for the game '''
self.currentTarget = self.targetPositions.pop()
self.array[self.currentTarget[0], self.currentTarget[1]].target = True
def initializeRobotPositions(self):
''' this method places the four robots randomly on the board.
self.array gets updated such that the four tiles know they posses a robot, and
a dictionary containing the robot positions is return '''
robotPositions = dict()
for robot in xrange(4):
# the while loop ensures that robots have unique positions.
while(True):
iCoord = random.randint(0, self.rows-1)
jCoord = random.randint(0, self.cols-1)
if (iCoord, jCoord) in robotPositions.values():
continue # another robot is already at this position so try again
elif self.array[iCoord, jCoord].target != None:
continue
else:
robotPositions[robot] = (iCoord, jCoord)
self.array[iCoord, jCoord].robot = robot
break # move onto the next robot in the outer for loop
return robotPositions
def printBoard(self):
''' this method displays a board state '''
result = "*"
for i in xrange(self.cols):
result += "-*"
print(result)
for i in xrange(self.rows):
result1 ="|"
result2 = "*"
for j in xrange (self.cols):
'''checking tile content'''
if (self.array[i,j].robot==None):
if (self.array[i,j].target == True):
result1 += "X"
else:
result1 +=" "
elif (self.array[i,j].robot==0):
result1 += "B"
elif (self.array[i,j].robot==1):
result1 += "R"
elif (self.array[i,j].robot==2):
result1 += "G"
elif (self.array[i,j].robot==3):
result1 += "Y"
'''checking east wall'''
if(self.array[i,j].wallDict["EAST"]==True):
result1+="|"
else :
result1+= " "
'''checking south wall'''
if(self.array[i,j].wallDict["SOUTH"]==True):
result2+="-*"
else :
result2+= " *"
print(result1)
print (result2)
def validMove(self, moveInt):
''' returns whether the move is valid or not.
i.e. will the robot actual move at all. '''
move = self.allMoves.getMoveAtIndex(moveInt)
position = self.robotPositions[move.colour]
tile = self.array[position]
if tile.wallDict[move.direction]:
return False # there is a wall in the way!
adjacentTile = self.getAdjacentTile(tile, move.direction)
if adjacentTile is None:
return False # just to be safe, however at this point since there is no wall in that direction, the
# adjacent tile shouldn't be none
if not adjacentTile.robot is None:
return False # a robot in the way!
return True # nothing in the way
def makeMoveByInt(self, moveInt):
''' this method will take an integer and make the move that that integer corresponds to.
in this way, we can store moves as just an integer and convert them as needed.
board will contain an AllMoves object where the move is grabbed from '''
move = self.allMoves.getMoveAtIndex(moveInt)
self.previousMove = moveInt # set the previous move
return self.makeMove(move)
def makeRandomMove(self):
''' this method will make a random VALID move and return the move number.
we can call this move instead of doing a possible "useless" move '''
opposite = self.allMoves.getOppositeMove(self.previousMove)
while True:
moveToMake = random.randint(0,15)
if moveToMake == opposite:
continue # would be foolish to move one way then back again right after
if self.validMove(moveToMake):
break
# otherwise not valid so keep looking
# found a valid move that isn't the opposite of the previous move, so make it
self.makeMoveByInt(moveToMake)
return moveToMake
def makeMoveByIntInverse(self, moveInt):
''' this method will take an integer and make the inverse move that that integer corresponds to.
in this way, we can store moves as just an integer and convert them as needed.
board will contain an AllMoves object where the move is grabbed from '''
move = self.allMoves.getMoveAtIndex(moveInt)
if(move.direction == 'NORTH'):
move.direction='SOUTH'
elif (move.direction == 'WEST'):
move.direction='EAST'
elif (move.direction == 'SOUTH'):
move.direction='NORTH'
else:
move.direction='WEST'
#print (move)
return self.makeMove(move)
def makeMove(self, move):
''' given a move, make it on the board (so move the colour in the direction and update the array)
if no movement is possible then False is returned to indicate nothing happend, otherwise True is returned'''
startPosition = self.robotPositions[move.colour] # initalize the endPosition to be the starting position
#print("Start position = {}".format(startPosition))
# now see how far the robot can move in the direction
currentTile = self.array[startPosition]
somethingHappend = False
while True:
if currentTile.wallDict[move.direction]:
# there is a wall in the direction we need to move, so final spot
break
# since an edge tile will always have a wall, if we made it to here then we know we can find the adjacent tile
adjacentTile = self.getAdjacentTile(currentTile, move.direction)
if adjacentTile.robot != None:
# there is a robot blocking us, so final spot
break
somethingHappend = True # the robot was able to move at least one tile
# no wall or robot in the way, so move the robot onto the adjacent tile
adjacentTile.robot = currentTile.robot
currentTile.robot = None
self.robotPositions[move.colour] = adjacentTile.position
currentTile = adjacentTile
# the currentTile is the ending position
return somethingHappend
def getAdjacentTile(self, tile, direction):
''' given a tile and a direction, this returns the tile adjacent in that direction.
return None if an edge '''
i, j = tile.position
if direction == "NORTH" or direction ==0:
i -= 1
elif direction == "SOUTH" or direction == 2:
i += 1
elif direction == "EAST" or direction ==1:
j += 1
elif direction == "WEST" or direction ==3:
j -= 1
if i < 0 or i >= self.rows or j < 0 or j >= self.cols:
# out of bounds
return None
return self.array[i,j]
def endState(self):
''' returns boolean of whether the board is in an end state (i.e. is the right robot at the target '''
try:
return self.currentTarget == self.robotPositions[0] #we assume blue is always the target robot
except:
# currentTarget not set yet
print("currentTarget not yet set!")
return False
def resetRobots(self, resetPositions):
''' this method takes a dictionary with robot positions to set self.robot positions to.
it does that as well as updating the tiles in the array to reflect the change. '''
#print("start of resetrobots: self.robotPositions = {0}, resetPositions = {1}".format(self.robotPositions, resetPositions))
for i in xrange(4):
originalPosition = self.robotPositions[i]
originalTile = self.array[originalPosition]
if not originalTile.position in resetPositions.values():
originalTile.robot = None
#print("original Tile = {0}".format(originalTile))
newPosition = resetPositions[i]
newTile = self.array[newPosition]
newTile.robot = i
#print("new tile = {0}".format(newTile))
self.robotPositions[i] = resetPositions[i]#(resetPositions[i][0], resetPositions[i][1])
#print("start of resetrobots: self.robotPositions = {0}, resetPositions = {1}".format(self.robotPositions, resetPositions))
#print()
#print()
#self.robotPositions = deepcopy(resetPositions)
def validateMoveSequence(self, sequence):
''' takes a move sequence (as integers!) as input as validates if it results in an end state '''
resetPositions = deepcopy(self.robotPositions)
for i in sequence:
self.makeMoveByInt(i)
valid = self.endState()
#print("robot positions after making moves in validateMoveSequence = {0}".format(self.robotPositions))
self.resetRobots(resetPositions)
return valid
def getTileOfRobot(self, robot):
''' given a robot (integer) returns the tile which it is occupying '''
position = self.robotPositions[robot]
return self.array[position]
def correctRobotTiles(self):
''' a debugging method to see if all 4 robots are accounted for on a tile '''
robots = {}
for j in xrange(16):
for k in xrange(16):
if self.array[j,k].robot != None:
robots[self.array[j,k].robot] = (j,k)
#print("Tile at {0},{1} has robot = {2}".format(j, k, self.array[j,k].robot))
if len(robots) != 4:
print("Too few tiles think they have a robot! {0}".format(robots))
return False
return True
def getRay(self,r,c,direction):
''' returns a list of tile that casts from (r,c) in a direction
until a wall is present
not including tile(r,c) itself
0 = NORTH
1 = EAST
2 = SOUTH
3 = WEST
'''
ray =[]
currentTile = self.array[r,c]
if (direction ==0 and not currentTile.wallDict["NORTH"] ): #NORTH
currentTile = self.array[r-1,c]
while (not currentTile.wallDict["NORTH"]):
ray.append(currentTile)
currentTile = self.array[currentTile.position[0]-1,c]
ray.append(currentTile)
elif (direction == 1 and not currentTile.wallDict["EAST"]): # EAST
currentTile = self.array[r,c+1]
while (not currentTile.wallDict["EAST"]):
ray.append(currentTile)
currentTile = self.array[r,currentTile.position[1]+1]
ray.append(currentTile)
elif (direction == 2 and not currentTile.wallDict["SOUTH"]): # SOUTH
currentTile = self.array[r+1,c]
while (not currentTile.wallDict["SOUTH"]):
ray.append(currentTile)
currentTile = self.array[currentTile.position[0]+1,c]
ray.append(currentTile)
elif (direction == 3 and not currentTile.wallDict["WEST"]): # WEST-
currentTile = self.array[r,c-1]
while (not currentTile.wallDict["EAST"]):
ray.append(currentTile)
currentTile = self.array[r,currentTile.position[1]-1]
ray.append(currentTile)
return ray
def paintLB(self,tileList, LB):
'''recursive function to calculate lowerbound on each tile,
each tile should starte with score -1
at the end of the function, if some tile was not visited ,
that means it is not reachable from the target tile
thus would also have a score of -1
input arguement should be the a list of first four lines of tiles in the four direction of the target tile
'''
if (not tileList):# if the queue is empty return none
return
newList =[]
for tile in tileList:# each tile at this level (distance away from target)
r = tile.position[0]
c = tile.position[1]
if (not tile.wallDict["NORTH"]):## check all the tile to the north direction of the current tile
temp = self.array[r-1,c] ## move one tile to the north
if (temp.lowerBound==-1 or temp.lowerBound == LB): ## if -1 then unvisited, if LB some other tile from this same level has visited this tile before but in a different direction, so we need to check
while(not temp.wallDict["NORTH"]): ##loop keep going north
if (temp.lowerBound == -1): ## unvisited
temp.lowerBound = LB
newList.append(temp) ##add to queue
temp=self.array[temp.position[0]-1,c] ## loop to next
if (temp.lowerBound==-1): ## loop stoped one tile before the last tile , now is checking
temp.lowerBound=LB
newList.append(temp)
if (not tile.wallDict["EAST"]):## check all tiles to the east of current tile
temp = self.array[r,c+1]
if (temp.lowerBound==-1 or temp.lowerBound == LB):
while(not temp.wallDict["EAST"]):
if (temp.lowerBound == -1):
temp.lowerBound = LB
newList.append(temp)
temp=self.array[r,temp.position[1]+1]
if (temp.lowerBound==-1):
temp.lowerBound=LB
newList.append(temp)
if (not tile.wallDict["SOUTH"]):## check all the tiles to the south of the current tile
temp = self.array[r+1,c]
if (temp.lowerBound==-1 or temp.lowerBound == LB):
while(not temp.wallDict["SOUTH"]):
if (temp.lowerBound == -1):
temp.lowerBound = LB
newList.append(temp)
temp=self.array[temp.position[0]+1,c]
if (temp.lowerBound==-1):
temp.lowerBound=LB
newList.append(temp)
if (not tile.wallDict["WEST"]):## check all the tiles to the west
temp = self.array[r,c-1]
if (temp.lowerBound==-1 or temp.lowerBound == LB):
while(not temp.wallDict["WEST"]):
if (temp.lowerBound == -1):
temp.lowerBound = LB
newList.append(temp)
temp=self.array[r,temp.position[1]-1]
if (temp.lowerBound==-1):
temp.lowerBound=LB
newList.append(temp)
self.paintLB(newList,LB+1)## proceed to the next level with new list contains the queue
def lowerBoundPreProc(self):
'''pre-process the board with lower bound heuristics'''
row,col =self.currentTarget
self.array[row,col].lowerBound=0
initList =[]
for i in xrange(4):
initList += self.getRay(row,col,i)
for t in initList:
t.lowerBound = 1
self.paintLB(initList,2)
def printLBs(self):
'''this function displays the board heuristics'''
for i in xrange(self.rows):
a =""
for j in xrange(self.cols):
a+=str(self.array[i,j].lowerBound)
print (a)
def printRBs(self):
'''this function displays the board heuristics'''
for i in xrange(self.rows):
a =""
for j in xrange(self.cols):
if (self.array[i,j].reachable>=10):
a+=" "+str(self.array[i,j].reachable)+" "
else:
a+=" "+str(self.array[i,j].reachable)+" "
print (a)
def CalcReachability(self,firsttime):
''' calculate reachability
input boolean firsttime indicate if erase previous information, true = erase
return n s
n: number of tiles can be reached from this target
S : sum of increase and decrease from a previous state of the board.
increase decrease in respect of number of moves to reach target, the tile.reachable
'''
r,c =self.currentTarget
if(firsttime): ## clean RB
for i in xrange(self.rows):
for j in xrange(self.cols):
self.array[i,j].reachable=-1
for i in xrange(self.rows): ## clean visited flag, is only used in paintRB
for j in xrange(self.cols):
self.array[i,j].check =False
a=[]
self.array[r,c].reachable =0
self.array[r,c].check = True
a.append(self.array[r,c])
n,s = self.paintRB(a,1)
return n,s
def shootRay(self,direction,r,c,RB,newList):
direct = ["NORTH","EAST","SOUTH","WEST"]
pace =[[-1,0],[0,1],[1,0],[0,-1]]
nums=0
sums=0
tile = self.array[r,c]
if (not tile.wallDict[direct[direction]] and (tile.wallDict[direct[(direction+2)%4]] or self.getAdjacentTile(tile,(direction+2)%4).robot>0) and (tile.robot==None or tile.robot ==0)):
temp = self.array[r+pace[direction][0],c+pace[direction][1]] ## one step
#
while(not temp.wallDict[direct[direction]] and (temp.robot == None or temp.robot==0)): ## looping
if (temp.check == False):
temp.check =True ## calc score
nums +=1
if(temp.reachable<RB):
sums -= 1
elif (temp.reachable >RB):
sums+=1
temp.reachable = RB
tile1 = self.getAdjacentTile(temp,(direction+1)%4)
tile2 = self.getAdjacentTile(temp,(direction+3)%4)
#if (tile1.robot>0):
# print (tile2.robot)
if (temp.wallDict[direct[(direction+1)%4]] != temp.wallDict[direct[(direction+3)%4]] or (tile1!=None and tile1.robot!=0) or (tile2!=None and tile2.robot!=0)):
newList.append(temp)
temp=self.array[temp.position[0]+pace[direction][0],temp.position[1]+pace[direction][1]]
if (temp.check == False and (temp.robot == None or temp.robot==0)):## last tile after the loop
temp.check =True
nums +=1
if(temp.reachable<RB):
sums -= 1
elif (temp.reachable >RB):
sums +=1
temp.reachable = RB
tile1 = self.getAdjacentTile(temp,(direction+1)%4)
tile2 = self.getAdjacentTile(temp,(direction+3)%4)
if (temp.wallDict[direct[(direction+1)%4]] != temp.wallDict[direct[(direction+3)%4]] or (tile1!=None and tile1.robot!=0) or (tile2!=None and tile2.robot!=0)):
newList.append(temp)
return nums,sums
def paintRB(self,tileList,RB):
'''recursive function to calculate Reachability on each tile,
each tile should starte with score -1 only if it was the first time painting
at the end of the function, if some tile was not visited ,
that means it is not reachable from the target tile
thus would also have a score of -1
input arguement should be the a list of tiles
'''
nums =0; ## number of tiles can be reached
sums =0; ## sum of score : +1 for RB decrease from previous state, -1 for RB increase from previous state
if (not tileList):
return nums,sums
newList =[]
for tile in tileList:
r = tile.position[0]
c = tile.position[1]
for i in xrange(4):
n,s =self.shootRay(i,r,c,RB,newList)
nums+=n
sums+=s
num1, sum1 = self.paintRB(newList,RB+1)
# print (nums)
nums += num1
sums += sum1
return nums, sums
def correctWall(self):
# this function checks current board is legal, and correct any walls with illegal placement
r = self.rows
c = self.cols
for i in xrange(r):
for j in xrange(c):
#if (i== 7 and j ==15):
#print self.array[i,j].wallDict
if(i>0):
t = self.array[i,j].wallDict["NORTH"] or self.array[i-1,j].wallDict["SOUTH"]
self.array[i,j].wallDict["NORTH"]=t
self.array[i-1,j].wallDict["SOUTH"] = t
else:
self.array[i,j].wallDict["NORTH"]=True
#if (i== 7 and j ==15):
#print self.array[i,j].wallDict
if(i<r-1):
t = self.array[i+1,j].wallDict["NORTH"] or self.array[i,j].wallDict["SOUTH"]
self.array[i+1,j].wallDict["NORTH"]=t
self.array[i,j].wallDict["SOUTH"] = t
else:
self.array[i,j].wallDict["SOUTH"]=True
#if (i== 7 and j ==15):
#print self.array[i,j].wallDict
if(j>0):
t = self.array[i,j].wallDict["WEST"] or self.array[i,j-1].wallDict["EAST"]
self.array[i,j].wallDict["WEST"]=t
self.array[i,j-1].wallDict["EAST"] =t
else:
self.array[i,j].wallDict["WEST"]=True
#if (i== 7 and j ==15):
#print self.array[i,j].wallDict
if(j<c-1):
t = self.array[i,j+1].wallDict["WEST"] or self.array[i,j].wallDict["EAST"]
self.array[i,j+1].wallDict["WEST"]=t
self.array[i,j].wallDict["EAST"] = t
else:
self.array[i,j].wallDict["EAST"]=True
#if (i== 7 and j ==15):
#print self.array[i,j].wallDict
############################## RandomBoard Subclass ####################################
class RandomBoard(Board):
''' the random board which initializes the tiles randomly '''
def __init__(self, rows, cols):
Board.__init__(self, rows, cols) # call super constructor
self.array = self.initializeTiles()
self.targetPositions = self.initializeTargetPositions()
self.robotPositions = self.initializeRobotPositions()
self.correctWall()
def reinitializeTileWithPercentage(self,percent):
self.array = self.genTileWithCorner(percent)
self.correctWall()
self.targetPositions = self.initializeTargetPositions()
self.setTarget()
self.lowerBoundPreProc()
self.robotPositions = self.initializeRobotPositions()
def initializeTiles(self):
''' this method initializes the array of tiles randomly.
this includes wall placement but not robots or targets '''
result = np.empty((self.rows, self.cols), dtype=object)
# for each position on the board, generate a random tile (the wall placement)
for i in xrange(self.rows):
for j in xrange(self.cols):
result[i,j] = self.generateRandomTile((i,j))
return result
def initializeRobotPositionsWithReachability():
'''this function initialzie robots within the reachable tiles'''
robotPositions = dict()
for robot in xrange(4):
# the while loop ensures that robots have unique positions.
while(True):
iCoord = random.randint(0, self.rows-1)
jCoord = random.randint(0, self.cols-1)
if (iCoord, jCoord) in robotPositions.values():
continue # another robot is already at this position so try again
elif self.array[iCoord, jCoord].target != None:
continue
elif self.array[iCoord, jCoord].lowerBound == -1:
continue
else:
robotPositions[robot] = (iCoord, jCoord)
self.array[iCoord, jCoord].robot = robot
break # move onto the next robot in the outer for loop
return robotPositions
def generateRandomTile(self, position):
wallDict = dict()
boolList = [True, False]
for direction in ["NORTH", "SOUTH", "EAST", "WEST"]:
wallDict[direction] = boolList[random.randint(0,1)]
return Tile.Tile(position, None, None, wallDict) # return a tile with random walls and None robot/target
def initializeTargetPositions(self):
''' this method places the 17 targets randomly on the board.
self.array gets updated such that the tiles know when they posses a target.
A dictionary containing the targets is returned '''
result = [(random.randint(0,15),random.randint(0,15))]
#print result
return result
def genTileWithCorner(self,percent):
''' this method initializes the array of tiles randomly.
this includes wall placement but not robots or targets
only produces corner walls with input probability out of 100'''
result = np.empty((self.rows, self.cols), dtype=object)
# for each position on the board, generate a random tile (the wall placement)
for i in xrange(self.rows):
for j in xrange(self.cols):
x = random.randint(0,100)
if (x < percent):
result[i,j] = self.getConner((i,j))
else:
result[i,j] =Tile.Tile((i,j), None, None, {"NORTH" : False, "EAST" : False,"WEST" : False, "SOUTH":False})
#print 'herer'
return result
def getConner(self,position):
a = [{"NORTH" : True, "EAST" : True,"WEST" : False, "SOUTH":False},
{"NORTH" : True, "EAST" : False,"WEST" : True, "SOUTH":False},
{"NORTH" : False, "EAST" : False,"WEST" : True, "SOUTH":True},
{"NORTH" : False, "EAST" : True,"WEST" : False, "SOUTH":True}]
x = random.randint(0,3)
return Tile.Tile(position, None, None, a[x])
########################### StandardBoard Subclass ##############################
class StandardBoard(Board):
''' the standard board hard codes the board array to be a built-in one from the game '''
def __init__(self, rows, cols, inputFileName):
Board.__init__(self, rows, cols) # call super constructor
# set the array, robot, and target positions to be empty for now
self.array = np.empty((self.rows, self.cols), dtype=object)
self.targetPositions = []
self.robotPositions = dict()
# now read the board information from the input file
self.readFromFile(inputFileName)
def readFromFile(self, inputFileName):
''' standard board reads an input text file with a board representation to se the board.
hex numbers 0-f each represent one of the 2^4 different wall configurations. A text file
consists of 16 lines with a hex number for each of the 16 tiles, followed by the robot and target positions.'''
lineCount = 0
with open(inputFileName, 'r') as file:
for line in file:
line = line.strip()
if lineCount < self.rows:
self.processArrayLine(line, lineCount)
elif lineCount < self.rows+4:
self.processRobotLine(line)
else:
self.processTargetLine(line)
lineCount += 1
def processArrayLine(self, line, lineCount):
''' pass an array line from the input file. This method sets that line's tiles in the np array '''
for j in xrange(self.cols):
position = (lineCount, j)
self.array[lineCount, j] = self.generateTileFromNumber(line[j], position)
def processRobotLine(self, line):
''' called on a line from the input board that is giving the location of a robot '''
for j in xrange(len(line)):
if line[j] == "=":
iCoord, jCoord = line[j+1:].split(",")
robotINT = Board.conversionDict[line[:j]]
self.robotPositions[robotINT] = (int(iCoord), int(jCoord))
self.array[iCoord, jCoord].robot = robotINT
def processTargetLine(self, line):
''' called on a line from the input board that is giving the locations of the targets '''
if line[0] != "T":
# called on the wrong line
return
targetCoords = line[2:].split(" ")
for coord in targetCoords:
iCoord, jCoord = coord.split(",")
self.targetPositions.append((int(iCoord),int(jCoord)))
self.array[iCoord, jCoord].target = True
def generateTileFromNumber(self, number, positon):
''' when reading from the txt file, this is called to process a given number.
Each number corresponds to a wall configuration. A tile with this configuration is returned'''
if number == "0":
return self.generateTileFromDirections([], positon)
elif number == "1":
return self.generateTileFromDirections(["NORTH"], positon)
elif number == "2":
return self.generateTileFromDirections(["EAST"], positon)
elif number == "3":
return self.generateTileFromDirections(["NORTH", "EAST"], positon)
elif number == "4":
return self.generateTileFromDirections(["SOUTH"], positon)
elif number == "5":
return self.generateTileFromDirections(["NORTH","SOUTH"], positon)
elif number == "6":
return self.generateTileFromDirections(["EAST","SOUTH"], positon)
elif number == "7":
return self.generateTileFromDirections(["NORTH", "EAST", "SOUTH"], positon)
elif number == "8":
return self.generateTileFromDirections(["WEST"], positon)
elif number == "9":
return self.generateTileFromDirections(["WEST", "NORTH"], positon)
elif number == "a":
return self.generateTileFromDirections(["WEST","EAST"], positon)
elif number == "b":
return self.generateTileFromDirections(["WEST", "NORTH","EAST"], positon)
elif number == "c":
return self.generateTileFromDirections(["WEST","SOUTH"], positon)
elif number == "d":
return self.generateTileFromDirections(["WEST", "NORTH","SOUTH"], positon)
elif number == "e":
return self.generateTileFromDirections(["WEST","SOUTH","EAST"], positon)
elif number == "f":
return self.generateTileFromDirections(["WEST", "NORTH","SOUTH","EAST"], positon)
else:
print("Error no wall configuration for this input number = {}!".format(number))
def generateTileFromDirections(self, directionsList, position):
''' pass a list of directions which posses a wall. this method returns a tile object with those wall
directions set to True in its wallDict '''
wallDict = dict()
# first initialize no walls (all False)
for direction in ["NORTH", "SOUTH", "EAST", "WEST"]:
wallDict[direction] = False
#now for the directions passed to the function, set them to True
for direction in directionsList:
wallDict[direction] = True
return Tile.Tile(position, None, False, wallDict) # return a tile with walls and None robot and False target
| |
#!/usr/bin/env python
"""
Weboob main Python wrapper
This file is a wrapper around Weboob, which is spawned by Kresus backend and
prints fetched data as a JSON export on stdout, so that it could be imported
easily in Kresus' NodeJS backend.
..note:: Useful environment variables are
- ``WEBOOB_DIR`` to specify the path to the root Weboob folder (with
modules and Weboob code)
- ``KRESUS_DIR`` to specify the path to Kresus data dir.
- ``WEBOOB_SOURCES_LIST`` to specify a Weboob sources.list to use instead
of the default one.
Commands are parsed from ``argv``. Available commands are:
* ``version`` to get the Weboob version.
* ``test`` to test Weboob is installed and a working connector can be
built.
* ``update`` to update Weboob modules.
* ``accounts --module BANK --login LOGIN EXTRA_CONFIG`` to get accounts from bank
``BANK`` using the provided credentials and the given extra
configuration options for the Weboob module (passed as --field NAME VALUE, NAME being the name
of the field and VALUE its value). The password is passed by the environment variable
``KRESUS_WEBOOB_PWD``.
* ``operations --module BANK --login LOGIN EXTRA_CONFIG`` to get a list of
operations from bank ``BANK`` using the provided credentials and given
extra configuration options (passed as for ``account`` command).
"""
from __future__ import print_function, unicode_literals
import json
import logging
import os
import shutil
import sys
import traceback
import argparse
import io
from copy import deepcopy
from datetime import datetime
from requests import ConnectionError, HTTPError # pylint: disable=redefined-builtin
# Ensure unicode is also defined in python 3.
try:
unicode = unicode # pylint: disable=redefined-builtin,invalid-name,self-assigning-variable
except NameError:
unicode = str # pylint: disable=invalid-name
def fail(error_code, error_short, error_long):
"""
Log error, return error JSON on stdin and exit with non-zero error code.
:param error_code: Kresus-specific error code. See ``shared/errors.json``.
:param error_short: Short error string description.
:param error_long: Long error string description.
"""
error_message = None
if error_long is not None:
error_message = "%s\n%s" % (error_short, error_long)
else:
error_message = error_short
error_object = {
'error_code': error_code,
'error_short': error_short,
'error_message': error_message
}
print(json.dumps(error_object))
sys.exit(1)
# Load errors description
ERRORS_PATH = os.path.join(
os.path.dirname(os.path.abspath(__file__)), # This script directory
'..', 'shared', 'errors.json'
)
with open(ERRORS_PATH, 'r') as f:
ERRORS = json.load(f)
ACTION_NEEDED = ERRORS['ACTION_NEEDED']
AUTH_METHOD_NYI = ERRORS['AUTH_METHOD_NYI']
UNKNOWN_MODULE = ERRORS['UNKNOWN_WEBOOB_MODULE']
INVALID_PASSWORD = ERRORS['INVALID_PASSWORD']
EXPIRED_PASSWORD = ERRORS['EXPIRED_PASSWORD']
GENERIC_EXCEPTION = ERRORS['GENERIC_EXCEPTION']
INVALID_PARAMETERS = ERRORS['INVALID_PARAMETERS']
NO_ACCOUNTS = ERRORS['NO_ACCOUNTS']
WEBOOB_NOT_INSTALLED = ERRORS['WEBOOB_NOT_INSTALLED']
INTERNAL_ERROR = ERRORS['INTERNAL_ERROR']
NO_PASSWORD = ERRORS['NO_PASSWORD']
CONNECTION_ERROR = ERRORS['CONNECTION_ERROR']
BROWSER_QUESTION = ERRORS['BROWSER_QUESTION']
def fail_unset_field(field, error_type=INVALID_PARAMETERS):
"""
Wrapper around ``fail`` for the specific case where a required field is not
set.
:param field: The name of the required field.
:param error_type: A possibility to overload the type of error thrown.
Defaults to ``INVALID_PARAMETERS``.
"""
fail(
error_type,
'%s shall be set to a non empty string' % field,
None
)
# Put the weboob path at the top of the current python path.
if 'WEBOOB_DIR' in os.environ and os.path.isdir(os.environ['WEBOOB_DIR']):
sys.path.insert(0, os.environ['WEBOOB_DIR'])
# Import Weboob core
try:
from weboob.capabilities.base import empty
from weboob.capabilities.bank import Transaction
from weboob.core import Weboob
from weboob.core.repositories import IProgress
from weboob.exceptions import (
ActionNeeded,
AuthMethodNotImplemented,
BrowserIncorrectPassword,
BrowserPasswordExpired,
BrowserQuestion,
NoAccountsException,
ModuleInstallError,
ModuleLoadError
)
from weboob.tools.backend import Module
from weboob.tools.log import createColoredFormatter
from weboob.tools.json import WeboobEncoder
except ImportError as exc:
fail(
WEBOOB_NOT_INSTALLED,
('Is weboob correctly installed? Unknown exception raised: %s.' %
unicode(exc)),
traceback.format_exc()
)
def init_logging(level, is_prod):
"""
Initialize loggers.
:param level: Minimal severity to log.
:param is_prod: whether we're running in production or not.
"""
root_logger = logging.getLogger()
root_logger.setLevel(level)
handler = logging.StreamHandler(sys.stderr)
fmt = (
'%(asctime)s:%(levelname)s:%(name)s:%(filename)s:'
'%(lineno)d:%(funcName)s %(message)s'
)
# Only output colored logging if not running in production.
if is_prod:
handler.setFormatter(logging.Formatter(fmt))
else:
handler.setFormatter(createColoredFormatter(sys.stderr, fmt))
root_logger.addHandler(handler)
class DictStorage():
"""
This class mocks the Weboob Storage class.
"""
def __init__(self, obj):
self.values = deepcopy(obj)
def load(self, *args, **kwargs):
"""
The load method is meaningless when a 'dict' storage is used.
"""
pass # pylint: disable=unnecessary-pass
def save(self, *args, **kwargs):
"""
The save method is meaningless when a 'dict' storage is used.
"""
pass # pylint: disable=unnecessary-pass
def set(self, *args):
"""
This method allows to set a value at a given path in the storage.
:param: ('path', 'to', 'the', 'value', value)
sets self.values['path']['to']['the']['value'] = value
"""
value = self.values
# Loop over elements of path.
for arg in args[:-2]:
value = value.setdefault(arg, {})
# Finally, set value at the right path.
value[args[-2]] = args[-1]
def delete(self, *args):
"""
This method allows to delete a value at a given path in the storage.
:param: ('path', 'to', 'the', 'value')
deletes self.values['path']['to']['the']['value']
"""
value = self.values
# Loop over elements of path.
for arg in args[:-1]:
# Check element in path exists.
try:
value = value[arg]
except KeyError:
# If not, end the process.
return
# Finally, delete element at the right path.
value.pop(args[-1], None)
def get(self, *args, **kwargs):
"""
This method allows to get a value at a given path in the storage.
:param: ('path', 'to', 'the', 'value')
:param default: The default value to be returned if the path does not exist.
returns self.values['path']['to']['the']['value']
"""
value = self.values
# Loop over elements of path.
for arg in args:
# Check element in path exists.
try:
value = value[arg]
except KeyError:
# If not, return the default value.
return kwargs.get('default')
return value
def dump(self):
"""
Returns the full storage.
"""
return self.values
class DummyProgress(IProgress):
"""
Dummy progressbar, to hide messages displayed when installing modules.
"""
def progress(self, percent, message):
"""
Do not display progress.
"""
pass # pylint: disable=unnecessary-pass
def error(self, message):
"""
Display error messages.
"""
logging.error(message)
return True
def prompt(self, message): # pylint: disable=no-self-use
"""
Ignore prompt messages.
"""
logging.info(message)
return True
class KresusEncoder(WeboobEncoder):
"""
JSON Encoder which serializes bytes (cookies for sessions) in python 3.
"""
def default(self, o): # pylint: disable=method-hidden
if isinstance(o, bytes):
return o.decode('utf-8')
return super(KresusEncoder, self).default(o)
class Connector():
"""
Connector is a tool that connects to common websites like bank website,
phone operator website... and that grabs personal data from there.
Credentials are required to make this operation.
Technically, connectors are weboob backend wrappers.
"""
@staticmethod
def version():
"""
Get the version of the installed Weboob.
"""
return Weboob.VERSION
def __init__(self, weboob_data_path, fakemodules_path, sources_list_content, is_prod):
"""
Create a Weboob instance.
:param weboob_data_path: Weboob path to use.
:param fakemodules_path: Path to the fake modules directory in user
data.
:param sources_list_content: Optional content of the sources.list file,
as an array of lines, or None if not present.
:param is_prod: whether we're running in production or not.
"""
# By default, consider we don't need to update the repositories.
self.needs_update = False
self.fakemodules_path = fakemodules_path
self.sources_list_content = sources_list_content
if not os.path.isdir(weboob_data_path):
os.makedirs(weboob_data_path)
# Set weboob data directory and sources.list file.
self.weboob_data_path = weboob_data_path
self.weboob_backup_path = os.path.normpath('%s.bak' % weboob_data_path)
self.write_weboob_sources_list()
# Create a Weboob object.
self.weboob = Weboob(workdir=weboob_data_path,
datadir=weboob_data_path)
self.backend = None
self.storage = None
# To make development more pleasant, always copy the fake modules in
# non-production modes.
if not is_prod:
self.copy_fakemodules()
# Update the weboob repos only if new repos are included.
if self.needs_update:
self.update()
def copy_fakemodules(self):
"""
Copies the fake modules files into the default fakemodules user-data
directory.
When Weboob updates modules, it might want to write within the
fakemodules directory, which might not be writable by the current
user. To prevent this, first copy the fakemodules directory in
a directory we have write access to, and then use that directory
in the sources list file.
"""
fakemodules_src = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fakemodules')
if os.path.isdir(self.fakemodules_path):
shutil.rmtree(self.fakemodules_path)
shutil.copytree(fakemodules_src, self.fakemodules_path)
def write_weboob_sources_list(self):
"""
Ensure the Weboob sources.list file contains the required entries from
Kresus.
"""
sources_list_path = os.path.join(self.weboob_data_path, 'sources.list')
# Determine the new content of the sources.list file.
new_sources_list_content = []
if self.sources_list_content is not None:
new_sources_list_content = self.sources_list_content
else:
# Default content of the sources.list file.
new_sources_list_content = [
unicode('https://updates.weboob.org/%(version)s/main/'),
unicode('file://%s' % self.fakemodules_path)
]
# Read the content of existing sources.list, if it exists.
original_sources_list_content = []
if os.path.isfile(sources_list_path):
with io.open(sources_list_path, encoding="utf-8") as fh:
original_sources_list_content = fh.read().splitlines()
# Update the source.list content and update the repository, only if the
# content has changed.
if set(original_sources_list_content) != set(new_sources_list_content):
with io.open(sources_list_path, 'w', encoding="utf-8") as sources_list_file:
sources_list_file.write('\n'.join(new_sources_list_content))
self.needs_update = True
def backup_data_dir(self):
"""
Backups modules.
"""
# shutil.copytree expects the destination path to not exist.
if os.path.isdir(self.weboob_backup_path):
shutil.rmtree(self.weboob_backup_path)
shutil.copytree(self.weboob_data_path, self.weboob_backup_path)
def restore_data_dir(self):
"""
Restores modules to their initial path.
"""
if os.path.isdir(self.weboob_backup_path):
# Ensure the target directory is clean.
if os.path.isdir(self.weboob_data_path):
shutil.rmtree(self.weboob_data_path)
# Replace the invalid data with the backup.
shutil.move(os.path.join(self.weboob_backup_path), self.weboob_data_path)
def clean_data_dir_backup(self):
"""
Cleans the backup.
"""
if os.path.isdir(self.weboob_backup_path):
shutil.rmtree(self.weboob_backup_path)
def update(self):
"""
Update Weboob modules.
"""
self.copy_fakemodules()
# Weboob has an offending print statement when it "Rebuilds index",
# which happen at every run if the user has a local repository. We need
# to silence it, hence the temporary redirect of stdout.
sys.stdout = open(os.devnull, "w")
# Create the backup before doing anything.
self.backup_data_dir()
try:
self.weboob.update(progress=DummyProgress())
except (ConnectionError, HTTPError) as exc:
# Do not delete the repository if there is a connection error or the repo has problems.
raise exc
except Exception:
# Try to remove the data directory, to see if it changes a thing.
# This is especially useful when a new version of Weboob is
# published and/or the keyring changes.
shutil.rmtree(self.weboob_data_path)
os.makedirs(self.weboob_data_path)
# Recreate the Weboob object as the directories are created
# on creating the Weboob object.
self.weboob = Weboob(workdir=self.weboob_data_path,
datadir=self.weboob_data_path)
# Rewrite sources.list file
self.write_weboob_sources_list()
# Retry update
try:
self.weboob.update(progress=DummyProgress())
except Exception as exc:
# If it still fails, just restore the previous state.
self.restore_data_dir()
# Re-throw the exception so that the user is warned of the problem.
raise exc
finally:
# Restore stdout
sys.stdout = sys.__stdout__
# Clean the backup.
self.clean_data_dir_backup()
def create_backend(self, modulename, parameters, session):
"""
Create a Weboob backend for a given module, ready to be used to fetch
data.
:param modulename: The name of the module from which backend should be
created.
:param parameters: A dict of parameters to pass to the module. It
should at least contain ``login`` and ``password`` fields, but can
contain additional values depending on the module.
:param session: an object representing the browser state.
"""
# Install the module if required.
repositories = self.weboob.repositories
minfo = repositories.get_module_info(modulename)
if (
minfo is not None and not minfo.is_installed() and
not minfo.is_local()
):
# We cannot install a locally available module, this would
# result in a ModuleInstallError.
try:
repositories.install(minfo, progress=DummyProgress())
except ModuleInstallError:
fail(
GENERIC_EXCEPTION,
"Unable to install module %s." % modulename,
traceback.format_exc()
)
# Initialize the Storage.
self.storage = DictStorage(session)
# Initialize the backend.
self.backend = self.weboob.build_backend(
modulename,
parameters,
storage=self.storage
)
def delete_backend(self):
"""
Delete a created backend for the given module.
"""
if self.backend:
with self.backend:
self.backend.deinit()
self.backend = None
self.storage = None
def get_accounts(self):
"""
Fetch accounts data from Weboob.
:param backend: The Weboob built backend to fetch data from.
:returns: A list of dicts representing the available accounts.
"""
results = []
with self.backend:
for account in list(self.backend.iter_accounts()):
# The minimum dict keys for an account are :
# 'id', 'label', 'balance' and 'type'
# Retrieve extra information for the account.
account = self.backend.fillobj(account, ['iban', 'currency'])
iban = None
if not empty(account.iban):
iban = account.iban
currency = None
if not empty(account.currency):
currency = unicode(account.currency)
results.append({
'vendorAccountId': account.id,
'label': account.label,
'balance': account.balance,
'iban': iban,
'currency': currency,
'type': account.type,
})
return results
def get_operations(self, from_date=None):
"""
Fetch operations data from Weboob.
:param from_date: The date until (in the past) which the transactions should be fetched.
Optional, if not provided all transactions are returned.
:returns: A list of dicts representing the available operations.
"""
results = []
with self.backend:
for account in list(self.backend.iter_accounts()):
# Get all operations for this account.
nyi_methods = []
operations = []
try:
for histop in self.backend.iter_history(account):
operations.append(histop)
op_date = histop.date
if histop.rdate and histop.rdate > op_date:
op_date = histop.rdate
if from_date and op_date and op_date < from_date:
logging.debug(
'Stopped fetch because op date (%s) is before from_date (%s)',
op_date.isoformat(),
from_date.isoformat()
)
break
except NotImplementedError:
nyi_methods.append('iter_history')
try:
operations += [
op for op in self.backend.iter_coming(account)
if op.type in [
Transaction.TYPE_DEFERRED_CARD,
Transaction.TYPE_CARD_SUMMARY
]
]
except NotImplementedError:
nyi_methods.append('iter_coming')
for method_name in nyi_methods:
logging.error(
('%s not implemented for this account: %s.'),
method_name,
account.id
)
# Build an operation dict for each operation.
for operation in operations:
label = None
if not empty(operation.label):
label = unicode(operation.label)
raw_label = None
if not empty(operation.raw):
raw_label = unicode(operation.raw)
elif label:
raw_label = label
if raw_label and not label:
label = raw_label
# Handle date
if operation.rdate:
# Use date of the payment (real date) if available.
date = operation.rdate
elif operation.date:
# Otherwise, use debit date, on the bank statement.
date = operation.date
else:
logging.error(
'No known date property in operation line: %s.',
raw_label or "no label"
)
date = datetime.now()
isodate = date.isoformat()
debit_date = operation.date.isoformat()
results.append({
'account': account.id,
'amount': operation.amount,
'rawLabel': raw_label,
'type': operation.type,
'date': isodate,
'debit_date': debit_date,
'label': label
})
return results
def fetch(self, which, from_date=None):
"""
Wrapper to fetch data from the Weboob connector.
This wrapper fetches the required data from Weboob and returns it. It
handles the translation between Weboob exceptions and Kresus error
codes stored in the JSON response.
:param which: The type of data to fetch. Can be either ``accounts`` or
``operations``.
:param from_date: The date until (in the past) which the transactions should be fetched.
Optional, if not provided all transactions are returned.
:returns: A dict of the fetched data, in a ``values`` keys. Errors are
described under ``error_code``, ``error_short`` and ``error_message``
keys.
"""
results = {}
try:
if which == 'accounts':
results['values'] = self.get_accounts()
elif which == 'operations':
results['values'] = self.get_operations(from_date)
else:
raise Exception('Invalid fetch command.')
except NoAccountsException:
results['error_code'] = NO_ACCOUNTS
except ModuleLoadError:
results['error_code'] = UNKNOWN_MODULE
except BrowserPasswordExpired:
results['error_code'] = EXPIRED_PASSWORD
except BrowserQuestion:
results['error_code'] = BROWSER_QUESTION
except AuthMethodNotImplemented:
results['error_code'] = AUTH_METHOD_NYI
except ActionNeeded as exc:
# This `except` clause is not in alphabetic order and cannot be,
# because BrowserPasswordExpired and AuthMethodNotImplemented
# (above) inherits from it in Weboob 1.4.
results['error_code'] = ACTION_NEEDED
results['error_message'] = unicode(exc)
except BrowserIncorrectPassword:
# This `except` clause is not in alphabetic order and cannot be,
# because BrowserPasswordExpired (above) inherits from it in
# Weboob 1.3.
results['error_code'] = INVALID_PASSWORD
except Module.ConfigError as exc:
results['error_code'] = INVALID_PARAMETERS
results['error_message'] = unicode(exc)
except ConnectionError as exc:
results['error_code'] = CONNECTION_ERROR
results['error_message'] = unicode(exc)
except Exception as exc:
fail(
GENERIC_EXCEPTION,
'Unknown error: %s.' % unicode(exc),
traceback.format_exc()
)
# Return session information for future use.
results['session'] = self.storage.dump()
return results
def main():
"""
Guess what? It's the main function!
"""
parser = argparse.ArgumentParser(description='Process CLI arguments for Kresus')
parser.add_argument('command',
choices=['test', 'version', 'operations', 'accounts'],
help='The command to be executed by the script')
parser.add_argument('--module', help="The weboob module name.")
parser.add_argument('--login', help="The login for the access.")
parser.add_argument('--field', nargs=2, action='append',
help="Custom fields. Can be set several times.",
metavar=('NAME', 'VALUE'))
parser.add_argument('--fromDate', help="An optional datetime (UNIX timestamp in seconds) until "
"which the transactions fetch must happen.")
parser.add_argument('--debug', action='store_true',
help="If set, the debug mode is activated.")
parser.add_argument(
'--update', action='store_true',
help=("If set, the repositories will be updated prior to command "
"accounts or operations.")
)
# Parse command from standard input.
options = parser.parse_args()
# Handle logging
is_prod = os.environ.get('NODE_ENV', 'production') == 'production'
if options.debug:
init_logging(logging.DEBUG, is_prod)
else:
init_logging(logging.WARNING, is_prod)
kresus_dir = os.environ.get('KRESUS_DIR', None)
if kresus_dir is None:
fail(
INTERNAL_ERROR,
"KRESUS_DIR must be set to use the weboob cli tool.",
traceback.format_exc()
)
sources_list_content = None
if (
'WEBOOB_SOURCES_LIST' in os.environ and
os.path.isfile(os.environ['WEBOOB_SOURCES_LIST'])
):
# Read the new content from the sources.list provided as env
# variable.
with io.open(os.environ['WEBOOB_SOURCES_LIST'], encoding="utf-8") as fh:
sources_list_content = fh.read().splitlines()
# Build a Weboob connector.
try:
weboob_connector = Connector(
weboob_data_path=os.path.join(kresus_dir, 'weboob-data'),
fakemodules_path=os.path.join(kresus_dir, 'fakemodules'),
sources_list_content=sources_list_content,
is_prod=is_prod
)
except ConnectionError as exc:
fail(
CONNECTION_ERROR,
'The connection seems down: %s' % unicode(exc),
traceback.format_exc()
)
except Exception as exc:
fail(
WEBOOB_NOT_INSTALLED,
('Is weboob installed? Unknown exception raised: %s.' %
unicode(exc)),
traceback.format_exc()
)
# Handle the command and output the expected result on standard output, as
# JSON encoded string.
command = options.command
if command == 'version':
# Return Weboob version.
obj = {
'values': weboob_connector.version()
}
print(json.dumps(obj))
sys.exit()
if options.update:
# Update Weboob modules.
try:
weboob_connector.update()
except ConnectionError as exc:
fail(
CONNECTION_ERROR,
'Exception when updating weboob: %s.' % unicode(exc),
traceback.format_exc()
)
except Exception as exc:
fail(
GENERIC_EXCEPTION,
'Exception when updating weboob: %s.' % unicode(exc),
traceback.format_exc()
)
if command == 'test':
# Do nothing, just check we arrived so far.
print(json.dumps({}))
sys.exit()
if command in ['accounts', 'operations']:
if not options.module:
fail_unset_field('Module')
if not options.login:
fail_unset_field('Login')
password = os.environ.get('KRESUS_WEBOOB_PWD', None)
if not password:
fail_unset_field('Password', error_type=NO_PASSWORD)
# Format parameters for the Weboob connector.
bank_module = options.module
params = {
'login': options.login,
'username': options.login,
'password': password
}
if options.fromDate:
params['from_date'] = datetime.fromtimestamp(float(options.fromDate))
if options.field is not None:
for name, value in options.field:
if not name:
fail_unset_field('Name of custom field')
if value:
params[name] = value
else:
logging.warning('No value specified for custom field %s', name)
# Session management.
session = os.environ.get('KRESUS_WEBOOB_SESSION', '{}')
try:
session = json.loads(session)
except ValueError:
logging.error('Invalid session stringified JSON, resetting the session.')
session = dict()
# Create a Weboob backend, fetch data and delete the module.
try:
weboob_connector.create_backend(bank_module, params, session)
except Module.ConfigError:
fail(
INVALID_PARAMETERS,
"Unable to load module %s." % bank_module,
traceback.format_exc()
)
except ModuleLoadError:
fail(
UNKNOWN_MODULE,
"Unable to load module %s." % bank_module,
traceback.format_exc()
)
content = weboob_connector.fetch(command, params.get('from_date'))
weboob_connector.delete_backend()
# Output the fetched data as JSON.
print(json.dumps(content, cls=KresusEncoder))
sys.exit()
if __name__ == '__main__':
main()
| |
""" Utility functions for dealing with URLs in pyramid """
import os
import warnings
from repoze.lru import lru_cache
from pyramid.interfaces import (
IResourceURL,
IRoutesMapper,
IStaticURLInfo,
)
from pyramid.compat import (
bytes_,
string_types,
)
from pyramid.encode import (
url_quote,
urlencode,
)
from pyramid.path import caller_package
from pyramid.threadlocal import get_current_registry
from pyramid.traversal import (
ResourceURL,
quote_path_segment,
)
PATH_SAFE = '/:@&+$,' # from webob
QUERY_SAFE = '/?:@!$&\'()*+,;=' # RFC 3986
ANCHOR_SAFE = QUERY_SAFE
def parse_url_overrides(kw):
"""Parse special arguments passed when generating urls.
The supplied dictionary is mutated, popping arguments as necessary.
Returns a 6-tuple of the format ``(app_url, scheme, host, port,
qs, anchor)``.
"""
anchor = ''
qs = ''
app_url = None
host = None
scheme = None
port = None
if '_query' in kw:
query = kw.pop('_query')
if isinstance(query, string_types):
qs = '?' + url_quote(query, QUERY_SAFE)
elif query:
qs = '?' + urlencode(query, doseq=True)
if '_anchor' in kw:
anchor = kw.pop('_anchor')
anchor = url_quote(anchor, ANCHOR_SAFE)
anchor = '#' + anchor
if '_app_url' in kw:
app_url = kw.pop('_app_url')
if '_host' in kw:
host = kw.pop('_host')
if '_scheme' in kw:
scheme = kw.pop('_scheme')
if '_port' in kw:
port = kw.pop('_port')
return app_url, scheme, host, port, qs, anchor
class URLMethodsMixin(object):
""" Request methods mixin for BaseRequest having to do with URL
generation """
def _partial_application_url(self, scheme=None, host=None, port=None):
"""
Construct the URL defined by request.application_url, replacing any
of the default scheme, host, or port portions with user-supplied
variants.
If ``scheme`` is passed as ``https``, and the ``port`` is *not*
passed, the ``port`` value is assumed to ``443``. Likewise, if
``scheme`` is passed as ``http`` and ``port`` is not passed, the
``port`` value is assumed to be ``80``.
"""
e = self.environ
if scheme is None:
scheme = e['wsgi.url_scheme']
else:
if scheme == 'https':
if port is None:
port = '443'
if scheme == 'http':
if port is None:
port = '80'
url = scheme + '://'
if port is not None:
port = str(port)
if host is None:
host = e.get('HTTP_HOST')
if host is None:
host = e['SERVER_NAME']
if port is None:
if ':' in host:
host, port = host.split(':', 1)
else:
port = e['SERVER_PORT']
else:
if ':' in host:
host, _ = host.split(':', 1)
if scheme == 'https':
if port == '443':
port = None
elif scheme == 'http':
if port == '80':
port = None
url += host
if port:
url += ':%s' % port
url_encoding = getattr(self, 'url_encoding', 'utf-8') # webob 1.2b3+
bscript_name = bytes_(self.script_name, url_encoding)
return url + url_quote(bscript_name, PATH_SAFE)
def route_url(self, route_name, *elements, **kw):
"""Generates a fully qualified URL for a named :app:`Pyramid`
:term:`route configuration`.
Use the route's ``name`` as the first positional argument.
Additional positional arguments (``*elements``) are appended to the
URL as path segments after it is generated.
Use keyword arguments to supply values which match any dynamic
path elements in the route definition. Raises a :exc:`KeyError`
exception if the URL cannot be generated for any reason (not
enough arguments, for example).
For example, if you've defined a route named "foobar" with the path
``{foo}/{bar}/*traverse``::
request.route_url('foobar',
foo='1') => <KeyError exception>
request.route_url('foobar',
foo='1',
bar='2') => <KeyError exception>
request.route_url('foobar',
foo='1',
bar='2',
traverse=('a','b')) => http://e.com/1/2/a/b
request.route_url('foobar',
foo='1',
bar='2',
traverse='/a/b') => http://e.com/1/2/a/b
Values replacing ``:segment`` arguments can be passed as strings
or Unicode objects. They will be encoded to UTF-8 and URL-quoted
before being placed into the generated URL.
Values replacing ``*remainder`` arguments can be passed as strings
*or* tuples of Unicode/string values. If a tuple is passed as a
``*remainder`` replacement value, its values are URL-quoted and
encoded to UTF-8. The resulting strings are joined with slashes
and rendered into the URL. If a string is passed as a
``*remainder`` replacement value, it is tacked on to the URL
after being URL-quoted-except-for-embedded-slashes.
If no ``_query`` keyword argument is provided, the request query string
will be returned in the URL. If it is present, it will be used to
compose a query string that will be tacked on to the end of the URL,
replacing any request query string. The value of ``_query`` may be a
sequence of two-tuples *or* a data structure with an ``.items()``
method that returns a sequence of two-tuples (presumably a dictionary).
This data structure will be turned into a query string per the
documentation of :func:`pyramid.url.urlencode` function. This will
produce a query string in the ``x-www-form-urlencoded`` format. A
non-``x-www-form-urlencoded`` query string may be used by passing a
*string* value as ``_query`` in which case it will be URL-quoted
(e.g. query="foo bar" will become "foo%20bar"). However, the result
will not need to be in ``k=v`` form as required by
``x-www-form-urlencoded``. After the query data is turned into a query
string, a leading ``?`` is prepended, and the resulting string is
appended to the generated URL.
.. note::
Python data structures that are passed as ``_query`` which are
sequences or dictionaries are turned into a string under the same
rules as when run through :func:`urllib.urlencode` with the ``doseq``
argument equal to ``True``. This means that sequences can be passed
as values, and a k=v pair will be placed into the query string for
each value.
.. versionchanged:: 1.5
Allow the ``_query`` option to be a string to enable alternative
encodings.
If a keyword argument ``_anchor`` is present, its string
representation will be quoted per :rfc:`3986#section-3.5` and used as
a named anchor in the generated URL
(e.g. if ``_anchor`` is passed as ``foo`` and the route URL is
``http://example.com/route/url``, the resulting generated URL will
be ``http://example.com/route/url#foo``).
.. note::
If ``_anchor`` is passed as a string, it should be UTF-8 encoded. If
``_anchor`` is passed as a Unicode object, it will be converted to
UTF-8 before being appended to the URL.
.. versionchanged:: 1.5
The ``_anchor`` option will be escaped instead of using
its raw string representation.
If both ``_anchor`` and ``_query`` are specified, the anchor
element will always follow the query element,
e.g. ``http://example.com?foo=1#bar``.
If any of the keyword arguments ``_scheme``, ``_host``, or ``_port``
is passed and is non-``None``, the provided value will replace the
named portion in the generated URL. For example, if you pass
``_host='foo.com'``, and the URL that would have been generated
without the host replacement is ``http://example.com/a``, the result
will be ``http://foo.com/a``.
Note that if ``_scheme`` is passed as ``https``, and ``_port`` is not
passed, the ``_port`` value is assumed to have been passed as
``443``. Likewise, if ``_scheme`` is passed as ``http`` and
``_port`` is not passed, the ``_port`` value is assumed to have been
passed as ``80``. To avoid this behavior, always explicitly pass
``_port`` whenever you pass ``_scheme``.
If a keyword ``_app_url`` is present, it will be used as the
protocol/hostname/port/leading path prefix of the generated URL.
For example, using an ``_app_url`` of
``http://example.com:8080/foo`` would cause the URL
``http://example.com:8080/foo/fleeb/flub`` to be returned from
this function if the expansion of the route pattern associated
with the ``route_name`` expanded to ``/fleeb/flub``. If
``_app_url`` is not specified, the result of
``request.application_url`` will be used as the prefix (the
default).
If both ``_app_url`` and any of ``_scheme``, ``_host``, or ``_port``
are passed, ``_app_url`` takes precedence and any values passed for
``_scheme``, ``_host``, and ``_port`` will be ignored.
This function raises a :exc:`KeyError` if the URL cannot be
generated due to missing replacement names. Extra replacement
names are ignored.
If the route object which matches the ``route_name`` argument has
a :term:`pregenerator`, the ``*elements`` and ``**kw``
arguments passed to this function might be augmented or changed.
"""
try:
reg = self.registry
except AttributeError:
reg = get_current_registry() # b/c
mapper = reg.getUtility(IRoutesMapper)
route = mapper.get_route(route_name)
if route is None:
raise KeyError('No such route named %s' % route_name)
if route.pregenerator is not None:
elements, kw = route.pregenerator(self, elements, kw)
app_url, scheme, host, port, qs, anchor = parse_url_overrides(kw)
if app_url is None:
if (scheme is not None or host is not None or port is not None):
app_url = self._partial_application_url(scheme, host, port)
else:
app_url = self.application_url
path = route.generate(kw) # raises KeyError if generate fails
if elements:
suffix = _join_elements(elements)
if not path.endswith('/'):
suffix = '/' + suffix
else:
suffix = ''
return app_url + path + suffix + qs + anchor
def route_path(self, route_name, *elements, **kw):
"""
Generates a path (aka a 'relative URL', a URL minus the host, scheme,
and port) for a named :app:`Pyramid` :term:`route configuration`.
This function accepts the same argument as
:meth:`pyramid.request.Request.route_url` and performs the same duty.
It just omits the host, port, and scheme information in the return
value; only the script_name, path, query parameters, and anchor data
are present in the returned string.
For example, if you've defined a route named 'foobar' with the path
``/{foo}/{bar}``, this call to ``route_path``::
request.route_path('foobar', foo='1', bar='2')
Will return the string ``/1/2``.
.. note::
Calling ``request.route_path('route')`` is the same as calling
``request.route_url('route', _app_url=request.script_name)``.
:meth:`pyramid.request.Request.route_path` is, in fact,
implemented in terms of :meth:`pyramid.request.Request.route_url`
in just this way. As a result, any ``_app_url`` passed within the
``**kw`` values to ``route_path`` will be ignored.
"""
kw['_app_url'] = self.script_name
return self.route_url(route_name, *elements, **kw)
def resource_url(self, resource, *elements, **kw):
"""
Generate a string representing the absolute URL of the
:term:`resource` object based on the ``wsgi.url_scheme``,
``HTTP_HOST`` or ``SERVER_NAME`` in the request, plus any
``SCRIPT_NAME``. The overall result of this method is always a
UTF-8 encoded string.
Examples::
request.resource_url(resource) =>
http://example.com/
request.resource_url(resource, 'a.html') =>
http://example.com/a.html
request.resource_url(resource, 'a.html', query={'q':'1'}) =>
http://example.com/a.html?q=1
request.resource_url(resource, 'a.html', anchor='abc') =>
http://example.com/a.html#abc
request.resource_url(resource, app_url='') =>
/
Any positional arguments passed in as ``elements`` must be strings
Unicode objects, or integer objects. These will be joined by slashes
and appended to the generated resource URL. Each of the elements
passed in is URL-quoted before being appended; if any element is
Unicode, it will converted to a UTF-8 bytestring before being
URL-quoted. If any element is an integer, it will be converted to its
string representation before being URL-quoted.
.. warning:: if no ``elements`` arguments are specified, the resource
URL will end with a trailing slash. If any
``elements`` are used, the generated URL will *not*
end in a trailing slash.
If a keyword argument ``query`` is present, it will be used to compose
a query string that will be tacked on to the end of the URL. The value
of ``query`` may be a sequence of two-tuples *or* a data structure with
an ``.items()`` method that returns a sequence of two-tuples
(presumably a dictionary). This data structure will be turned into a
query string per the documentation of :func:``pyramid.url.urlencode``
function. This will produce a query string in the
``x-www-form-urlencoded`` encoding. A non-``x-www-form-urlencoded``
query string may be used by passing a *string* value as ``query`` in
which case it will be URL-quoted (e.g. query="foo bar" will become
"foo%20bar"). However, the result will not need to be in ``k=v`` form
as required by ``x-www-form-urlencoded``. After the query data is
turned into a query string, a leading ``?`` is prepended, and the
resulting string is appended to the generated URL.
.. note::
Python data structures that are passed as ``query`` which are
sequences or dictionaries are turned into a string under the same
rules as when run through :func:`urllib.urlencode` with the ``doseq``
argument equal to ``True``. This means that sequences can be passed
as values, and a k=v pair will be placed into the query string for
each value.
.. versionchanged:: 1.5
Allow the ``query`` option to be a string to enable alternative
encodings.
If a keyword argument ``anchor`` is present, its string
representation will be used as a named anchor in the generated URL
(e.g. if ``anchor`` is passed as ``foo`` and the resource URL is
``http://example.com/resource/url``, the resulting generated URL will
be ``http://example.com/resource/url#foo``).
.. note::
If ``anchor`` is passed as a string, it should be UTF-8 encoded. If
``anchor`` is passed as a Unicode object, it will be converted to
UTF-8 before being appended to the URL.
.. versionchanged:: 1.5
The ``anchor`` option will be escaped instead of using
its raw string representation.
If both ``anchor`` and ``query`` are specified, the anchor element
will always follow the query element,
e.g. ``http://example.com?foo=1#bar``.
If any of the keyword arguments ``scheme``, ``host``, or ``port`` is
passed and is non-``None``, the provided value will replace the named
portion in the generated URL. For example, if you pass
``host='foo.com'``, and the URL that would have been generated
without the host replacement is ``http://example.com/a``, the result
will be ``http://foo.com/a``.
If ``scheme`` is passed as ``https``, and an explicit ``port`` is not
passed, the ``port`` value is assumed to have been passed as ``443``.
Likewise, if ``scheme`` is passed as ``http`` and ``port`` is not
passed, the ``port`` value is assumed to have been passed as
``80``. To avoid this behavior, always explicitly pass ``port``
whenever you pass ``scheme``.
If a keyword argument ``app_url`` is passed and is not ``None``, it
should be a string that will be used as the port/hostname/initial
path portion of the generated URL instead of the default request
application URL. For example, if ``app_url='http://foo'``, then the
resulting url of a resource that has a path of ``/baz/bar`` will be
``http://foo/baz/bar``. If you want to generate completely relative
URLs with no leading scheme, host, port, or initial path, you can
pass ``app_url=''``. Passing ``app_url=''`` when the resource path is
``/baz/bar`` will return ``/baz/bar``.
.. versionadded:: 1.3
``app_url``
If ``app_url`` is passed and any of ``scheme``, ``port``, or ``host``
are also passed, ``app_url`` will take precedence and the values
passed for ``scheme``, ``host``, and/or ``port`` will be ignored.
If the ``resource`` passed in has a ``__resource_url__`` method, it
will be used to generate the URL (scheme, host, port, path) for the
base resource which is operated upon by this function.
.. seealso::
See also :ref:`overriding_resource_url_generation`.
.. versionadded:: 1.5
``route_name``, ``route_kw``, and ``route_remainder_name``
If ``route_name`` is passed, this function will delegate its URL
production to the ``route_url`` function. Calling
``resource_url(someresource, 'element1', 'element2', query={'a':1},
route_name='blogentry')`` is roughly equivalent to doing::
remainder_path = request.resource_path(someobject)
url = request.route_url(
'blogentry',
'element1',
'element2',
_query={'a':'1'},
traverse=traversal_path,
)
It is only sensible to pass ``route_name`` if the route being named has
a ``*remainder`` stararg value such as ``*traverse``. The remainder
value will be ignored in the output otherwise.
By default, the resource path value will be passed as the name
``traverse`` when ``route_url`` is called. You can influence this by
passing a different ``route_remainder_name`` value if the route has a
different ``*stararg`` value at its end. For example if the route
pattern you want to replace has a ``*subpath`` stararg ala
``/foo*subpath``::
request.resource_url(
resource,
route_name='myroute',
route_remainder_name='subpath'
)
If ``route_name`` is passed, it is also permissible to pass
``route_kw``, which will passed as additional keyword arguments to
``route_url``. Saying ``resource_url(someresource, 'element1',
'element2', route_name='blogentry', route_kw={'id':'4'},
_query={'a':'1'})`` is roughly equivalent to::
remainder_path = request.resource_path_tuple(someobject)
kw = {'id':'4', '_query':{'a':'1'}, 'traverse':traversal_path}
url = request.route_url(
'blogentry',
'element1',
'element2',
**kw,
)
If ``route_kw`` or ``route_remainder_name`` is passed, but
``route_name`` is not passed, both ``route_kw`` and
``route_remainder_name`` will be ignored. If ``route_name``
is passed, the ``__resource_url__`` method of the resource passed is
ignored unconditionally. This feature is incompatible with
resources which generate their own URLs.
.. note::
If the :term:`resource` used is the result of a :term:`traversal`, it
must be :term:`location`-aware. The resource can also be the context
of a :term:`URL dispatch`; contexts found this way do not need to be
location-aware.
.. note::
If a 'virtual root path' is present in the request environment (the
value of the WSGI environ key ``HTTP_X_VHM_ROOT``), and the resource
was obtained via :term:`traversal`, the URL path will not include the
virtual root prefix (it will be stripped off the left hand side of
the generated URL).
.. note::
For backwards compatibility purposes, this method is also
aliased as the ``model_url`` method of request.
"""
try:
reg = self.registry
except AttributeError:
reg = get_current_registry() # b/c
url_adapter = reg.queryMultiAdapter((resource, self), IResourceURL)
if url_adapter is None:
url_adapter = ResourceURL(resource, self)
virtual_path = getattr(url_adapter, 'virtual_path', None)
if virtual_path is None:
# old-style IContextURL adapter (Pyramid 1.2 and previous)
warnings.warn(
'Pyramid is using an IContextURL adapter to generate a '
'resource URL; any "app_url", "host", "port", or "scheme" '
'arguments passed to resource_url are being ignored. To '
'avoid this behavior, as of Pyramid 1.3, register an '
'IResourceURL adapter instead of an IContextURL '
'adapter for the resource type(s). IContextURL adapters '
'will be ignored in a later major release of Pyramid.',
DeprecationWarning,
2)
resource_url = url_adapter()
else:
# IResourceURL adapter (Pyramid 1.3 and after)
app_url = None
scheme = None
host = None
port = None
if 'route_name' in kw:
newkw = {}
route_name = kw['route_name']
remainder = getattr(url_adapter, 'virtual_path_tuple', None)
if remainder is None:
# older user-supplied IResourceURL adapter without 1.5
# virtual_path_tuple
remainder = tuple(url_adapter.virtual_path.split('/'))
remainder_name = kw.get('route_remainder_name', 'traverse')
newkw[remainder_name] = remainder
for name in (
'app_url', 'scheme', 'host', 'port', 'query', 'anchor'
):
val = kw.get(name, None)
if val is not None:
newkw['_' + name] = val
if 'route_kw' in kw:
route_kw = kw.get('route_kw')
if route_kw is not None:
newkw.update(route_kw)
return self.route_url(route_name, *elements, **newkw)
if 'app_url' in kw:
app_url = kw['app_url']
if 'scheme' in kw:
scheme = kw['scheme']
if 'host' in kw:
host = kw['host']
if 'port' in kw:
port = kw['port']
if app_url is None:
if scheme or host or port:
app_url = self._partial_application_url(scheme, host, port)
else:
app_url = self.application_url
resource_url = None
local_url = getattr(resource, '__resource_url__', None)
if local_url is not None:
# the resource handles its own url generation
d = dict(
virtual_path = virtual_path,
physical_path = url_adapter.physical_path,
app_url = app_url,
)
# allow __resource_url__ to punt by returning None
resource_url = local_url(self, d)
if resource_url is None:
# the resource did not handle its own url generation or the
# __resource_url__ function returned None
resource_url = app_url + virtual_path
qs = ''
anchor = ''
if 'query' in kw:
query = kw['query']
if isinstance(query, string_types):
qs = '?' + url_quote(query, QUERY_SAFE)
elif query:
qs = '?' + urlencode(query, doseq=True)
if 'anchor' in kw:
anchor = kw['anchor']
anchor = url_quote(anchor, ANCHOR_SAFE)
anchor = '#' + anchor
if elements:
suffix = _join_elements(elements)
else:
suffix = ''
return resource_url + suffix + qs + anchor
model_url = resource_url # b/w compat forever
def resource_path(self, resource, *elements, **kw):
"""
Generates a path (aka a 'relative URL', a URL minus the host, scheme,
and port) for a :term:`resource`.
This function accepts the same argument as
:meth:`pyramid.request.Request.resource_url` and performs the same
duty. It just omits the host, port, and scheme information in the
return value; only the script_name, path, query parameters, and
anchor data are present in the returned string.
.. note::
Calling ``request.resource_path(resource)`` is the same as calling
``request.resource_path(resource, app_url=request.script_name)``.
:meth:`pyramid.request.Request.resource_path` is, in fact,
implemented in terms of
:meth:`pyramid.request.Request.resource_url` in just this way. As
a result, any ``app_url`` passed within the ``**kw`` values to
``route_path`` will be ignored. ``scheme``, ``host``, and
``port`` are also ignored.
"""
kw['app_url'] = self.script_name
return self.resource_url(resource, *elements, **kw)
def static_url(self, path, **kw):
"""
Generates a fully qualified URL for a static :term:`asset`.
The asset must live within a location defined via the
:meth:`pyramid.config.Configurator.add_static_view`
:term:`configuration declaration` (see :ref:`static_assets_section`).
Example::
request.static_url('mypackage:static/foo.css') =>
http://example.com/static/foo.css
The ``path`` argument points at a file or directory on disk which
a URL should be generated for. The ``path`` may be either a
relative path (e.g. ``static/foo.css``) or an absolute path (e.g.
``/abspath/to/static/foo.css``) or a :term:`asset specification`
(e.g. ``mypackage:static/foo.css``).
The purpose of the ``**kw`` argument is the same as the purpose of
the :meth:`pyramid.request.Request.route_url` ``**kw`` argument. See
the documentation for that function to understand the arguments which
you can provide to it. However, typically, you don't need to pass
anything as ``*kw`` when generating a static asset URL.
This function raises a :exc:`ValueError` if a static view
definition cannot be found which matches the path specification.
"""
if not os.path.isabs(path):
if not ':' in path:
# if it's not a package:relative/name and it's not an
# /absolute/path it's a relative/path; this means its relative
# to the package in which the caller's module is defined.
package = caller_package()
path = '%s:%s' % (package.__name__, path)
try:
reg = self.registry
except AttributeError:
reg = get_current_registry() # b/c
info = reg.queryUtility(IStaticURLInfo)
if info is None:
raise ValueError('No static URL definition matching %s' % path)
return info.generate(path, self, **kw)
def static_path(self, path, **kw):
"""
Generates a path (aka a 'relative URL', a URL minus the host, scheme,
and port) for a static resource.
This function accepts the same argument as
:meth:`pyramid.request.Request.static_url` and performs the
same duty. It just omits the host, port, and scheme information in
the return value; only the script_name, path, query parameters, and
anchor data are present in the returned string.
Example::
request.static_path('mypackage:static/foo.css') =>
/static/foo.css
.. note::
Calling ``request.static_path(apath)`` is the same as calling
``request.static_url(apath, _app_url=request.script_name)``.
:meth:`pyramid.request.Request.static_path` is, in fact, implemented
in terms of `:meth:`pyramid.request.Request.static_url` in just this
way. As a result, any ``_app_url`` passed within the ``**kw`` values
to ``static_path`` will be ignored.
"""
if not os.path.isabs(path):
if not ':' in path:
# if it's not a package:relative/name and it's not an
# /absolute/path it's a relative/path; this means its relative
# to the package in which the caller's module is defined.
package = caller_package()
path = '%s:%s' % (package.__name__, path)
kw['_app_url'] = self.script_name
return self.static_url(path, **kw)
def current_route_url(self, *elements, **kw):
"""
Generates a fully qualified URL for a named :app:`Pyramid`
:term:`route configuration` based on the 'current route'.
This function supplements
:meth:`pyramid.request.Request.route_url`. It presents an easy way to
generate a URL for the 'current route' (defined as the route which
matched when the request was generated).
The arguments to this method have the same meaning as those with the
same names passed to :meth:`pyramid.request.Request.route_url`. It
also understands an extra argument which ``route_url`` does not named
``_route_name``.
The route name used to generate a URL is taken from either the
``_route_name`` keyword argument or the name of the route which is
currently associated with the request if ``_route_name`` was not
passed. Keys and values from the current request :term:`matchdict`
are combined with the ``kw`` arguments to form a set of defaults
named ``newkw``. Then ``request.route_url(route_name, *elements,
**newkw)`` is called, returning a URL.
Examples follow.
If the 'current route' has the route pattern ``/foo/{page}`` and the
current url path is ``/foo/1`` , the matchdict will be
``{'page':'1'}``. The result of ``request.current_route_url()`` in
this situation will be ``/foo/1``.
If the 'current route' has the route pattern ``/foo/{page}`` and the
current url path is ``/foo/1``, the matchdict will be
``{'page':'1'}``. The result of
``request.current_route_url(page='2')`` in this situation will be
``/foo/2``.
Usage of the ``_route_name`` keyword argument: if our routing table
defines routes ``/foo/{action}`` named 'foo' and
``/foo/{action}/{page}`` named ``fooaction``, and the current url
pattern is ``/foo/view`` (which has matched the ``/foo/{action}``
route), we may want to use the matchdict args to generate a URL to
the ``fooaction`` route. In this scenario,
``request.current_route_url(_route_name='fooaction', page='5')``
Will return string like: ``/foo/view/5``.
"""
if '_route_name' in kw:
route_name = kw.pop('_route_name')
else:
route = getattr(self, 'matched_route', None)
route_name = getattr(route, 'name', None)
if route_name is None:
raise ValueError('Current request matches no route')
if '_query' not in kw:
kw['_query'] = self.GET
newkw = {}
newkw.update(self.matchdict)
newkw.update(kw)
return self.route_url(route_name, *elements, **newkw)
def current_route_path(self, *elements, **kw):
"""
Generates a path (aka a 'relative URL', a URL minus the host, scheme,
and port) for the :app:`Pyramid` :term:`route configuration` matched
by the current request.
This function accepts the same argument as
:meth:`pyramid.request.Request.current_route_url` and performs the
same duty. It just omits the host, port, and scheme information in
the return value; only the script_name, path, query parameters, and
anchor data are present in the returned string.
For example, if the route matched by the current request has the
pattern ``/{foo}/{bar}``, this call to ``current_route_path``::
request.current_route_path(foo='1', bar='2')
Will return the string ``/1/2``.
.. note::
Calling ``request.current_route_path('route')`` is the same
as calling ``request.current_route_url('route',
_app_url=request.script_name)``.
:meth:`pyramid.request.Request.current_route_path` is, in fact,
implemented in terms of
:meth:`pyramid.request.Request.current_route_url` in just this
way. As a result, any ``_app_url`` passed within the ``**kw``
values to ``current_route_path`` will be ignored.
"""
kw['_app_url'] = self.script_name
return self.current_route_url(*elements, **kw)
def route_url(route_name, request, *elements, **kw):
"""
This is a backwards compatibility function. Its result is the same as
calling::
request.route_url(route_name, *elements, **kw)
See :meth:`pyramid.request.Request.route_url` for more information.
"""
return request.route_url(route_name, *elements, **kw)
def route_path(route_name, request, *elements, **kw):
"""
This is a backwards compatibility function. Its result is the same as
calling::
request.route_path(route_name, *elements, **kw)
See :meth:`pyramid.request.Request.route_path` for more information.
"""
return request.route_path(route_name, *elements, **kw)
def resource_url(resource, request, *elements, **kw):
"""
This is a backwards compatibility function. Its result is the same as
calling::
request.resource_url(resource, *elements, **kw)
See :meth:`pyramid.request.Request.resource_url` for more information.
"""
return request.resource_url(resource, *elements, **kw)
model_url = resource_url # b/w compat (forever)
def static_url(path, request, **kw):
"""
This is a backwards compatibility function. Its result is the same as
calling::
request.static_url(path, **kw)
See :meth:`pyramid.request.Request.static_url` for more information.
"""
if not os.path.isabs(path):
if not ':' in path:
# if it's not a package:relative/name and it's not an
# /absolute/path it's a relative/path; this means its relative
# to the package in which the caller's module is defined.
package = caller_package()
path = '%s:%s' % (package.__name__, path)
return request.static_url(path, **kw)
def static_path(path, request, **kw):
"""
This is a backwards compatibility function. Its result is the same as
calling::
request.static_path(path, **kw)
See :meth:`pyramid.request.Request.static_path` for more information.
"""
if not os.path.isabs(path):
if not ':' in path:
# if it's not a package:relative/name and it's not an
# /absolute/path it's a relative/path; this means its relative
# to the package in which the caller's module is defined.
package = caller_package()
path = '%s:%s' % (package.__name__, path)
return request.static_path(path, **kw)
def current_route_url(request, *elements, **kw):
"""
This is a backwards compatibility function. Its result is the same as
calling::
request.current_route_url(*elements, **kw)
See :meth:`pyramid.request.Request.current_route_url` for more
information.
"""
return request.current_route_url(*elements, **kw)
def current_route_path(request, *elements, **kw):
"""
This is a backwards compatibility function. Its result is the same as
calling::
request.current_route_path(*elements, **kw)
See :meth:`pyramid.request.Request.current_route_path` for more
information.
"""
return request.current_route_path(*elements, **kw)
@lru_cache(1000)
def _join_elements(elements):
return '/'.join([quote_path_segment(s, safe=':@&+$,') for s in elements])
| |
"""Script for generating test data to Scout
!!!!!!!! NOT COMPLETE !!!!!!!!
"""
import logging
import os
from pprint import pprint as pp
import click
import coloredlogs
from scout.demo.resources import (
exac_reduced_path,
genemap2_reduced_path,
genes37_reduced_path,
genes38_reduced_path,
hgnc_reduced_path,
hpo_phenotype_to_terms_reduced_path,
hpogenes_reduced_path,
hpoterms_reduced_path,
mim2gene_reduced_path,
transcripts37_reduced_path,
transcripts38_reduced_path,
)
from scout.parse.ensembl import parse_ensembl_exons, parse_ensembl_genes, parse_ensembl_transcripts
from scout.parse.exac import parse_exac_genes
from scout.parse.hgnc import parse_hgnc_line
from scout.parse.omim import parse_genemap2, parse_mim2gene
from scout.utils.scout_requests import (
fetch_ensembl_exons,
fetch_ensembl_genes,
fetch_ensembl_transcripts,
fetch_exac_constraint,
fetch_hgnc,
fetch_hpo_files,
fetch_hpo_genes,
fetch_hpo_terms,
fetch_mim_files,
)
LOG = logging.getLogger(__name__)
def get_reduced_hpo_terms(hpo_terms):
"""Return a reduced version of the hpo terms
Args:
hpo_terms(set(str)): Set of choosen terms that should be included
Yields:
hpo_line: A line with hpo information
"""
hpo_lines = fetch_hpo_terms()
begining = True
term_lines = []
# We want to keep the header lines
keep = True
nr_terms = 0
nr_kept = 0
for line in hpo_lines:
# When we encounter a new term we yield all lines of the previous term
if line.startswith("[Term]"):
nr_terms += 1
if keep:
nr_kept += 1
for hpo_line in term_lines:
yield hpo_line
keep = False
term_lines = []
elif line.startswith("id"):
hpo_id = line[4:]
if hpo_id in hpo_terms:
keep = True
term_lines.append(line)
if keep:
for hpo_line in term_lines:
yield hpo_line
LOG.info("Nr of terms in file %s", nr_terms)
LOG.info("Nr of terms kept: %s", nr_kept)
def remove_file(path):
"""Check if a file exists and remove it if so
Args:
path(str)
"""
LOG.info("Removing file %s", path)
try:
os.remove(path)
LOG.info("File %s removed", path)
except OSError as err:
LOG.info("File %s does not exists", path)
def generate_hgnc(genes):
"""Generate lines from a file with reduced hgnc information
Args:
genes(dict): A dictionary with hgnc_id as key and hgnc_symbol as value
outpath(str): Defaults to hgnc_reduced_path
Yields:
print_line(str): Lines from the reduced file
"""
LOG.info("Generating new hgnc reduced file")
# fetch the latest hgnc file here
hgnc_gene_lines = fetch_hgnc()
header = None
genes_found = 0
# Loop over all hgnc gene lines
for i, line in enumerate(hgnc_gene_lines):
line = line.rstrip()
# Skip lines that are empty
if not len(line) > 0:
continue
# If we are reading the header, print it
if i == 0:
header = line.split("\t")
yield line
continue
# Parse the hgnc gene line
gene = parse_hgnc_line(line, header)
if not gene:
continue
hgnc_id = int(gene["hgnc_id"])
# Check if the gene is in the reduced
if hgnc_id in genes:
genes_found += 1
yield line
LOG.info("Number of genes printed to file: %s", genes_found)
def generate_genemap2(genes, api_key):
"""Generate a reduced file with omim genemap2 information
Args:
genes(dict): A dictionary with hgnc_symbol as key and hgnc_id as value
api_key(str)
Yields:
print_line(str): Lines from the reduced file
"""
mim_files = fetch_mim_files(api_key, genemap2=True)
genemap2_lines = mim_files["genemap2"]
# Yield the header lines
for line in genemap2_lines:
if line.startswith("#"):
yield line
else:
break
for gene_info in parse_genemap2(genemap2_lines):
hgnc_symbol = gene_info.get("hgnc_symbol")
if not hgnc_symbol:
continue
if hgnc_symbol in genes:
yield gene_info["raw"]
def generate_mim2genes(genes, api_key):
"""Generate a reduced file with omim mim2gene information
Args:
genes(dict): A dictionary with hgnc_symbol as key and hgnc_id as value
api_key(str)
Yields:
print_line(str): Lines from the reduced file
"""
mim_files = fetch_mim_files(api_key, mim2genes=True)
mim2gene_lines = mim_files["mim2genes"]
for line in mim2gene_lines:
if line.startswith("#"):
yield line
else:
break
for gene_info in parse_mim2gene(mim2gene_lines):
hgnc_symbol = gene_info.get("hgnc_symbol")
if not hgnc_symbol:
continue
if hgnc_symbol in genes:
yield gene_info["raw"]
def generate_exac_genes(genes):
"""Generate a reduced file with omim mim2gene information
Args:
genes(dict): A dictionary with hgnc_symbol as key and hgnc_id as value
outpath(str)
Yields:
print_line(str): Lines from the reduced file
"""
exac_lines = fetch_exac_constraint()
yield (exac_lines[0])
for gene_info in parse_exac_genes(exac_lines):
hgnc_symbol = gene_info.get("hgnc_symbol")
if not hgnc_symbol:
continue
if hgnc_symbol in genes:
yield gene_info["raw"]
def generate_ensembl_genes(genes, silent=False, build=None):
"""Generate gene lines from a build
Args:
genes(dict): A dictionary with hgnc_symbol as key and hgnc_id as value
silent(bool): If genes should be written to file or not
build(str): What build to use. Defaults to 37
Yields:
print_line(str): Lines from the reduced file
"""
build = build or "37"
# Convert genes to map from id to symbol
id_to_symbol = {genes[hgnc_symbol]: hgnc_symbol for hgnc_symbol in genes}
ensembl_header = [
"Chromosome/scaffold name",
"Gene start (bp)",
"Gene end (bp)",
"Gene stable ID",
"HGNC symbol",
"HGNC ID",
]
yield "\t".join(ensembl_header)
ensembl_genes = fetch_ensembl_genes(build=build)
nr_genes = 0
# This function will yield dictionaries with ensemble info
for gene_info in parse_ensembl_gene(ensembl_genes):
hgnc_id = gene_info.get("hgnc_id")
if not hgnc_id:
continue
if hgnc_id in id_to_symbol:
print_line = [
gene_info["chrom"],
str(gene_info["gene_start"]),
str(gene_info["gene_end"]),
gene_info["ensembl_gene_id"],
gene_info["hgnc_symbol"],
str(gene_info["hgnc_id"]),
]
yield "\t".join(print_line)
nr_genes += 1
LOG.info("Nr genes collected for build %s: %s", build, nr_genes)
def generate_ensembl_transcripts(ensembl_genes, build=None):
"""Generate a file with reduced ensembl gene information
Args:
genes(dict): A dictionary with ensembl_id as key and hgnc_id as value
build(str): What build to use. Defaults to 37
Yields:
print_line(str): Lines from the reduced file
"""
build = build or "37"
ensembl_transcripts = fetch_ensembl_transcripts(build=build)
ensembl_header = [
"Chromosome/scaffold name",
"Gene stable ID",
"Transcript stable ID",
"Transcript start (bp)",
"Transcript end (bp)",
"RefSeq mRNA ID",
"RefSeq mRNA predicted ID",
"RefSeq ncRNA ID",
]
yield "\t".join(ensembl_header)
for tx_info in parse_ensembl_transcripts(ensembl_transcripts):
ens_gene_id = tx_info["ensembl_gene_id"]
if ens_gene_id in ensembl_genes:
print_line = [
tx_info["chrom"],
tx_info["ensembl_gene_id"],
tx_info["ensembl_transcript_id"],
str(tx_info["transcript_start"]),
str(tx_info["transcript_end"]),
tx_info["refseq_mrna"] or "",
tx_info["refseq_mrna_predicted"] or "",
tx_info["refseq_ncrna"] or "",
]
yield "\t".join(print_line)
def generate_hpo_genes(genes):
"""Generate the lines from a reduced hpo genes file
Args:
genes(dict): A map from hgnc_symbol to hgnc_id
Yields:
line(str): Lines from hpo with connection to genes
"""
hpo_lines = fetch_hpo_genes()
for i, line in enumerate(hpo_lines):
line = line.rstrip()
if not len(line) > 1:
continue
# Header line
if i == 0:
yield line
continue
splitted_line = line.split("\t")
hgnc_symbol = splitted_line[1]
if hgnc_symbol in genes:
yield line
def generate_hpo_terms(genes):
"""Generate the lines from a reduced hpo terms file
Args:
genes(dict): A map from hgnc_symbol to hgnc_id
Yields:
line(str): Lines from hpo with connection to genes
"""
hpo_lines = fetch_hpo_genes()
nr_terms = 0
for i, line in enumerate(hpo_lines):
line = line.rstrip()
if not len(line) > 1:
continue
# Header line
if i == 0:
yield line
continue
splitted_line = line.split("\t")
hgnc_symbol = splitted_line[1]
if hgnc_symbol in genes:
nr_terms
yield line
def generate_hpo_files(genes):
"""Generate files with hpo reduced information"""
hpo_files = fetch_hpo_files(
hpogenes=True, hpoterms=True, phenotype_to_terms=True, hpodisease=False
)
file_names = {
"hpogenes": hpogenes_reduced_path,
"hpoterms": hpoterms_reduced_path,
"phenotype_to_terms": hpo_phenotype_to_terms_reduced_path,
}
for name in file_names:
hpo_lines = hpo_files[name]
out_path = file_names[name]
outfile = open(out_path, "w")
LOG.info("Writing file %s", out_path)
for i, line in enumerate(hpo_lines):
line = line.rstrip()
if not len(line) > 1:
continue
if i == 0: # Header line
outfile.write(line + "\n")
continue
splitted_line = line.split("\t")
if name == "hpogenes":
hgnc_symbol = splitted_line[1]
elif name == "hpoterms":
hgnc_symbol = splitted_line[3]
elif name == "phenotype_to_terms":
hgnc_symbol = splitted_line[1]
if hgnc_symbol in genes:
outfile.write(line + "\n")
LOG.info("File ready")
def read_panel_file(lines):
"""Read a file with gene ids and names.
A file with genes. First column hgnc id, secon column hgnc symbol
"""
genes = {}
for line in lines:
if line.startswith("#"):
continue
line = line.split("\t")
if len(line) < 3:
continue
hgnc_id = int(line[0])
hgnc_symbol = line[1]
genes[hgnc_symbol] = hgnc_id
return genes
@click.group()
@click.pass_context
def cli(ctx):
"""Generate test data for scout"""
coloredlogs.install(level="INFO")
@cli.command()
@click.argument("genes", type=click.File("r"))
@click.option("-b", "--build", type=click.Choice(["37", "38"]), default="37", show_default=True)
@click.option("-c", "--chromosome")
@click.option("-e", "--exons", type=click.File("r"), help="If exon information is in a file")
@click.pass_context
def exons(ctx, genes, build, exons, chromosome):
"""Generate exons scout. Need to have a ensemble gene file generated from above"""
if chromosome:
chromosome = [chromosome]
ensg_to_hgncid = {}
for gene_info in parse_ensembl_genes(genes):
ensgid = gene_info["ensembl_gene_id"]
hgncid = gene_info["hgnc_id"]
ensg_to_hgncid[ensgid] = hgncid
for i, line in enumerate(fetch_ensembl_exons(build=build, chromosomes=chromosome)):
if i == 0:
header = line.rstrip().split("\t")
click.echo(line)
continue
exon_line = line.rstrip().split("\t")
exon_info = dict(zip(header, exon_line))
gene_id = exon_info["Gene stable ID"]
if not gene_id in ensg_to_hgncid:
continue
click.echo(line)
if __name__ == "__main__":
cli()
| |
from __future__ import absolute_import, unicode_literals
from unittest import TestCase as UnitTestCase
import django
from django.contrib.contenttypes.models import ContentType
from django.core import serializers
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.db import connection
from django.test import TestCase, TransactionTestCase
from django.test.utils import override_settings
from django.utils.encoding import force_text
from .forms import CustomPKFoodForm, DirectFoodForm, FoodForm, OfficialFoodForm
from .models import (Article, Child, CustomManager, CustomPKFood,
CustomPKHousePet, CustomPKPet, DirectFood,
DirectHousePet, DirectPet, Food, HousePet, Movie,
OfficialFood, OfficialHousePet, OfficialPet,
OfficialTag, OfficialThroughModel, Pet, Photo,
TaggedCustomPKFood, TaggedCustomPKPet, TaggedFood,
TaggedPet)
from taggit.managers import _model_name, _TaggableManager, TaggableManager
from taggit.models import Tag, TaggedItem
from taggit.utils import edit_string_for_tags, parse_tags
try:
from unittest import skipIf, skipUnless
except ImportError:
from django.utils.unittest import skipIf, skipUnless
class BaseTaggingTest(object):
def assert_tags_equal(self, qs, tags, sort=True, attr="name"):
got = [getattr(obj, attr) for obj in qs]
if sort:
got.sort()
tags.sort()
self.assertEqual(got, tags)
def _get_form_str(self, form_str):
if django.VERSION >= (1, 3):
form_str %= {
"help_start": '<span class="helptext">',
"help_stop": "</span>"
}
else:
form_str %= {
"help_start": "",
"help_stop": ""
}
return form_str
def assert_form_renders(self, form, html):
self.assertHTMLEqual(str(form), self._get_form_str(html))
class BaseTaggingTestCase(TestCase, BaseTaggingTest):
pass
class BaseTaggingTransactionTestCase(TransactionTestCase, BaseTaggingTest):
pass
class TagModelTestCase(BaseTaggingTransactionTestCase):
food_model = Food
tag_model = Tag
def test_unique_slug(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("Red", "red")
def test_update(self):
special = self.tag_model.objects.create(name="special")
special.save()
def test_add(self):
apple = self.food_model.objects.create(name="apple")
yummy = self.tag_model.objects.create(name="yummy")
apple.tags.add(yummy)
def test_slugify(self):
a = Article.objects.create(title="django-taggit 1.0 Released")
a.tags.add("awesome", "release", "AWESOME")
self.assert_tags_equal(a.tags.all(), [
"category-awesome",
"category-release",
"category-awesome-1"
], attr="slug")
def test_integers(self):
"""Adding an integer as a tag should raise a ValueError (#237)."""
apple = self.food_model.objects.create(name="apple")
with self.assertRaisesRegexp(ValueError, (
r"Cannot add 1 \(<(type|class) 'int'>\). "
r"Expected <class 'django.db.models.base.ModelBase'> or str.")):
apple.tags.add(1)
class TagModelDirectTestCase(TagModelTestCase):
food_model = DirectFood
tag_model = Tag
class TagModelCustomPKTestCase(TagModelTestCase):
food_model = CustomPKFood
tag_model = Tag
class TagModelOfficialTestCase(TagModelTestCase):
food_model = OfficialFood
tag_model = OfficialTag
class TaggableManagerTestCase(BaseTaggingTestCase):
food_model = Food
pet_model = Pet
housepet_model = HousePet
taggeditem_model = TaggedItem
tag_model = Tag
def test_add_tag(self):
apple = self.food_model.objects.create(name="apple")
self.assertEqual(list(apple.tags.all()), [])
self.assertEqual(list(self.food_model.tags.all()), [])
apple.tags.add('green')
self.assert_tags_equal(apple.tags.all(), ['green'])
self.assert_tags_equal(self.food_model.tags.all(), ['green'])
pear = self.food_model.objects.create(name="pear")
pear.tags.add('green')
self.assert_tags_equal(pear.tags.all(), ['green'])
self.assert_tags_equal(self.food_model.tags.all(), ['green'])
apple.tags.add('red')
self.assert_tags_equal(apple.tags.all(), ['green', 'red'])
self.assert_tags_equal(self.food_model.tags.all(), ['green', 'red'])
self.assert_tags_equal(
self.food_model.tags.most_common(),
['green', 'red'],
sort=False
)
apple.tags.remove('green')
self.assert_tags_equal(apple.tags.all(), ['red'])
self.assert_tags_equal(self.food_model.tags.all(), ['green', 'red'])
tag = self.tag_model.objects.create(name="delicious")
apple.tags.add(tag)
self.assert_tags_equal(apple.tags.all(), ["red", "delicious"])
apple.delete()
self.assert_tags_equal(self.food_model.tags.all(), ["green"])
def test_add_queries(self):
# Prefill content type cache:
ContentType.objects.get_for_model(self.food_model)
apple = self.food_model.objects.create(name="apple")
# 1 query to see which tags exist
# + 3 queries to create the tags.
# + 6 queries to create the intermediary things (including SELECTs, to
# make sure we don't double create.
# + 12 on Django 1.6 for save points.
queries = 22
if django.VERSION < (1, 6):
queries -= 12
self.assertNumQueries(queries, apple.tags.add, "red", "delicious", "green")
pear = self.food_model.objects.create(name="pear")
# 1 query to see which tags exist
# + 4 queries to create the intermeidary things (including SELECTs, to
# make sure we dont't double create.
# + 4 on Django 1.6 for save points.
queries = 9
if django.VERSION < (1, 6):
queries -= 4
self.assertNumQueries(queries, pear.tags.add, "green", "delicious")
self.assertNumQueries(0, pear.tags.add)
def test_require_pk(self):
food_instance = self.food_model()
self.assertRaises(ValueError, lambda: food_instance.tags.all())
def test_delete_obj(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("red")
self.assert_tags_equal(apple.tags.all(), ["red"])
strawberry = self.food_model.objects.create(name="strawberry")
strawberry.tags.add("red")
apple.delete()
self.assert_tags_equal(strawberry.tags.all(), ["red"])
def test_delete_bulk(self):
apple = self.food_model.objects.create(name="apple")
kitty = self.pet_model.objects.create(pk=apple.pk, name="kitty")
apple.tags.add("red", "delicious", "fruit")
kitty.tags.add("feline")
self.food_model.objects.all().delete()
self.assert_tags_equal(kitty.tags.all(), ["feline"])
def test_lookup_by_tag(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("red", "green")
pear = self.food_model.objects.create(name="pear")
pear.tags.add("green")
self.assertEqual(
list(self.food_model.objects.filter(tags__name__in=["red"])),
[apple]
)
self.assertEqual(
list(self.food_model.objects.filter(tags__name__in=["green"])),
[apple, pear]
)
kitty = self.pet_model.objects.create(name="kitty")
kitty.tags.add("fuzzy", "red")
dog = self.pet_model.objects.create(name="dog")
dog.tags.add("woof", "red")
self.assertEqual(
list(self.food_model.objects.filter(tags__name__in=["red"]).distinct()),
[apple]
)
tag = self.tag_model.objects.get(name="woof")
self.assertEqual(list(self.pet_model.objects.filter(tags__in=[tag])), [dog])
cat = self.housepet_model.objects.create(name="cat", trained=True)
cat.tags.add("fuzzy")
pks = self.pet_model.objects.filter(tags__name__in=["fuzzy"])
model_name = self.pet_model.__name__
self.assertQuerysetEqual(pks,
['<{0}: kitty>'.format(model_name),
'<{0}: cat>'.format(model_name)],
ordered=False)
def test_lookup_bulk(self):
apple = self.food_model.objects.create(name="apple")
pear = self.food_model.objects.create(name="pear")
apple.tags.add('fruit', 'green')
pear.tags.add('fruit', 'yummie')
def lookup_qs():
# New fix: directly allow WHERE object_id IN (SELECT id FROM ..)
objects = self.food_model.objects.all()
lookup = self.taggeditem_model.bulk_lookup_kwargs(objects)
list(self.taggeditem_model.objects.filter(**lookup))
def lookup_list():
# Simulate old situation: iterate over a list.
objects = list(self.food_model.objects.all())
lookup = self.taggeditem_model.bulk_lookup_kwargs(objects)
list(self.taggeditem_model.objects.filter(**lookup))
self.assertNumQueries(1, lookup_qs)
self.assertNumQueries(2, lookup_list)
def test_exclude(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("red", "green", "delicious")
pear = self.food_model.objects.create(name="pear")
pear.tags.add("green", "delicious")
self.food_model.objects.create(name="guava")
pks = self.food_model.objects.exclude(tags__name__in=["red"])
model_name = self.food_model.__name__
self.assertQuerysetEqual(pks,
['<{0}: pear>'.format(model_name),
'<{0}: guava>'.format(model_name)],
ordered=False)
def test_similarity_by_tag(self):
"""Test that pears are more similar to apples than watermelons"""
apple = self.food_model.objects.create(name="apple")
apple.tags.add("green", "juicy", "small", "sour")
pear = self.food_model.objects.create(name="pear")
pear.tags.add("green", "juicy", "small", "sweet")
watermelon = self.food_model.objects.create(name="watermelon")
watermelon.tags.add("green", "juicy", "large", "sweet")
similar_objs = apple.tags.similar_objects()
self.assertEqual(similar_objs, [pear, watermelon])
self.assertEqual([obj.similar_tags for obj in similar_objs],
[3, 2])
def test_tag_reuse(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("juicy", "juicy")
self.assert_tags_equal(apple.tags.all(), ['juicy'])
def test_query_traverse(self):
spot = self.pet_model.objects.create(name='Spot')
spike = self.pet_model.objects.create(name='Spike')
spot.tags.add('scary')
spike.tags.add('fluffy')
lookup_kwargs = {
'%s__name' % _model_name(self.pet_model): 'Spot'
}
self.assert_tags_equal(
self.tag_model.objects.filter(**lookup_kwargs),
['scary']
)
def test_taggeditem_unicode(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("juicy")
self.assertEqual(
force_text(self.taggeditem_model.objects.all()[0]),
"apple tagged with juicy"
)
def test_abstract_subclasses(self):
p = Photo.objects.create()
p.tags.add("outdoors", "pretty")
self.assert_tags_equal(
p.tags.all(),
["outdoors", "pretty"]
)
m = Movie.objects.create()
m.tags.add("hd")
self.assert_tags_equal(
m.tags.all(),
["hd"],
)
def test_field_api(self):
# Check if tag field, which simulates m2m, has django-like api.
field = self.food_model._meta.get_field('tags')
self.assertTrue(hasattr(field, 'rel'))
self.assertTrue(hasattr(field.rel, 'to'))
self.assertTrue(hasattr(field, 'related'))
# This API has changed in Django 1.8
# https://code.djangoproject.com/ticket/21414
if django.VERSION >= (1, 8):
self.assertEqual(self.food_model, field.model)
self.assertEqual(self.tag_model, field.related.model)
else:
self.assertEqual(self.food_model, field.related.model)
def test_names_method(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add('green')
apple.tags.add('red')
self.assertEqual(list(apple.tags.names()), ['green', 'red'])
def test_slugs_method(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add('green and juicy')
apple.tags.add('red')
self.assertEqual(list(apple.tags.slugs()), ['green-and-juicy', 'red'])
def test_serializes(self):
apple = self.food_model.objects.create(name="apple")
serializers.serialize("json", (apple,))
def test_prefetch_related(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add('1', '2')
orange = self.food_model.objects.create(name="orange")
orange.tags.add('2', '4')
with self.assertNumQueries(2):
l = list(self.food_model.objects.prefetch_related('tags').all())
with self.assertNumQueries(0):
foods = dict((f.name, set(t.name for t in f.tags.all())) for f in l)
self.assertEqual(foods, {
'orange': set(['2', '4']),
'apple': set(['1', '2'])
})
def test_internal_type_is_manytomany(self):
self.assertEqual(
TaggableManager().get_internal_type(), 'ManyToManyField'
)
def test_prefetch_no_extra_join(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add('1', '2')
with self.assertNumQueries(2):
l = list(self.food_model.objects.prefetch_related('tags').all())
join_clause = 'INNER JOIN "%s"' % self.taggeditem_model._meta.db_table
self.assertEqual(connection.queries[-1]['sql'].count(join_clause), 1, connection.queries[-2:])
@override_settings(TAGGIT_CASE_INSENSITIVE=True)
def test_with_case_insensitive_option(self):
spain = self.tag_model.objects.create(name="Spain", slug="spain")
orange = self.food_model.objects.create(name="orange")
orange.tags.add('spain')
self.assertEqual(list(orange.tags.all()), [spain])
class TaggableManagerDirectTestCase(TaggableManagerTestCase):
food_model = DirectFood
pet_model = DirectPet
housepet_model = DirectHousePet
taggeditem_model = TaggedFood
class TaggableManagerCustomPKTestCase(TaggableManagerTestCase):
food_model = CustomPKFood
pet_model = CustomPKPet
housepet_model = CustomPKHousePet
taggeditem_model = TaggedCustomPKFood
def test_require_pk(self):
# TODO with a charfield pk, pk is never None, so taggit has no way to
# tell if the instance is saved or not
pass
class TaggableManagerOfficialTestCase(TaggableManagerTestCase):
food_model = OfficialFood
pet_model = OfficialPet
housepet_model = OfficialHousePet
taggeditem_model = OfficialThroughModel
tag_model = OfficialTag
def test_extra_fields(self):
self.tag_model.objects.create(name="red")
self.tag_model.objects.create(name="delicious", official=True)
apple = self.food_model.objects.create(name="apple")
apple.tags.add("delicious", "red")
pear = self.food_model.objects.create(name="Pear")
pear.tags.add("delicious")
self.assertEqual(apple, self.food_model.objects.get(tags__official=False))
class TaggableManagerInitializationTestCase(TaggableManagerTestCase):
"""Make sure manager override defaults and sets correctly."""
food_model = Food
custom_manager_model = CustomManager
def test_default_manager(self):
self.assertEqual(self.food_model.tags.__class__, _TaggableManager)
def test_custom_manager(self):
self.assertEqual(self.custom_manager_model.tags.__class__, CustomManager.Foo)
class TaggableFormTestCase(BaseTaggingTestCase):
form_class = FoodForm
food_model = Food
def test_form(self):
self.assertEqual(list(self.form_class.base_fields), ['name', 'tags'])
f = self.form_class({'name': 'apple', 'tags': 'green, red, yummy'})
self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" value="apple" maxlength="50" /></td></tr>
<tr><th><label for="id_tags">Tags:</label></th><td><input type="text" name="tags" value="green, red, yummy" id="id_tags" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""")
f.save()
apple = self.food_model.objects.get(name='apple')
self.assert_tags_equal(apple.tags.all(), ['green', 'red', 'yummy'])
f = self.form_class({'name': 'apple', 'tags': 'green, red, yummy, delicious'}, instance=apple)
f.save()
apple = self.food_model.objects.get(name='apple')
self.assert_tags_equal(apple.tags.all(), ['green', 'red', 'yummy', 'delicious'])
self.assertEqual(self.food_model.objects.count(), 1)
f = self.form_class({"name": "raspberry"})
self.assertFalse(f.is_valid())
f = self.form_class(instance=apple)
self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" value="apple" maxlength="50" /></td></tr>
<tr><th><label for="id_tags">Tags:</label></th><td><input type="text" name="tags" value="delicious, green, red, yummy" id="id_tags" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""")
apple.tags.add('has,comma')
f = self.form_class(instance=apple)
self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" value="apple" maxlength="50" /></td></tr>
<tr><th><label for="id_tags">Tags:</label></th><td><input type="text" name="tags" value=""has,comma", delicious, green, red, yummy" id="id_tags" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""")
apple.tags.add('has space')
f = self.form_class(instance=apple)
self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" value="apple" maxlength="50" /></td></tr>
<tr><th><label for="id_tags">Tags:</label></th><td><input type="text" name="tags" value=""has space", "has,comma", delicious, green, red, yummy" id="id_tags" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""")
def test_formfield(self):
tm = TaggableManager(verbose_name='categories', help_text='Add some categories', blank=True)
ff = tm.formfield()
self.assertEqual(ff.label, 'Categories')
self.assertEqual(ff.help_text, 'Add some categories')
self.assertEqual(ff.required, False)
self.assertEqual(ff.clean(""), [])
tm = TaggableManager()
ff = tm.formfield()
self.assertRaises(ValidationError, ff.clean, "")
class TaggableFormDirectTestCase(TaggableFormTestCase):
form_class = DirectFoodForm
food_model = DirectFood
class TaggableFormCustomPKTestCase(TaggableFormTestCase):
form_class = CustomPKFoodForm
food_model = CustomPKFood
class TaggableFormOfficialTestCase(TaggableFormTestCase):
form_class = OfficialFoodForm
food_model = OfficialFood
class TagStringParseTestCase(UnitTestCase):
"""
Ported from Jonathan Buchanan's `django-tagging
<http://django-tagging.googlecode.com/>`_
"""
def test_with_simple_space_delimited_tags(self):
"""
Test with simple space-delimited tags.
"""
self.assertEqual(parse_tags('one'), ['one'])
self.assertEqual(parse_tags('one two'), ['one', 'two'])
self.assertEqual(parse_tags('one two three'), ['one', 'three', 'two'])
self.assertEqual(parse_tags('one one two two'), ['one', 'two'])
def test_with_comma_delimited_multiple_words(self):
"""
Test with comma-delimited multiple words.
An unquoted comma in the input will trigger this.
"""
self.assertEqual(parse_tags(',one'), ['one'])
self.assertEqual(parse_tags(',one two'), ['one two'])
self.assertEqual(parse_tags(',one two three'), ['one two three'])
self.assertEqual(parse_tags('a-one, a-two and a-three'),
['a-one', 'a-two and a-three'])
def test_with_double_quoted_multiple_words(self):
"""
Test with double-quoted multiple words.
A completed quote will trigger this. Unclosed quotes are ignored.
"""
self.assertEqual(parse_tags('"one'), ['one'])
self.assertEqual(parse_tags('"one two'), ['one', 'two'])
self.assertEqual(parse_tags('"one two three'), ['one', 'three', 'two'])
self.assertEqual(parse_tags('"one two"'), ['one two'])
self.assertEqual(parse_tags('a-one "a-two and a-three"'),
['a-one', 'a-two and a-three'])
def test_with_no_loose_commas(self):
"""
Test with no loose commas -- split on spaces.
"""
self.assertEqual(parse_tags('one two "thr,ee"'), ['one', 'thr,ee', 'two'])
def test_with_loose_commas(self):
"""
Loose commas - split on commas
"""
self.assertEqual(parse_tags('"one", two three'), ['one', 'two three'])
def test_tags_with_double_quotes_can_contain_commas(self):
"""
Double quotes can contain commas
"""
self.assertEqual(parse_tags('a-one "a-two, and a-three"'),
['a-one', 'a-two, and a-three'])
self.assertEqual(parse_tags('"two", one, one, two, "one"'),
['one', 'two'])
def test_with_naughty_input(self):
"""
Test with naughty input.
"""
# Bad users! Naughty users!
self.assertEqual(parse_tags(None), [])
self.assertEqual(parse_tags(''), [])
self.assertEqual(parse_tags('"'), [])
self.assertEqual(parse_tags('""'), [])
self.assertEqual(parse_tags('"' * 7), [])
self.assertEqual(parse_tags(',,,,,,'), [])
self.assertEqual(parse_tags('",",",",",",","'), [','])
self.assertEqual(parse_tags('a-one "a-two" and "a-three'),
['a-one', 'a-three', 'a-two', 'and'])
def test_recreation_of_tag_list_string_representations(self):
plain = Tag.objects.create(name='plain')
spaces = Tag.objects.create(name='spa ces')
comma = Tag.objects.create(name='com,ma')
self.assertEqual(edit_string_for_tags([plain]), 'plain')
self.assertEqual(edit_string_for_tags([plain, spaces]), '"spa ces", plain')
self.assertEqual(edit_string_for_tags([plain, spaces, comma]), '"com,ma", "spa ces", plain')
self.assertEqual(edit_string_for_tags([plain, comma]), '"com,ma", plain')
self.assertEqual(edit_string_for_tags([comma, spaces]), '"com,ma", "spa ces"')
@skipIf(django.VERSION < (1, 7), "not relevant for Django < 1.7")
class DeconstructTestCase(UnitTestCase):
def test_deconstruct_kwargs_kept(self):
instance = TaggableManager(through=OfficialThroughModel, to='dummy.To')
name, path, args, kwargs = instance.deconstruct()
new_instance = TaggableManager(*args, **kwargs)
self.assertEqual('tests.OfficialThroughModel', new_instance.rel.through)
self.assertEqual('dummy.To', new_instance.rel.to)
@skipUnless(django.VERSION < (1, 7), "test only applies to 1.6 and below")
class SouthSupportTests(TestCase):
def test_import_migrations_module(self):
try:
from taggit.migrations import __doc__ # noqa
except ImproperlyConfigured as e:
exception = e
self.assertIn("SOUTH_MIGRATION_MODULES", exception.args[0])
class InheritedPrefetchTests(TestCase):
def test_inherited_tags_with_prefetch(self):
child = Child()
child.save()
child.tags.add('tag 1', 'tag 2', 'tag 3', 'tag 4')
child = Child.objects.get()
no_prefetch_tags = child.tags.all()
self.assertEquals(4, no_prefetch_tags.count())
child = Child.objects.prefetch_related('tags').get()
prefetch_tags = child.tags.all()
self.assertEquals(4, prefetch_tags.count())
self.assertEquals(set([t.name for t in no_prefetch_tags]),
set([t.name for t in prefetch_tags]))
| |
# Copyright 2014 Alcatel-Lucent USA Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Ronak Shah, Aniket Dandekar, Nuage Networks, Alcatel-Lucent USA Inc.
import contextlib
import os
import mock
from oslo.config import cfg
from webob import exc
from neutron.extensions import external_net
from neutron.extensions import portbindings
from neutron.plugins.nuage import extensions
from neutron.plugins.nuage import plugin as nuage_plugin
from neutron.tests.unit import _test_extension_portbindings as test_bindings
from neutron.tests.unit.nuage import fake_nuageclient
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import test_extension_extraroute as extraroute_test
from neutron.tests.unit import test_l3_plugin
API_EXT_PATH = os.path.dirname(extensions.__file__)
FAKE_DEFAULT_ENT = 'default'
NUAGE_PLUGIN_PATH = 'neutron.plugins.nuage.plugin'
FAKE_SERVER = '1.1.1.1'
FAKE_SERVER_AUTH = 'user:pass'
FAKE_SERVER_SSL = False
FAKE_BASE_URI = '/base/'
FAKE_AUTH_RESOURCE = '/auth'
FAKE_ORGANIZATION = 'fake_org'
_plugin_name = ('%s.NuagePlugin' % NUAGE_PLUGIN_PATH)
class NuagePluginV2TestCase(test_db_plugin.NeutronDbPluginV2TestCase):
def setUp(self, plugin=_plugin_name,
ext_mgr=None, service_plugins=None):
def mock_nuageClient_init(self):
server = FAKE_SERVER
serverauth = FAKE_SERVER_AUTH
serverssl = FAKE_SERVER_SSL
base_uri = FAKE_BASE_URI
auth_resource = FAKE_AUTH_RESOURCE
organization = FAKE_ORGANIZATION
self.nuageclient = None
self.nuageclient = fake_nuageclient.FakeNuageClient(server,
base_uri,
serverssl,
serverauth,
auth_resource,
organization)
with mock.patch.object(nuage_plugin.NuagePlugin,
'nuageclient_init', new=mock_nuageClient_init):
cfg.CONF.set_override('api_extensions_path',
API_EXT_PATH)
super(NuagePluginV2TestCase, self).setUp(plugin=plugin,
ext_mgr=ext_mgr)
def _assert_no_assoc_fip(self, fip):
body = self._show('floatingips',
fip['floatingip']['id'])
self.assertIsNone(body['floatingip']['port_id'])
self.assertIsNone(
body['floatingip']['fixed_ip_address'])
def _associate_and_assert_fip(self, fip, port, allow=True):
port_id = port['port']['id']
ip_address = (port['port']['fixed_ips']
[0]['ip_address'])
if allow:
body = self._update(
'floatingips', fip['floatingip']['id'],
{'floatingip': {'port_id': port_id}})
self.assertEqual(
body['floatingip']['port_id'], port_id)
self.assertEqual(
body['floatingip']['fixed_ip_address'],
ip_address)
return body['floatingip']['router_id']
else:
code = exc.HTTPInternalServerError.code
self._update(
'floatingips', fip['floatingip']['id'],
{'floatingip': {'port_id': port_id}},
expected_code=code)
def _test_floatingip_update_different_router(self):
with contextlib.nested(self.subnet(cidr='10.0.0.0/24'),
self.subnet(cidr='10.0.1.0/24')) as (
s1, s2):
with contextlib.nested(self.port(subnet=s1),
self.port(subnet=s2)) as (p1, p2):
private_sub1 = {'subnet':
{'id':
p1['port']['fixed_ips'][0]['subnet_id']}}
private_sub2 = {'subnet':
{'id':
p2['port']['fixed_ips'][0]['subnet_id']}}
with self.subnet(cidr='12.0.0.0/24') as public_sub:
with contextlib.nested(
self.floatingip_no_assoc_with_public_sub(
private_sub1, public_sub=public_sub),
self.floatingip_no_assoc_with_public_sub(
private_sub2, public_sub=public_sub)) as (
(fip1, r1), (fip2, r2)):
self._assert_no_assoc_fip(fip1)
self._assert_no_assoc_fip(fip2)
fip1_r1_res = self._associate_and_assert_fip(fip1, p1)
self.assertEqual(fip1_r1_res, r1['router']['id'])
# The following operation will associate the floating
# ip to a different router and should fail
self._associate_and_assert_fip(fip1, p2, allow=False)
# disassociate fip1
self._update(
'floatingips', fip1['floatingip']['id'],
{'floatingip': {'port_id': None}})
fip2_r2_res = self._associate_and_assert_fip(fip2, p2)
self.assertEqual(fip2_r2_res, r2['router']['id'])
def _test_network_update_external_failure(self):
with self.router() as r:
with self.subnet() as s1:
self._set_net_external(s1['subnet']['network_id'])
self._add_external_gateway_to_router(
r['router']['id'],
s1['subnet']['network_id'])
self._update('networks', s1['subnet']['network_id'],
{'network': {external_net.EXTERNAL: False}},
expected_code=exc.HTTPInternalServerError.code)
self._remove_external_gateway_from_router(
r['router']['id'],
s1['subnet']['network_id'])
class TestNuageBasicGet(NuagePluginV2TestCase,
test_db_plugin.TestBasicGet):
pass
class TestNuageV2HTTPResponse(NuagePluginV2TestCase,
test_db_plugin.TestV2HTTPResponse):
pass
class TestNuageNetworksV2(NuagePluginV2TestCase,
test_db_plugin.TestNetworksV2):
pass
class TestNuageSubnetsV2(NuagePluginV2TestCase,
test_db_plugin.TestSubnetsV2):
def test_create_subnet_bad_hostroutes(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_create_subnet_inconsistent_ipv4_hostroute_dst_v6(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_create_subnet_inconsistent_ipv4_hostroute_np_v6(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_update_subnet_adding_additional_host_routes_and_dns(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_update_subnet_inconsistent_ipv6_hostroute_dst_v4(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_update_subnet_inconsistent_ipv6_hostroute_np_v4(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_create_subnet_with_one_host_route(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_create_subnet_with_two_host_routes(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_create_subnet_with_too_many_routes(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_update_subnet_route(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_update_subnet_route_to_None(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_update_subnet_route_with_too_many_entries(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_delete_subnet_with_route(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_delete_subnet_with_dns_and_route(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_validate_subnet_host_routes_exhausted(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_validate_subnet_dns_nameservers_exhausted(self):
self.skipTest("Plugin does not support Neutron Subnet host-routes")
def test_create_subnet_with_none_gateway(self):
self.skipTest("Plugin does not support "
"Neutron Subnet no-gateway option")
def test_create_subnet_nonzero_cidr(self):
self.skipTest("Plugin does not support "
"Neutron Subnet no-gateway option")
def test_create_subnet_with_none_gateway_fully_allocated(self):
self.skipTest("Plugin does not support Neutron "
"Subnet no-gateway option")
def test_create_subnet_with_none_gateway_allocation_pool(self):
self.skipTest("Plugin does not support Neutron "
"Subnet no-gateway option")
class TestNuagePluginPortBinding(NuagePluginV2TestCase,
test_bindings.PortBindingsTestCase):
VIF_TYPE = portbindings.VIF_TYPE_OVS
def setUp(self):
super(TestNuagePluginPortBinding, self).setUp()
class TestNuagePortsV2(NuagePluginV2TestCase,
test_db_plugin.TestPortsV2):
def test_no_more_port_exception(self):
self.skipTest("Plugin does not support "
"Neutron Subnet no-gateway option")
class TestNuageL3NatTestCase(NuagePluginV2TestCase,
test_l3_plugin.L3NatDBIntTestCase):
def test_floatingip_update_different_router(self):
self._test_floatingip_update_different_router()
def test_network_update_external_failure(self):
self._test_network_update_external_failure()
class TestNuageExtrarouteTestCase(NuagePluginV2TestCase,
extraroute_test.ExtraRouteDBIntTestCase):
def test_router_update_with_dup_destination_address(self):
with self.router() as r:
with self.subnet(cidr='10.0.1.0/24') as s:
with self.port(subnet=s, do_delete=False) as p:
self._router_interface_action('add',
r['router']['id'],
None,
p['port']['id'])
routes = [{'destination': '135.207.0.0/16',
'nexthop': '10.0.1.3'},
{'destination': '135.207.0.0/16',
'nexthop': '10.0.1.5'}]
self._update('routers', r['router']['id'],
{'router': {'routes':
routes}},
expected_code=exc.HTTPBadRequest.code)
# clean-up
self._router_interface_action('remove',
r['router']['id'],
None,
p['port']['id'])
def test_floatingip_update_different_router(self):
self._test_floatingip_update_different_router()
def test_network_update_external_failure(self):
self._test_network_update_external_failure()
| |
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from argparse import Namespace
import os
import unittest
import uuid
import github3
import mock
import pytest
from artman.tasks.publish import github
from artman.utils.logger import logger
class CreateGitHubBranchTests(unittest.TestCase):
@mock.patch.object(github.CreateGitHubBranch, 'exec_command')
@mock.patch.object(os, 'chdir')
@mock.patch.object(uuid, 'uuid4')
def test_execute(self, uuid4, chdir, exec_command):
uuid4.return_value = uuid.UUID('00000000-0000-0000-0000-000000000000')
# Run the task.
task = github.CreateGitHubBranch()
branch_name = task.execute(
api_name='pubsub',
api_version='v1',
gapic_code_dir='/path/to/code',
git_repo={
'location': 'git@github.com:me/repo.git',
'paths': ['generated/ruby/gapic-google-cloud-pubsub-v1'],
},
github={
'username': 'test',
'token': 'TOKEN',
},
language='ruby',
output_dir='/path/to',
)
# List the commands that should have been executed.
expected_commands = (
'git clone https://test:TOKEN@github.com/me/repo.git /tmp/00000000',
'git checkout -b pubsub-ruby-v1-00000000',
' '.join([
'git rm -r --force --ignore-unmatch',
'generated/ruby/gapic-google-cloud-pubsub-v1',
]),
'cp -rf /path/to/code/. generated/ruby/gapic-google-cloud-pubsub-v1',
'git add generated/ruby/gapic-google-cloud-pubsub-v1',
'git commit --allow-empty -m Ruby GAPIC: Pubsub v1', # Close enough
'git push origin pubsub-ruby-v1-00000000',
'rm -rf /path/to',
'rm -rf /tmp/00000000',
)
# Now prove that they were.
assert exec_command.call_count == len(expected_commands)
for cmd, exec_call in zip(expected_commands, exec_command.mock_calls):
_, args, _ = exec_call
assert ' '.join(args[0]) == cmd
@mock.patch.object(github.CreateGitHubBranch, 'exec_command')
@mock.patch.object(os, 'chdir')
@mock.patch.object(uuid, 'uuid4')
def test_execute_with_non_master_base(self, uuid4, chdir, exec_command):
uuid4.return_value = uuid.UUID('00000000-0000-0000-0000-000000000000')
# Run the task.
task = github.CreateGitHubBranch()
branch_name = task.execute(
api_name='pubsub',
api_version='v1',
gapic_code_dir='/path/to/code',
git_repo={
'location': 'git@github.com:me/repo.git',
'branch': 'pubsub',
'paths': ['generated/ruby/gapic-google-cloud-pubsub-v1'],
},
github={
'username': 'test',
'token': 'TOKEN',
},
language='ruby',
output_dir='/path/to',
)
# List the commands that should have been executed.
expected_commands = (
'git clone https://test:TOKEN@github.com/me/repo.git /tmp/00000000',
'git checkout --track -b pubsub origin/pubsub',
'git checkout -b pubsub-ruby-v1-00000000',
' '.join([
'git rm -r --force --ignore-unmatch',
'generated/ruby/gapic-google-cloud-pubsub-v1',
]),
'cp -rf /path/to/code/. generated/ruby/gapic-google-cloud-pubsub-v1',
'git add generated/ruby/gapic-google-cloud-pubsub-v1',
'git commit --allow-empty -m Ruby GAPIC: Pubsub v1',
'git push origin pubsub-ruby-v1-00000000',
'rm -rf /path/to',
'rm -rf /tmp/00000000',
)
# Now prove that they were.
assert exec_command.call_count == len(expected_commands)
for cmd, exec_call in zip(expected_commands, exec_command.mock_calls):
_, args, _ = exec_call
assert ' '.join(args[0]) == cmd
@mock.patch.object(github.CreateGitHubBranch, 'exec_command')
@mock.patch.object(os, 'chdir')
@mock.patch.object(uuid, 'uuid4')
def test_execute_with_grpc(self, uuid4, chdir, exec_command):
uuid4.return_value = uuid.UUID('00000000-0000-0000-0000-000000000000')
# Run the task.
task = github.CreateGitHubBranch()
branch_name = task.execute(
api_name='pubsub',
api_version='v1',
gapic_code_dir='/path/to/code',
grpc_code_dir='/path/to/grpc_code',
git_repo={
'location': 'git@github.com:me/repo.git',
'paths': [
'generated/python/gapic-pubsub-v1',
{
'artifact': 'grpc',
'dest': 'generated/python/proto-pubsub-v1',
},
],
},
github={
'username': 'test',
'token': 'TOKEN',
},
language='python',
output_dir='/path/to',
)
# List the commands that should have been executed.
expected_commands = (
'git clone https://test:TOKEN@github.com/me/repo.git /tmp/00000000',
'git checkout -b pubsub-python-v1-00000000',
' '.join([
'git rm -r --force --ignore-unmatch',
'generated/python/gapic-pubsub-v1',
]),
'cp -rf /path/to/code/. generated/python/gapic-pubsub-v1',
'git add generated/python/gapic-pubsub-v1',
' '.join([
'git rm -r --force --ignore-unmatch',
'generated/python/proto-pubsub-v1',
]),
'cp -rf /path/to/grpc_code/. generated/python/proto-pubsub-v1',
'git add generated/python/proto-pubsub-v1',
'git commit --allow-empty -m Python GAPIC: Pubsub v1',
'git push origin pubsub-python-v1-00000000',
'rm -rf /path/to',
'rm -rf /tmp/00000000',
)
# Now prove that they were.
assert exec_command.call_count == len(expected_commands)
for cmd, exec_call in zip(expected_commands, exec_command.mock_calls):
_, args, _ = exec_call
assert ' '.join(args[0]) == cmd
@mock.patch.object(github.CreateGitHubBranch, 'exec_command')
@mock.patch.object(os, 'chdir')
@mock.patch.object(uuid, 'uuid4')
def test_execute_with_grpc_explicit_src(self, uuid4, chdir, exec_command):
uuid4.return_value = uuid.UUID('00000000-0000-0000-0000-000000000000')
# Run the task.
task = github.CreateGitHubBranch()
branch_name = task.execute(
api_name='pubsub',
api_version='v1',
gapic_code_dir='/path/to/code',
grpc_code_dir='/path/to/grpc_code',
git_repo={
'location': 'git@github.com:me/repo.git',
'paths': [{
'src': 'gapic',
'dest': 'generated/python/gapic-pubsub-v1',
}, {
'artifact': 'grpc',
'src': 'proto',
'dest': 'generated/python/proto-pubsub-v1',
}],
},
github={
'username': 'test',
'token': 'TOKEN',
},
language='python',
output_dir='/path/to',
)
# List the commands that should have been executed.
expected_commands = (
'git clone https://test:TOKEN@github.com/me/repo.git /tmp/00000000',
'git checkout -b pubsub-python-v1-00000000',
' '.join([
'git rm -r --force --ignore-unmatch',
'generated/python/gapic-pubsub-v1',
]),
'cp -rf /path/to/code/gapic/. generated/python/gapic-pubsub-v1',
'git add generated/python/gapic-pubsub-v1',
' '.join([
'git rm -r --force --ignore-unmatch',
'generated/python/proto-pubsub-v1',
]),
'cp -rf /path/to/grpc_code/proto/. generated/python/proto-pubsub-v1',
'git add generated/python/proto-pubsub-v1',
'git commit --allow-empty -m Python GAPIC: Pubsub v1',
'git push origin pubsub-python-v1-00000000',
'rm -rf /path/to',
'rm -rf /tmp/00000000',
)
# Now prove that they were.
assert exec_command.call_count == len(expected_commands)
for cmd, exec_call in zip(expected_commands, exec_command.mock_calls):
_, args, _ = exec_call
assert ' '.join(args[0]) == cmd
class CreateGitHubPullRequestTests(unittest.TestCase):
def setUp(self):
self.task_kwargs = {
'api_name': 'pubsub',
'api_version': 'v1',
'branch_name': 'pubsub-python-v1',
'git_repo': {
'location': 'git@github.com:me/repo.git',
},
'github': {
'username': 'lukesneeringer',
'token': '1335020400',
},
'language': 'python',
}
@mock.patch.object(github3, 'login')
def test_with_ssh_repo(self, login):
# Set up test data to return when we attempt to make the
# pull request.
gh = mock.MagicMock(spec=github3.github.GitHub)
login.return_value = gh
url = 'https://github.com/me/repo/pulls/1/'
gh.repository().create_pull.return_value = Namespace(html_url=url)
# Run the task.
task = github.CreateGitHubPullRequest()
pr = task.execute(**self.task_kwargs)
# Assert we got the correct result.
assert pr.html_url == url
# Assert that the correct methods were called.
login.assert_called_once_with('lukesneeringer', '1335020400')
gh.repository.assert_called_with('me', 'repo')
gh.repository().create_pull.assert_called_once_with(
base='master',
body='This pull request was generated by artman. '
'Please review it thoroughly before merging.',
head='pubsub-python-v1',
title='Python GAPIC: Pubsub v1',
)
@mock.patch.object(github3, 'login')
def test_with_http_url(self, login):
# Set up test data to return when we attempt to make the
# pull request.
gh = mock.MagicMock(spec=github3.github.GitHub)
login.return_value = gh
url = 'https://github.com/me/repo/pulls/1/'
gh.repository().create_pull.return_value = Namespace(html_url=url)
# Run the task.
task = github.CreateGitHubPullRequest()
pr = task.execute(**dict(self.task_kwargs, git_repo={
'location': 'https://github/me/repo/',
}))
# Assert we got the correct result.
assert pr.html_url == url
# Assert that the correct repository method was still called.
gh.repository.assert_called_with('me', 'repo')
@mock.patch.object(github3, 'login')
def test_pr_failure(self, login):
# Set up test data to return when we attempt to make the
# pull request.
gh = mock.MagicMock(spec=github3.github.GitHub)
login.return_value = gh
gh.repository().create_pull.return_value = None
# Run the task; it should raise RuntimeError.
task = github.CreateGitHubPullRequest()
with pytest.raises(RuntimeError):
task.execute(**self.task_kwargs)
| |
## Copyright 2004-2006 Luc Saffre
## This file is part of the Lino project.
## Lino is free software; you can redistribute it and/or modify it
## under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## Lino is distributed in the hope that it will be useful, but WITHOUT
## ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
## or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
## License for more details.
## You should have received a copy of the GNU General Public License
## along with Lino; if not, write to the Free Software Foundation,
## Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys, os
import zipfile
import tempfile
#from timtools.ui import console
from timtools.oogen import elements
#from timtools.oogen.generators import OoText, OoSpreadsheet
from timtools.oogen.ifiles import IFILES
class Document:
extension = NotImplementedError
mimetype = NotImplementedError
officeClass = NotImplementedError
#bodyClass = NotImplementedError
def __init__(self,filename):
self.body = elements.Body(self) # self.bodyClass(self)
self.fonts = elements.Fonts()
self.styles = elements.Styles()
self.autoStyles = elements.AutoStyles()
self.masterStyles = elements.MasterStyles()
self.createFonts()
self.createStyles()
self.createAutoStyles()
self.createMasterStyles()
self.elements = elements # used in pds scripts
self.tempDir = tempfile.gettempdir()
if not filename.lower().endswith(self.extension):
filename += self.extension
self.filename = filename
self.ifiles = tuple([cl(self) for cl in IFILES])
def addFont(self,**kw):
x = elements.Font(**kw)
self.fonts.append(x)
return x
def createFonts(self):
self.addFont(
name= "Tahoma1",
fontFamily="Tahoma",
)
self.addFont(
name= "Lucida Sans Unicode",
fontFamily="'Lucida Sans Unicode'",
fontPitch="variable",
)
self.addFont(
name= "Tahoma",
fontFamily="Tahoma",
fontPitch="variable",
)
self.addFont(
name="Courier New",
fontFamily="'Courier New'",
fontFamilyGeneric="modern",
fontPitch="fixed",
)
self.addFont(
name= "Times New Roman",
fontFamily="'Times New Roman'",
fontFamilyGeneric="roman",
fontPitch="variable",
)
self.addFont(
name= "Arial",
fontFamily="Arial",
fontFamilyGeneric="swiss",
fontPitch="variable",
)
def addStyle(self,**kw):
s = elements.Style(**kw)
self.styles.append(s)
return s
def addAutoStyle(self,**kw):
s = elements.Style(**kw)
self.autoStyles.append(s)
return s
def getStyle(self,name,family):
try:
return self.autoStyles.peek(name,family)
except elements.InvalidRequest:
return self.styles.peek(name,family)
def createStyles(self):
s = elements.DefaultStyle(family="paragraph")
s.append(elements.Properties(useWindowFontColor=True,
fontName="Times New Roman",
fontSize="12pt",
language="en", country="US",
tabStopDistance="1.251cm",
writingMode="page",
hyphenate=False,
hypenationRemainCharCount=2,
hypenationPushCharCount=2,
hypenationLadderCount="no-limit",
textAutospace="ideograph-alpha",
punctuationWrap="hanging",
lineBreak="strict",
))
self.styles.append(s)
s = elements.Style(name="Standard",
family="paragraph",className="text")
self.styles.append(s)
s = elements.Style(name="Text body",
family="paragraph",
parentStyleName="Standard",
className="text")
s.append(elements.Properties(marginTop="0cm",
marginBottom="0.212cm"))
self.styles.append(s)
s = elements.DefaultStyle(family="table-cell")
s.append(elements.Properties(decimalPlaces=2,
fontName="Arial",
language="en",country="US",
tabStopDistance="1.25cm"))
self.styles.append(s)
s = elements.NumberStyle(name="N0",family="data-style")
s.append(elements.Number(minIntegerDigits=1))
self.styles.append(s)
s = elements.CurrencyStyle(name="N106P0",
family="data-style",
volatile=True)
s.append(elements.Number(decimalPlaces=2,minIntegerDigits=1,
grouping=True))
s.append(elements.Text("\n"))
s.append(elements.CurrencySymbol("EUR",language="fr",country="BE"))
self.styles.append(s)
s = elements.CurrencyStyle(name="N106",
family="data-style", volatile=True)
s.append(elements.Number(decimalPlaces=2,
minIntegerDigits=1,
grouping=True))
s.append(elements.Text(""))
s.append(elements.CurrencySymbol(
"EUR",language="fr",country="BE"))
self.styles.append(s)
self.styles.append(elements.Style(
name="Default", family="table-cell", volatile=True))
s = elements.Style(
name="Result", family="table-cell",
parentStyleName="Default")
s.append(elements.Properties(
fontStyle="italic",
textUnderline="single",
textUnderlineColor="font-color",
fontWeight="bold"))
self.styles.append(s)
self.styles.append(elements.Style(
name="Result2", family="table-cell",
parentStyleName="Default", dataStyle="N106"))
s = elements.Style(
name="Heading", family="table-cell",
parentStyleName="Default")
s.append(elements.Properties(
textAlign="center",textAlignSource="fix",
fontSize="16pt",fontStyle="italic",
fontWeight="bold"))
self.styles.append(s)
s = elements.Style(
name="Heading1", family="table-cell",
parentStyleName="Heading")
s.append(elements.Properties(direction="ltr",
rotationAngle=90))
self.styles.append(s)
def setPageProperties(self,**kw):
self.pageProperties.setAttribs(**kw)
def setHeaderProperties(self,**kw):
self.headerProperties.setAttribs(**kw)
def setFooterProperties(self,**kw):
self.footerProperties.setAttribs(**kw)
def createAutoStyles(self):
pm = elements.PageMaster(name="pm1")
self.autoStyles.append(pm)
#pm.append(elements.Properties(writingMode="lr-tb"))
self.pageProperties = elements.Properties(
pageWidth="20.999cm",
pageHeight="29.699cm",
numFormat="1",
printOrientation="portrait",
marginTop="2cm",
marginBottom="2cm",
marginLeft="2cm",
marginRight="2cm",
footnoteMaxHeight="0cm",
writingMode="lr-tb",
)
pm.append(self.pageProperties)
pm.append(elements.FootnoteSep(
width="0.018cm", distanceBeforeSep="0.101cm",
distanceAfterSep="0.101cm", adjustment="left",
relWidth="25%", color="#000000"))
h = elements.HeaderStyle()
self.headerProperties = elements.Properties(
minHeight="0.751cm",
marginLeft="0cm",marginRight="0cm",
marginBottom="0.25cm")
h.append(self.headerProperties)
pm.append(h)
h = elements.FooterStyle()
self.footerProperties = elements.Properties(
minHeight="0.751cm",marginLeft="0cm",
marginRight="0cm",marginBottom="0.25cm")
h.append(self.footerProperties)
pm.append(h)
if False:
pm = elements.PageMaster(name="pm2")
self.autoStyles.append(pm)
pm.append(elements.Properties(writingMode="lr-tb"))
h = elements.HeaderStyle()
pm.append(h)
p = elements.Properties(
minHeight="0.751cm",marginLeft="0cm",
marginRight="0cm",marginBottom="0.25cm",
border="0.088cm solid #000000",
padding="0.018cm", backgroundColor="#c0c0c0")
p.append(elements.BackgroundImage())
h.append(p)
h = elements.FooterStyle()
pm.append(h)
p = elements.Properties(
minHeight="0.751cm",marginLeft="0cm",
marginRight="0cm",marginBottom="0.25cm",
border="0.088cm solid #000000",
padding="0.018cm", backgroundColor="#c0c0c0")
p.append(elements.BackgroundImage())
h.append(p)
def getHeader(self):
"""
Examples:
h = doc.getHeader()
h.p("This is a simple paragraph in the header")
writer ignores header if it contains regions!
how to validate this?!
"""
return self.headerContent
def getFooter(self):
return self.footerContent
def createMasterStyles(self):
mp = elements.MasterPage(name="Default",pageMasterName="pm1")
self.masterStyles.append(mp)
h = elements.Header(self)
self.headerContent = h
mp.append(h)
#h.append(elements.P(elements.SheetName("???")))
## if False:
## h.append(elements.P("Here is a simple header"))
## else:
## h.append(elements.RegionLeft(elements.P("left header")))
## h.append(elements.RegionCenter(elements.P("center header")))
## h.append(elements.RegionRight(elements.P("right header")))
mp.append(elements.HeaderLeft(self,display=False))
f = elements.Footer(self)
self.footerContent = f
mp.append(f)
#f.append(elements.P("Page ",elements.PageNumber("1")))
mp.append(elements.FooterLeft(self,display=False))
if False:
mp = elements.MasterPage(name="Report",
pageMasterName="pm2")
self.masterStyles.append(mp)
h = elements.Footer()
mp.append(h)
r = elements.RegionLeft(
elements.P(elements.SheetName("???"),
"(",
elements.Title("???"),")"))
h.append(r)
r = elements.RegionRight(
elements.P(
elements.Date("20/05/2004",
dataStyleName="N2",
dateValue="0-00-00"),
",",
elements.Time("13:59:08")
))
h.append(r)
## def report(self,**kw):
## from timtools.reports.oo import OoReport
## return OoReport(self,**kw)
## def generator(self,filename=None):
## if filename is not None:
## for cl in (OoText,OoSpreadsheet):
## if filename.lower().endswith(cl.extension):
## return cl(self,filename)
## if len(self.tables) == len(self.children):
## return OoSpreadsheet(self,filename)
## return OoText(self,filename)
def save(self): # ,showOutput=False):
#sess.status("Writing "+self.filename)
for f in self.ifiles:
f.writeFile()
zf = zipfile.ZipFile(self.filename,'w',zipfile.ZIP_DEFLATED)
for f in self.ifiles:
zf.write(os.path.join(self.tempDir,f.filename),
f.filename)
zf.close()
#sess.status()
## if showOutput and sess.isInteractive():
## if sys.platform == "win32":
## os.system("start %s" % self.filename)
## else:
## sess.warning("Don't know how to start %s", \
## self.filename)
def report(self,rpt,name=None,*args,**kw):
if name is None: name=rpt.getLabel()
rpt.beginReport(self)
t = self.table(name=name)
for col in rpt.columns:
t.column()
l = [ col.getLabel() for col in rpt.columns ]
self.table.headerRow(*l)
for row in rpt.iterator:
cells = rpt.processRow(self,row)
l = []
for c in cells:
if c.value is None:
l.append("")
else:
l.append(c.col.format(c.value))
self.table.row(*l)
rpt.endReport(self)
class TextDocument(Document):
extension = ".sxw"
officeClass = "text"
mimetype = "application/vnd.sun.xml.writer"
#bodyClass = elements.TextBody
def __init__(self,*args,**kw):
Document.__init__(self,*args,**kw)
self.tables = []
def getTables(self):
return self.tables
def table(self,*args,**kw):
t = self.body.table(*args,**kw)
self.tables.append(t)
return t
def par(self,*args,**kw):
return self.body.par(*args,**kw)
def header(self,*args,**kw):
return self.body.header(*args,**kw)
class SpreadsheetDocument(Document):
extension = ".sxc"
officeClass = "spreadsheet"
mimetype = "application/vnd.sun.xml.calc"
#bodyClass = elements.SpreadsheetBody
def getTables(self):
return self.body.content
def table(self,*args,**kw):
return self.body.table(*args,**kw)
def par(self,*args,**kw):
raise element.InvalidRequest(
"Spreadsheet body contains only tables")
def header(self,*args,**kw):
raise elements.InvalidRequest(
"Spreadsheet body contains only tables")
| |
"""
Font Bakery CheckRunner is the driver of a font bakery suite of checks.
Separation of Concerns Disclaimer:
While created specifically for checking fonts and font-families this
module has no domain knowledge about fonts. It can be used for any kind
of (document) checking. Please keep it so. It will be valuable for other
domains as well.
Domain specific knowledge should be encoded only in the Profile (Checks,
Conditions) and MAYBE in *customized* reporters e.g. subclasses.
"""
import os
import types
from collections import OrderedDict, Counter
import importlib
import inspect
from typing import Dict, Any
from fontbakery.callable import (
FontbakeryCallable,
FontBakeryCheck,
FontBakeryCondition,
FontBakeryExpectedValue,
)
from fontbakery.message import Message
from fontbakery.profile import Profile, get_module_profile
from fontbakery.utils import is_negated
from fontbakery.errors import (
APIViolationError,
CircularDependencyError,
FailedCheckError,
FailedConditionError,
FailedDependenciesError,
MissingConditionError,
SetupError,
MissingValueError,
CircularAliasError,
NamespaceError,
ValueValidationError,
)
from fontbakery.section import Section
from fontbakery.status import (
Status,
DEBUG,
PASS,
SKIP,
INFO,
WARN,
FAIL,
ERROR,
START,
STARTCHECK,
ENDCHECK,
SECTIONSUMMARY,
END,
)
class CheckRunner:
def __init__(
self,
profile,
values,
config,
values_can_override_profile_names=True,
use_cache=True,
):
# TODO: transform all iterables that are list like to tuples
# to make sure that they won't change anymore.
# Also remove duplicates from list like iterables
# Add the profile's config values "underneath" the user's config
self.config = profile.merge_default_config(config)
self._custom_order = config["custom_order"]
self._explicit_checks = config["explicit_checks"]
self._exclude_checks = config["exclude_checks"]
self._iterargs = OrderedDict()
for singular, plural in profile.iterargs.items():
if plural in values:
values[plural] = tuple(values[plural])
self._iterargs[singular] = len(values[plural])
if not values_can_override_profile_names:
for name in values:
if profile.has(name) and profile.get_type(name) != "expected_values":
# Of course values can override
# expected_values, that's their purpose!
raise SetupError(
f'Values entry "{name}" collides with profile'
f" namespace as a {profile.get_type(name)}"
)
self._profile = profile
self._profile.test_dependencies()
valid, message = self._profile.validate_values(values)
if not valid:
raise ValueValidationError(
f"Validation of expected values failed:\n" f"{message}"
)
self._values = values
self.use_cache = use_cache
self._cache = {"conditions": {}, "order": None}
def clearCache(self):
# no need to clear 'order' cache IMO
self._cache["conditions"] = {}
@property
def iterargs(self):
""" uses the singular name as key """
iterargs = OrderedDict()
for name in self._iterargs:
plural = self._profile.iterargs[name]
iterargs[name] = tuple(self._values[plural])
return iterargs
@property
def profile(self):
return self._profile
def _check_result(self, result):
"""Check that the check returned a well formed result:
A tuple (<Status>, message)
A boolean Status is allowd and will be transformed to:
True <Status: PASS>, False <Status: FAIL>
Checks will be implemented by other parties. This is to
help implementors creating good checks, to spot erroneous
implementations early and to make it easier to handle
the results tuple.
"""
if not isinstance(result, tuple):
msg = f"Result must be a tuple but it is {type(result)}."
return (FAIL, APIViolationError(msg, result))
if len(result) != 2:
msg = f"Result must have 2 items, but it has {len(result)}."
return (FAIL, APIViolationError(msg, result))
status, message = result
# Allow booleans, but there's no way to issue a WARNING
if isinstance(status, bool):
# normalize
status = PASS if status else FAIL
result = (status, message)
if not isinstance(status, Status):
msg = (
f"Result item `status` must be an instance of Status,"
f" but it is {status} and its type is {type(status)}."
)
return (FAIL, APIViolationError(msg, result))
return result
def _exec_check(self, check: FontbakeryCallable, args: Dict[str, Any]):
"""Yields check sub results.
Each check result is a tuple of: (<Status>, mixed message)
`status`: must be an instance of Status.
If one of the `status` entries in one of the results
is FAIL, the whole check is considered failed.
WARN is most likely a PASS in a non strict mode and a
FAIL in a strict mode.
`message`:
* If it is an `Exception` type we expect `status`
not to be PASS
* If it is a `string` it's a description of what passed
or failed.
* we'll think of an AdvancedMessageType as well, so that
we can connect the check result with more in depth
knowledge from the check definition.
"""
if check.configs:
new_globals = {
varname: self.config.get(check.id, {}).get(varname)
for varname in check.configs
}
check.inject_globals(new_globals)
try:
# A check can be either a normal function that returns one Status or a
# generator that yields one or more. The latter will return a generator
# object that we can detect with types.GeneratorType.
result = check(**args) # Might raise.
if isinstance(result, types.GeneratorType):
# Iterate over sub-results one-by-one, list(result) would abort on
# encountering the first exception.
for sub_result in result: # Might raise.
yield self._check_result(sub_result)
return # Do not fall through to rest of method.
except Exception as e:
error = FailedCheckError(e)
result = (ERROR, error)
yield self._check_result(result)
def _evaluate_condition(self, name, iterargs, path=None):
if path is None:
# top level call
path = []
if name in path:
dependencies = " -> ".join(path)
msg = f'Condition "{name}" is' f" a circular dependency in {dependencies}"
raise CircularDependencyError(msg)
path.append(name)
try:
condition = self._profile.conditions[name]
except KeyError as err:
error = MissingConditionError(name, err)
return error, None
try:
args = self._get_args(condition, iterargs, path)
except Exception as err:
error = FailedConditionError(condition, err)
return error, None
path.pop()
try:
return None, condition(**args)
except Exception as err:
error = FailedConditionError(condition, err)
return error, None
def _filter_condition_used_iterargs(self, name, iterargs):
allArgs = set()
names = list(self._profile.conditions[name].args)
while names:
name = names.pop()
if name in allArgs:
continue
allArgs.add(name)
if name in self._profile.conditions:
names += self._profile.conditions[name].args
return tuple((name, value) for name, value in iterargs if name in allArgs)
def _get_condition(self, name, iterargs, path=None):
# conditions are evaluated lazily
used_iterargs = self._filter_condition_used_iterargs(name, iterargs)
key = (name, used_iterargs)
if not self.use_cache or key not in self._cache["conditions"]:
err, val = self._evaluate_condition(name, used_iterargs, path)
if self.use_cache:
self._cache["conditions"][key] = err, val
else:
err, val = self._cache["conditions"][key]
return err, val
def get(self, key, iterargs, *args):
return self._get(key, iterargs, None, *args)
def get_iterarg(self, name, index):
""" Used by e.g. reporters """
plural = self._profile.iterargs[name]
return self._values[plural][index]
def _generate_iterargs(self, requirements):
if not requirements:
yield tuple()
return
name, length = requirements[0]
for index in range(length):
current = (name, index)
for tail in self._generate_iterargs(requirements[1:]):
yield (current,) + tail
def _derive_iterable_condition(self, name, simple=False, path=None):
# returns a generator, which is better for memory critical situations
# than a list containing all results of the used conditions
condition = self._profile.conditions[name]
iterargs = self._profile.get_iterargs(condition)
if not iterargs:
# without this, we would return just an empty tuple
raise TypeError(f'Condition "{name}" uses no iterargs.')
# like [('font', 10), ('other', 22)]
requirements = [(singular, self._iterargs[singular]) for singular in iterargs]
for iterargs in self._generate_iterargs(requirements):
error, value = self._get_condition(name, iterargs, path)
if error:
raise error
if simple:
yield value
else:
yield (iterargs, value)
def _get(self, name, iterargs, path, *args):
iterargsDict = dict(iterargs)
has_fallback = bool(len(args))
if has_fallback:
fallback = args[0]
# try this once before resolving aliases and once after
if name in self._values:
return self._values[name]
original_name = name
name = self._profile.resolve_alias(name)
if name != original_name and name in self._values:
return self._values[name]
nametype = self._profile.get_type(name, None)
if name in self._values:
return self._values[name]
if nametype == "expected_values":
# No need to validate
expected_value = self._profile.get(name)
if expected_value.has_default:
# has no default: fallback or MissingValueError
return expected_value.default
if nametype == "iterargs" and name in iterargsDict:
index = iterargsDict[name]
plural = self._profile.get(name)
return self._values[plural][index]
if nametype == "conditions":
error, value = self._get_condition(name, iterargs, path)
if error:
raise error
return value
if nametype == "derived_iterables":
condition_name, simple = self._profile.get(name)
return self._derive_iterable_condition(condition_name, simple, path)
if nametype == "config":
return self.config
if has_fallback:
return fallback
if original_name != name:
report_name = f'"{original_name}" as "{name}"'
else:
report_name = f'"{name}"'
raise MissingValueError(f"Value {report_name} is undefined.")
def _get_args(self, item, iterargs, path=None):
# iterargs can't be optional arguments yet, we wouldn't generate
# an execution with an empty list. I don't know if that would be even
# feasible, so I don't add this complication for the sake of clarity.
# If this is needed for anything useful, we'll have to figure this out.
args = {}
for name in item.args:
if name in args:
continue
try:
args[name] = self._get(name, iterargs, path)
except MissingValueError:
if name not in item.optionalArgs:
raise
return args
def _get_check_dependencies(self, check, iterargs):
unfulfilled_conditions = []
for condition in check.conditions:
negate, name = is_negated(condition)
if name in self._values:
# this is a handy way to set flags from the outside
err, val = None, self._values[name]
else:
err, val = self._get_condition(name, iterargs)
if negate:
val = not val
if err:
status = (ERROR, err)
return (status, None)
# An annoying FutureWarning here (Python 3.8.3) on stderr:
# "FutureWarning: The behavior of this method will change in future
# versions. Use specific 'len(elem)' or 'elem is not None' test instead."
# Actually not shure how to tackle this, since val is very unspecific
# here intentionally. Where is the documentation for the change?
if val is None:
bool_val = False
else:
try:
_len = len(val)
bool_val = not _len == 0
except TypeError:
# TypeError: object of type 'bool' has no len()
bool_val = bool(val)
if not bool_val:
unfulfilled_conditions.append(condition)
if unfulfilled_conditions:
# This will make the check neither pass nor fail
comma_separated = ", ".join(unfulfilled_conditions)
status = (SKIP, f"Unfulfilled Conditions: {comma_separated}")
return (status, None)
try:
args = self._get_args(check, iterargs)
# Run the generators now, so we can test if they're empty
for k,v in args.items():
if inspect.isgenerator(v) or inspect.isgeneratorfunction(v):
args[k] = list(v)
if all(not x for x in args.values()):
status = (SKIP, "No applicable arguments")
return (status, None)
return None, args
except Exception as error:
status = (ERROR, FailedDependenciesError(check, error))
return (status, None)
def _run_check(self, check, iterargs):
summary_status = None
# A check is more than just a function, it carries
# a lot of meta-data for us, in this case we can use
# meta-data to learn how to call the check (via
# configuration or inspection, where inspection would be
# the default and configuration could be used to override
# inspection results).
skipped = None
if self._profile.check_skip_filter:
iterargsDict = {
key: self.get_iterarg(key, index) for key, index in iterargs
}
accepted, message = self._profile.check_skip_filter(
check.id, **iterargsDict
)
if not accepted:
skipped = (SKIP, "Filtered: {}".format(message or "(no message)"))
if not skipped:
skipped, args = self._get_check_dependencies(check, iterargs)
# FIXME: check is not a message
# so, to use it as a message, it should have a "message-interface"
# TODO: describe generic "message-interface"
yield STARTCHECK, None
if skipped is not None:
summary_status = skipped[0]
# `skipped` is a normal result tuple (status, message)
# where `status` is either FAIL for unmet dependencies
# or SKIP for unmet conditions or ERROR. A status of SKIP is
# never a failed check.
# ERROR is either a missing dependency or a condition that raised
# an exception. This shouldn't happen when everyting is set up
# correctly.
yield skipped
else:
for sub_result in self._exec_check(check, args):
status, _ = sub_result
if summary_status is None or status >= summary_status:
summary_status = status
yield sub_result
# The only reason to yield this is to make it testable
# that a check ran to its end, or, if we start to allow
# nestable subchecks. Otherwise, a STARTCHECK would end the
# previous check implicitly.
# We can also use it to display status updates to the user.
if summary_status is None:
summary_status = ERROR
yield ERROR, (f"The check {check} did not yield any status")
elif summary_status < PASS:
summary_status = ERROR
# got to yield it,so we can see it in the report
yield ERROR, (
f"The most significant status of {check}"
f" was only {summary_status} but"
f" the minimum is {PASS}"
)
yield ENDCHECK, summary_status
@property
def order(self):
order = self._cache.get("order", None)
if order is None:
order = []
# section, check, iterargs = identity
for identity in self._profile.execution_order(
self._iterargs,
custom_order=self._custom_order,
explicit_checks=self._explicit_checks,
exclude_checks=self._exclude_checks,
):
order.append(identity)
self._cache["order"] = order = tuple(order)
return order
def check_order(self, order):
"""
order must be a subset of self.order
"""
own_order = self.order
for item in order:
if item not in own_order:
raise ValueError(f"Order item {item} not found.")
return order
def _check_protocol_generator(self, next_check_identity):
section, check, iterargs = next_check_identity
for status, message in self._run_check(check, iterargs):
yield status, message, (section, check, iterargs)
def session_protocol_generator(self, order=None):
order = order if order is not None else self.order
yield from session_protocol_generator(self._check_protocol_generator, order)
def run(self, order=None):
order = order if order is not None else self.order
session_gen = self.session_protocol_generator(order)
next_check_gen = iter(order)
yield from drive_session_protocol(session_gen, next_check_gen)
def run_externally_controlled(self, receive_result_fn, next_check_gen, order=None):
order = order if order is not None else self.order
session_gen = self.session_protocol_generator(order)
for result in drive_session_protocol(session_gen, next_check_gen):
receive_result_fn(result)
def drive_session_protocol(session_gen, next_check_gen):
# can't send anything but None on first iteration
value = None
try:
while True:
result = session_gen.send(value)
yield result
value = None
if result[0] in (START, ENDCHECK):
# get the next check, if None protocol will wrap up
value = next(next_check_gen, None)
except StopIteration:
pass
def session_protocol_generator(check_protocol_generator, order):
# init
# Could use self.order, but the truth is, we don't know.
# However, self.order still should contain each check_identity
# just to make sure we can actually run the check!
# Let's just assume that _run_check will fail otherwise...
sections = OrderedDict()
next_check_identity = yield START, order, (None, None, None)
while next_check_identity:
for event in check_protocol_generator(next_check_identity):
# send(check_identity) always after ENDCHECK
next_check_identity = yield event
# after _run_check the last status must be ENDCHECK
status, message, (section, check, iterargs) = event
event = None
assert status == ENDCHECK
# message is the summary_status of the check when status is ENDCHECK
section_key = str(section)
if section_key not in sections:
sections[section_key] = ([], Counter(), section)
section_order, section_summary, _ = sections[section_key]
section_order.append((check, iterargs))
# message is the summary_status of the check when status is ENDCHECK
section_summary[message.name] += 1
checkrun_summary = Counter()
for _, (section_order, section_summary, section) in sections.items():
yield SECTIONSUMMARY, (section_order, section_summary), (section, None, None)
checkrun_summary.update(section_summary)
yield END, checkrun_summary, (None, None, None)
def distribute_generator(gen, targets_callbacks):
for item in gen:
for target in targets_callbacks:
target(item)
FILE_MODULE_NAME_PREFIX = "."
def get_module_from_file(filename):
# filename = 'my/path/to/file.py'
# module_name = 'file_module.file_py'
module_name = f"{FILE_MODULE_NAME_PREFIX}{format(os.path.basename(filename).replace('.', '_'))}"
module_spec = importlib.util.spec_from_file_location(module_name, filename)
module = importlib.util.module_from_spec(module_spec)
module_spec.loader.exec_module(module)
# assert module.__file__ == filename
return module
def _get_module_from_locator(module_locator):
if module_locator["name"].startswith(FILE_MODULE_NAME_PREFIX):
return get_module_from_file(module_locator["origin"])
# Fails with an appropriate ImportError.
return importlib.import_module(module_locator["name"], package=None)
def get_profile_from_module_locator(module_locator):
module = _get_module_from_locator(module_locator)
return get_module_profile(module)
| |
#!/usr/bin/env python
# Copyright (c) 2011-2018, wradlib developers.
# Distributed under the MIT License. See LICENSE.txt for more info.
"""
Read RADOLAN and DX
^^^^^^^^^^^^^^^^^^^
Reading DX and RADOLAN data from German Weather Service
.. autosummary::
:nosignatures:
:toctree: generated/
read_dx
read_radolan_composite
get_radolan_filehandle
read_radolan_header
parse_dwd_composite_header
read_radolan_binary_array
decode_radolan_runlength_array
"""
# standard libraries
from __future__ import absolute_import
import datetime as dt
try:
from StringIO import StringIO
import io
except ImportError:
from io import StringIO # noqa
import io
import re
import warnings
# site packages
import numpy as np
from .. import util as util
# current DWD file naming pattern (2008) for example:
# raa00-dx_10488-200608050000-drs---bin
dwdpattern = re.compile('raa..-(..)[_-]([0-9]{5})-([0-9]*)-(.*?)---bin')
def _get_timestamp_from_filename(filename):
"""Helper function doing the actual work of get_dx_timestamp"""
time = dwdpattern.search(filename).group(3)
if len(time) == 10:
time = '20' + time
return dt.datetime.strptime(time, '%Y%m%d%H%M')
def get_dx_timestamp(name):
"""Converts a dx-timestamp (as part of a dx-product filename) to a
python datetime.object.
Parameters
----------
name : string
representing a DWD product name
Returns
-------
time : timezone-aware datetime.datetime object
"""
return _get_timestamp_from_filename(name).replace(tzinfo=util.UTC())
def unpack_dx(raw):
"""function removes DWD-DX-product bit-13 zero packing"""
# data is encoded in the first 12 bits
data = 4095
# the zero compression flag is bit 13
flag = 4096
beam = []
# # naive version
# # 49193 function calls in 0.772 CPU seconds
# # 20234 function calls in 0.581 CPU seconds
# for item in raw:
# if item & flag:
# beam.extend([0]* (item & data))
# else:
# beam.append(item & data)
# performance version - hopefully
# 6204 function calls in 0.149 CPU seconds
# get all compression cases
flagged = np.where(raw & flag)[0]
# if there is no zero in the whole data, we can return raw as it is
if flagged.size == 0:
assert raw.size == 128
return raw
# everything until the first flag is normal data
beam.extend(raw[0:flagged[0]])
# iterate over all flags except the last one
for this, nxt in zip(flagged[:-1], flagged[1:]):
# create as many zeros as there are given within the flagged
# byte's data part
beam.extend([0] * (raw[this] & data))
# append the data until the next flag
beam.extend(raw[this + 1:nxt])
# process the last flag
# add zeroes
beam.extend([0] * (raw[flagged[-1]] & data))
# add remaining data
beam.extend(raw[flagged[-1] + 1:])
# return the data
return np.array(beam)
def parse_dx_header(header):
"""Internal function to retrieve and interpret the ASCII header of a DWD
DX product file.
Parameters
----------
header : string
string representation of DX header
"""
# empty container
out = {}
# RADOLAN product type def
out["producttype"] = header[0:2]
# time stamp from file header as Python datetime object
out["datetime"] = dt.datetime.strptime(header[2:8] + header[13:17] + "00",
"%d%H%M%m%y%S")
# Make it aware of its time zone (UTC)
out["datetime"] = out["datetime"].replace(tzinfo=util.UTC())
# radar location ID (always 10000 for composites)
out["radarid"] = header[8:13]
pos_by = header.find("BY")
pos_vs = header.find("VS")
pos_co = header.find("CO")
pos_cd = header.find("CD")
pos_cs = header.find("CS")
pos_ep = header.find("EP")
pos_ms = header.find("MS")
out['bytes'] = int(header[pos_by + 2:pos_by + 7])
out['version'] = header[pos_vs + 2:pos_vs + 4]
out['cluttermap'] = int(header[pos_co + 2:pos_co + 3])
out['dopplerfilter'] = int(header[pos_cd + 2:pos_cd + 3])
out['statfilter'] = int(header[pos_cs + 2:pos_cs + 3])
out['elevprofile'] = [float(header[pos_ep + 2 + 3 * i:pos_ep + 2 + 3 * (i + 1)]) for i in range(8)] # noqa
out['message'] = header[pos_ms + 5:pos_ms + 5 + int(header[pos_ms + 2:pos_ms + 5])] # noqa
return out
def read_dx(filename):
"""Data reader for German Weather Service DX product raw radar data files.
This product uses a simple algorithm to compress zero values to reduce data
file size.
Notes
-----
While the format appears to be well defined, there have been reports on DX-
files that seem to produce errors. e.g. while one file usually contains a
360 degree by 128 1km range bins, there are files, that contain 361 beams.
Also, while usually azimuths are stored monotonously in ascending order,
this is not guaranteed by the format. This routine does not (yet) check
for this and directly returns the data in the order found in the file.
If you are in doubt, check the 'azim' attribute.
Be aware that this function does no extensive checking on its output.
If e.g. beams contain different numbers of range bins, the resulting data
will not be a 2-D array but a 1-D array of objects, which will most
probably break calling code. It was decided to leave the handling of these
(hopefully) rare events to the user, who might still be able to retrieve
some reasonable data, instead of raising an exception, making it impossible
to get any data from a file containing errors.
Parameters
----------
filename : string
binary file of DX raw data
Returns
-------
data : :func:`numpy:numpy.array`
of image data [dBZ]; shape (360,128)
attributes : dict
dictionary of attributes - currently implemented keys:
- 'azim' - azimuths np.array of shape (360,)
- 'elev' - elevations (1 per azimuth); np.array of shape (360,)
- 'clutter' - clutter mask; boolean array of same shape as `data`;
corresponds to bit 15 set in each dataset.
- 'bytes'- the total product length (including header).
Apparently, this value may be off by one byte for unknown reasons
- 'version'- a product version string - use unknown
- 'cluttermap' - number of the (DWD internal) cluttermap used
- 'dopplerfilter' - number of the dopplerfilter used (DWD internal)
- 'statfilter' - number of a statistical filter used (DWD internal)
- 'elevprofile' - as stated in the format description, this list
indicates the elevations in the eight 45 degree sectors. These
sectors need not start at 0 degrees north, so it is advised to
explicitly evaluate the `elev` attribute, if elevation information
is needed.
- 'message' - additional text stored in the header.
Examples
--------
See :ref:`/notebooks/fileio/wradlib_reading_dx.ipynb`.
"""
azimuthbitmask = 2 ** (14 - 1)
databitmask = 2 ** (13 - 1) - 1
clutterflag = 2 ** 15
dataflag = 2 ** 13 - 1
f = get_radolan_filehandle(filename)
# header string for later processing
header = ''
atend = False
# read header
while True:
mychar = f.read(1)
# 0x03 signals the end of the header but sometimes there might be
# an additional 0x03 char after that
if mychar == b'\x03':
atend = True
if mychar != b'\x03' and atend:
break
header += str(mychar.decode())
attrs = parse_dx_header(header)
# position file at end of header
f.seek(len(header))
# read number of bytes as declared in the header
# intermediate fix:
# if product length is uneven but header is even (e.g. because it has two
# chr(3) at the end, read one byte less
buflen = attrs['bytes'] - len(header)
if (buflen % 2) != 0:
# make sure that this is consistent with our assumption
# i.e. contact DWD again, if DX files show up with uneven byte lengths
# *and* only one 0x03 character
# assert header[-2] == chr(3)
buflen -= 1
buf = f.read(buflen)
# we can interpret the rest directly as a 1-D array of 16 bit unsigned ints
raw = np.frombuffer(buf, dtype='uint16')
# reading finished, close file, but only if we opened it.
if isinstance(f, io.IOBase):
f.close()
# a new ray/beam starts with bit 14 set
# careful! where always returns its results in a tuple, so in order to get
# the indices we have to retrieve element 0 of this tuple
newazimuths = np.where(raw == azimuthbitmask)[0] # Thomas kontaktieren!
# for the following calculations it is necessary to have the end of the
# data as the last index
newazimuths = np.append(newazimuths, len(raw))
# initialize our list of rays/beams
beams = []
# initialize our list of elevations
elevs = []
# initialize our list of azimuths
azims = []
# iterate over all beams
for i in range(newazimuths.size - 1):
# unpack zeros
beam = unpack_dx(raw[newazimuths[i] + 3:newazimuths[i + 1]])
beams.append(beam)
elevs.append((raw[newazimuths[i] + 2] & databitmask) / 10.)
azims.append((raw[newazimuths[i] + 1] & databitmask) / 10.)
beams = np.array(beams)
# attrs = {}
attrs['elev'] = np.array(elevs)
attrs['azim'] = np.array(azims)
attrs['clutter'] = (beams & clutterflag) != 0
# converting the DWD rvp6-format into dBZ data and return as numpy array
# together with attributes
return (beams & dataflag) * 0.5 - 32.5, attrs
def get_radolan_header_token():
"""Return array with known header token of radolan composites
Returns
-------
head : dict
with known header token, value set to None
"""
head = {'BY': None, 'VS': None, 'SW': None, 'PR': None,
'INT': None, 'GP': None, 'MS': None, 'LV': None,
'CS': None, 'MX': None, 'BG': None, 'ST': None,
'VV': None, 'MF': None, 'QN': None, 'VR': None,
'U': None}
return head
def get_radolan_header_token_pos(header):
"""Get Token and positions from DWD radolan header
Parameters
----------
header : string
(ASCII header)
Returns
-------
head : dictionary
with found header tokens and positions
"""
head_dict = get_radolan_header_token()
for token in head_dict.keys():
d = header.rfind(token)
if d > -1:
head_dict[token] = d
head = {}
result_dict = {}
result_dict.update((k, v) for k, v in head_dict.items() if v is not None)
for k, v in head_dict.items():
if v is not None:
start = v + len(k)
filt = [x for x in result_dict.values() if x > v]
if filt:
stop = min(filt)
else:
stop = len(header)
head[k] = (start, stop)
else:
head[k] = v
return head
def parse_dwd_composite_header(header):
"""Parses the ASCII header of a DWD quantitative composite file
Parameters
----------
header : string
(ASCII header)
Returns
-------
output : dictionary
of metadata retrieved from file header
"""
# empty container
out = {}
# RADOLAN product type def
out["producttype"] = header[0:2]
# file time stamp as Python datetime object
out["datetime"] = dt.datetime.strptime(header[2:8] + header[13:17] + "00",
"%d%H%M%m%y%S")
# radar location ID (always 10000 for composites)
out["radarid"] = header[8:13]
# get dict of header token with positions
head = get_radolan_header_token_pos(header)
# iterate over token and fill output dict accordingly
# for k, v in head.iteritems():
for k, v in head.items():
if v:
if k == 'BY':
out['datasize'] = int(header[v[0]:v[1]]) - len(header) - 1
if k == 'VS':
out["maxrange"] = {0: "100 km and 128 km (mixed)",
1: "100 km",
2: "128 km",
3: "150 km"}.get(int(header[v[0]:v[1]]),
"100 km")
if k == 'SW':
out["radolanversion"] = header[v[0]:v[1]].strip()
if k == 'PR':
out["precision"] = float('1' + header[v[0]:v[1]].strip())
if k == 'INT':
out["intervalseconds"] = int(header[v[0]:v[1]]) * 60
if k == 'U':
out["intervalunit"] = int(header[v[0]:v[1]])
if out["intervalunit"] == 1:
out["intervalseconds"] *= 1440
if k == 'GP':
dimstrings = header[v[0]:v[1]].strip().split("x")
out["nrow"] = int(dimstrings[0])
out["ncol"] = int(dimstrings[1])
if k == 'BG':
dimstrings = header[v[0]:v[1]]
dimstrings = (dimstrings[:int(len(dimstrings) / 2)],
dimstrings[int(len(dimstrings) / 2):])
out["nrow"] = int(dimstrings[0])
out["ncol"] = int(dimstrings[1])
if k == 'LV':
lv = header[v[0]:v[1]].split()
out['nlevel'] = np.int(lv[0])
out['level'] = np.array(lv[1:]).astype('float')
if k == 'MS':
locationstring = (header[v[0]:].strip().split("<")[1].
split(">")[0])
out["radarlocations"] = locationstring.split(",")
if k == 'ST':
locationstring = (header[v[0]:].strip().split("<")[1].
split(">")[0])
out["radardays"] = locationstring.split(",")
if k == 'CS':
out['indicator'] = {0: "near ground level",
1: "maximum",
2: "tops"}.get(int(header[v[0]:v[1]]))
if k == 'MX':
out['imagecount'] = int(header[v[0]:v[1]])
if k == 'VV':
out['predictiontime'] = int(header[v[0]:v[1]])
if k == 'MF':
out['moduleflag'] = int(header[v[0]:v[1]])
if k == 'QN':
out['quantification'] = int(header[v[0]:v[1]])
if k == 'VR':
out['reanalysisversion'] = header[v[0]:v[1]].strip()
return out
def decode_radolan_runlength_line(line, attrs):
"""Decodes one line of runlength coded binary data of DWD
composite file and returns decoded array
Parameters
----------
line : :func:`numpy:numpy.array`
of byte values
attrs : dict
dictionary of attributes derived from file header
Returns
-------
arr : :func:`numpy:numpy.array`
of decoded values
"""
# byte '0' is line number, we don't need it
# so we start with offset byte,
lo = 1
byte = line[lo]
# line empty condition, lf directly behind line number
if byte == 10:
return np.ones(attrs['ncol'], dtype=np.uint8) * attrs['nodataflag']
offset = byte - 16
# check if offset byte is 255 and take next byte(s)
# also for the offset
while byte == 255:
lo += 1
byte = line[lo]
offset += byte - 16
# just take the rest
dline = line[lo + 1:]
# this could be optimized
# iterate over line string, until lf (10) is reached
for lo, byte in enumerate(dline):
if byte == 10:
break
width = (byte & 0xF0) >> 4
val = byte & 0x0F
# the "offset pixel" are "not measured" values
# so we set them to 'nodata'
if lo == 0:
arr = np.ones(offset, dtype=np.uint8) * attrs['nodataflag']
arr = np.append(arr, np.ones(width, dtype=np.uint8) * val)
trailing = attrs['ncol'] - len(arr)
if trailing > 0:
arr = np.append(arr, np.ones(trailing,
dtype=np.uint8) * attrs['nodataflag'])
elif trailing < 0:
arr = dline[:trailing]
return arr
def read_radolan_runlength_line(fid):
"""Reads one line of runlength coded binary data of DWD
composite file and returns it as numpy array
Parameters
----------
fid : object
file/buffer id
Returns
-------
line : :func:`numpy:numpy.array`
of coded values
"""
line = fid.readline()
# check if eot
if line == b'\x04':
return None
# convert input buffer to np.uint8 array
line = np.frombuffer(line, np.uint8).astype(np.uint8)
return line
def decode_radolan_runlength_array(binarr, attrs):
"""Decodes the binary runlength coded section from DWD composite
file and return decoded numpy array with correct shape
Parameters
----------
binarr : string
Buffer
attrs : dict
Attribute dict of file header
Returns
-------
arr : :func:`numpy:numpy.array`
of decoded values
"""
buf = io.BytesIO(binarr)
# read and decode first line
line = read_radolan_runlength_line(buf)
arr = decode_radolan_runlength_line(line, attrs)
# read following lines
line = read_radolan_runlength_line(buf)
while line is not None:
dline = decode_radolan_runlength_line(line, attrs)
arr = np.vstack((arr, dline))
line = read_radolan_runlength_line(buf)
# return upside down because first line read is top line
return np.flipud(arr)
def read_radolan_binary_array(fid, size):
"""Read binary data from file given by filehandle
Parameters
----------
fid : object
file handle
size : int
number of bytes to read
Returns
-------
binarr : string
array of binary data
"""
binarr = fid.read(size)
fid.close()
if len(binarr) != size:
raise IOError('{0}: File corruption while reading {1}! \nCould not '
'read enough data!'.format(__name__, fid.name))
return binarr
def get_radolan_filehandle(fname):
"""Opens radolan file and returns file handle
Parameters
----------
fname : string
filename
Returns
-------
f : object
filehandle
"""
gzip = util.import_optional('gzip')
# open file handle
try:
f = gzip.open(fname, 'rb')
f.read(1)
except IOError:
f = open(fname, 'rb')
f.read(1)
# rewind file
f.seek(0, 0)
return f
def read_radolan_header(fid):
"""Reads radolan ASCII header and returns it as string
Parameters
----------
fid : object
file handle
Returns
-------
header : string
"""
header = ''
while True:
mychar = fid.read(1)
if not mychar:
raise EOFError('Unexpected EOF detected while reading '
'RADOLAN header')
if mychar == b'\x03':
break
header += str(mychar.decode())
return header
def read_radolan_composite(f, missing=-9999, loaddata=True):
"""Read quantitative radar composite format of the German Weather Service
The quantitative composite format of the DWD (German Weather Service) was
established in the course of the
RADOLAN project and includes several file
types, e.g. RX, RO, RK, RZ, RP, RT, RC, RI, RG, PC, PG and many, many more.
(see format description on the RADOLAN project homepage :cite:`DWD2009`).
At the moment, the national RADOLAN composite is a 900 x 900 grid with 1 km
resolution and in polar-stereographic projection. There are other grid
resolutions for different composites (eg. PC, PG)
Warning
-------
This function already evaluates and applies the so-called
PR factor which is specified in the header section of the RADOLAN files.
The raw values in an RY file are in the unit 0.01 mm/5min, while
read_radolan_composite returns values in mm/5min (i. e. factor 100 higher).
The factor is also returned as part of attrs dictionary under
keyword "precision".
Parameters
----------
f : string or file handle
path to the composite file or file handle
missing : int
value assigned to no-data cells
loaddata : bool
True | False, If False function returns (None, attrs)
Returns
-------
output : tuple
tuple of two items (data, attrs):
- data : :func:`numpy:numpy.array` of shape (number of rows,
number of columns)
- attrs : dictionary of metadata information from the file header
Examples
--------
See :ref:`/notebooks/radolan/radolan_format.ipynb`.
"""
NODATA = missing
mask = 0xFFF # max value integer
# If a file name is supplied, get a file handle
try:
header = read_radolan_header(f)
except AttributeError:
f = get_radolan_filehandle(f)
header = read_radolan_header(f)
attrs = parse_dwd_composite_header(header)
if not loaddata:
f.close()
return None, attrs
attrs["nodataflag"] = NODATA
if not attrs["radarid"] == "10000":
warnings.warn("WARNING: You are using function e" +
"wradlib.io.read_RADOLAN_composit for a non " +
"composite file.\n " +
"This might work...but please check the validity " +
"of the results")
# read the actual data
indat = read_radolan_binary_array(f, attrs['datasize'])
if attrs['producttype'] in ['RX', 'EX', 'WX']:
# convert to 8bit integer
arr = np.frombuffer(indat, np.uint8).astype(np.uint8)
arr = np.where(arr == 250, NODATA, arr)
attrs['cluttermask'] = np.where(arr == 249)[0]
elif attrs['producttype'] in ['PG', 'PC']:
arr = decode_radolan_runlength_array(indat, attrs)
else:
# convert to 16-bit integers
arr = np.frombuffer(indat, np.uint16).astype(np.uint16)
# evaluate bits 13, 14, 15 and 16
attrs['secondary'] = np.where(arr & 0x1000)[0]
nodata = np.where(arr & 0x2000)[0]
negative = np.where(arr & 0x4000)[0]
attrs['cluttermask'] = np.where(arr & 0x8000)[0]
# mask out the last 4 bits
arr &= mask
# consider negative flag if product is RD (differences from adjustment)
if attrs['producttype'] == 'RD':
# NOT TESTED, YET
arr[negative] = -arr[negative]
# apply precision factor
# this promotes arr to float if precision is float
arr = arr * attrs['precision']
# set nodata value
arr[nodata] = NODATA
# anyway, bring it into right shape
arr = arr.reshape((attrs['nrow'], attrs['ncol']))
return arr, attrs
| |
"""This module provides general utility methods."""
from collections import ChainMap
from contextlib import contextmanager
from functools import lru_cache, wraps
import locale
import logging
import os
import re
import shutil
import subprocess
import time
import threading
import sublime
from . import events
MYPY = False
if MYPY:
from typing import Iterator, List, MutableMapping, Optional, TypeVar, Union
T = TypeVar('T')
logger = logging.getLogger(__name__)
STREAM_STDOUT = 1
STREAM_STDERR = 2
STREAM_BOTH = STREAM_STDOUT + STREAM_STDERR
ANSI_COLOR_RE = re.compile(r'\033\[[0-9;]*m')
@events.on('settings_changed')
def on_settings_changed(settings, **kwargs):
get_augmented_path.cache_clear()
@contextmanager
def print_runtime(message):
start_time = time.perf_counter()
yield
end_time = time.perf_counter()
duration = round((end_time - start_time) * 1000)
thread_name = threading.current_thread().name[0]
print('{} took {}ms [{}]'.format(message, duration, thread_name))
def show_message(message, window=None):
if window is None:
window = sublime.active_window()
window.run_command("sublime_linter_display_panel", {"msg": message})
def clear_message():
window = sublime.active_window()
window.run_command("sublime_linter_remove_panel")
def flash(view, msg):
# type: (sublime.View, str) -> None
window = view.window() or sublime.active_window()
window.status_message(msg)
def distinct_until_buffer_changed(method):
# Sublime has problems to hold the distinction between buffers and views.
# It usually emits multiple identical events if you have multiple views
# into the same buffer.
last_call = None
@wraps(method)
def wrapper(self, view):
nonlocal last_call
this_call = (view.buffer_id(), view.change_count())
if this_call == last_call:
return
last_call = this_call
method(self, view)
return wrapper
def canonical_filename(view):
return (
os.path.basename(view.file_name()) if view.file_name()
else '<untitled {}>'.format(view.buffer_id())
)
def get_filename(view):
# type: (sublime.View) -> str
return view.file_name() or '<untitled {}>'.format(view.buffer_id())
def get_syntax(view):
"""
Return the view's syntax.
or the syntax it is mapped to in the "syntax_map" setting.
"""
syntax_re = re.compile(r'(?i)/([^/]+)\.(?:tmLanguage|sublime-syntax)$')
view_syntax = view.settings().get('syntax') or ''
mapped_syntax = ''
if view_syntax:
match = syntax_re.search(view_syntax)
if match:
view_syntax = match.group(1).lower()
from .persist import settings
mapped_syntax = settings.get(
'syntax_map', {}).get(view_syntax, '').lower()
else:
view_syntax = ''
return mapped_syntax or view_syntax
def is_lintable(view):
"""
Return true when a view is not lintable, e.g. scratch, read_only, etc.
There is a bug (or feature) in the current ST3 where the Find panel
is not marked scratch but has no window.
There is also a bug where settings files opened from within .sublime-package
files are not marked scratch during the initial on_modified event, so we have
to check that a view with a filename actually exists on disk if the file
being opened is in the Sublime Text packages directory.
"""
if (
not view.window() or
view.is_scratch() or
view.is_read_only() or
view.settings().get("repl") or
view.settings().get('is_widget')
):
return False
filename = view.file_name()
if (
filename and
filename.startswith(sublime.packages_path() + os.path.sep) and
not os.path.exists(filename)
):
return False
return True
# file/directory/environment utils
@lru_cache(maxsize=1) # print once every time the path changes
def debug_print_env(path):
import textwrap
logger.info('PATH:\n{}'.format(textwrap.indent(path.replace(os.pathsep, '\n'), ' ')))
def create_environment():
# type: () -> MutableMapping[str, str]
"""Return a dict with os.environ augmented with a better PATH.
Platforms paths are added to PATH by getting the "paths" user settings
for the current platform.
"""
return ChainMap({'PATH': get_augmented_path()}, os.environ)
@lru_cache(maxsize=1)
def get_augmented_path():
# type: () -> str
from . import persist
paths = [
os.path.expanduser(path)
for path in persist.settings.get('paths', {}).get(sublime.platform(), [])
] # type: List[str]
augmented_path = os.pathsep.join(paths + [os.environ['PATH']])
if logger.isEnabledFor(logging.INFO):
debug_print_env(augmented_path)
return augmented_path
def which(cmd):
# type: (str) -> Optional[str]
"""Return the full path to an executable searching PATH."""
return shutil.which(cmd, path=get_augmented_path())
def where(executable):
# type: (str) -> Iterator[str]
"""Yield full paths to given executable."""
for path in get_augmented_path().split(os.pathsep):
resolved = shutil.which(executable, path=path)
if resolved:
yield resolved
# popen utils
def check_output(cmd, cwd=None):
"""Short wrapper around subprocess.check_output."""
logger.info('Running `{}`'.format(' '.join(cmd)))
env = create_environment()
try:
output = subprocess.check_output(
cmd, env=env, cwd=cwd,
stderr=subprocess.STDOUT,
startupinfo=create_startupinfo()
)
except Exception as err:
import textwrap
output_ = getattr(err, 'output', '')
if output_:
output_ = process_popen_output(output_)
output_ = textwrap.indent(output_, ' ')
output_ = "\n ...\n{}".format(output_)
logger.warning(
"Executing `{}` failed\n {}{}".format(
' '.join(cmd), str(err), output_
)
)
raise
else:
return process_popen_output(output)
class popen_output(str):
"""Hybrid of a Popen process and its output.
Small compatibility layer: It is both the decoded output
as str and partially the Popen object.
"""
stdout = '' # type: Optional[str]
stderr = '' # type: Optional[str]
combined_output = ''
def __new__(cls, proc, stdout, stderr):
if stdout is not None:
stdout = process_popen_output(stdout)
if stderr is not None:
stderr = process_popen_output(stderr)
combined_output = ''.join(filter(None, [stdout, stderr]))
rv = super().__new__(cls, combined_output) # type: ignore
rv.combined_output = combined_output
rv.stdout = stdout
rv.stderr = stderr
rv.proc = proc
rv.pid = proc.pid
rv.returncode = proc.returncode
return rv
def process_popen_output(output):
# bytes -> string --> universal newlines
output = decode(output).replace('\r\n', '\n').replace('\r', '\n')
return ANSI_COLOR_RE.sub('', output)
def decode(bytes):
"""
Decode and return a byte string using utf8, falling back to system's encoding if that fails.
So far we only have to do this because javac is so utterly hopeless it uses CP1252
for its output on Windows instead of UTF8, even if the input encoding is specified as UTF8.
Brilliant! But then what else would you expect from Oracle?
"""
if not bytes:
return ''
try:
return bytes.decode('utf8')
except UnicodeError:
return bytes.decode(locale.getpreferredencoding(), errors='replace')
def create_startupinfo():
if os.name == 'nt':
info = subprocess.STARTUPINFO()
info.dwFlags |= subprocess.STARTF_USESTDHANDLES | subprocess.STARTF_USESHOWWINDOW
info.wShowWindow = subprocess.SW_HIDE
return info
return None
def get_creationflags():
if os.name == 'nt':
return subprocess.CREATE_NEW_PROCESS_GROUP
return 0
# misc utils
def ensure_list(value):
# type: (Union[T, List[T]]) -> List[T]
return value if isinstance(value, list) else [value]
def load_json(*segments, from_sl_dir=False):
base_path = "Packages/SublimeLinter/" if from_sl_dir else ""
full_path = base_path + "/".join(segments)
return sublime.decode_value(sublime.load_resource(full_path))
def get_sl_version():
try:
metadata = load_json("package-metadata.json", from_sl_dir=True)
return metadata.get("version")
except Exception:
return "unknown"
| |
from __future__ import unicode_literals
import copy
from collections import OrderedDict
from django.apps import AppConfig
from django.apps.registry import Apps, apps as global_apps
from django.conf import settings
from django.db import models
from django.db.models.fields.proxy import OrderWrt
from django.db.models.fields.related import (
RECURSIVE_RELATIONSHIP_CONSTANT, do_pending_lookups,
)
from django.db.models.options import DEFAULT_NAMES, normalize_together
from django.utils import six
from django.utils.encoding import force_text, smart_text
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
from django.utils.version import get_docs_version
class InvalidBasesError(ValueError):
pass
def _get_app_label_and_model_name(model, app_label=''):
if isinstance(model, six.string_types):
split = model.split('.', 1)
return (tuple(split) if len(split) == 2 else (app_label, split[0]))
else:
return model._meta.app_label, model._meta.model_name
def get_related_models_recursive(model):
"""
Returns all models that have a direct or indirect relationship
to the given model.
"""
def _related_models(m):
return [
f.related_model for f in m._meta.get_fields(include_parents=True, include_hidden=True)
if f.is_relation and not isinstance(f.related_model, six.string_types)
] + [
subclass for subclass in m.__subclasses__()
if issubclass(subclass, models.Model)
]
seen = set()
queue = _related_models(model)
for rel_mod in queue:
rel_app_label, rel_model_name = rel_mod._meta.app_label, rel_mod._meta.model_name
if (rel_app_label, rel_model_name) in seen:
continue
seen.add((rel_app_label, rel_model_name))
queue.extend(_related_models(rel_mod))
return seen - {(model._meta.app_label, model._meta.model_name)}
class ProjectState(object):
"""
Represents the entire project's overall state.
This is the item that is passed around - we do it here rather than at the
app level so that cross-app FKs/etc. resolve properly.
"""
def __init__(self, models=None, real_apps=None):
self.models = models or {}
# Apps to include from main registry, usually unmigrated ones
self.real_apps = real_apps or []
def add_model(self, model_state):
app_label, model_name = model_state.app_label, model_state.name_lower
self.models[(app_label, model_name)] = model_state
if 'apps' in self.__dict__: # hasattr would cache the property
self.reload_model(app_label, model_name)
def remove_model(self, app_label, model_name):
del self.models[app_label, model_name]
if 'apps' in self.__dict__: # hasattr would cache the property
self.apps.unregister_model(app_label, model_name)
def reload_model(self, app_label, model_name):
if 'apps' in self.__dict__: # hasattr would cache the property
try:
old_model = self.apps.get_model(app_label, model_name)
except LookupError:
related_models = set()
else:
# Get all relations to and from the old model before reloading,
# as _meta.apps may change
related_models = get_related_models_recursive(old_model)
# Get all outgoing references from the model to be rendered
model_state = self.models[(app_label, model_name)]
for name, field in model_state.fields:
if field.is_relation:
if field.rel.to == RECURSIVE_RELATIONSHIP_CONSTANT:
continue
rel_app_label, rel_model_name = _get_app_label_and_model_name(field.rel.to, app_label)
related_models.add((rel_app_label, rel_model_name.lower()))
# Unregister all related models
for rel_app_label, rel_model_name in related_models:
self.apps.unregister_model(rel_app_label, rel_model_name)
# Unregister the current model
self.apps.unregister_model(app_label, model_name)
# Gather all models states of those models that will be rerendered.
# This includes:
# 1. The current model
try:
model_state = self.models[app_label, model_name]
except KeyError:
states_to_be_rendered = []
else:
states_to_be_rendered = [model_state]
# 2. All related models of unmigrated apps
for model_state in self.apps.real_models:
if (model_state.app_label, model_state.name_lower) in related_models:
states_to_be_rendered.append(model_state)
# 3. All related models of migrated apps
for rel_app_label, rel_model_name in related_models:
try:
model_state = self.models[rel_app_label, rel_model_name]
except KeyError:
pass
else:
states_to_be_rendered.append(model_state)
# Render all models
self.apps.render_multiple(states_to_be_rendered)
def clone(self):
"Returns an exact copy of this ProjectState"
new_state = ProjectState(
models={k: v.clone() for k, v in self.models.items()},
real_apps=self.real_apps,
)
if 'apps' in self.__dict__:
new_state.apps = self.apps.clone()
return new_state
@cached_property
def apps(self):
return StateApps(self.real_apps, self.models)
@property
def concrete_apps(self):
self.apps = StateApps(self.real_apps, self.models, ignore_swappable=True)
return self.apps
@classmethod
def from_apps(cls, apps):
"Takes in an Apps and returns a ProjectState matching it"
app_models = {}
for model in apps.get_models(include_swapped=True):
model_state = ModelState.from_model(model)
app_models[(model_state.app_label, model_state.name_lower)] = model_state
return cls(app_models)
def __eq__(self, other):
if set(self.models.keys()) != set(other.models.keys()):
return False
if set(self.real_apps) != set(other.real_apps):
return False
return all(model == other.models[key] for key, model in self.models.items())
def __ne__(self, other):
return not (self == other)
class AppConfigStub(AppConfig):
"""
Stubs a Django AppConfig. Only provides a label, and a dict of models.
"""
# Not used, but required by AppConfig.__init__
path = ''
def __init__(self, label):
self.label = label
# App-label and app-name are not the same thing, so technically passing
# in the label here is wrong. In practice, migrations don't care about
# the app name, but we need something unique, and the label works fine.
super(AppConfigStub, self).__init__(label, None)
def import_models(self, all_models):
self.models = all_models
class StateApps(Apps):
"""
Subclass of the global Apps registry class to better handle dynamic model
additions and removals.
"""
def __init__(self, real_apps, models, ignore_swappable=False):
# Any apps in self.real_apps should have all their models included
# in the render. We don't use the original model instances as there
# are some variables that refer to the Apps object.
# FKs/M2Ms from real apps are also not included as they just
# mess things up with partial states (due to lack of dependencies)
self.real_models = []
for app_label in real_apps:
app = global_apps.get_app_config(app_label)
for model in app.get_models():
self.real_models.append(ModelState.from_model(model, exclude_rels=True))
# Populate the app registry with a stub for each application.
app_labels = {model_state.app_label for model_state in models.values()}
app_configs = [AppConfigStub(label) for label in sorted(real_apps + list(app_labels))]
super(StateApps, self).__init__(app_configs)
self.render_multiple(list(models.values()) + self.real_models)
# If there are some lookups left, see if we can first resolve them
# ourselves - sometimes fields are added after class_prepared is sent
for lookup_model, operations in self._pending_lookups.items():
try:
model = self.get_model(lookup_model[0], lookup_model[1])
except LookupError:
app_label = "%s.%s" % (lookup_model[0], lookup_model[1])
if app_label == settings.AUTH_USER_MODEL and ignore_swappable:
continue
# Raise an error with a best-effort helpful message
# (only for the first issue). Error message should look like:
# "ValueError: Lookup failed for model referenced by
# field migrations.Book.author: migrations.Author"
msg = "Lookup failed for model referenced by field {field}: {model[0]}.{model[1]}"
raise ValueError(msg.format(field=operations[0][1], model=lookup_model))
else:
do_pending_lookups(model)
def render_multiple(self, model_states):
# We keep trying to render the models in a loop, ignoring invalid
# base errors, until the size of the unrendered models doesn't
# decrease by at least one, meaning there's a base dependency loop/
# missing base.
unrendered_models = model_states
while unrendered_models:
new_unrendered_models = []
for model in unrendered_models:
try:
model.render(self)
except InvalidBasesError:
new_unrendered_models.append(model)
if len(new_unrendered_models) == len(unrendered_models):
raise InvalidBasesError(
"Cannot resolve bases for %r\nThis can happen if you are inheriting models from an "
"app with migrations (e.g. contrib.auth)\n in an app with no migrations; see "
"https://docs.djangoproject.com/en/%s/topics/migrations/#dependencies "
"for more" % (new_unrendered_models, get_docs_version())
)
unrendered_models = new_unrendered_models
def clone(self):
"""
Return a clone of this registry, mainly used by the migration framework.
"""
clone = StateApps([], {})
clone.all_models = copy.deepcopy(self.all_models)
clone.app_configs = copy.deepcopy(self.app_configs)
# No need to actually clone them, they'll never change
clone.real_models = self.real_models
return clone
def register_model(self, app_label, model):
self.all_models[app_label][model._meta.model_name] = model
if app_label not in self.app_configs:
self.app_configs[app_label] = AppConfigStub(app_label)
self.app_configs[app_label].models = OrderedDict()
self.app_configs[app_label].models[model._meta.model_name] = model
self.clear_cache()
def unregister_model(self, app_label, model_name):
try:
del self.all_models[app_label][model_name]
del self.app_configs[app_label].models[model_name]
except KeyError:
pass
self.clear_cache()
class ModelState(object):
"""
Represents a Django Model. We don't use the actual Model class
as it's not designed to have its options changed - instead, we
mutate this one and then render it into a Model as required.
Note that while you are allowed to mutate .fields, you are not allowed
to mutate the Field instances inside there themselves - you must instead
assign new ones, as these are not detached during a clone.
"""
def __init__(self, app_label, name, fields, options=None, bases=None, managers=None):
self.app_label = app_label
self.name = force_text(name)
self.fields = fields
self.options = options or {}
self.bases = bases or (models.Model, )
self.managers = managers or []
# Sanity-check that fields is NOT a dict. It must be ordered.
if isinstance(self.fields, dict):
raise ValueError("ModelState.fields cannot be a dict - it must be a list of 2-tuples.")
# Sanity-check that fields are NOT already bound to a model.
for name, field in fields:
if hasattr(field, 'model'):
raise ValueError(
'ModelState.fields cannot be bound to a model - "%s" is.' % name
)
@cached_property
def name_lower(self):
return self.name.lower()
@classmethod
def from_model(cls, model, exclude_rels=False):
"""
Feed me a model, get a ModelState representing it out.
"""
# Deconstruct the fields
fields = []
for field in model._meta.local_fields:
if getattr(field, "rel", None) and exclude_rels:
continue
if isinstance(field, OrderWrt):
continue
name, path, args, kwargs = field.deconstruct()
field_class = import_string(path)
try:
fields.append((name, field_class(*args, **kwargs)))
except TypeError as e:
raise TypeError("Couldn't reconstruct field %s on %s.%s: %s" % (
name,
model._meta.app_label,
model._meta.object_name,
e,
))
if not exclude_rels:
for field in model._meta.local_many_to_many:
name, path, args, kwargs = field.deconstruct()
field_class = import_string(path)
try:
fields.append((name, field_class(*args, **kwargs)))
except TypeError as e:
raise TypeError("Couldn't reconstruct m2m field %s on %s: %s" % (
name,
model._meta.object_name,
e,
))
# Extract the options
options = {}
for name in DEFAULT_NAMES:
# Ignore some special options
if name in ["apps", "app_label"]:
continue
elif name in model._meta.original_attrs:
if name == "unique_together":
ut = model._meta.original_attrs["unique_together"]
options[name] = set(normalize_together(ut))
elif name == "index_together":
it = model._meta.original_attrs["index_together"]
options[name] = set(normalize_together(it))
else:
options[name] = model._meta.original_attrs[name]
# Force-convert all options to text_type (#23226)
options = cls.force_text_recursive(options)
# If we're ignoring relationships, remove all field-listing model
# options (that option basically just means "make a stub model")
if exclude_rels:
for key in ["unique_together", "index_together", "order_with_respect_to"]:
if key in options:
del options[key]
def flatten_bases(model):
bases = []
for base in model.__bases__:
if hasattr(base, "_meta") and base._meta.abstract:
bases.extend(flatten_bases(base))
else:
bases.append(base)
return bases
# We can't rely on __mro__ directly because we only want to flatten
# abstract models and not the whole tree. However by recursing on
# __bases__ we may end up with duplicates and ordering issues, we
# therefore discard any duplicates and reorder the bases according
# to their index in the MRO.
flattened_bases = sorted(set(flatten_bases(model)), key=lambda x: model.__mro__.index(x))
# Make our record
bases = tuple(
(
"%s.%s" % (base._meta.app_label, base._meta.model_name)
if hasattr(base, "_meta") else
base
)
for base in flattened_bases
)
# Ensure at least one base inherits from models.Model
if not any((isinstance(base, six.string_types) or issubclass(base, models.Model)) for base in bases):
bases = (models.Model,)
# Constructs all managers on the model
managers = {}
def reconstruct_manager(mgr):
as_manager, manager_path, qs_path, args, kwargs = mgr.deconstruct()
if as_manager:
qs_class = import_string(qs_path)
instance = qs_class.as_manager()
else:
manager_class = import_string(manager_path)
instance = manager_class(*args, **kwargs)
# We rely on the ordering of the creation_counter of the original
# instance
managers[mgr.name] = (mgr.creation_counter, instance)
if hasattr(model, "_default_manager"):
default_manager_name = model._default_manager.name
# Make sure the default manager is always the first
if model._default_manager.use_in_migrations:
reconstruct_manager(model._default_manager)
else:
# Force this manager to be the first and thus default
managers[default_manager_name] = (0, models.Manager())
# Sort all managers by their creation counter
for _, manager, _ in sorted(model._meta.managers):
if manager.name == "_base_manager" or not manager.use_in_migrations:
continue
reconstruct_manager(manager)
# Sort all managers by their creation counter but take only name and
# instance for further processing
managers = [
(name, instance) for name, (cc, instance) in
sorted(managers.items(), key=lambda v: v[1])
]
if managers == [(default_manager_name, models.Manager())]:
managers = []
else:
managers = []
# Construct the new ModelState
return cls(
model._meta.app_label,
model._meta.object_name,
fields,
options,
bases,
managers,
)
@classmethod
def force_text_recursive(cls, value):
if isinstance(value, six.string_types):
return smart_text(value)
elif isinstance(value, list):
return [cls.force_text_recursive(x) for x in value]
elif isinstance(value, tuple):
return tuple(cls.force_text_recursive(x) for x in value)
elif isinstance(value, set):
return set(cls.force_text_recursive(x) for x in value)
elif isinstance(value, dict):
return {
cls.force_text_recursive(k): cls.force_text_recursive(v)
for k, v in value.items()
}
return value
def construct_fields(self):
"Deep-clone the fields using deconstruction"
for name, field in self.fields:
_, path, args, kwargs = field.deconstruct()
field_class = import_string(path)
yield name, field_class(*args, **kwargs)
def construct_managers(self):
"Deep-clone the managers using deconstruction"
# Sort all managers by their creation counter
sorted_managers = sorted(self.managers, key=lambda v: v[1].creation_counter)
for mgr_name, manager in sorted_managers:
as_manager, manager_path, qs_path, args, kwargs = manager.deconstruct()
if as_manager:
qs_class = import_string(qs_path)
yield mgr_name, qs_class.as_manager()
else:
manager_class = import_string(manager_path)
yield mgr_name, manager_class(*args, **kwargs)
def clone(self):
"Returns an exact copy of this ModelState"
return self.__class__(
app_label=self.app_label,
name=self.name,
fields=list(self.construct_fields()),
options=dict(self.options),
bases=self.bases,
managers=list(self.construct_managers()),
)
def render(self, apps):
"Creates a Model object from our current state into the given apps"
# First, make a Meta object
meta_contents = {'app_label': self.app_label, "apps": apps}
meta_contents.update(self.options)
meta = type(str("Meta"), tuple(), meta_contents)
# Then, work out our bases
try:
bases = tuple(
(apps.get_model(base) if isinstance(base, six.string_types) else base)
for base in self.bases
)
except LookupError:
raise InvalidBasesError("Cannot resolve one or more bases from %r" % (self.bases,))
# Turn fields into a dict for the body, add other bits
body = dict(self.construct_fields())
body['Meta'] = meta
body['__module__'] = "__fake__"
# Restore managers
body.update(self.construct_managers())
# Then, make a Model object (apps.register_model is called in __new__)
return type(
str(self.name),
bases,
body,
)
def get_field_by_name(self, name):
for fname, field in self.fields:
if fname == name:
return field
raise ValueError("No field called %s on model %s" % (name, self.name))
def __repr__(self):
return "<ModelState: '%s.%s'>" % (self.app_label, self.name)
def __eq__(self, other):
return (
(self.app_label == other.app_label) and
(self.name == other.name) and
(len(self.fields) == len(other.fields)) and
all((k1 == k2 and (f1.deconstruct()[1:] == f2.deconstruct()[1:]))
for (k1, f1), (k2, f2) in zip(self.fields, other.fields)) and
(self.options == other.options) and
(self.bases == other.bases) and
(self.managers == other.managers)
)
def __ne__(self, other):
return not (self == other)
| |
import logging
import os
import re
from io import BytesIO
from PIL import Image
from assets.common import replace_colors
from assets.common import unpack_color_directives
from assets.common import make_color_directives
def read_default_color(species_data):
color = []
if type(species_data[0]) is str:
return []
for group in species_data[0].keys():
color.append([group, species_data[0][group]])
return color
class Species():
def __init__(self, assets):
self.assets = assets
self.starbound_folder = assets.starbound_folder
self.humanoid_config = self.assets.read("/humanoid.config",
self.assets.vanilla_assets)
def is_species(self, key):
if key.endswith(".species"):
return True
else:
return False
def index_data(self, asset):
key = asset[0]
path = asset[1]
asset_data = self.assets.read(key, path)
if asset_data is None:
return
if "kind" in asset_data:
return (key, path, "species", "", asset_data["kind"].lower(), "")
else:
logging.warning("Species missing kind key: %s in %s" % (key, path))
def get_species_list(self):
"""Return a formatted list of all species."""
c = self.assets.db.cursor()
c.execute("select distinct name from assets where type = 'species' order by name")
names = [x[0] for x in c.fetchall()]
formatted = []
for s in names:
if s == "dummy":
continue
try:
formatted.append(s[0].upper() + s[1:])
except IndexError:
formatted.append(s)
logging.exception("Unable to format species: %s", s)
return formatted
def get_species(self, name):
"""Look up a species from the index and return contents of species
files."""
c = self.assets.db.cursor()
c.execute("select * from assets where type = 'species' and name = ?",
(name.lower(),))
species = c.fetchone()
if species is None:
# species is not indexed
logging.warning("Unable to load species: %s", name)
return None
species_data = self.assets.read(species[0], species[1])
if species_data is None:
# corrupt save, no race set
logging.warning("No race set on player")
return None
else:
return species, species_data
def get_appearance_data(self, name, gender, key):
species = self.get_species(name)
# there is another json extension here where strings that have a , on
# the end are treated as 1 item lists. there are also some species with
# missing keys
try:
results = self.get_gender_data(species, gender)[key]
except KeyError:
return []
if type(results) is str:
return (results,)
else:
return results
def get_facial_hair_types(self, name, gender, group):
return self.get_appearance_data(name, gender, "facialHair")
def get_facial_hair_groups(self, name, gender):
return self.get_appearance_data(name, gender, "facialHairGroup")
def get_facial_mask_types(self, name, gender, group):
return self.get_appearance_data(name, gender, "facialMask")
def get_facial_mask_groups(self, name, gender):
return self.get_appearance_data(name, gender, "facialMaskGroup")
def get_hair_types(self, name, gender, group):
return self.get_appearance_data(name, gender, "hair")
def get_hair_groups(self, name, gender):
groups = self.get_appearance_data(name, gender, "hairGroup")
if len(groups) == 0:
return ("hair",)
else:
return groups
def get_personality(self):
return self.humanoid_config["charGen"]["personalities"]
def get_gender_data(self, species_data, gender):
if gender == "male":
return species_data[1]["genders"][0]
else:
return species_data[1]["genders"][1]
def get_default_colors(self, species):
# just use first option
species_data = self.get_species(species)[1]
def val(key):
if key in species_data.keys() and species_data[key] is not None:
default = read_default_color(species_data[key])
if default == []:
return ""
else:
replace = make_color_directives([default])
return replace
else:
return ""
colors = {
"bodyColor": val("bodyColor"),
"undyColor": val("undyColor"),
"hairColor": val("hairColor")
}
# TODO: there is an unbelievably complicated method for choosing
# default player colors. i'm not sure if it's worth going into too much
# considering it will only be used if a player switches species
# it might be easier to just leave this out entirely. let user
# add/remove their own directive colors
directives = {
"body": [colors["bodyColor"]],
"emote": [colors["bodyColor"], colors["undyColor"]],
"hair": [colors["hairColor"]],
"facial_hair": [colors["bodyColor"]],
"facial_mask": [colors["bodyColor"]]
}
return directives
def get_preview_image(self, name, gender):
"""Return raw image data for species placeholder pic.
I don't think this is actually used anywhere in game. Some mods don't
include it."""
species = self.get_species(name.lower())
try:
try:
key = self.get_gender_data(species, gender)["characterImage"]
except TypeError:
return None
return self.assets.read(key, species[0][1], image=True)
except FileNotFoundError:
# corrupt save, no race set
logging.warning("No race set on player")
return None
def render_part(self, player, player_image, part, slot):
"""Lookup, crop and color given item slot and apply to player render."""
gender = player.get_gender()
stance = player.get_personality()
if part == "head":
frame_key = "head", "normal"
elif part == "legs":
frame_key = "pants"+gender[0], stance
elif part == "back":
frame_key = "back", stance
frame = self.assets.frames().lookup_frame(*frame_key)
if slot is None:
return player_image
item = self.assets.items().get_item(slot["name"])
if (item is None or
not gender + "Frames" in item[0]):
return player_image
item_img_path = item[0][gender + "Frames"]
if item_img_path[0] != "/":
item_img_path = os.path.dirname(item[1]) + "/" + item_img_path
item_img = self.assets.images().get_image(item_img_path)
if item_img is None:
return player_image
item_img = item_img.crop(frame)
item_img = self.assets.images().color_image(item_img, slot["parameters"])
player_image.paste(item_img, mask=item_img)
return player_image
def render_chest(self, player, player_image, slot, part):
"""Lookup, crop and color given chest slot and apply to player render."""
gender = player.get_gender()
stance = player.get_personality()
if slot is None:
return player_image
item = self.assets.items().get_item(slot["name"])
if (item is None or not (gender + "Frames") in item[0]):
return player_image
frame_paths = item[0][gender + "Frames"]
for k, v in frame_paths.items():
if v[0] != "/":
frame_paths[k] = os.path.dirname(item[1]) + "/" + v
files = ["fsleeve", "chestm", "bsleeve"]
if gender == "female":
files = ["fsleevef", "chestf", "bsleevef"]
color = lambda x: self.assets.images().color_image(x, slot["parameters"])
if part == "fsleeve":
fsleeve = self.assets.images().get_image(frame_paths["frontSleeve"])
if fsleeve is None:
return player_image
fsleeve_frame = self.assets.frames().lookup_frame(files[0], stance)
fsleeve = fsleeve.crop(fsleeve_frame)
fsleeve = color(fsleeve)
player_image.paste(fsleeve, mask=fsleeve)
elif part == "bsleeve":
bsleeve = self.assets.images().get_image(frame_paths["backSleeve"])
if bsleeve is None:
return player_image
bsleeve_frame = self.assets.frames().lookup_frame(files[2], stance)
bsleeve = bsleeve.crop(bsleeve_frame)
bsleeve = color(bsleeve)
player_image.paste(bsleeve, mask=bsleeve)
elif part == "body":
body = self.assets.images().get_image(frame_paths["body"])
if body is None:
return player_image
body_frame = self.assets.frames().lookup_frame(files[1], stance)
body = body.crop(body_frame)
body = color(body)
player_image.paste(body, mask=body)
return player_image
def render_player(self, player, armor=True):
"""Return an Image of a fully rendered player from a save."""
name = player.get_race()
gender = player.get_gender()
species = self.get_species(name.lower())
if species is None:
return Image.open(BytesIO(self.assets.items().missing_icon()))
asset_loc = species[0][1]
# crop the spritesheets and replace colours
def grab_sprite(sheet_path, rect, directives):
sheet = self.assets.read(sheet_path, asset_loc, True)
img = Image.open(BytesIO(sheet)).convert("RGBA").crop(rect)
if directives != "":
img = replace_colors(img, unpack_color_directives(directives))
return img
default_rect = (43, 0, 86, 43)
# TODO: should use the .bbox to figure this out
personality = player.get_personality()
personality_offset = int(re.search("\d$", personality).group(0)) * 43
body_rect = (personality_offset, 0, personality_offset+43, 43)
body_img = grab_sprite("/humanoid/%s/%sbody.png" % (name, gender),
body_rect,
player.get_body_directives())
frontarm_img = grab_sprite("/humanoid/%s/frontarm.png" % name,
body_rect,
player.get_body_directives())
backarm_img = grab_sprite("/humanoid/%s/backarm.png" % name,
body_rect,
player.get_body_directives())
head_img = grab_sprite("/humanoid/%s/%shead.png" % (name, gender),
default_rect,
player.get_body_directives())
hair = player.get_hair()
hair_img = None
if hair[0] != "":
hair_img = self.get_hair_image(
name, hair[0],
hair[1], gender,
player.get_hair_directives()
)
facial_hair = player.get_facial_hair()
facial_hair_img = None
if facial_hair[0] != "":
facial_hair_img = self.get_hair_image(
name, facial_hair[0],
facial_hair[1], gender,
player.get_facial_hair_directives()
)
facial_mask = player.get_facial_mask()
facial_mask_img = None
if facial_mask[0] != "":
facial_mask_img = self.get_hair_image(
name, facial_mask[0],
facial_mask[1], gender,
player.get_facial_mask_directives()
)
head_slot = player.get_visible("head")
chest_slot = player.get_visible("chest")
legs_slot = player.get_visible("legs")
back_slot = player.get_visible("back")
do_head = armor and head_slot is not None
# new blank canvas!
base_size = 43
base = Image.new("RGBA", (base_size, base_size))
# the order of these is important!
# back arm
base.paste(backarm_img)
if armor and chest_slot is not None:
base = self.render_chest(player, base, chest_slot, "bsleeve")
# backpack
if armor and back_slot is not None:
base = self.render_part(player, base, "back", back_slot)
# then the head
base.paste(head_img, mask=head_img)
# TODO: support mask on head items
if hair_img is not None:
try:
base.paste(hair_img, mask=hair_img)
except ValueError:
logging.exception("Bad hair image: %s, %s", hair[0], hair[1])
# body
base.paste(body_img, mask=body_img)
if armor and legs_slot is not None:
base = self.render_part(player, base, "legs", legs_slot)
if armor and chest_slot is not None:
base = self.render_chest(player, base, chest_slot, "body")
# front arm
base.paste(frontarm_img, mask=frontarm_img)
if armor and chest_slot is not None:
base = self.render_chest(player, base, chest_slot, "fsleeve")
# facial mask if set
if facial_mask_img is not None:
try:
base.paste(facial_mask_img, mask=facial_mask_img)
except ValueError:
logging.exception("Bad facial mask image: %s, %s",
facial_mask[0], facial_mask[1])
# facial hair if set
if facial_hair_img is not None:
try:
base.paste(facial_hair_img, mask=facial_hair_img)
except ValueError:
logging.exception("Bad facial hair image: %s, %s",
facial_hair[0], facial_hair[1])
if do_head:
base = self.render_part(player, base, "head", head_slot)
return base.resize((base_size*3, base_size*3))
def get_hair_image(self, name, hair_type, hair_group, gender, directives):
# TODO: bbox is from .frame file, need a way to read them still
species = self.get_species(name.lower())
image_path = "/humanoid/%s/%s/%s.png" % (name, hair_type, hair_group)
try:
image = self.assets.read(image_path, species[0][1], image=True)
image = Image.open(BytesIO(image)).convert("RGBA").crop((43, 0,
86, 43))
return replace_colors(image, unpack_color_directives(directives))
except OSError:
logging.exception("Missing hair image: %s", image_path)
return
| |
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
try:
import json
except ImportError:
import simplejson as json
import logging
import os
import urlparse
# Python 2.5 compat fix
if not hasattr(urlparse, 'parse_qsl'):
import cgi
urlparse.parse_qsl = cgi.parse_qsl
import httplib2
from neutronclient.common import exceptions
from neutronclient.common import utils
_logger = logging.getLogger(__name__)
if os.environ.get('NEUTRONCLIENT_DEBUG'):
ch = logging.StreamHandler()
_logger.setLevel(logging.DEBUG)
_logger.addHandler(ch)
class ServiceCatalog(object):
"""Helper methods for dealing with a Keystone Service Catalog."""
def __init__(self, resource_dict):
self.catalog = resource_dict
def get_token(self):
"""Fetch token details fron service catalog."""
token = {'id': self.catalog['access']['token']['id'],
'expires': self.catalog['access']['token']['expires'], }
try:
token['user_id'] = self.catalog['access']['user']['id']
token['tenant_id'] = (
self.catalog['access']['token']['tenant']['id'])
except Exception:
# just leave the tenant and user out if it doesn't exist
pass
return token
def url_for(self, attr=None, filter_value=None,
service_type='network', endpoint_type='publicURL'):
"""Fetch the URL from the Neutron service for
a particular endpoint type. If none given, return
publicURL.
"""
catalog = self.catalog['access'].get('serviceCatalog', [])
matching_endpoints = []
for service in catalog:
if service['type'] != service_type:
continue
endpoints = service['endpoints']
for endpoint in endpoints:
if not filter_value or endpoint.get(attr) == filter_value:
matching_endpoints.append(endpoint)
if not matching_endpoints:
raise exceptions.EndpointNotFound()
elif len(matching_endpoints) > 1:
raise exceptions.AmbiguousEndpoints(message=matching_endpoints)
else:
if endpoint_type not in matching_endpoints[0]:
raise exceptions.EndpointTypeNotFound(message=endpoint_type)
return matching_endpoints[0][endpoint_type]
class HTTPClient(httplib2.Http):
"""Handles the REST calls and responses, include authn."""
USER_AGENT = 'python-neutronclient'
def __init__(self, username=None, tenant_name=None, tenant_id=None,
password=None, auth_url=None,
token=None, region_name=None, timeout=None,
endpoint_url=None, insecure=False,
endpoint_type='publicURL',
auth_strategy='keystone', ca_cert=None, log_credentials=False,
**kwargs):
super(HTTPClient, self).__init__(timeout=timeout, ca_certs=ca_cert)
self.username = username
self.tenant_name = tenant_name
self.tenant_id = tenant_id
self.password = password
self.auth_url = auth_url.rstrip('/') if auth_url else None
self.endpoint_type = endpoint_type
self.region_name = region_name
self.auth_token = token
self.content_type = 'application/json'
self.endpoint_url = endpoint_url
self.auth_strategy = auth_strategy
self.log_credentials = log_credentials
# httplib2 overrides
self.disable_ssl_certificate_validation = insecure
def _cs_request(self, *args, **kwargs):
kargs = {}
kargs.setdefault('headers', kwargs.get('headers', {}))
kargs['headers']['User-Agent'] = self.USER_AGENT
if 'content_type' in kwargs:
kargs['headers']['Content-Type'] = kwargs['content_type']
kargs['headers']['Accept'] = kwargs['content_type']
else:
kargs['headers']['Content-Type'] = self.content_type
kargs['headers']['Accept'] = self.content_type
if 'body' in kwargs:
kargs['body'] = kwargs['body']
args = utils.safe_encode_list(args)
kargs = utils.safe_encode_dict(kargs)
if self.log_credentials:
log_kargs = kargs
else:
log_kargs = self._strip_credentials(kargs)
utils.http_log_req(_logger, args, log_kargs)
try:
resp, body = self.request(*args, **kargs)
except httplib2.SSLHandshakeError as e:
raise exceptions.SslCertificateValidationError(reason=e)
except Exception as e:
# Wrap the low-level connection error (socket timeout, redirect
# limit, decompression error, etc) into our custom high-level
# connection exception (it is excepted in the upper layers of code)
raise exceptions.ConnectionFailed(reason=e)
finally:
# Temporary Fix for gate failures. RPC calls and HTTP requests
# seem to be stepping on each other resulting in bogus fd's being
# picked up for making http requests
self.connections.clear()
utils.http_log_resp(_logger, resp, body)
status_code = self.get_status_code(resp)
if status_code == 401:
raise exceptions.Unauthorized(message=body)
elif status_code == 403:
raise exceptions.Forbidden(message=body)
return resp, body
def _strip_credentials(self, kwargs):
if kwargs.get('body') and self.password:
log_kwargs = kwargs.copy()
log_kwargs['body'] = kwargs['body'].replace(self.password,
'REDACTED')
return log_kwargs
else:
return kwargs
def authenticate_and_fetch_endpoint_url(self):
if not self.auth_token:
self.authenticate()
elif not self.endpoint_url:
self.endpoint_url = self._get_endpoint_url()
def do_request(self, url, method, **kwargs):
self.authenticate_and_fetch_endpoint_url()
# Perform the request once. If we get a 401 back then it
# might be because the auth token expired, so try to
# re-authenticate and try again. If it still fails, bail.
try:
kwargs.setdefault('headers', {})
kwargs['headers']['X-Auth-Token'] = self.auth_token
resp, body = self._cs_request(self.endpoint_url + url, method,
**kwargs)
return resp, body
except exceptions.Unauthorized:
self.authenticate()
kwargs.setdefault('headers', {})
kwargs['headers']['X-Auth-Token'] = self.auth_token
resp, body = self._cs_request(
self.endpoint_url + url, method, **kwargs)
return resp, body
def _extract_service_catalog(self, body):
"""Set the client's service catalog from the response data."""
self.service_catalog = ServiceCatalog(body)
try:
sc = self.service_catalog.get_token()
self.auth_token = sc['id']
self.auth_tenant_id = sc.get('tenant_id')
self.auth_user_id = sc.get('user_id')
except KeyError:
raise exceptions.Unauthorized()
if not self.endpoint_url:
self.endpoint_url = self.service_catalog.url_for(
attr='region', filter_value=self.region_name,
endpoint_type=self.endpoint_type)
def authenticate(self):
if self.auth_strategy != 'keystone':
raise exceptions.Unauthorized(message='unknown auth strategy')
if self.tenant_id:
body = {'auth': {'passwordCredentials':
{'username': self.username,
'password': self.password, },
'tenantId': self.tenant_id, }, }
else:
body = {'auth': {'passwordCredentials':
{'username': self.username,
'password': self.password, },
'tenantName': self.tenant_name, }, }
token_url = self.auth_url + "/tokens"
# Make sure we follow redirects when trying to reach Keystone
tmp_follow_all_redirects = self.follow_all_redirects
self.follow_all_redirects = True
try:
resp, body = self._cs_request(token_url, "POST",
body=json.dumps(body),
content_type="application/json")
finally:
self.follow_all_redirects = tmp_follow_all_redirects
status_code = self.get_status_code(resp)
if status_code != 200:
raise exceptions.Unauthorized(message=body)
if body:
try:
body = json.loads(body)
except ValueError:
pass
else:
body = None
self._extract_service_catalog(body)
def _get_endpoint_url(self):
url = self.auth_url + '/tokens/%s/endpoints' % self.auth_token
try:
resp, body = self._cs_request(url, "GET")
except exceptions.Unauthorized:
# rollback to authenticate() to handle case when neutron client
# is initialized just before the token is expired
self.authenticate()
return self.endpoint_url
body = json.loads(body)
for endpoint in body.get('endpoints', []):
if (endpoint['type'] == 'network' and
endpoint.get('region') == self.region_name):
if self.endpoint_type not in endpoint:
raise exceptions.EndpointTypeNotFound(
message=self.endpoint_type)
return endpoint[self.endpoint_type]
raise exceptions.EndpointNotFound()
def get_auth_info(self):
return {'auth_token': self.auth_token,
'auth_tenant_id': self.auth_tenant_id,
'auth_user_id': self.auth_user_id,
'endpoint_url': self.endpoint_url}
def get_status_code(self, response):
"""Returns the integer status code from the response.
Either a Webob.Response (used in testing) or httplib.Response
is returned.
"""
if hasattr(response, 'status_int'):
return response.status_int
else:
return response.status
| |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from sentry.utils.db import is_postgres
class Migration(SchemaMigration):
def forwards(self, orm):
if is_postgres():
# Changing field 'Release.project_id'
db.execute("ALTER TABLE sentry_release ALTER COLUMN project_id DROP NOT NULL")
# Changing field 'ReleaseFile.project_id'
db.execute("ALTER TABLE sentry_releasefile ALTER COLUMN project_id DROP NOT NULL")
else:
# Changing field 'Release.project_id'
db.alter_column(
'sentry_release',
'project_id',
self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True)
)
# Changing field 'ReleaseFile.project_id'
db.alter_column(
'sentry_releasefile',
'project_id',
self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True)
)
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'Release.project_id'
raise RuntimeError(
"Cannot reverse this migration. 'Release.project_id' and its values cannot be restored."
)
# The following code is provided here to aid in writing a correct migration
# Changing field 'Release.project_id'
db.alter_column(
'sentry_release', 'project_id',
self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')()
)
# User chose to not deal with backwards NULL issues for 'ReleaseFile.project_id'
raise RuntimeError(
"Cannot reverse this migration. 'ReleaseFile.project_id' and its values cannot be restored."
)
# The following code is provided here to aid in writing a correct migration
# Changing field 'ReleaseFile.project_id'
db.alter_column(
'sentry_releasefile', 'project_id',
self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')()
)
models = {
'sentry.activity': {
'Meta': {
'object_name': 'Activity'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.apikey': {
'Meta': {
'object_name': 'ApiKey'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32'
}),
'label': (
'django.db.models.fields.CharField', [], {
'default': "'Default'",
'max_length': '64',
'blank': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Organization']"
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.apitoken': {
'Meta': {
'object_name': 'ApiToken'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiKey']",
'null': 'True'
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'token':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.auditlogentry': {
'Meta': {
'object_name': 'AuditLogEntry'
},
'actor': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_actors'",
'null': 'True',
'to': "orm['sentry.User']"
}
),
'actor_key': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiKey']",
'null': 'True',
'blank': 'True'
}
),
'actor_label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'target_object':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'target_user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_targets'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.authenticator': {
'Meta': {
'unique_together': "(('user', 'type'),)",
'object_name': 'Authenticator',
'db_table': "'auth_authenticator'"
},
'config': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {}),
'created_at':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authidentity': {
'Meta': {
'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))",
'object_name': 'AuthIdentity'
},
'auth_provider': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.AuthProvider']"
}
),
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_verified':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authprovider': {
'Meta': {
'object_name': 'AuthProvider'
},
'config': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_global_access':
('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'default_role':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50'
}),
'default_teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'unique': 'True'
}
),
'provider': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'sync_time':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.broadcast': {
'Meta': {
'object_name': 'Broadcast'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_expires': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 1, 19, 0, 0)',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active':
('django.db.models.fields.BooleanField', [], {
'default': 'True',
'db_index': 'True'
}),
'link': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.CharField', [], {
'max_length': '256'
}),
'title': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'upstream_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.broadcastseen': {
'Meta': {
'unique_together': "(('broadcast', 'user'),)",
'object_name': 'BroadcastSeen'
},
'broadcast': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Broadcast']"
}
),
'date_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.commit': {
'Meta': {
'unique_together': "(('repository_id', 'key'),)",
'object_name': 'Commit',
'index_together': "(('repository_id', 'date_added'),)"
},
'author': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.CommitAuthor']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'message': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'repository_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {
'unique_together': "(('organization_id', 'email'),)",
'object_name': 'CommitAuthor'
},
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.commitfilechange': {
'Meta': {
'unique_together': "(('commit', 'filename'),)",
'object_name': 'CommitFileChange'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'filename': ('django.db.models.fields.CharField', [], {
'max_length': '255'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '1'
})
},
'sentry.counter': {
'Meta': {
'object_name': 'Counter',
'db_table': "'sentry_projectcounter'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'unique': 'True'
}
),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.dsymbundle': {
'Meta': {
'object_name': 'DSymBundle'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'sdk': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymSDK']"
}
)
},
'sentry.dsymobject': {
'Meta': {
'object_name': 'DSymObject'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_path': ('django.db.models.fields.TextField', [], {
'db_index': 'True'
}),
'uuid':
('django.db.models.fields.CharField', [], {
'max_length': '36',
'db_index': 'True'
}),
'vmaddr':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'vmsize':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
})
},
'sentry.dsymsdk': {
'Meta': {
'object_name':
'DSymSDK',
'index_together':
"[('version_major', 'version_minor', 'version_patchlevel', 'version_build')]"
},
'dsym_type':
('django.db.models.fields.CharField', [], {
'max_length': '20',
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'sdk_name': ('django.db.models.fields.CharField', [], {
'max_length': '20'
}),
'version_build': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'version_major': ('django.db.models.fields.IntegerField', [], {}),
'version_minor': ('django.db.models.fields.IntegerField', [], {}),
'version_patchlevel': ('django.db.models.fields.IntegerField', [], {})
},
'sentry.dsymsymbol': {
'Meta': {
'unique_together': "[('object', 'address')]",
'object_name': 'DSymSymbol'
},
'address':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'symbol': ('django.db.models.fields.TextField', [], {})
},
'sentry.environment': {
'Meta': {
'unique_together': "(('project_id', 'name'),)",
'object_name': 'Environment'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.event': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'",
'index_together': "(('group_id', 'datetime'),)"
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'time_spent':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'null': 'True'
})
},
'sentry.eventmapping': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'EventMapping'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventtag': {
'Meta': {
'unique_together':
"(('event_id', 'key_id', 'value_id'),)",
'object_name':
'EventTag',
'index_together':
"(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {
'unique_together':
"(('project', 'ident'), ('project', 'hash'))",
'object_name':
'EventUser',
'index_together':
"(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'username':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
})
},
'sentry.file': {
'Meta': {
'object_name': 'File'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'legacy_blob'",
'null': 'True',
'to': "orm['sentry.FileBlob']"
}
),
'blobs': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.FileBlob']",
'through': "orm['sentry.FileBlobIndex']",
'symmetrical': 'False'
}
),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'null': 'True'
}),
'headers': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.fileblob': {
'Meta': {
'object_name': 'FileBlob'
},
'checksum':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
)
},
'sentry.fileblobindex': {
'Meta': {
'unique_together': "(('file', 'blob', 'offset'),)",
'object_name': 'FileBlobIndex'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.FileBlob']"
}
),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.globaldsymfile': {
'Meta': {
'object_name': 'GlobalDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'uuid':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '36'
})
},
'sentry.group': {
'Meta': {
'unique_together': "(('project', 'short_id'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'",
'index_together': "(('project', 'first_release'),)"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']",
'null': 'True',
'on_delete': 'models.PROTECT'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_public': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'blank': 'True'
}
),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'short_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'time_spent_total':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'times_seen': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
)
},
'sentry.groupassignee': {
'Meta': {
'object_name': 'GroupAssignee',
'db_table': "'sentry_groupasignee'"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'unique': 'True',
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_assignee_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupemailthread': {
'Meta': {
'unique_together': "(('email', 'group'), ('email', 'msgid'))",
'object_name': 'GroupEmailThread'
},
'date': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'msgid': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Project']"
}
)
},
'sentry.grouphash': {
'Meta': {
'unique_together': "(('project', 'hash'),)",
'object_name': 'GroupHash'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
)
},
'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {
'object_name': 'GroupRedirect'
},
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'previous_group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'unique': 'True'
})
},
'sentry.grouprelease': {
'Meta': {
'unique_together': "(('group_id', 'release_id', 'environment'),)",
'object_name': 'GroupRelease'
},
'environment':
('django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64'
}),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.groupresolution': {
'Meta': {
'object_name': 'GroupResolution'
},
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouprulestatus': {
'Meta': {
'unique_together': "(('rule', 'group'),)",
'object_name': 'GroupRuleStatus'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_active': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'rule': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Rule']"
}
),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
})
},
'sentry.groupseen': {
'Meta': {
'unique_together': "(('user', 'group'),)",
'object_name': 'GroupSeen'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'db_index': 'False'
}
)
},
'sentry.groupsnooze': {
'Meta': {
'object_name': 'GroupSnooze'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'until': ('django.db.models.fields.DateTimeField', [], {})
},
'sentry.groupsubscription': {
'Meta': {
'unique_together': "(('group', 'user'),)",
'object_name': 'GroupSubscription'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Project']"
}
),
'reason':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.grouptagkey': {
'Meta': {
'unique_together': "(('project', 'group', 'key'),)",
'object_name': 'GroupTagKey'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouptagvalue': {
'Meta': {
'unique_together': "(('group', 'key', 'value'),)",
'object_name': 'GroupTagValue',
'db_table': "'sentry_messagefiltervalue'",
'index_together': "(('project', 'key', 'value', 'last_seen'),)"
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'grouptag'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'grouptag'",
'null': 'True',
'to': "orm['sentry.Project']"
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.lostpasswordhash': {
'Meta': {
'object_name': 'LostPasswordHash'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'unique': 'True'
}
)
},
'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'last_updated':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {
'object_name': 'Organization'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'org_memberships'",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMember']",
'to': "orm['sentry.User']"
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.organizationaccessrequest': {
'Meta': {
'unique_together': "(('team', 'member'),)",
'object_name': 'OrganizationAccessRequest'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'member': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationmember': {
'Meta': {
'unique_together': "(('organization', 'user'), ('organization', 'email'))",
'object_name': 'OrganizationMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': (
'django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Organization']"
}
),
'role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMemberTeam']",
'blank': 'True'
}
),
'token': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'unique': 'True',
'null': 'True',
'blank': 'True'
}
),
'type': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'sentry_orgmember_set'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.organizationmemberteam': {
'Meta': {
'unique_together': "(('team', 'organizationmember'),)",
'object_name': 'OrganizationMemberTeam',
'db_table': "'sentry_organizationmember_teams'"
},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'organizationmember': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationonboardingtask': {
'Meta': {
'unique_together': "(('organization', 'task'),)",
'object_name': 'OrganizationOnboardingTask'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_completed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.organizationoption': {
'Meta': {
'unique_together': "(('organization', 'key'),)",
'object_name': 'OrganizationOption',
'db_table': "'sentry_organizationoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.project': {
'Meta': {
'unique_together': "(('team', 'slug'), ('organization', 'slug'))",
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'first_event': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'forced_color': (
'django.db.models.fields.CharField', [], {
'max_length': '6',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.projectbookmark': {
'Meta': {
'unique_together': "(('project_id', 'user'),)",
'object_name': 'ProjectBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.projectdsymfile': {
'Meta': {
'unique_together': "(('project', 'uuid'),)",
'object_name': 'ProjectDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'uuid': ('django.db.models.fields.CharField', [], {
'max_length': '36'
})
},
'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'roles': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.projectplatform': {
'Meta': {
'unique_together': "(('project_id', 'platform'),)",
'object_name': 'ProjectPlatform'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.release': {
'Meta': {
'unique_together': "(('project_id', 'version'),)",
'object_name': 'Release'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_released':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'new_groups':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True',
'blank': 'True'
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'releases'",
'symmetrical': 'False',
'through': "orm['sentry.ReleaseProject']",
'to': "orm['sentry.Project']"
}
),
'ref': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.releasecommit': {
'Meta': {
'unique_together': "(('release', 'commit'), ('release', 'order'))",
'object_name': 'ReleaseCommit'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True',
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseenvironment': {
'Meta': {
'unique_together': "(('project_id', 'release_id', 'environment_id'),)",
'object_name': 'ReleaseEnvironment',
'db_table': "'sentry_environmentrelease'"
},
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.releasefile': {
'Meta': {
'unique_together': "(('release', 'ident'),)",
'object_name': 'ReleaseFile'
},
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'name': ('django.db.models.fields.TextField', [], {}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseproject': {
'Meta': {
'unique_together': "(('project', 'release'),)",
'object_name': 'ReleaseProject',
'db_table': "'sentry_release_project'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.repository': {
'Meta': {
'unique_together':
"(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))",
'object_name':
'Repository'
},
'config': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'external_id':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'provider':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'url': ('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
})
},
'sentry.rule': {
'Meta': {
'object_name': 'Rule'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.savedsearch': {
'Meta': {
'unique_together': "(('project', 'name'),)",
'object_name': 'SavedSearch'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_default': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {
'unique_together': "(('project', 'user'),)",
'object_name': 'SavedSearchUserDefault',
'db_table': "'sentry_savedsearch_userdefault'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'savedsearch': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.SavedSearch']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.tagkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'TagKey',
'db_table': "'sentry_filterkey'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'label':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.tagvalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'TagValue',
'db_table': "'sentry_filtervalue'"
},
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.team': {
'Meta': {
'unique_together': "(('organization', 'slug'),)",
'object_name': 'Team'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_managed': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_password_expired':
('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'db_column': "'first_name'",
'blank': 'True'
}
),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'session_nonce':
('django.db.models.fields.CharField', [], {
'max_length': '12',
'null': 'True'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '128'
})
},
'sentry.useravatar': {
'Meta': {
'object_name': 'UserAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.useremail': {
'Meta': {
'unique_together': "(('user', 'email'),)",
'object_name': 'UserEmail'
},
'date_hash_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_verified': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'emails'",
'to': "orm['sentry.User']"
}
),
'validation_hash': (
'django.db.models.fields.CharField', [], {
'default': "u'ssVzZVyhkDOpmZUemjBvkkkRbhSwhPVT'",
'max_length': '32'
}
)
},
'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'),)",
'object_name': 'UserOption'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'UserReport',
'index_together': "(('project', 'event_id'), ('project', 'date_added'))"
},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
}
}
complete_apps = ['sentry']
| |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_at_management_group_scope_request(
group_id: str,
deployment_name: str,
operation_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/providers/Microsoft.Management/managementGroups/{groupId}/providers/Microsoft.Resources/deployments/{deploymentName}/operations/{operationId}')
path_format_arguments = {
"groupId": _SERIALIZER.url("group_id", group_id, 'str', max_length=90, min_length=1),
"deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"operationId": _SERIALIZER.url("operation_id", operation_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_at_management_group_scope_request(
group_id: str,
deployment_name: str,
*,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/providers/Microsoft.Management/managementGroups/{groupId}/providers/Microsoft.Resources/deployments/{deploymentName}/operations')
path_format_arguments = {
"groupId": _SERIALIZER.url("group_id", group_id, 'str', max_length=90, min_length=1),
"deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if top is not None:
query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_at_subscription_scope_request(
deployment_name: str,
operation_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Resources/deployments/{deploymentName}/operations/{operationId}')
path_format_arguments = {
"deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"operationId": _SERIALIZER.url("operation_id", operation_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_at_subscription_scope_request(
deployment_name: str,
subscription_id: str,
*,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Resources/deployments/{deploymentName}/operations')
path_format_arguments = {
"deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if top is not None:
query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
resource_group_name: str,
deployment_name: str,
operation_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations/{operationId}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"operationId": _SERIALIZER.url("operation_id", operation_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
resource_group_name: str,
deployment_name: str,
subscription_id: str,
*,
top: Optional[int] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2019-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', max_length=64, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if top is not None:
query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class DeploymentOperationsOperations(object):
"""DeploymentOperationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.resources.v2019_05_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get_at_management_group_scope(
self,
group_id: str,
deployment_name: str,
operation_id: str,
**kwargs: Any
) -> "_models.DeploymentOperation":
"""Gets a deployments operation.
:param group_id: The management group ID.
:type group_id: str
:param deployment_name: The name of the deployment.
:type deployment_name: str
:param operation_id: The ID of the operation to get.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentOperation, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2019_05_01.models.DeploymentOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_at_management_group_scope_request(
group_id=group_id,
deployment_name=deployment_name,
operation_id=operation_id,
template_url=self.get_at_management_group_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DeploymentOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_management_group_scope.metadata = {'url': '/providers/Microsoft.Management/managementGroups/{groupId}/providers/Microsoft.Resources/deployments/{deploymentName}/operations/{operationId}'} # type: ignore
@distributed_trace
def list_at_management_group_scope(
self,
group_id: str,
deployment_name: str,
top: Optional[int] = None,
**kwargs: Any
) -> Iterable["_models.DeploymentOperationsListResult"]:
"""Gets all deployments operations for a deployment.
:param group_id: The management group ID.
:type group_id: str
:param deployment_name: The name of the deployment.
:type deployment_name: str
:param top: The number of results to return.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeploymentOperationsListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2019_05_01.models.DeploymentOperationsListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentOperationsListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_at_management_group_scope_request(
group_id=group_id,
deployment_name=deployment_name,
top=top,
template_url=self.list_at_management_group_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_at_management_group_scope_request(
group_id=group_id,
deployment_name=deployment_name,
top=top,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("DeploymentOperationsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_at_management_group_scope.metadata = {'url': '/providers/Microsoft.Management/managementGroups/{groupId}/providers/Microsoft.Resources/deployments/{deploymentName}/operations'} # type: ignore
@distributed_trace
def get_at_subscription_scope(
self,
deployment_name: str,
operation_id: str,
**kwargs: Any
) -> "_models.DeploymentOperation":
"""Gets a deployments operation.
:param deployment_name: The name of the deployment.
:type deployment_name: str
:param operation_id: The ID of the operation to get.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentOperation, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2019_05_01.models.DeploymentOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_at_subscription_scope_request(
deployment_name=deployment_name,
operation_id=operation_id,
subscription_id=self._config.subscription_id,
template_url=self.get_at_subscription_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DeploymentOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_subscription_scope.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Resources/deployments/{deploymentName}/operations/{operationId}'} # type: ignore
@distributed_trace
def list_at_subscription_scope(
self,
deployment_name: str,
top: Optional[int] = None,
**kwargs: Any
) -> Iterable["_models.DeploymentOperationsListResult"]:
"""Gets all deployments operations for a deployment.
:param deployment_name: The name of the deployment.
:type deployment_name: str
:param top: The number of results to return.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeploymentOperationsListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2019_05_01.models.DeploymentOperationsListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentOperationsListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_at_subscription_scope_request(
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
top=top,
template_url=self.list_at_subscription_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_at_subscription_scope_request(
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
top=top,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("DeploymentOperationsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_at_subscription_scope.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Resources/deployments/{deploymentName}/operations'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
deployment_name: str,
operation_id: str,
**kwargs: Any
) -> "_models.DeploymentOperation":
"""Gets a deployments operation.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment.
:type deployment_name: str
:param operation_id: The ID of the operation to get.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeploymentOperation, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2019_05_01.models.DeploymentOperation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentOperation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
operation_id=operation_id,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DeploymentOperation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations/{operationId}'} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
deployment_name: str,
top: Optional[int] = None,
**kwargs: Any
) -> Iterable["_models.DeploymentOperationsListResult"]:
"""Gets all deployments operations for a deployment.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment.
:type deployment_name: str
:param top: The number of results to return.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeploymentOperationsListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2019_05_01.models.DeploymentOperationsListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentOperationsListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
top=top,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
resource_group_name=resource_group_name,
deployment_name=deployment_name,
subscription_id=self._config.subscription_id,
top=top,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("DeploymentOperationsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/deployments/{deploymentName}/operations'} # type: ignore
| |
# coding: utf-8
"""
Generated by: https://openapi-generator.tech
"""
from dataclasses import dataclass
import re # noqa: F401
import sys # noqa: F401
import typing
import urllib3
from urllib3._collections import HTTPHeaderDict
from openapi_client import api_client, exceptions
import decimal # noqa: F401
from datetime import date, datetime # noqa: F401
from frozendict import frozendict # noqa: F401
from openapi_client.schemas import ( # noqa: F401
AnyTypeSchema,
ComposedSchema,
DictSchema,
ListSchema,
StrSchema,
IntSchema,
Int32Schema,
Int64Schema,
Float32Schema,
Float64Schema,
NumberSchema,
DateSchema,
DateTimeSchema,
DecimalSchema,
BoolSchema,
BinarySchema,
NoneSchema,
none_type,
InstantiationMetadata,
Unset,
unset,
ComposedBase,
ListBase,
DictBase,
NoneBase,
StrBase,
IntBase,
NumberBase,
DateBase,
DateTimeBase,
BoolBase,
BinaryBase,
Schema,
_SchemaValidator,
_SchemaTypeChecker,
_SchemaEnumMaker
)
from openapi_client.model.pipeline_impl import PipelineImpl
# path params
OrganizationSchema = StrSchema
PipelineSchema = StrSchema
FolderSchema = StrSchema
RequestRequiredPathParams = typing.TypedDict(
'RequestRequiredPathParams',
{
'organization': OrganizationSchema,
'pipeline': PipelineSchema,
'folder': FolderSchema,
}
)
RequestOptionalPathParams = typing.TypedDict(
'RequestOptionalPathParams',
{
},
total=False
)
class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
pass
request_path_organization = api_client.PathParameter(
name="organization",
style=api_client.ParameterStyle.SIMPLE,
schema=OrganizationSchema,
required=True,
)
request_path_pipeline = api_client.PathParameter(
name="pipeline",
style=api_client.ParameterStyle.SIMPLE,
schema=PipelineSchema,
required=True,
)
request_path_folder = api_client.PathParameter(
name="folder",
style=api_client.ParameterStyle.SIMPLE,
schema=FolderSchema,
required=True,
)
_path = '/blue/rest/organizations/{organization}/pipelines/{folder}/pipelines/{pipeline}'
_method = 'GET'
_auth = [
'jenkins_auth',
]
SchemaFor200ResponseBodyApplicationJson = PipelineImpl
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor200ResponseBodyApplicationJson,
]
headers: Unset = unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
content={
'application/json': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationJson),
},
)
@dataclass
class ApiResponseFor401(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: Unset = unset
headers: Unset = unset
_response_for_401 = api_client.OpenApiResponse(
response_cls=ApiResponseFor401,
)
@dataclass
class ApiResponseFor403(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: Unset = unset
headers: Unset = unset
_response_for_403 = api_client.OpenApiResponse(
response_cls=ApiResponseFor403,
)
_status_code_to_response = {
'200': _response_for_200,
'401': _response_for_401,
'403': _response_for_403,
}
_all_accept_content_types = (
'application/json',
)
class GetPipelineFolderPipeline(api_client.Api):
def get_pipeline_folder_pipeline(
self: api_client.Api,
path_params: RequestPathParams = frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
"""
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
self._verify_typed_dict_inputs(RequestPathParams, path_params)
_path_params = {}
for parameter in (
request_path_organization,
request_path_pipeline,
request_path_folder,
):
parameter_data = path_params.get(parameter.name, unset)
if parameter_data is unset:
continue
serialized_data = parameter.serialize(parameter_data)
_path_params.update(serialized_data)
_headers = HTTPHeaderDict()
# TODO add cookie handling
if accept_content_types:
for accept_content_type in accept_content_types:
_headers.add('Accept', accept_content_type)
response = self.api_client.call_api(
resource_path=_path,
method=_method,
path_params=_path_params,
headers=_headers,
auth_settings=_auth,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(api_response=api_response)
return api_response
| |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A Transformed Distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import distribution as distribution_lib
from tensorflow.python.ops.distributions import identity_bijector
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.util import deprecation
__all__ = [
"TransformedDistribution",
]
# The following helper functions attempt to statically perform a TF operation.
# These functions make debugging easier since we can do more validation during
# graph construction.
def _static_value(x):
"""Returns the static value of a `Tensor` or `None`."""
return tensor_util.constant_value(ops.convert_to_tensor(x))
def _logical_and(*args):
"""Convenience function which attempts to statically `reduce_all`."""
args_ = [_static_value(x) for x in args]
if any(x is not None and not bool(x) for x in args_):
return constant_op.constant(False)
if all(x is not None and bool(x) for x in args_):
return constant_op.constant(True)
if len(args) == 2:
return math_ops.logical_and(*args)
return math_ops.reduce_all(args)
def _logical_equal(x, y):
"""Convenience function which attempts to statically compute `x == y`."""
x_ = _static_value(x)
y_ = _static_value(y)
if x_ is None or y_ is None:
return math_ops.equal(x, y)
return constant_op.constant(np.array_equal(x_, y_))
def _logical_not(x):
"""Convenience function which attempts to statically apply `logical_not`."""
x_ = _static_value(x)
if x_ is None:
return math_ops.logical_not(x)
return constant_op.constant(np.logical_not(x_))
def _concat_vectors(*args):
"""Convenience function which concatenates input vectors."""
args_ = [_static_value(x) for x in args]
if any(x_ is None for x_ in args_):
return array_ops.concat(args, 0)
return constant_op.constant([x_ for vec_ in args_ for x_ in vec_])
def _pick_scalar_condition(pred, cond_true, cond_false):
"""Convenience function which chooses the condition based on the predicate."""
# Note: This function is only valid if all of pred, cond_true, and cond_false
# are scalars. This means its semantics are arguably more like tf.cond than
# tf.select even though we use tf.select to implement it.
pred_ = _static_value(pred)
if pred_ is None:
return array_ops.where_v2(pred, cond_true, cond_false)
return cond_true if pred_ else cond_false
def _ones_like(x):
"""Convenience function attempts to statically construct `ones_like`."""
# Should only be used for small vectors.
if x.get_shape().is_fully_defined():
return array_ops.ones(x.get_shape().as_list(), dtype=x.dtype)
return array_ops.ones_like(x)
def _ndims_from_shape(shape):
"""Returns `Tensor`'s `rank` implied by a `Tensor` shape."""
if shape.get_shape().ndims not in (None, 1):
raise ValueError("input is not a valid shape: not 1D")
if not shape.dtype.is_integer:
raise TypeError("input is not a valid shape: wrong dtype")
if shape.get_shape().is_fully_defined():
return constant_op.constant(shape.get_shape().as_list()[0])
return array_ops.shape(shape)[0]
def _is_scalar_from_shape(shape):
"""Returns `True` `Tensor` if `Tensor` shape implies a scalar."""
return _logical_equal(_ndims_from_shape(shape), 0)
class TransformedDistribution(distribution_lib.Distribution):
"""A Transformed Distribution.
A `TransformedDistribution` models `p(y)` given a base distribution `p(x)`,
and a deterministic, invertible, differentiable transform, `Y = g(X)`. The
transform is typically an instance of the `Bijector` class and the base
distribution is typically an instance of the `Distribution` class.
A `Bijector` is expected to implement the following functions:
- `forward`,
- `inverse`,
- `inverse_log_det_jacobian`.
The semantics of these functions are outlined in the `Bijector` documentation.
We now describe how a `TransformedDistribution` alters the input/outputs of a
`Distribution` associated with a random variable (rv) `X`.
Write `cdf(Y=y)` for an absolutely continuous cumulative distribution function
of random variable `Y`; write the probability density function `pdf(Y=y) :=
d^k / (dy_1,...,dy_k) cdf(Y=y)` for its derivative wrt to `Y` evaluated at
`y`. Assume that `Y = g(X)` where `g` is a deterministic diffeomorphism,
i.e., a non-random, continuous, differentiable, and invertible function.
Write the inverse of `g` as `X = g^{-1}(Y)` and `(J o g)(x)` for the Jacobian
of `g` evaluated at `x`.
A `TransformedDistribution` implements the following operations:
* `sample`
Mathematically: `Y = g(X)`
Programmatically: `bijector.forward(distribution.sample(...))`
* `log_prob`
Mathematically: `(log o pdf)(Y=y) = (log o pdf o g^{-1})(y)
+ (log o abs o det o J o g^{-1})(y)`
Programmatically: `(distribution.log_prob(bijector.inverse(y))
+ bijector.inverse_log_det_jacobian(y))`
* `log_cdf`
Mathematically: `(log o cdf)(Y=y) = (log o cdf o g^{-1})(y)`
Programmatically: `distribution.log_cdf(bijector.inverse(x))`
* and similarly for: `cdf`, `prob`, `log_survival_function`,
`survival_function`.
A simple example constructing a Log-Normal distribution from a Normal
distribution:
```python
ds = tfp.distributions
log_normal = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Exp(),
name="LogNormalTransformedDistribution")
```
A `LogNormal` made from callables:
```python
ds = tfp.distributions
log_normal = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Inline(
forward_fn=tf.exp,
inverse_fn=tf.math.log,
inverse_log_det_jacobian_fn=(
lambda y: -tf.reduce_sum(tf.math.log(y), axis=-1)),
name="LogNormalTransformedDistribution")
```
Another example constructing a Normal from a StandardNormal:
```python
ds = tfp.distributions
normal = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Affine(
shift=-1.,
scale_identity_multiplier=2.)
name="NormalTransformedDistribution")
```
A `TransformedDistribution`'s batch- and event-shape are implied by the base
distribution unless explicitly overridden by `batch_shape` or `event_shape`
arguments. Specifying an overriding `batch_shape` (`event_shape`) is
permitted only if the base distribution has scalar batch-shape (event-shape).
The bijector is applied to the distribution as if the distribution possessed
the overridden shape(s). The following example demonstrates how to construct a
multivariate Normal as a `TransformedDistribution`.
```python
ds = tfp.distributions
# We will create two MVNs with batch_shape = event_shape = 2.
mean = [[-1., 0], # batch:0
[0., 1]] # batch:1
chol_cov = [[[1., 0],
[0, 1]], # batch:0
[[1, 0],
[2, 2]]] # batch:1
mvn1 = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Affine(shift=mean, scale_tril=chol_cov),
batch_shape=[2], # Valid because base_distribution.batch_shape == [].
event_shape=[2]) # Valid because base_distribution.event_shape == [].
mvn2 = ds.MultivariateNormalTriL(loc=mean, scale_tril=chol_cov)
# mvn1.log_prob(x) == mvn2.log_prob(x)
```
"""
@deprecation.deprecated(
"2019-01-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tf.distributions`.",
warn_once=True)
def __init__(self,
distribution,
bijector=None,
batch_shape=None,
event_shape=None,
validate_args=False,
name=None):
"""Construct a Transformed Distribution.
Args:
distribution: The base distribution instance to transform. Typically an
instance of `Distribution`.
bijector: The object responsible for calculating the transformation.
Typically an instance of `Bijector`. `None` means `Identity()`.
batch_shape: `integer` vector `Tensor` which overrides `distribution`
`batch_shape`; valid only if `distribution.is_scalar_batch()`.
event_shape: `integer` vector `Tensor` which overrides `distribution`
`event_shape`; valid only if `distribution.is_scalar_event()`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
name: Python `str` name prefixed to Ops created by this class. Default:
`bijector.name + distribution.name`.
"""
parameters = dict(locals())
name = name or (("" if bijector is None else bijector.name) +
distribution.name)
with ops.name_scope(name, values=[event_shape, batch_shape]) as name:
# For convenience we define some handy constants.
self._zero = constant_op.constant(0, dtype=dtypes.int32, name="zero")
self._empty = constant_op.constant([], dtype=dtypes.int32, name="empty")
if bijector is None:
bijector = identity_bijector.Identity(validate_args=validate_args)
# We will keep track of a static and dynamic version of
# self._is_{batch,event}_override. This way we can do more prior to graph
# execution, including possibly raising Python exceptions.
self._override_batch_shape = self._maybe_validate_shape_override(
batch_shape, distribution.is_scalar_batch(), validate_args,
"batch_shape")
self._is_batch_override = _logical_not(_logical_equal(
_ndims_from_shape(self._override_batch_shape), self._zero))
self._is_maybe_batch_override = bool(
tensor_util.constant_value(self._override_batch_shape) is None or
tensor_util.constant_value(self._override_batch_shape).size != 0)
self._override_event_shape = self._maybe_validate_shape_override(
event_shape, distribution.is_scalar_event(), validate_args,
"event_shape")
self._is_event_override = _logical_not(_logical_equal(
_ndims_from_shape(self._override_event_shape), self._zero))
self._is_maybe_event_override = bool(
tensor_util.constant_value(self._override_event_shape) is None or
tensor_util.constant_value(self._override_event_shape).size != 0)
# To convert a scalar distribution into a multivariate distribution we
# will draw dims from the sample dims, which are otherwise iid. This is
# easy to do except in the case that the base distribution has batch dims
# and we're overriding event shape. When that case happens the event dims
# will incorrectly be to the left of the batch dims. In this case we'll
# cyclically permute left the new dims.
self._needs_rotation = _logical_and(
self._is_event_override,
_logical_not(self._is_batch_override),
_logical_not(distribution.is_scalar_batch()))
override_event_ndims = _ndims_from_shape(self._override_event_shape)
self._rotate_ndims = _pick_scalar_condition(
self._needs_rotation, override_event_ndims, 0)
# We'll be reducing the head dims (if at all), i.e., this will be []
# if we don't need to reduce.
self._reduce_event_indices = math_ops.range(
self._rotate_ndims - override_event_ndims, self._rotate_ndims)
self._distribution = distribution
self._bijector = bijector
super(TransformedDistribution, self).__init__(
dtype=self._distribution.dtype,
reparameterization_type=self._distribution.reparameterization_type,
validate_args=validate_args,
allow_nan_stats=self._distribution.allow_nan_stats,
parameters=parameters,
# We let TransformedDistribution access _graph_parents since this class
# is more like a baseclass than derived.
graph_parents=(distribution._graph_parents + # pylint: disable=protected-access
bijector.graph_parents),
name=name)
@property
def distribution(self):
"""Base distribution, p(x)."""
return self._distribution
@property
def bijector(self):
"""Function transforming x => y."""
return self._bijector
def _event_shape_tensor(self):
return self.bijector.forward_event_shape_tensor(
distribution_util.pick_vector(
self._is_event_override,
self._override_event_shape,
self.distribution.event_shape_tensor()))
def _event_shape(self):
# If there's a chance that the event_shape has been overridden, we return
# what we statically know about the `event_shape_override`. This works
# because: `_is_maybe_event_override` means `static_override` is `None` or a
# non-empty list, i.e., we don't statically know the `event_shape` or we do.
#
# Since the `bijector` may change the `event_shape`, we then forward what we
# know to the bijector. This allows the `bijector` to have final say in the
# `event_shape`.
static_override = tensor_util.constant_value_as_shape(
self._override_event_shape)
return self.bijector.forward_event_shape(
static_override
if self._is_maybe_event_override
else self.distribution.event_shape)
def _batch_shape_tensor(self):
return distribution_util.pick_vector(
self._is_batch_override,
self._override_batch_shape,
self.distribution.batch_shape_tensor())
def _batch_shape(self):
# If there's a chance that the batch_shape has been overridden, we return
# what we statically know about the `batch_shape_override`. This works
# because: `_is_maybe_batch_override` means `static_override` is `None` or a
# non-empty list, i.e., we don't statically know the `batch_shape` or we do.
#
# Notice that this implementation parallels the `_event_shape` except that
# the `bijector` doesn't get to alter the `batch_shape`. Recall that
# `batch_shape` is a property of a distribution while `event_shape` is
# shared between both the `distribution` instance and the `bijector`.
static_override = tensor_util.constant_value_as_shape(
self._override_batch_shape)
return (static_override
if self._is_maybe_batch_override
else self.distribution.batch_shape)
def _sample_n(self, n, seed=None):
sample_shape = _concat_vectors(
distribution_util.pick_vector(self._needs_rotation, self._empty, [n]),
self._override_batch_shape,
self._override_event_shape,
distribution_util.pick_vector(self._needs_rotation, [n], self._empty))
x = self.distribution.sample(sample_shape=sample_shape, seed=seed)
x = self._maybe_rotate_dims(x)
# We'll apply the bijector in the `_call_sample_n` function.
return x
def _call_sample_n(self, sample_shape, seed, name, **kwargs):
# We override `_call_sample_n` rather than `_sample_n` so we can ensure that
# the result of `self.bijector.forward` is not modified (and thus caching
# works).
with self._name_scope(name, values=[sample_shape]):
sample_shape = ops.convert_to_tensor(
sample_shape, dtype=dtypes.int32, name="sample_shape")
sample_shape, n = self._expand_sample_shape_to_vector(
sample_shape, "sample_shape")
# First, generate samples. We will possibly generate extra samples in the
# event that we need to reinterpret the samples as part of the
# event_shape.
x = self._sample_n(n, seed, **kwargs)
# Next, we reshape `x` into its final form. We do this prior to the call
# to the bijector to ensure that the bijector caching works.
batch_event_shape = array_ops.shape(x)[1:]
final_shape = array_ops.concat([sample_shape, batch_event_shape], 0)
x = array_ops.reshape(x, final_shape)
# Finally, we apply the bijector's forward transformation. For caching to
# work, it is imperative that this is the last modification to the
# returned result.
y = self.bijector.forward(x, **kwargs)
y = self._set_sample_static_shape(y, sample_shape)
return y
def _log_prob(self, y):
# For caching to work, it is imperative that the bijector is the first to
# modify the input.
x = self.bijector.inverse(y)
event_ndims = self._maybe_get_static_event_ndims()
ildj = self.bijector.inverse_log_det_jacobian(y, event_ndims=event_ndims)
if self.bijector._is_injective: # pylint: disable=protected-access
return self._finish_log_prob_for_one_fiber(y, x, ildj, event_ndims)
lp_on_fibers = [
self._finish_log_prob_for_one_fiber(y, x_i, ildj_i, event_ndims)
for x_i, ildj_i in zip(x, ildj)]
return math_ops.reduce_logsumexp(array_ops.stack(lp_on_fibers), axis=0)
def _finish_log_prob_for_one_fiber(self, y, x, ildj, event_ndims):
"""Finish computation of log_prob on one element of the inverse image."""
x = self._maybe_rotate_dims(x, rotate_right=True)
log_prob = self.distribution.log_prob(x)
if self._is_maybe_event_override:
log_prob = math_ops.reduce_sum(log_prob, self._reduce_event_indices)
log_prob += math_ops.cast(ildj, log_prob.dtype)
if self._is_maybe_event_override and isinstance(event_ndims, int):
log_prob.set_shape(
array_ops.broadcast_static_shape(
y.get_shape().with_rank_at_least(1)[:-event_ndims],
self.batch_shape))
return log_prob
def _prob(self, y):
x = self.bijector.inverse(y)
event_ndims = self._maybe_get_static_event_ndims()
ildj = self.bijector.inverse_log_det_jacobian(y, event_ndims=event_ndims)
if self.bijector._is_injective: # pylint: disable=protected-access
return self._finish_prob_for_one_fiber(y, x, ildj, event_ndims)
prob_on_fibers = [
self._finish_prob_for_one_fiber(y, x_i, ildj_i, event_ndims)
for x_i, ildj_i in zip(x, ildj)]
return sum(prob_on_fibers)
def _finish_prob_for_one_fiber(self, y, x, ildj, event_ndims):
"""Finish computation of prob on one element of the inverse image."""
x = self._maybe_rotate_dims(x, rotate_right=True)
prob = self.distribution.prob(x)
if self._is_maybe_event_override:
prob = math_ops.reduce_prod(prob, self._reduce_event_indices)
prob *= math_ops.exp(math_ops.cast(ildj, prob.dtype))
if self._is_maybe_event_override and isinstance(event_ndims, int):
prob.set_shape(
array_ops.broadcast_static_shape(
y.get_shape().with_rank_at_least(1)[:-event_ndims],
self.batch_shape))
return prob
def _log_cdf(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("log_cdf is not implemented when overriding "
"event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("log_cdf is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.log_cdf(x)
def _cdf(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("cdf is not implemented when overriding "
"event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("cdf is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.cdf(x)
def _log_survival_function(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("log_survival_function is not implemented when "
"overriding event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("log_survival_function is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.log_survival_function(x)
def _survival_function(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("survival_function is not implemented when "
"overriding event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("survival_function is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.survival_function(x)
def _quantile(self, value):
if self._is_maybe_event_override:
raise NotImplementedError("quantile is not implemented when overriding "
"event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("quantile is not implemented when "
"bijector is not injective.")
# x_q is the "qth quantile" of X iff q = P[X <= x_q]. Now, since X =
# g^{-1}(Y), q = P[X <= x_q] = P[g^{-1}(Y) <= x_q] = P[Y <= g(x_q)],
# implies the qth quantile of Y is g(x_q).
inv_cdf = self.distribution.quantile(value)
return self.bijector.forward(inv_cdf)
def _entropy(self):
if not self.bijector.is_constant_jacobian:
raise NotImplementedError("entropy is not implemented")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("entropy is not implemented when "
"bijector is not injective.")
# Suppose Y = g(X) where g is a diffeomorphism and X is a continuous rv. It
# can be shown that:
# H[Y] = H[X] + E_X[(log o abs o det o J o g)(X)].
# If is_constant_jacobian then:
# E_X[(log o abs o det o J o g)(X)] = (log o abs o det o J o g)(c)
# where c can by anything.
entropy = self.distribution.entropy()
if self._is_maybe_event_override:
# H[X] = sum_i H[X_i] if X_i are mutually independent.
# This means that a reduce_sum is a simple rescaling.
entropy *= math_ops.cast(math_ops.reduce_prod(self._override_event_shape),
dtype=entropy.dtype.base_dtype)
if self._is_maybe_batch_override:
new_shape = array_ops.concat([
_ones_like(self._override_batch_shape),
self.distribution.batch_shape_tensor()
], 0)
entropy = array_ops.reshape(entropy, new_shape)
multiples = array_ops.concat([
self._override_batch_shape,
_ones_like(self.distribution.batch_shape_tensor())
], 0)
entropy = array_ops.tile(entropy, multiples)
dummy = array_ops.zeros(
shape=array_ops.concat(
[self.batch_shape_tensor(), self.event_shape_tensor()],
0),
dtype=self.dtype)
event_ndims = (self.event_shape.ndims if self.event_shape.ndims is not None
else array_ops.size(self.event_shape_tensor()))
ildj = self.bijector.inverse_log_det_jacobian(
dummy, event_ndims=event_ndims)
entropy -= math_ops.cast(ildj, entropy.dtype)
entropy.set_shape(self.batch_shape)
return entropy
def _maybe_validate_shape_override(self, override_shape, base_is_scalar,
validate_args, name):
"""Helper to __init__ which ensures override batch/event_shape are valid."""
if override_shape is None:
override_shape = []
override_shape = ops.convert_to_tensor(override_shape, dtype=dtypes.int32,
name=name)
if not override_shape.dtype.is_integer:
raise TypeError("shape override must be an integer")
override_is_scalar = _is_scalar_from_shape(override_shape)
if tensor_util.constant_value(override_is_scalar):
return self._empty
dynamic_assertions = []
if override_shape.get_shape().ndims is not None:
if override_shape.get_shape().ndims != 1:
raise ValueError("shape override must be a vector")
elif validate_args:
dynamic_assertions += [check_ops.assert_rank(
override_shape, 1,
message="shape override must be a vector")]
if tensor_util.constant_value(override_shape) is not None:
if any(s <= 0 for s in tensor_util.constant_value(override_shape)):
raise ValueError("shape override must have positive elements")
elif validate_args:
dynamic_assertions += [check_ops.assert_positive(
override_shape,
message="shape override must have positive elements")]
is_both_nonscalar = _logical_and(_logical_not(base_is_scalar),
_logical_not(override_is_scalar))
if tensor_util.constant_value(is_both_nonscalar) is not None:
if tensor_util.constant_value(is_both_nonscalar):
raise ValueError("base distribution not scalar")
elif validate_args:
dynamic_assertions += [check_ops.assert_equal(
is_both_nonscalar, False,
message="base distribution not scalar")]
if not dynamic_assertions:
return override_shape
return control_flow_ops.with_dependencies(
dynamic_assertions, override_shape)
def _maybe_rotate_dims(self, x, rotate_right=False):
"""Helper which rolls left event_dims left or right event_dims right."""
needs_rotation_const = tensor_util.constant_value(self._needs_rotation)
if needs_rotation_const is not None and not needs_rotation_const:
return x
ndims = array_ops.rank(x)
n = (ndims - self._rotate_ndims) if rotate_right else self._rotate_ndims
return array_ops.transpose(
x, _concat_vectors(math_ops.range(n, ndims), math_ops.range(0, n)))
def _maybe_get_static_event_ndims(self):
if self.event_shape.ndims is not None:
return self.event_shape.ndims
event_ndims = array_ops.size(self.event_shape_tensor())
event_ndims_ = distribution_util.maybe_get_static_value(event_ndims)
if event_ndims_ is not None:
return event_ndims_
return event_ndims
| |
from PyQt5.QtCore import Qt, pyqtSignal, QRectF, QByteArray, QDataStream
from PyQt5.QtGui import QPainter, QPainterPath, QColor
from PyQt5.QtOpenGL import QGLWidget
from PyQt5.QtWidgets import QGraphicsObject
import pyqtgraph as pg
import numpy as np
import struct
from OpenGL import GL
__all__ = ['GraphItemBase', 'PathItem', 'PlotCurveItem']
class GraphItemBase(QGraphicsObject):
hoveringChanged = pyqtSignal()
touchable = False
label = None
def __init__(self, view):
super().__init__()
self.view = view
self.view.pixelRatioChanged.connect(self.pixelRatioChanged)
self.pen = None
self.brush = None
self.hovering = False
self.__boundingRect = None
def setPenColor(self, color):
if self.pen:
self.pen.setColor(QColor(color))
def pixelRatioChanged(self):
pass
def addParam(self, name, param):
setattr(self, name, param)
param.valueChanged.connect(self.paramChanged)
def paramChanged(self):
self.prepareGeometryChange()
self.invalidateBoundingRect()
self.update()
def paint(self, painter, option, widget):
if isinstance(widget, QGLWidget):
painter.beginNativePainting()
GL.glDisable(GL.GL_STENCIL_TEST)
vb = self.view.viewBox
tl = vb.mapToParent(vb.boundingRect().topLeft())
br = vb.mapToParent(vb.boundingRect().bottomRight())
x, y = tl.x() + 1, self.view.size().height() - br.y() + 1
w, h = br.x() - x, br.y() - tl.y() - 1
GL.glEnable(GL.GL_SCISSOR_TEST)
GL.glScissor(*map(int, (x, y, w, h)))
try:
self.paintGL()
finally:
painter.endNativePainting()
else:
painter.setRenderHint(QPainter.Antialiasing)
if self.pen: painter.setPen(self.pen)
if self.brush: painter.setBrush(self.brush)
self.paint_(painter)
def paint_(self, painter):
pass
def paintGL(self):
pass
def dataBounds(self, ax, frac, orthoRange=None):
return None
def boundingRect(self):
if self.__boundingRect is None:
self.__boundingRect = self.boundingRect_()
return self.__boundingRect
def boundingRect_(self):
return QRectF()
def invalidateBoundingRect(self):
self.__boundingRect = None
def hoverEvent(self, ev):
if ev.enter:
self.hovering = True
elif ev.exit:
self.hovering = False
self.hoveringChanged.emit()
def GL_drawPath(self, x, y, mode=GL.GL_LINE_STRIP):
points = np.empty((len(x), 2))
points[:,0] = x
points[:,1] = y
GL.glEnableClientState(GL.GL_VERTEX_ARRAY)
try:
GL.glVertexPointerf(points)
if mode in (GL.GL_LINES, GL.GL_LINE_STRIP, GL.GL_LINE_LOOP):
GL.glEnable(GL.GL_LINE_SMOOTH)
GL.glHint(GL.GL_LINE_SMOOTH_HINT, GL.GL_NICEST)
GL.glEnable(GL.GL_BLEND)
GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)
color = self.pen.color()
GL.glLineWidth(self.pen.width())
elif mode == GL.GL_TRIANGLE_FAN:
color = self.brush.color()
GL.glColor3f(color.red()/255, color.green()/255, color.blue()/255)
GL.glDrawArrays(mode, 0, points.shape[0]);
finally:
GL.glDisableClientState(GL.GL_VERTEX_ARRAY);
class PathItem(GraphItemBase):
maxLineWidth = 2
def __init__(self, view, color):
super().__init__(view)
self.pen = pg.mkPen(color, width=2)
def paint_(self, painter):
painter.drawPath(self.path)
def paintGL(self):
self.GL_drawPath(self.x, self.y)
def setXY(self, x, y):
self.x = x
self.y = y
self.updatePath()
self.prepareGeometryChange()
self.invalidateBoundingRect()
self.update()
def updatePath(self):
self.path = self.createPath(self.x, self.y)
@classmethod
def createPath(cls, x, y, fill=Qt.OddEvenFill):
# https://code.woboq.org/qt5/qtbase/src/gui/painting/qpainterpath.cpp.html#_ZrsR11QDataStreamR12QPainterPath
# http://doc.qt.io/qt-5/qpainterpath.html#ElementType-enum
# http://doc.qt.io/qt-5/qt.html#FillRule-enum
# QDataStream &QPainterPath::operator>>(QDataStream &s, QPainterPath &p)
# offset size type description
# 0 4 int32 element count (N)
# 4 4 int32 element type (0 -- 3)
# 8 8 double x
# 16 8 double y
# ...
# 20*i+ 4 4 int32 element type (0 -- 3)
# 20*i+ 8 8 double x
# 20*i+16 8 double y
# ...
# 20*(N-1)+ 4 4 int32 element type (0 -- 3)
# 20*(N-1)+ 8 8 double x
# 20*(N-1)+16 8 double y
# 20*(N-1)+20 4 int32 next starting i (N-1)
# 20*(N-1)+24 4 int32 fill rule
path = QPainterPath()
N = x.shape[0]
if N == 0:
return path
data = np.empty(N+2, dtype=[('type', '<i4'), ('x', '<f8'), ('y', '<f8')])
data[1]['type'] = 0
data[2:N+1]['type'] = 1
data[1:N+1]['x'] = x
data[1:N+1]['y'] = y
fpos = 20*(N+1)
view = data.view(dtype=np.ubyte)
view[:16] = 0
view.data[16:20] = struct.pack('<i', N)
view.data[fpos:fpos+8] = struct.pack('<ii', N-1, int(fill))
buf = QByteArray.fromRawData(view.data[16:fpos+8])
ds = QDataStream(buf)
ds.setByteOrder(ds.LittleEndian)
ds >> path
return path
def dataBounds(self, ax, frac, orthoRange=None):
if ax == 0:
return min(self.x), max(self.x)
else:
return min(self.y), max(self.y)
def boundingRect_(self):
rx, ry = self.view.pixelRatio
x1, x2 = min(self.x), max(self.x)
y1, y2 = min(self.y), max(self.y)
mx, my = self.maxLineWidth*rx, self.maxLineWidth*ry
return QRectF(x1-mx/2, y1-my/2, x2-x1+mx, y2-y1+my)
class PlotCurveItem(PathItem):
touchable = True
maxLineWidth = 4
def __init__(self, x, y, view, color, label=None):
super().__init__(view, color)
self.label = label
self.shapePath = None
self.setXY(x, y)
self.setHighlighted(False)
self.setAcceptHoverEvents(True)
def pixelRatioChanged(self):
super().pixelRatioChanged()
self.shapePath = None
def updatePath(self):
super().updatePath()
self.shapePath = None
def createShapePath(self):
rx, ry = self.view.pixelRatio
w = 8
dx = self.x[1:] - self.x[:-1]
dy = self.y[1:] - self.y[:-1]
theta1 = np.arctan2(dx/rx, -dy/ry)
theta2 = theta1 + np.pi
dxf = np.cos(theta1)*w/2*rx
dyf = np.sin(theta1)*w/2*ry
dxr = np.cos(theta2)*w/2*rx
dyr = np.sin(theta2)*w/2*ry
xf = np.array([self.x[:-1]+dxf, self.x[1:]+dxf]).flatten('F')
yf = np.array([self.y[:-1]+dyf, self.y[1:]+dyf]).flatten('F')
xr = np.array([self.x[:-1]+dxr, self.x[1:]+dxr]).flatten('F')
yr = np.array([self.y[:-1]+dyr, self.y[1:]+dyr]).flatten('F')
stroke = self.createPath(
np.append(xf, xr[::-1]),
np.append(yf, yr[::-1]),
Qt.WindingFill
)
stroke.closeSubpath()
return stroke
def shape(self):
if not self.shapePath:
self.shapePath = self.createShapePath()
return self.shapePath
def setHighlighted(self, highlighted):
self.update()
self.pen.setWidth(4 if highlighted else 2)
| |
# Authors: Phani Vadrevu <pvadrevu@uga.edu>
# Roberto Perdisci <perdisci@cs.uga.edu>
import sys
from datetime import timedelta, date
import time
import simplejson
import logging
import logging.config
from config import *
import vt_api
import util
LOG_CONF_FILE = "logging.conf"
class VTSubmissions:
def __init__(self):
self.QUERY_RATE_LIMIT = 10
self.ONE_MIN = 60
logging.config.fileConfig(LOG_CONF_FILE)
self.logger = logging.getLogger("amico_logger")
#stdout_handler = logging.StreamHandler(sys.stdout)
#stdout_handler.setLevel(logging.DEBUG)
#formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s'
#'- %(message)s')
#stdout_handler.setFormatter(formatter)
#self.logger.addHandler(stdout_handler)
util.setup_socks()
self.conn = util.connect_to_db()
self.cursor = self.conn.cursor()
self.today = date.today().strftime("%Y-%m-%d")
self.yesterday = (date.today() -
timedelta(days=1)).strftime("%Y-%m-%d")
self.last_month = (date.today() -
timedelta(days=30)).strftime("%Y-%m-%d")
def get_hashes_from_db(self):
if vt_submissions == "manual":
hashes = self.get_hashes_from_db_manual()
elif vt_submissions == "live":
hashes = self.get_hashes_from_db_live()
else:
hashes = self.get_hashes_from_db_scans()
self.logger.debug("get_hashes_from_db(): Yesterday's hahses: %s", len(hashes))
self.hashes = self.update_hashes(hashes)
def update_hashes(self, hashes):
self.cursor.execute("""
SELECT distinct md5, sha1
FROM virus_total_submissions
WHERE (submit_time::date) = %s
""", (self.last_month,))
if self.cursor.rowcount > 0:
hashes = hashes.union(self.cursor.fetchall())
self.cursor.execute("""
SELECT distinct md5, sha1
FROM virus_total_submissions
WHERE (submit_time::date) > %s AND
(submit_time::date) < %s
""", (self.last_month, self.yesterday))
if self.cursor.rowcount > 0:
hashes = hashes.difference(self.cursor.fetchall())
self.cursor.execute("""
SELECT distinct md5, sha1
FROM virus_total_submissions
WHERE (submit_time::date) = %s
""", (self.today,))
if self.cursor.rowcount > 0:
hashes = hashes.difference(self.cursor.fetchall())
self.logger.debug("update_hashes(): Number of hashes: %s", len(hashes))
return hashes
def get_hashes_from_db_scans(self):
self.cursor.execute("""
SELECT distinct md5, sha1
FROM virus_total_scans
WHERE json IS NOT NULL AND
query_time::date = %s
""", (self.yesterday,))
if self.cursor.rowcount > 0:
hashes = set(self.cursor.fetchall())
else:
hashes = set()
return hashes
def get_hashes_from_db_live(self):
self.cursor.execute("""
SELECT distinct md5, sha1
FROM pe_dumps
WHERE sha1 IS NOT NULL AND
timestamp::date = %s
""", (self.yesterday,))
if self.cursor.rowcount > 0:
hashes = set(self.cursor.fetchall())
else:
hashes = set()
return hashes
def get_hashes_from_db_manual(self):
self.logger.debug("entered get_hashes_from_db_manual()")
self.cursor.execute("""
SELECT distinct md5, sha1
FROM manual_download_checksums
WHERE referer_exists = 'f' AND
sha1 IS NOT NULL AND
timestamp::date = %s
""", (self.yesterday,))
if self.cursor.rowcount > 0:
hashes = set(self.cursor.fetchall())
else:
hashes = set()
return hashes
def insert_scan(self, sha1, md5, response):
self.logger.debug("entered insert_scan()")
self.cursor.execute("""
INSERT INTO virus_total_submissions
(submit_time, sha1, md5, scan_id)
VALUES (LOCALTIMESTAMP, %s, %s, %s)
RETURNING vt_submit_id
""", (sha1, md5, response['scan_id']))
vt_submit_id = self.cursor.fetchone()[0]
self.cursor.execute("""
UPDATE virus_total_submissions
SET resubmit_id = %s
WHERE sha1= %s AND
submit_time::date = %s
""", (vt_submit_id, sha1, self.last_month))
def check_report_exists(self, sha1):
self.cursor.execute("""
SELECT * FROM virus_total_scans
WHERE sha1 = %s AND
scan_time IS NOT NULL""", (sha1, ))
report_exists = True if self.cursor.rowcount else False
self.cursor.execute("""
SELECT * FROM virus_total_submissions
WHERE sha1 = %s AND
json IS NOT NULL""", (sha1, ))
report_exists = True if self.cursor.rowcount else report_exists
return report_exists
def make_request(self, md5, sha1):
self.logger.debug("entered make_request()")
self.logger.debug("sha1: %s", sha1)
report_exists = self.check_report_exists(sha1)
self.logger.debug("report_exists: %s", report_exists)
json = None
try:
json = (vt_api.rescan_request(md5) if report_exists else
vt_api.send_file(md5))
if json:
response = simplejson.loads(json)
if response["response_code"] == 1:
self.insert_scan(sha1, md5, response)
return True
else:
self.logger.warning("make_request: Bad response code: %s",
response["response_code"])
else:
self.logger.warning("make_request: No JSON response")
except Exception as e:
self.logger.exception("report_exists: %s", report_exists)
self.logger.exception("json: %s", json)
self.logger.exception("sha1: %s", sha1)
self.logger.exception("make_request: Error %s", e)
return False
def submit_hashes(self):
self.logger.debug("entered submit_hashes()")
query_count = 0
done_hashes = set()
for md5, sha1 in self.hashes:
tries = 0
# This loop makes max 3 attempts to send a scan request
while tries <= 3:
if query_count == self.QUERY_RATE_LIMIT:
self.logger.debug(
"Query limit reached. Sleeping for a min.")
time.sleep(self.ONE_MIN)
query_count = 0
tries += 1
query_count += 1
if self.make_request(md5, sha1):
done_hashes.add((md5, sha1))
break
if len(self.hashes):
self.logger.debug("Submitted the hashes on: %s", date.today())
self.hashes.difference_update(done_hashes)
def update_table_with_report(self, scan_id, report, json):
self.logger.debug("entered update_table_with_report()")
scan_time = report["scan_date"]
scans = report["scans"]
num_av_labels = report["positives"]
trusted_av_labels = 0
for k, v in scans.iteritems():
if v["detected"] is True:
if k in trusted_av_vendors:
trusted_av_labels += 1
scan_time += " UTC"
self.cursor.execute("""
UPDATE virus_total_submissions
SET trusted_av_labels = %s,
num_av_labels = %s,
scan_time = TIMESTAMP WITH TIME ZONE %s,
json = %s
WHERE scan_id = %s and json is NULL""",
(trusted_av_labels, num_av_labels, scan_time,
json, scan_id))
def fetch_reports(self):
self.logger.debug("entered fetch_reports()")
self.cursor.execute("""
SELECT scan_id
FROM virus_total_submissions
WHERE json is NULL and
(LOCALTIMESTAMP - submit_time) > '5 minutes' and
(LOCALTIMESTAMP - submit_time) < '3 days'
ORDER BY submit_time ASC""")
scan_ids = [row[0] for row in self.cursor.fetchall()]
self.logger.debug("fetch_reports(): %s scan reports to be fetched",
len(scan_ids))
query_count = 0
for scan_id in scan_ids:
if query_count == self.QUERY_RATE_LIMIT:
self.logger.debug(
"Query limit reached. Sleeping for a min.")
time.sleep(self.ONE_MIN)
query_count = 0
query_count += 1
try:
json = vt_api.get_vt_report(scan_id)
if not json:
self.logger.debug("No json")
continue
report = simplejson.loads(json)
# Sometimes, we get the old reports wrongly
if (report["response_code"] != 1) or (
report['scan_id'] != scan_id):
self.logger.debug("Response code %s for scan_id %s" %
(report["response_code"], scan_id))
continue
self.update_table_with_report(scan_id, report, json)
except Exception as e:
self.logger.exception(
"Error in fetching report for scan_id %s: %s" % (scan_id, e))
continue
def sleep_for_the_day():
today = date.today()
while today == date.today():
time.sleep(15 * 60)
def vt_submissions_func():
vt_submit = VTSubmissions()
vt_submit.get_hashes_from_db()
while True:
try:
vt_submit.submit_hashes()
vt_submit.fetch_reports()
except Exception as e:
vt_submit.logger.exception(
"Unexpected error! %s \n Sleeping for the rest of the day", e)
sleep_for_the_day()
vt_submit.logger.debug("main(): Sleeping for 15 min.")
time.sleep(vt_submit.ONE_MIN * 15)
today = date.today().strftime("%Y-%m-%d")
if today != vt_submit.today:
vt_submit.today = today
vt_submit.yesterday = (date.today() -
timedelta(days=1)).strftime("%Y-%m-%d")
vt_submit.last_month = (date.today() -
timedelta(days=30)).strftime("%Y-%m-%d")
vt_submit.get_hashes_from_db()
if __name__ == "__main__":
vt_submissions_func()
| |
import time
import threading
import xmlrpc.client as xmlrpc
from ..hub import WebProfileDialog
from ..hub_proxy import SAMPHubProxy
from ..client import SAMPClient
from ..integrated_client import SAMPIntegratedClient
from ..utils import ServerProxyPool
from ..errors import SAMPClientError, SAMPHubError
class AlwaysApproveWebProfileDialog(WebProfileDialog):
def __init__(self):
self.polling = True
WebProfileDialog.__init__(self)
def show_dialog(self, *args):
self.consent()
def poll(self):
while self.polling:
self.handle_queue()
time.sleep(0.1)
def stop(self):
self.polling = False
class SAMPWebHubProxy(SAMPHubProxy):
"""
Proxy class to simplify the client interaction with a SAMP hub (via the web
profile).
In practice web clients should run from the browser, so this is provided as
a means of testing a hub's support for the web profile from Python.
"""
def connect(self, pool_size=20, web_port=21012):
"""
Connect to the current SAMP Hub on localhost:web_port
Parameters
----------
pool_size : int, optional
The number of socket connections opened to communicate with the
Hub.
"""
self._connected = False
try:
self.proxy = ServerProxyPool(pool_size, xmlrpc.ServerProxy,
'http://127.0.0.1:{0}'.format(web_port),
allow_none=1)
self.ping()
self._connected = True
except xmlrpc.ProtocolError as p:
raise SAMPHubError("Protocol Error {}: {}".format(p.errcode, p.errmsg))
@property
def _samp_hub(self):
"""
Property to abstract away the path to the hub, which allows this class
to be used for both the standard and the web profile.
"""
return self.proxy.samp.webhub
def set_xmlrpc_callback(self, private_key, xmlrpc_addr):
raise NotImplementedError("set_xmlrpc_callback is not defined for the "
"web profile")
def register(self, identity_info):
"""
Proxy to ``register`` SAMP Hub method.
"""
return self._samp_hub.register(identity_info)
def allow_reverse_callbacks(self, private_key, allow):
"""
Proxy to ``allowReverseCallbacks`` SAMP Hub method.
"""
return self._samp_hub.allowReverseCallbacks(private_key, allow)
def pull_callbacks(self, private_key, timeout):
"""
Proxy to ``pullCallbacks`` SAMP Hub method.
"""
return self._samp_hub.pullCallbacks(private_key, timeout)
class SAMPWebClient(SAMPClient):
"""
Utility class which provides facilities to create and manage a SAMP
compliant XML-RPC server that acts as SAMP callable web client application.
In practice web clients should run from the browser, so this is provided as
a means of testing a hub's support for the web profile from Python.
Parameters
----------
hub : :class:`~astropy.samp.hub_proxy.SAMPWebHubProxy`
An instance of :class:`~astropy.samp.hub_proxy.SAMPWebHubProxy` to
be used for messaging with the SAMP Hub.
name : str, optional
Client name (corresponding to ``samp.name`` metadata keyword).
description : str, optional
Client description (corresponding to ``samp.description.text`` metadata
keyword).
metadata : dict, optional
Client application metadata in the standard SAMP format.
callable : bool, optional
Whether the client can receive calls and notifications. If set to
`False`, then the client can send notifications and calls, but can not
receive any.
"""
def __init__(self, hub, name=None, description=None, metadata=None,
callable=True):
# GENERAL
self._is_running = False
self._is_registered = False
if metadata is None:
metadata = {}
if name is not None:
metadata["samp.name"] = name
if description is not None:
metadata["samp.description.text"] = description
self._metadata = metadata
self._callable = callable
# HUB INTERACTION
self.client = None
self._public_id = None
self._private_key = None
self._hub_id = None
self._notification_bindings = {}
self._call_bindings = {"samp.app.ping": [self._ping, {}],
"client.env.get": [self._client_env_get, {}]}
self._response_bindings = {}
self.hub = hub
if self._callable:
self._thread = threading.Thread(target=self._serve_forever)
self._thread.daemon = True
def _serve_forever(self):
while self.is_running:
# Watch for callbacks here
if self._is_registered:
results = self.hub.pull_callbacks(self.get_private_key(), 0)
for result in results:
if result['samp.methodName'] == 'receiveNotification':
self.receive_notification(self._private_key,
*result['samp.params'])
elif result['samp.methodName'] == 'receiveCall':
self.receive_call(self._private_key,
*result['samp.params'])
elif result['samp.methodName'] == 'receiveResponse':
self.receive_response(self._private_key,
*result['samp.params'])
self.hub.server_close()
def register(self):
"""
Register the client to the SAMP Hub.
"""
if self.hub.is_connected:
if self._private_key is not None:
raise SAMPClientError("Client already registered")
result = self.hub.register("Astropy SAMP Web Client")
if result["samp.self-id"] == "":
raise SAMPClientError("Registation failed - samp.self-id "
"was not set by the hub.")
if result["samp.private-key"] == "":
raise SAMPClientError("Registation failed - samp.private-key "
"was not set by the hub.")
self._public_id = result["samp.self-id"]
self._private_key = result["samp.private-key"]
self._hub_id = result["samp.hub-id"]
if self._callable:
self._declare_subscriptions()
self.hub.allow_reverse_callbacks(self._private_key, True)
if self._metadata != {}:
self.declare_metadata()
self._is_registered = True
else:
raise SAMPClientError("Unable to register to the SAMP Hub. Hub "
"proxy not connected.")
class SAMPIntegratedWebClient(SAMPIntegratedClient):
"""
A Simple SAMP web client.
In practice web clients should run from the browser, so this is provided as
a means of testing a hub's support for the web profile from Python.
This class is meant to simplify the client usage providing a proxy class
that merges the :class:`~astropy.samp.client.SAMPWebClient` and
:class:`~astropy.samp.hub_proxy.SAMPWebHubProxy` functionalities in a
simplified API.
Parameters
----------
name : str, optional
Client name (corresponding to ``samp.name`` metadata keyword).
description : str, optional
Client description (corresponding to ``samp.description.text`` metadata
keyword).
metadata : dict, optional
Client application metadata in the standard SAMP format.
callable : bool, optional
Whether the client can receive calls and notifications. If set to
`False`, then the client can send notifications and calls, but can not
receive any.
"""
def __init__(self, name=None, description=None, metadata=None,
callable=True):
self.hub = SAMPWebHubProxy()
self.client = SAMPWebClient(self.hub, name, description, metadata,
callable)
def connect(self, pool_size=20, web_port=21012):
"""
Connect with the current or specified SAMP Hub, start and register the
client.
Parameters
----------
pool_size : int, optional
The number of socket connections opened to communicate with the
Hub.
"""
self.hub.connect(pool_size, web_port=web_port)
self.client.start()
self.client.register()
| |
import tkinter as tk
from tkinter import filedialog
from tkinter.filedialog import asksaveasfile
from tkinter.filedialog import askopenfile
import tkFontChooser
import urllib.request
import urllib.parse
from titlecase import titlecase
import wikipedia
from stackexchange import Site, StackOverflow
class WikiModule(tk.Frame):
title = None
root = None
summary = None
url = None
popup = None
def __init__(self,master):
self.title = tk.StringVar()
self.title.set("Wikipedia")
self.summary = tk.StringVar()
self.summary.set(wikipedia.summary("Wikipedia",sentences=4))
root = tk.Frame.__init__(self,master,height=267,width=300)
header = tk.Label(self,width=43,textvariable=self.title)
body = tk.Message(self,width=300,textvariable=self.summary)
header.grid(column=0,row=0)
body.grid(column=0,row=1)
def updateModule(self,article):
# do stuff
print(article)
self.title.set(titlecase(article))
try:
self.summary.set(wikipedia.summary(article,sentences=4))
except wikipedia.exceptions.DisambiguationError as e:
for i in e.options:
print(i)
print("Done")
print(len(e.options))
self.disambiguation(e.options)
return
def select(self,e):
w = e.widget
index = int(w.curselection()[0])
value = w.get(index)
self.updateModule(value)
self.popup.destroy()
def disambiguation(self,options):
self.popup = tk.Tk()
scrollBar = tk.Scrollbar(self.popup)
scrollBar.pack(side=tk.RIGHT,fill=tk.BOTH)
listbox = tk.Listbox(self.popup,yscrollcommand=scrollBar.set)
listbox.pack(side=tk.RIGHT,fill=tk.BOTH,expand=1)
for i in options:
listbox.insert(tk.END,i)
self.popup.minsize(width=300,height=400)
self.popup.maxsize(width=300,height=400)
self.popup.title("Disambiguation")
listbox.bind('<<ListboxSelect>>',self.select)
class StackModule(tk.Frame):
title = None
root = None
summary = None
body = None
url = None
def __init__(self,master):
self.title = tk.StringVar()
self.title.set("Stack Exchange")
#query = "Discrete Math"
root = tk.Frame.__init__(self,master,height=267,width=300)
header = tk.Label(self,width=43,textvariable=self.title)
site = Site(StackOverflow,'qB5xmT87jDlGGf*OjXrawQ((')
site.be_inclusive()
questionList = site.similar("Discrete Math")
question = questionList[0]
questionID = question.question_id
question = site.question(questionID)
answer = question.url
self.summary = tk.StringVar()
self.summary.set(str(question) +'\n'+str(answer))
body = tk.Message(self,width=300,textvariable=self.summary)
header.grid(column=0,row=0)
body.grid(column=0,row=1)
def updateModule(self,newQuestion):
print(newQuestion)
site = Site(StackOverflow,'qB5xmT87jDlGGf*OjXrawQ((')
site.be_inclusive()
questionList = site.similar(newQuestion)
question = questionList[0]
questionID = question.question_id
question = site.question(questionID)
answer = question.url
self.summary.set(str(question) + '\n' + str(answer))
self.title.set(newQuestion)
class Application(tk.Frame):
filename = None
root = None
menuIbar = None
text = None
wiki = None
def __init__(self, master=None):
global root
root = tk.Frame.__init__(self,master)
self.grid()
self.createWidgets()
self.filename = "Untitled.txt"
def newFile(self):
self.filename = "Untitled.txt"
self.text.delete(0.0, tk.END)
def saveFile(self):
t = self.text.get(0.0, tk.END)
f = open(self.filename, 'w')
f.write(t)
f.close()
def saveAs(self):
f = asksaveasfile(defaultextension='.txt')
t = self.text.get(0.0, tk.END)
try:
f.write(t.rstrip())
except:
showerror(title="Oh No!", message="Unable to save file...")
def openFile(self):
file = askopenfile(parent=root,title='Select a File')
self.filename = file.name
t = file.read()
self.text.delete(0.0, tk.END)
self.text.insert(0.0, t)
file.close()
def rightClick(self,e):
try:
def rightClick_Copy(e, apnd=0):
e.widget.event_generate('<Control-c>')
def rightClick_Cut(e):
e.widget.event_generate('<Control-x>')
def rightClick_Paste(e):
e.widget.event_generate('<Control-v>')
def rightClick_Wikipedia(e):
self.wiki.updateModule(self.text.selection_get())
def rightClick_StackExchange(e):
self.stk.updateModule(self.text.selection_get())
e.widget.focus()
nclst=[
(' Cut', lambda e=e: rightClick_Cut(e)),
(' Copy', lambda e=e: rightClick_Copy(e)),
(' Paste', lambda e=e: rightClick_Paste(e)),
(' Wikipedia', lambda e=e: rightClick_Wikipedia(e)),
(' StackExchange', lambda e=e: rightClick_StackExchange(e))
]
rmenu = tk.Menu(None, tearoff=0, takefocus=0)
for (txt, cmd) in nclst:
rmenu.add_command(label=txt, command=cmd)
rmenu.tk_popup(e.x_root+40, e.y_root+10,entry="0")
except TclError:
print (' - rClick menu, something wrong')
pass
return "break"
def rClickbinder(r):
try:
for b in [ 'Text', 'Entry', 'Listbox', 'Label']: #
r.bind_class(b, sequence='<Button-3>',
func=rClicker, add='')
except TclError:
print (' - rClickbinder, something wrong')
pass
def createWidgets(self):
panes = tk.PanedWindow(self,width=1000,height=900)
textFrame = tk.Frame(panes,width=600)
#summary = wikipedia.summary("section 8 housing",sentences=5)
webPane = tk.Frame(panes,bg="#333333")
#webPane = tk.Message(panes,width=300,bg="#00FFFF",text=summary)
self.wiki = WikiModule(webPane)
self.wiki.grid()
self.stk = StackModule(webPane)
self.stk.grid()
panes.add(textFrame)
panes.add(webPane,minsize=300)
panes.grid(column=0,row=0)
scrollbar = tk.Scrollbar(textFrame)
self.text = tk.Text(textFrame,width=100, yscrollcommand=scrollbar.set)
scrollbar.pack(side=tk.RIGHT,fill=tk.BOTH)
self.text.pack(side=tk.RIGHT,fill=tk.BOTH,expand=1)
self.wiki.grid_propagate(0)
self.stk.grid_propagate(0)
scrollbar.config(command=self.text.yview)
self.menubar = tk.Menu(self)
filemenu = tk.Menu(self.menubar)
filemenu.add_command(label="New", command=self.newFile)
filemenu.add_command(label="Open", command=self.openFile)
filemenu.add_command(label="Save", command=self.saveFile)
filemenu.add_command(label="Save As", command=self.saveAs)
# some more code...
fontbar = tk.Menu(self.menubar)
#fontbar.add_command(label="Font Size", command=self.fontSize)
#fontbar.add_command(label="Font Type", command=self.fontType)
#fontbar.add_command(label="Font Color", command=self.fontColor)
filemenu.add_separator()
filemenu.add_command(label="Quit", command=self.quit)
self.menubar.add_cascade(label="File", menu=filemenu)
#menubar.add_cascade(label="Change Font", menu=fontbar)
#app.config(menu=menubar)
editmenu = tk.Menu(self.menubar)
editmenu.add_command(label="Undo", command=self.openFile)
editmenu.add_separator()
editmenu.add_command(label="Cut", command=self.openFile)
editmenu.add_command(label="Copy", command=self.saveFile)
editmenu.add_command(label="Paste", command=self.saveAs)
editmenu.add_command(label="Delete", command=self.quit)
editmenu.add_separator()
editmenu.add_command(label="Copy", command=self.saveFile)
editmenu.add_command(label="Copy", command=self.saveFile)
editmenu.add_command(label="Paste", command=self.saveAs)
editmenu.add_separator()
editmenu.add_command(label="Select All", command=self.openFile)
editmenu.add_command(label="Time & Date", command = self.openFile)
self.menubar.add_cascade(label="Edit", menu=editmenu)
app = Application()
app.master.title('NoteMaker')
app.master.minsize(width=1000,height=801)
app.master.maxsize(width=1000,height=801)
if __name__ == '__main__':
app.master.bind('<Button-3>',app.rightClick)
app.master.config(menu=app.menubar)
app.mainloop()
| |
# -- coding: utf-8 --
# Created by LoginRadius Development Team
# Copyright 2019 LoginRadius Inc. All rights reserved.
#
class PINAuthenticationApi:
def __init__(self, lr_object):
"""
:param lr_object: this is the reference to the parent LoginRadius object.
"""
self._lr_object = lr_object
def pin_login(self, login_by_pin_model, session_token):
"""This API is used to login a user by pin and session token.
Args:
login_by_pin_model: Model Class containing Definition of payload for LoginByPin API
session_token: Session Token of user
Returns:
Response containing User Profile Data and access token
9.22
"""
if(login_by_pin_model is None):
raise Exception(self._lr_object.get_validation_message("login_by_pin_model"))
if(self._lr_object.is_null_or_whitespace(session_token)):
raise Exception(self._lr_object.get_validation_message("session_token"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["session_token"] = session_token
resource_path = "identity/v2/auth/login/pin"
return self._lr_object.execute("POST", resource_path, query_parameters, login_by_pin_model)
def send_forgot_pin_email_by_email(self, forgot_pin_link_by_email_model, email_template=None, reset_pin_url=None):
"""This API sends the reset pin email to specified email address.
Args:
forgot_pin_link_by_email_model: Model Class containing Definition for Forgot Pin Link By Email API
email_template: Email template name
reset_pin_url: Reset PIN Url
Returns:
Response containing Definition of Complete Validation data
42.1
"""
if(forgot_pin_link_by_email_model is None):
raise Exception(self._lr_object.get_validation_message("forgot_pin_link_by_email_model"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
if(not self._lr_object.is_null_or_whitespace(email_template)):
query_parameters["emailTemplate"] = email_template
if(not self._lr_object.is_null_or_whitespace(reset_pin_url)):
query_parameters["resetPINUrl"] = reset_pin_url
resource_path = "identity/v2/auth/pin/forgot/email"
return self._lr_object.execute("POST", resource_path, query_parameters, forgot_pin_link_by_email_model)
def send_forgot_pin_email_by_username(self, forgot_pin_link_by_user_name_model, email_template=None, reset_pin_url=None):
"""This API sends the reset pin email using username.
Args:
forgot_pin_link_by_user_name_model: Model Class containing Definition for Forgot Pin Link By UserName API
email_template: Email template name
reset_pin_url: Reset PIN Url
Returns:
Response containing Definition of Complete Validation data
42.2
"""
if(forgot_pin_link_by_user_name_model is None):
raise Exception(self._lr_object.get_validation_message("forgot_pin_link_by_user_name_model"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
if(not self._lr_object.is_null_or_whitespace(email_template)):
query_parameters["emailTemplate"] = email_template
if(not self._lr_object.is_null_or_whitespace(reset_pin_url)):
query_parameters["resetPINUrl"] = reset_pin_url
resource_path = "identity/v2/auth/pin/forgot/username"
return self._lr_object.execute("POST", resource_path, query_parameters, forgot_pin_link_by_user_name_model)
def reset_pin_by_reset_token(self, reset_pin_by_reset_token):
"""This API is used to reset pin using reset token.
Args:
reset_pin_by_reset_token: Model Class containing Definition of payload for Reset Pin By Reset Token API
Returns:
Response containing Definition of Complete Validation data
42.3
"""
if(reset_pin_by_reset_token is None):
raise Exception(self._lr_object.get_validation_message("reset_pin_by_reset_token"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
resource_path = "identity/v2/auth/pin/reset/token"
return self._lr_object.execute("PUT", resource_path, query_parameters, reset_pin_by_reset_token)
def reset_pin_by_email_and_security_answer(self, reset_pin_by_security_question_answer_and_email_model):
"""This API is used to reset pin using security question answer and email.
Args:
reset_pin_by_security_question_answer_and_email_model: Model Class containing Definition of payload for Reset Pin By Security Question and Email API
Returns:
Response containing Definition of Complete Validation data
42.4
"""
if(reset_pin_by_security_question_answer_and_email_model is None):
raise Exception(self._lr_object.get_validation_message("reset_pin_by_security_question_answer_and_email_model"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
resource_path = "identity/v2/auth/pin/reset/securityanswer/email"
return self._lr_object.execute("PUT", resource_path, query_parameters, reset_pin_by_security_question_answer_and_email_model)
def reset_pin_by_username_and_security_answer(self, reset_pin_by_security_question_answer_and_username_model):
"""This API is used to reset pin using security question answer and username.
Args:
reset_pin_by_security_question_answer_and_username_model: Model Class containing Definition of payload for Reset Pin By Security Question and UserName API
Returns:
Response containing Definition of Complete Validation data
42.5
"""
if(reset_pin_by_security_question_answer_and_username_model is None):
raise Exception(self._lr_object.get_validation_message("reset_pin_by_security_question_answer_and_username_model"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
resource_path = "identity/v2/auth/pin/reset/securityanswer/username"
return self._lr_object.execute("PUT", resource_path, query_parameters, reset_pin_by_security_question_answer_and_username_model)
def reset_pin_by_phone_and_security_answer(self, reset_pin_by_security_question_answer_and_phone_model):
"""This API is used to reset pin using security question answer and phone.
Args:
reset_pin_by_security_question_answer_and_phone_model: Model Class containing Definition of payload for Reset Pin By Security Question and Phone API
Returns:
Response containing Definition of Complete Validation data
42.6
"""
if(reset_pin_by_security_question_answer_and_phone_model is None):
raise Exception(self._lr_object.get_validation_message("reset_pin_by_security_question_answer_and_phone_model"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
resource_path = "identity/v2/auth/pin/reset/securityanswer/phone"
return self._lr_object.execute("PUT", resource_path, query_parameters, reset_pin_by_security_question_answer_and_phone_model)
def send_forgot_pin_sms_by_phone(self, forgot_pin_otp_by_phone_model, sms_template=None):
"""This API sends the OTP to specified phone number
Args:
forgot_pin_otp_by_phone_model: Model Class containing Definition for Forgot Pin Otp By Phone API
sms_template:
Returns:
Response Containing Validation Data and SMS Data
42.7
"""
if(forgot_pin_otp_by_phone_model is None):
raise Exception(self._lr_object.get_validation_message("forgot_pin_otp_by_phone_model"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
if(not self._lr_object.is_null_or_whitespace(sms_template)):
query_parameters["smsTemplate"] = sms_template
resource_path = "identity/v2/auth/pin/forgot/otp"
return self._lr_object.execute("POST", resource_path, query_parameters, forgot_pin_otp_by_phone_model)
def change_pin_by_access_token(self, access_token, change_pin_model):
"""This API is used to change a user's PIN using access token.
Args:
access_token: Uniquely generated identifier key by LoginRadius that is activated after successful authentication.
change_pin_model: Model Class containing Definition for change PIN Property
Returns:
Response containing Definition of Complete Validation data
42.8
"""
if(self._lr_object.is_null_or_whitespace(access_token)):
raise Exception(self._lr_object.get_validation_message("access_token"))
if(change_pin_model is None):
raise Exception(self._lr_object.get_validation_message("change_pin_model"))
query_parameters = {}
query_parameters["access_token"] = access_token
query_parameters["apiKey"] = self._lr_object.get_api_key()
resource_path = "identity/v2/auth/pin/change"
return self._lr_object.execute("PUT", resource_path, query_parameters, change_pin_model)
def reset_pin_by_phone_and_otp(self, reset_pin_by_phone_and_otp_model):
"""This API is used to reset pin using phoneId and OTP.
Args:
reset_pin_by_phone_and_otp_model: Model Class containing Definition of payload for Reset Pin By Phone and Otp API
Returns:
Response containing Definition of Complete Validation data
42.9
"""
if(reset_pin_by_phone_and_otp_model is None):
raise Exception(self._lr_object.get_validation_message("reset_pin_by_phone_and_otp_model"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
resource_path = "identity/v2/auth/pin/reset/otp/phone"
return self._lr_object.execute("PUT", resource_path, query_parameters, reset_pin_by_phone_and_otp_model)
def reset_pin_by_email_and_otp(self, reset_pin_by_email_and_otp_model):
"""This API is used to reset pin using email and OTP.
Args:
reset_pin_by_email_and_otp_model: Model Class containing Definition of payload for Reset Pin By Email and Otp API
Returns:
Response containing Definition of Complete Validation data
42.10
"""
if(reset_pin_by_email_and_otp_model is None):
raise Exception(self._lr_object.get_validation_message("reset_pin_by_email_and_otp_model"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
resource_path = "identity/v2/auth/pin/reset/otp/email"
return self._lr_object.execute("PUT", resource_path, query_parameters, reset_pin_by_email_and_otp_model)
def reset_pin_by_username_and_otp(self, reset_pin_by_username_and_otp_model):
"""This API is used to reset pin using username and OTP.
Args:
reset_pin_by_username_and_otp_model: Model Class containing Definition of payload for Reset Pin By Username and Otp API
Returns:
Response containing Definition of Complete Validation data
42.11
"""
if(reset_pin_by_username_and_otp_model is None):
raise Exception(self._lr_object.get_validation_message("reset_pin_by_username_and_otp_model"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
resource_path = "identity/v2/auth/pin/reset/otp/username"
return self._lr_object.execute("PUT", resource_path, query_parameters, reset_pin_by_username_and_otp_model)
def set_pin_by_pin_auth_token(self, pin_required_model, pin_auth_token):
"""This API is used to change a user's PIN using Pin Auth token.
Args:
pin_required_model: Model Class containing Definition for PIN
pin_auth_token: Pin Auth Token
Returns:
Response containing User Profile Data and access token
42.12
"""
if(pin_required_model is None):
raise Exception(self._lr_object.get_validation_message("pin_required_model"))
if(self._lr_object.is_null_or_whitespace(pin_auth_token)):
raise Exception(self._lr_object.get_validation_message("pin_auth_token"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["pinAuthToken"] = pin_auth_token
resource_path = "identity/v2/auth/pin/set/pinauthtoken"
return self._lr_object.execute("POST", resource_path, query_parameters, pin_required_model)
def in_validate_pin_session_token(self, session_token):
"""This API is used to invalidate pin session token.
Args:
session_token: Session Token of user
Returns:
Response containing Definition of Complete Validation data
44.1
"""
if(self._lr_object.is_null_or_whitespace(session_token)):
raise Exception(self._lr_object.get_validation_message("session_token"))
query_parameters = {}
query_parameters["apiKey"] = self._lr_object.get_api_key()
query_parameters["session_token"] = session_token
resource_path = "identity/v2/auth/session_token/invalidate"
return self._lr_object.execute("GET", resource_path, query_parameters, {})
| |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""An extensible ASCII table reader and writer.
fixedwidth.py:
Read or write a table with fixed width columns.
:Copyright: Smithsonian Astrophysical Observatory (2011)
:Author: Tom Aldcroft (aldcroft@head.cfa.harvard.edu)
"""
from . import core
from .core import InconsistentTableError, DefaultSplitter
from . import basic
class FixedWidthSplitter(core.BaseSplitter):
"""
Split line based on fixed start and end positions for each ``col`` in
``self.cols``.
This class requires that the Header class will have defined ``col.start``
and ``col.end`` for each column. The reference to the ``header.cols`` gets
put in the splitter object by the base Reader.read() function just in time
for splitting data lines by a ``data`` object.
Note that the ``start`` and ``end`` positions are defined in the pythonic
style so line[start:end] is the desired substring for a column. This splitter
class does not have a hook for ``process_lines`` since that is generally not
useful for fixed-width input.
"""
delimiter_pad = ''
bookend = False
delimiter = '|'
def __call__(self, lines):
for line in lines:
vals = [line[x.start:x.end] for x in self.cols]
if self.process_val:
yield [self.process_val(x) for x in vals]
else:
yield vals
def join(self, vals, widths):
pad = self.delimiter_pad or ''
delimiter = self.delimiter or ''
padded_delim = pad + delimiter + pad
if self.bookend:
bookend_left = delimiter + pad
bookend_right = pad + delimiter
else:
bookend_left = ''
bookend_right = ''
vals = [' ' * (width - len(val)) + val for val, width in zip(vals, widths)]
return bookend_left + padded_delim.join(vals) + bookend_right
class FixedWidthHeaderSplitter(DefaultSplitter):
'''Splitter class that splits on ``|``.'''
delimiter = '|'
class FixedWidthHeader(basic.BasicHeader):
"""
Fixed width table header reader.
"""
splitter_class = FixedWidthHeaderSplitter
""" Splitter class for splitting data lines into columns """
position_line = None # secondary header line position
""" row index of line that specifies position (default = 1) """
set_of_position_line_characters = set(r'`~!#$%^&*-_+=\|":' + "'")
def get_line(self, lines, index):
for i, line in enumerate(self.process_lines(lines)):
if i == index:
break
else: # No header line matching
raise InconsistentTableError('No header line found in table')
return line
def get_cols(self, lines):
"""
Initialize the header Column objects from the table ``lines``.
Based on the previously set Header attributes find or create the column names.
Sets ``self.cols`` with the list of Columns.
Parameters
----------
lines : list
List of table lines
"""
# See "else" clause below for explanation of start_line and position_line
start_line = core._get_line_index(self.start_line, self.process_lines(lines))
position_line = core._get_line_index(self.position_line, self.process_lines(lines))
# If start_line is none then there is no header line. Column positions are
# determined from first data line and column names are either supplied by user
# or auto-generated.
if start_line is None:
if position_line is not None:
raise ValueError("Cannot set position_line without also setting header_start")
data_lines = self.data.process_lines(lines)
if not data_lines:
raise InconsistentTableError(
'No data lines found so cannot autogenerate column names')
vals, starts, ends = self.get_fixedwidth_params(data_lines[0])
self.names = [self.auto_format.format(i)
for i in range(1, len(vals) + 1)]
else:
# This bit of code handles two cases:
# start_line = <index> and position_line = None
# Single header line where that line is used to determine both the
# column positions and names.
# start_line = <index> and position_line = <index2>
# Two header lines where the first line defines the column names and
# the second line defines the column positions
if position_line is not None:
# Define self.col_starts and self.col_ends so that the call to
# get_fixedwidth_params below will use those to find the header
# column names. Note that get_fixedwidth_params returns Python
# slice col_ends but expects inclusive col_ends on input (for
# more intuitive user interface).
line = self.get_line(lines, position_line)
if len(set(line) - set([self.splitter.delimiter, ' '])) != 1:
raise InconsistentTableError('Position line should only contain delimiters and one other character, e.g. "--- ------- ---".')
# The line above lies. It accepts white space as well.
# We don't want to encourage using three different
# characters, because that can cause ambiguities, but white
# spaces are so common everywhere that practicality beats
# purity here.
charset = self.set_of_position_line_characters.union(set([self.splitter.delimiter, ' ']))
if not set(line).issubset(charset):
raise InconsistentTableError('Characters in position line must be part of {0}'.format(charset))
vals, self.col_starts, col_ends = self.get_fixedwidth_params(line)
self.col_ends = [x - 1 if x is not None else None for x in col_ends]
# Get the header column names and column positions
line = self.get_line(lines, start_line)
vals, starts, ends = self.get_fixedwidth_params(line)
self.names = vals
self._set_cols_from_names()
# Set column start and end positions.
for i, col in enumerate(self.cols):
col.start = starts[i]
col.end = ends[i]
def get_fixedwidth_params(self, line):
"""
Split ``line`` on the delimiter and determine column values and
column start and end positions. This might include null columns with
zero length (e.g. for ``header row = "| col1 || col2 | col3 |"`` or
``header2_row = "----- ------- -----"``). The null columns are
stripped out. Returns the values between delimiters and the
corresponding start and end positions.
Parameters
----------
line : str
Input line
Returns
-------
vals : list
List of values.
starts : list
List of starting indices.
ends : list
List of ending indices.
"""
# If column positions are already specified then just use those.
# If neither column starts or ends are given, figure out positions
# between delimiters. Otherwise, either the starts or the ends have
# been given, so figure out whichever wasn't given.
if self.col_starts is not None and self.col_ends is not None:
starts = list(self.col_starts) # could be any iterable, e.g. np.array
ends = [x + 1 if x is not None else None for x in self.col_ends] # user supplies inclusive endpoint
if len(starts) != len(ends):
raise ValueError('Fixed width col_starts and col_ends must have the same length')
vals = [line[start:end].strip() for start, end in zip(starts, ends)]
elif self.col_starts is None and self.col_ends is None:
# There might be a cleaner way to do this but it works...
vals = line.split(self.splitter.delimiter)
starts = [0]
ends = []
for val in vals:
if val:
ends.append(starts[-1] + len(val))
starts.append(ends[-1] + 1)
else:
starts[-1] += 1
starts = starts[:-1]
vals = [x.strip() for x in vals if x]
if len(vals) != len(starts) or len(vals) != len(ends):
raise InconsistentTableError('Error parsing fixed width header')
else:
# exactly one of col_starts or col_ends is given...
if self.col_starts is not None:
starts = list(self.col_starts)
ends = starts[1:] + [None] # Assume each col ends where the next starts
else: # self.col_ends is not None
ends = [x + 1 for x in self.col_ends]
starts = [0] + ends[:-1] # Assume each col starts where the last ended
vals = [line[start:end].strip() for start, end in zip(starts, ends)]
return vals, starts, ends
def write(self, lines):
# Header line not written until data are formatted. Until then it is
# not known how wide each column will be for fixed width.
pass
class FixedWidthData(basic.BasicData):
"""
Base table data reader.
"""
splitter_class = FixedWidthSplitter
""" Splitter class for splitting data lines into columns """
def write(self, lines):
vals_list = []
col_str_iters = self.str_vals()
for vals in zip(*col_str_iters):
vals_list.append(vals)
for i, col in enumerate(self.cols):
col.width = max([len(vals[i]) for vals in vals_list])
if self.header.start_line is not None:
col.width = max(col.width, len(col.info.name))
widths = [col.width for col in self.cols]
if self.header.start_line is not None:
lines.append(self.splitter.join([col.info.name for col in self.cols],
widths))
if self.header.position_line is not None:
char = self.header.position_char
if len(char) != 1:
raise ValueError('Position_char="{}" must be a single '
'character'.format(char))
vals = [char * col.width for col in self.cols]
lines.append(self.splitter.join(vals, widths))
for vals in vals_list:
lines.append(self.splitter.join(vals, widths))
return lines
class FixedWidth(basic.Basic):
"""
Read or write a fixed width table with a single header line that defines column
names and positions. Examples::
# Bar delimiter in header and data
| Col1 | Col2 | Col3 |
| 1.2 | hello there | 3 |
| 2.4 | many words | 7 |
# Bar delimiter in header only
Col1 | Col2 | Col3
1.2 hello there 3
2.4 many words 7
# No delimiter with column positions specified as input
Col1 Col2Col3
1.2hello there 3
2.4many words 7
See the :ref:`fixed_width_gallery` for specific usage examples.
"""
_format_name = 'fixed_width'
_description = 'Fixed width'
header_class = FixedWidthHeader
data_class = FixedWidthData
def __init__(self, col_starts=None, col_ends=None, delimiter_pad=' ', bookend=True):
super().__init__()
self.data.splitter.delimiter_pad = delimiter_pad
self.data.splitter.bookend = bookend
self.header.col_starts = col_starts
self.header.col_ends = col_ends
class FixedWidthNoHeaderHeader(FixedWidthHeader):
'''Header reader for fixed with tables with no header line'''
start_line = None
class FixedWidthNoHeaderData(FixedWidthData):
'''Data reader for fixed width tables with no header line'''
start_line = 0
class FixedWidthNoHeader(FixedWidth):
"""
Read or write a fixed width table which has no header line. Column
names are either input (``names`` keyword) or auto-generated. Column
positions are determined either by input (``col_starts`` and ``col_stops``
keywords) or by splitting the first data line. In the latter case a
``delimiter`` is required to split the data line.
Examples::
# Bar delimiter in header and data
| 1.2 | hello there | 3 |
| 2.4 | many words | 7 |
# Compact table having no delimiter and column positions specified as input
1.2hello there3
2.4many words 7
This class is just a convenience wrapper around the ``FixedWidth`` reader
but with ``header.start_line = None`` and ``data.start_line = 0``.
See the :ref:`fixed_width_gallery` for specific usage examples.
"""
_format_name = 'fixed_width_no_header'
_description = 'Fixed width with no header'
header_class = FixedWidthNoHeaderHeader
data_class = FixedWidthNoHeaderData
def __init__(self, col_starts=None, col_ends=None, delimiter_pad=' ', bookend=True):
super().__init__(col_starts, col_ends, delimiter_pad=delimiter_pad,
bookend=bookend)
class FixedWidthTwoLineHeader(FixedWidthHeader):
'''Header reader for fixed width tables splitting on whitespace.
For fixed width tables with several header lines, there is typically
a white-space delimited format line, so splitting on white space is
needed.
'''
splitter_class = DefaultSplitter
class FixedWidthTwoLineDataSplitter(FixedWidthSplitter):
'''Splitter for fixed width tables splitting on ``' '``.'''
delimiter = ' '
class FixedWidthTwoLineData(FixedWidthData):
'''Data reader for fixed with tables with two header lines.'''
splitter_class = FixedWidthTwoLineDataSplitter
class FixedWidthTwoLine(FixedWidth):
"""
Read or write a fixed width table which has two header lines. The first
header line defines the column names and the second implicitly defines the
column positions. Examples::
# Typical case with column extent defined by ---- under column names.
col1 col2 <== header_start = 0
----- ------------ <== position_line = 1, position_char = "-"
1 bee flies <== data_start = 2
2 fish swims
# Pretty-printed table
+------+------------+
| Col1 | Col2 |
+------+------------+
| 1.2 | "hello" |
| 2.4 | there world|
+------+------------+
See the :ref:`fixed_width_gallery` for specific usage examples.
"""
_format_name = 'fixed_width_two_line'
_description = 'Fixed width with second header line'
data_class = FixedWidthTwoLineData
header_class = FixedWidthTwoLineHeader
def __init__(self, position_line=1, position_char='-', delimiter_pad=None, bookend=False):
super().__init__(delimiter_pad=delimiter_pad, bookend=bookend)
self.header.position_line = position_line
self.header.position_char = position_char
self.data.start_line = position_line + 1
| |
# Copyright 2022 The jax3d Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for jax3d.projects.nesf.nerfstatic.utils.gin_utils."""
import dataclasses
import pathlib
import textwrap
from typing import Any
import gin
from jax3d.projects.nesf.nerfstatic.utils import gin_utils
import pytest
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class A:
a: int
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class B(A):
b: int
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class C(B):
c: int
c2: int = 2
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class Params:
model: Any = gin_utils.ConfigField(A, required=True)
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class ChildParams(Params):
x: Any = gin_utils.ConfigField(A)
y: Any = gin_utils.ConfigField(A)
z: int = 1
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class NestedParamsOptionArgs:
x: Any = 1
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class NestedParamsOption:
args0: Any = gin_utils.ConfigField(NestedParamsOptionArgs)
args1: Any = gin_utils.ConfigField(NestedParamsOptionArgs)
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class NestedParams:
option0: Any = gin_utils.ConfigField(NestedParamsOption)
option1: Any = gin_utils.ConfigField(NestedParamsOption)
@pytest.fixture(scope='function', autouse=True)
def _reset_gin():
gin_utils._VALID_SCOPES.update([
'root',
'child',
'parent',
])
gin.clear_config()
yield
# Ensure scopes set inside the test were valid
gin_utils.validate_scope_names()
def test_log_params_to_disk(tmp_path: pathlib.Path):
gin.parse_config("""
Params.model = 'B'
A.a = 1
B.b = 'hello'
""")
params = Params() # Ensure C.a is referenced. # pytype: disable=missing-parameter
path = tmp_path / 'args.params.py'
gin_utils.log_params_to_disk(path, params)
assert path.read_text() == textwrap.dedent(
"""\
Params(
model=B(
a=1, # Required
b='hello', # Required
), # Default factory overwritten
)
"""
)
def test_config_field():
# Create expected default values before the gin parsing
default_a = A(a=1)
default_b = B(a=1, b=2)
default_c = C(a=1, b=2, c=3)
gin.parse_config("""
A.a = 1
B.b = 2
C.c = 3
""")
# By default, ConfigField is required
with pytest.raises(TypeError, match='Missing 1 required argument'):
_ = Params()
params = Params(model='A')
assert type(params.model) is A # pylint: disable=unidiomatic-typecheck
assert params.model == default_a
assert repr(params) == str(params)
assert repr(params) == textwrap.dedent(
"""\
Params(
model=A(
a=1,
),
)"""
)
assert params.to_gin_operative_repr() == textwrap.dedent( # pytype: disable=attribute-error
"""\
Params(
model=A(
a=1, # Required
),
)"""
)
params = Params(model='B')
assert type(params.model) is B # pylint: disable=unidiomatic-typecheck
assert params.model == default_b
params = Params(model='C')
assert type(params.model) is C # pylint: disable=unidiomatic-typecheck
assert params.model == default_c
assert repr(params) == textwrap.dedent(
"""\
Params(
model=C(
a=1,
b=2,
c=3,
c2=2,
),
)"""
)
# Values can be passed directly
params = Params(model=C(b=234)) # pytype: disable=missing-parameter
assert params.model == C(a=1, b=234, c=3)
# Invalid value
with pytest.raises(ValueError, match='Invalid param str'):
Params(model='D')
def test_config_field_gin_error():
gin.parse_config("""
A.a = 1
B.b = 2
C.b = 3 # b is defined in C
C.c = 3
""")
with pytest.raises(ValueError, match='Duplicated gin bindings'):
Params(model='C')
def test_gin_config_field_wrong_input():
# gin.configurable instead of gin_utils
@gin.configurable
@dataclasses.dataclass
class NotAConfigurable:
x: int
with pytest.raises(
TypeError, match='should be a `gin_utils.dataclass_configurable`'):
gin_utils.ConfigField(NotAConfigurable)
with pytest.raises(TypeError, match='is not a dataclass'):
@gin_utils.dataclass_configurable # pylint: disable=unused-variable
class NotADataclass:
a: int
@gin.configurable # pylint: disable=unused-variable
@dataclasses.dataclass
class NotAConfigurableChild(A):
x: int
with pytest.raises(
TypeError, match='should be a `gin_utils.dataclass_configurable`'):
Params(model='NotAConfigurableChild')
def test_to_gin_operative_repr():
gin.parse_config("""
B.b = 2
C.c = 3
C.c2 = 4
""")
assert C(a=1).to_gin_operative_repr() == textwrap.dedent( # pytype: disable=missing-parameter,attribute-error
"""\
C(
a=1,
b=2, # Required
c=3, # Required
c2=4, # Default: 2
)"""
)
def test_scope():
gin.parse_config("""
A.a = 1
B.b = 2
C.c = 3
Params.model = 'C'
Params_model/A.a = 10
Params_model/B.b = 20
ChildParams.x = 'C'
ChildParams.y = 'B'
ChildParams_y/A.a = 100
ChildParams_y/B.b = 200
""")
params = Params()
assert params == Params(model=C(a=10, b=20, c=3))
assert params.model.a == 10
assert params.model.b == 20
assert params.model.c == 3
assert params.to_gin_operative_repr() == textwrap.dedent( # pytype: disable=attribute-error
"""\
Params(
model=C(
a=10, # Required
b=20, # Required
c=3, # Required
c2=2,
), # Default factory overwritten
)"""
)
params = ChildParams()
assert params == ChildParams(
model=C(a=10, b=20, c=3),
x=C(a=1, b=2, c=3),
y=B(a=100, b=200),
)
assert params.model.a == 10
assert params.model.b == 20
assert params.model.c == 3
assert params.model.c2 == 2
assert params.x.a == 1
assert params.x.b == 2
assert params.x.c == 3
assert params.x.c2 == 2
assert params.y.a == 100
assert params.y.b == 200
assert params.to_gin_operative_repr() == textwrap.dedent( # pytype: disable=attribute-error
"""\
ChildParams(
model=C(
a=10, # Required
b=20, # Required
c=3, # Required
c2=2,
), # Default factory overwritten
x=C(
a=1, # Required
b=2, # Required
c=3, # Required
c2=2,
), # Default factory overwritten
y=B(
a=100, # Required
b=200, # Required
), # Default factory overwritten
z=1,
)"""
)
def test_scope_nested():
gin.parse_config("""
NestedParams.option0 = 'NestedParamsOption'
NestedParamsOption.args1 = 'NestedParamsOptionArgs'
NestedParams_option0/NestedParamsOption_args0/NestedParamsOptionArgs.x = '00'
NestedParams_option1/NestedParamsOptionArgs.x = '1'
NestedParams_option1/NestedParamsOption_args1/NestedParamsOptionArgs.x = '11'
""")
params = NestedParams()
params_explicit = NestedParams(
option0=NestedParamsOption(
args0=NestedParamsOptionArgs(x='00'),
args1=NestedParamsOptionArgs(x=1),
),
option1=NestedParamsOption(
args0=NestedParamsOptionArgs(x='1'),
args1=NestedParamsOptionArgs(x='11'),
),
)
assert params == params_explicit
assert params.option0.args0.x == '00'
assert params.option0.args1.x == 1
assert params.option1.args0.x == '1'
assert params.option1.args1.x == '11'
assert params_explicit.option0.args0.x == '00'
assert params_explicit.option0.args1.x == 1
assert params_explicit.option1.args0.x == '1'
assert params_explicit.option1.args1.x == '11'
assert repr(params) == textwrap.dedent(
"""\
NestedParams(
option0=NestedParamsOption(
args0=NestedParamsOptionArgs(
x='00',
),
args1=NestedParamsOptionArgs(
x=1,
),
),
option1=NestedParamsOption(
args0=NestedParamsOptionArgs(
x='1',
),
args1=NestedParamsOptionArgs(
x='11',
),
),
)"""
)
assert params.to_gin_operative_repr() == textwrap.dedent( # pytype: disable=attribute-error
"""\
NestedParams(
option0=NestedParamsOption(
args0=NestedParamsOptionArgs(
x='00', # Default: 1
),
args1=NestedParamsOptionArgs(
x=1,
), # Default factory overwritten
), # Default factory overwritten
option1=NestedParamsOption(
args0=NestedParamsOptionArgs(
x='1', # Default: 1
),
args1=NestedParamsOptionArgs(
x='11', # Default: 1
), # Default factory overwritten
),
)"""
)
def test_scope_nested_inner_only():
# Only set the default inner values
gin.parse_config("""
NestedParamsOption_args0/NestedParamsOptionArgs.x = 'b_default'
NestedParams_option0/NestedParamsOption_args0/NestedParamsOptionArgs.x = 'a'
NestedParamsOption_args1/NestedParamsOptionArgs.x = 'b'
""")
params = NestedParams()
params_explicit = NestedParams(
option0=NestedParamsOption(
args0=NestedParamsOptionArgs(x='a'),
args1=NestedParamsOptionArgs(x='b'),
),
option1=NestedParamsOption(
args0=NestedParamsOptionArgs(x='b_default'),
args1=NestedParamsOptionArgs(x='b'),
),
)
assert repr(params) == repr(params_explicit)
assert params == params_explicit
def test_valid_scope_names():
"""Check that an error is raised if a wrong scope is used."""
gin_utils.validate_scope_names()
gin.clear_config()
# typo: use `Params_model` -> `ChildParams_model`
gin.parse_config("""
ChildParams_model/C.c = 3
""")
with pytest.raises(ValueError, match='Invalid scope'):
gin_utils.validate_scope_names()
gin.clear_config()
gin.parse_config("""
Params_model/C.c = 3
""")
gin_utils.validate_scope_names()
def test_gin_macro_invalid():
gin.parse_config("""
Params.model = @C()
""")
with pytest.raises(ValueError, match='Gin macro forbidden'):
Params()
gin.clear_config()
gin.parse_config("""
Params.model = @C
""")
with pytest.raises(ValueError, match='Gin macro forbidden'):
Params()
# Also check with inheritance
gin.clear_config()
gin.parse_config("""
Params.model = @C
ChildParams.x = 'A'
ChildParams.y = 'A'
""")
with pytest.raises(ValueError, match='Gin macro forbidden'):
ChildParams()
def test_default_factory_create_cls():
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class FactoryParamsCreateCls:
params: Any = gin_utils.ConfigField(A)
gin.parse_config("""
A.a = 123
""")
assert FactoryParamsCreateCls().params == A(a=123)
def test_default_factory_required():
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class FactoryParamsRequired:
params: Any = gin_utils.ConfigField(A, required=True)
gin.parse_config("""
A.a = 123
""")
with pytest.raises(TypeError, match='Missing 1 required argument'):
_ = FactoryParamsRequired()
assert FactoryParamsRequired(params='A').params == A(a=123)
gin.parse_config("""
FactoryParamsRequired.params = 'A'
""")
assert FactoryParamsRequired().params == A(a=123)
def test_default_factory_explicit():
@gin_utils.dataclass_configurable
@dataclasses.dataclass
class FactoryParamsExplicit:
params: Any = gin_utils.ConfigField(B)
gin.parse_config("""
A.a = 123
B.b = 234
""")
assert FactoryParamsExplicit().params == B(a=123, b=234)
# Default factory is compatible with scope
gin.parse_config("""
FactoryParamsExplicit_params/A.a = 10
FactoryParamsExplicit_params/B.b = 100
""")
assert FactoryParamsExplicit().params == B(a=10, b=100)
# Default factory can be overwritten
gin.parse_config("""
FactoryParamsExplicit.params = 'A'
""")
assert FactoryParamsExplicit().params == A(a=10)
def test_get_topmost_parent_class():
assert gin_utils._get_topmost_parent_class(A) is A
assert gin_utils._get_topmost_parent_class(B) is A
assert gin_utils._get_topmost_parent_class(C) is A
assert gin_utils._get_topmost_parent_class(Params) is Params
assert gin_utils._get_topmost_parent_class(ChildParams) is Params
@gin.configurable
def fn_in_scope(x=1):
return x
@pytest.mark.parametrize(
'config', [
# Nested scopes are correctly applied
"""
fn_in_scope.x = 'expected'
""",
"""
child/fn_in_scope.x = 'expected'
""",
"""
parent/child/fn_in_scope.x = 'expected'
""",
"""
root/parent/child/fn_in_scope.x = 'expected'
""",
"""
root/fn_in_scope.x = 'expected'
""",
"""
root/parent/fn_in_scope.x = 'expected'
""",
# Missing inner scope should still work
"""
root/child/fn_in_scope.x = 'expected'
""",
# More specific scope take precedence over more generic ones
"""
fn_in_scope.x = 'bad'
child/fn_in_scope.x = 'bad'
parent/child/fn_in_scope.x = 'bad'
root/parent/child/fn_in_scope.x = 'expected'
""",
"""
parent/child/fn_in_scope.x = 'expected'
child/fn_in_scope.x = 'bad'
fn_in_scope.x = 'bad'
""",
"""
fn_in_scope.x = 'bad'
child/fn_in_scope.x = 'expected'
""",
"""
parent/fn_in_scope.x = 'bad'
child/fn_in_scope.x = 'expected'
""",
"""
root/fn_in_scope.x = 'bad'
child/fn_in_scope.x = 'expected'
""",
"""
parent/fn_in_scope.x = 'bad'
root/child/fn_in_scope.x = 'expected'
""",
"""
root/child/fn_in_scope.x = 'bad'
parent/child/fn_in_scope.x = 'expected'
""",
],
)
def test_gin_scope_order(config):
# Test that the scope order does not change
gin.clear_config()
gin.parse_config(config)
with gin.config_scope('root'):
with gin.config_scope('parent'):
with gin.config_scope('child'):
assert gin_utils._get_bindings('fn_in_scope') == {'x': 'expected'} # pytype: disable=wrong-arg-types
| |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime as dt
import fire
import functools
import itertools
import multiprocessing as mp
import os
import sys
import random
import subprocess
import time
from collections import deque
sys.path.insert(0, '.')
from absl import flags
import tensorflow as tf
from tqdm import tqdm
import numpy as np
import preprocessing
from utils import timer, ensure_dir_exists
from rl_loop import fsdb
READ_OPTS = preprocessing.TF_RECORD_CONFIG
LOCAL_DIR = "data/"
# How many positions to look at per generation.
# Per AGZ, 2048 minibatch * 1k = 2M positions/generation
EXAMPLES_PER_GENERATION = 2 ** 21
MINIMUM_NEW_GAMES = 12000
AVG_GAMES_PER_MODEL = 20000
def pick_examples_from_tfrecord(filename, sampling_frac=0.02):
# tf_record_iterator is deprecated. Silence those warnings for now.
# TODO(tommadams): remove this once
# https://github.com/tensorflow/minigo/issues/740 is fixed.
v = tf.logging.get_verbosity()
tf.logging.set_verbosity(tf.logging.ERROR)
protos = list(tf.python_io.tf_record_iterator(filename, READ_OPTS))
tf.logging.set_verbosity(v)
number_samples = np.random.poisson(len(protos) * sampling_frac)
choices = random.sample(protos, min(len(protos), number_samples))
return choices
def choose(game, sampling_frac=0.02):
examples = pick_examples_from_tfrecord(game, sampling_frac)
timestamp = file_timestamp(game)
return [(timestamp, ex) for ex in examples]
def file_timestamp(filename):
return int(os.path.basename(filename).split('-')[0])
def _ts_to_str(timestamp):
return dt.datetime.fromtimestamp(timestamp).strftime("%Y-%m-%d %H:%M:%S")
class ExampleBuffer():
def __init__(self, max_size=2**21, sampling_frac=0.02):
self.examples = deque(maxlen=max_size)
self.max_size = max_size
self.sampling_frac = sampling_frac
self.func = functools.partial(choose, sampling_frac=sampling_frac)
self.total_updates = 0
def parallel_fill(self, games, threads=8):
""" games is a list of .tfrecord.zz game records. """
games.sort(key=os.path.basename)
# A couple extra in case parsing fails
max_games = int(self.max_size / self.sampling_frac / 200) + 480
if len(games) > max_games:
games = games[-max_games:]
with mp.Pool(threads) as pool:
res = tqdm(pool.imap(self.func, games), total=len(games))
self.examples.extend(itertools.chain.from_iterable(res))
print("Got", len(self.examples), "examples")
def update(self, new_games):
""" new_games is a list of .tfrecord.zz new game records. """
new_games.sort(key=os.path.basename)
first_new_game = None
for idx, game in enumerate(new_games):
timestamp = file_timestamp(game)
if timestamp <= self.examples[-1][0]:
continue
elif first_new_game is None:
first_new_game = idx
num_new_games = len(new_games) - idx
print("Found {}/{} new games".format(
num_new_games, len(new_games)))
self.total_updates += num_new_games
self.examples.extend(self.func(game))
if first_new_game is None:
print("No new games", file_timestamp(
new_games[-1]), self.examples[-1][0])
def flush(self, path):
# random.shuffle on deque is O(n^2) convert to list for O(n)
self.examples = list(self.examples)
random.shuffle(self.examples)
with timer("Writing examples to " + path):
preprocessing.write_tf_examples(
path, [ex[1] for ex in self.examples], serialize=False)
self.examples.clear()
self.examples = deque(maxlen=self.max_size)
@property
def count(self):
return len(self.examples)
def __str__(self):
if self.count == 0:
return "ExampleBuffer: 0 positions"
return "ExampleBuffer: {} positions sampled from {} to {}".format(
self.count,
_ts_to_str(self.examples[0][0]),
_ts_to_str(self.examples[-1][0]))
def files_for_model(model):
return tf.gfile.Glob(os.path.join(LOCAL_DIR, model[1], '*.zz'))
def smart_rsync(
from_model_num=0,
source_dir=None,
dest_dir=LOCAL_DIR):
source_dir = source_dir or fsdb.selfplay_dir()
from_model_num = 0 if from_model_num < 0 else from_model_num
models = [m for m in fsdb.get_models() if m[0] >= from_model_num]
for _, model in models:
_rsync_dir(os.path.join(
source_dir, model), os.path.join(dest_dir, model))
def time_rsync(from_date,
source_dir=None,
dest_dir=LOCAL_DIR):
source_dir = source_dir or fsdb.selfplay_dir()
while from_date < dt.datetime.utcnow():
src = os.path.join(source_dir, from_date.strftime("%Y-%m-%d-%H"))
if tf.gfile.Exists(src):
_rsync_dir(src, os.path.join(
dest_dir, from_date.strftime("%Y-%m-%d-%H")))
from_date = from_date + dt.timedelta(hours=1)
def _rsync_dir(source_dir, dest_dir):
ensure_dir_exists(dest_dir)
with open('.rsync_log', 'ab') as rsync_log:
subprocess.call(['gsutil', '-m', 'rsync', source_dir, dest_dir],
stderr=rsync_log)
def _determine_chunk_to_make(write_dir):
"""
Returns the full path of the chunk to make (gs://...)
and a boolean, indicating whether we should wait for a new model
or if we're 'behind' and should just write out our current chunk immediately
True == write immediately.
"""
models = fsdb.get_models()
# Last model is N. N+1 (should be) training. We should gather games for N+2.
chunk_to_make = os.path.join(write_dir, str(
models[-1][0] + 1) + '.tfrecord.zz')
if not tf.gfile.Exists(chunk_to_make):
# N+1 is missing. Write it out ASAP
print("Making chunk ASAP:", chunk_to_make)
return chunk_to_make, True
chunk_to_make = os.path.join(write_dir, str(
models[-1][0] + 2) + '.tfrecord.zz')
while tf.gfile.Exists(chunk_to_make):
print("Chunk for next model ({}) already exists. Sleeping.".format(
chunk_to_make))
time.sleep(5 * 60)
models = fsdb.get_models()
chunk_to_make = os.path.join(write_dir, str(
models[-1][0] + 2) + '.tfrecord.zz')
print("Making chunk:", chunk_to_make)
return chunk_to_make, False
def get_window_size(chunk_num):
""" Adjust the window size by how far we are through a run.
At the start of the run, there's a benefit to 'expiring' the completely
random games a little sooner, and scaling up to the 500k game window
specified in the paper.
"""
return min(500000, (chunk_num + 5) * (AVG_GAMES_PER_MODEL // 2))
def fill_and_wait_time(bufsize=EXAMPLES_PER_GENERATION,
write_dir=None,
threads=32,
start_from=None):
start_from = start_from or dt.datetime.utcnow()
write_dir = write_dir or fsdb.golden_chunk_dir()
buf = ExampleBuffer(bufsize)
chunk_to_make, fast_write = _determine_chunk_to_make(write_dir)
hours = fsdb.get_hour_dirs()
with timer("Rsync"):
time_rsync(min(dt.datetime.strptime(
hours[-1], "%Y-%m-%d-%H/"), start_from))
start_from = dt.datetime.utcnow()
hours = fsdb.get_hour_dirs()
files = (tf.gfile.Glob(os.path.join(LOCAL_DIR, d, "*.zz"))
for d in reversed(hours) if tf.gfile.Exists(os.path.join(LOCAL_DIR, d)))
files = itertools.islice(files, get_window_size(chunk_to_make))
models = fsdb.get_models()
buf.parallel_fill(
list(itertools.chain.from_iterable(files)), threads=threads)
print("Filled buffer, watching for new games")
while (fsdb.get_latest_model() == models[-1] or buf.total_updates < MINIMUM_NEW_GAMES):
with timer("Rsync"):
time_rsync(start_from - dt.timedelta(minutes=60))
start_from = dt.datetime.utcnow()
hours = sorted(fsdb.get_hour_dirs(LOCAL_DIR))
new_files = list(map(lambda d: tf.gfile.Glob(
os.path.join(LOCAL_DIR, d, '*.zz')), hours[-2:]))
buf.update(list(itertools.chain.from_iterable(new_files)))
if fast_write:
break
time.sleep(30)
if fsdb.get_latest_model() != models[-1]:
print("New model! Waiting for games. Got",
buf.total_updates, "new games so far")
latest = fsdb.get_latest_model()
print("New model!", latest[1], "!=", models[-1][1])
print(buf)
buf.flush(chunk_to_make)
def fill_and_wait_models(bufsize=EXAMPLES_PER_GENERATION,
write_dir=None,
threads=8,
model_window=100,
skip_first_rsync=False):
""" Fills a ringbuffer with positions from the most recent games, then
continually rsync's and updates the buffer until a new model is promoted.
Once it detects a new model, iit then dumps its contents for training to
immediately begin on the next model.
"""
write_dir = write_dir or fsdb.golden_chunk_dir()
buf = ExampleBuffer(bufsize)
models = fsdb.get_models()[-model_window:]
if not skip_first_rsync:
with timer("Rsync"):
smart_rsync(models[-1][0] - 6)
files = tqdm(map(files_for_model, models), total=len(models))
buf.parallel_fill(list(itertools.chain(*files)), threads=threads)
print("Filled buffer, watching for new games")
while fsdb.get_latest_model()[0] == models[-1][0]:
with timer("Rsync"):
smart_rsync(models[-1][0] - 2)
new_files = tqdm(map(files_for_model, models[-2:]), total=len(models))
buf.update(list(itertools.chain(*new_files)))
time.sleep(60)
latest = fsdb.get_latest_model()
print("New model!", latest[1], "!=", models[-1][1])
print(buf)
buf.flush(os.path.join(write_dir, str(latest[0] + 1) + '.tfrecord.zz'))
def make_chunk_for(output_dir=LOCAL_DIR,
local_dir=LOCAL_DIR,
game_dir=None,
model_num=1,
positions=EXAMPLES_PER_GENERATION,
threads=8,
sampling_frac=0.02):
"""
Explicitly make a golden chunk for a given model `model_num`
(not necessarily the most recent one).
While we haven't yet got enough samples (EXAMPLES_PER_GENERATION)
Add samples from the games of previous model.
"""
game_dir = game_dir or fsdb.selfplay_dir()
ensure_dir_exists(output_dir)
models = [model for model in fsdb.get_models() if model[0] < model_num]
buf = ExampleBuffer(positions, sampling_frac=sampling_frac)
files = []
for _, model in sorted(models, reverse=True):
local_model_dir = os.path.join(local_dir, model)
if not tf.gfile.Exists(local_model_dir):
print("Rsyncing", model)
_rsync_dir(os.path.join(game_dir, model), local_model_dir)
files.extend(tf.gfile.Glob(os.path.join(local_model_dir, '*.zz')))
print("{}: {} games".format(model, len(files)))
if len(files) * 200 * sampling_frac > positions:
break
print("Filling from {} files".format(len(files)))
buf.parallel_fill(files, threads=threads)
print(buf)
output = os.path.join(output_dir, str(model_num) + '.tfrecord.zz')
print("Writing to", output)
buf.flush(output)
if __name__ == "__main__":
import sys
remaining_argv = flags.FLAGS(sys.argv, known_only=True)
fire.Fire({
'fill_and_wait_models': fill_and_wait_models,
'fill_and_wait_time': fill_and_wait_time,
'smart_rsync': smart_rsync,
'make_chunk_for': make_chunk_for,
}, remaining_argv[1:])
| |
from Tkinter import *
import tkFileDialog
import tkMessageBox
import os
import string
import time
import datetime
import csv
import numpy
import calendar
from scipy import stats
from scipy import optimize
from scipy import linspace
import math
import pylab as P
from matplotlib.backends.backend_pdf import PdfPages
import fnmatch
#confidence interval for linear regression analysis
confidence_interval=90.0
def MergeResults():
global sampletimefilename
global mergedresultsfilename
global tempfilename
inputfoldername = 'C:\UserData'
resultsfoldername = 'C:\DATA_TOOLS_UOFS'
opensampletimefile=open(sampletimefilename, 'rb')
sampletimes = numpy.genfromtxt(opensampletimefile, delimiter=',', dtype=None, names=True)
sampleepochtimes=[];
c1=0
# run through sampletimefile and find first and last date
for row in sampletimes['SampleName']:
samplestartstr=str(sampletimes['Year'][c1])+" "+str(sampletimes['Month'][c1])+" "+str(sampletimes['Day'][c1])+" "+str(sampletimes['Hour'][c1])+" "+str(sampletimes['Minute'][c1]) +" "+str(sampletimes['Second'][c1])
samplestructtime=time.strptime(samplestartstr, "%Y %m %d %H %M %S")
sampleepochtime=calendar.timegm(samplestructtime)
sampleepochtimes.append(sampleepochtime)
c1=c1+1
sampleepochtimes=sorted(sampleepochtimes)
firstsampletime=min(sampleepochtimes)
lastsampletime=max(sampleepochtimes)
print sampleepochtimes
os.chdir(resultsfoldername)
mergedresultsfilename=datetime.datetime.now().strftime('%Y%m%d_%H%M%S') + '_merged_results.txt'
openresultsfile=open(mergedresultsfilename, 'wb')
resultswriter = csv.writer(openresultsfile, delimiter='\t')
#[fd['DATE'][c1],fd['TIME'][c1],fd['EPOCH_TIME'][c1],fd['N2O'][c1],fd['d15N'][c1],fd['d15Nalpha'][c1],fd['d15Nbeta'][c1],fd['co2_conc'][c1],fd['H2O_conc'][c1])
resultswriter.writerow(['Date','Local_Time','Epoch_Time','12CO2_ppmv','13CO2_ppmv', 'd13C', 'H2O_volperc'])
os.chdir(inputfoldername)
for dirpath, dirs, files in os.walk(inputfoldername):
for filename in fnmatch.filter(files, '*Data.dat'):
openinputfile=open(os.path.join(dirpath, filename),'rb')
junk,datestr,junk2=filename.split("-",2)
YMD=int(datestr)
yearoffile=YMD/10000
monthoffile=(YMD-yearoffile*10000)/100
dayoffile=YMD-yearoffile*10000-monthoffile*100
filetimestr=str(yearoffile)+" "+str(monthoffile)+" "+str(dayoffile)+" 00 00 00"
filestructtime=time.strptime(filetimestr, "%Y %m %d %H %M %S")
fileepochtime=calendar.timegm(filestructtime)
if fileepochtime > firstsampletime-86400 and fileepochtime < lastsampletime+86400:
print filename
print datestr
fd = numpy.genfromtxt(openinputfile, dtype=None, names=True)
c1=0
for row in fd['TIME']:
temptime,junk=row.split(".",1)
datetimestr=str(fd['DATE'][c1])+" "+str(temptime)
tempstructtime=time.strptime(datetimestr, "%m/%d/%y %H:%M:%S")
tempepochtime=time.mktime(tempstructtime)
tempepochtime=tempepochtime
tempstructtime=time.localtime(tempepochtime)
gooddate=time.strftime("%Y-%m-%d", tempstructtime)
goodtime=(time.strftime("%H:%M:%S", tempstructtime))
resultswriter.writerow([gooddate,goodtime,tempepochtime+(int(UTCoffsetentry.get())*3600),fd['12CO2'][c1],fd['13CO2_Raw'][c1],fd['Delta_Raw'][c1],fd['H2O'][c1]])
c1=c1+1
openinputfile.close()
openresultsfile.close()
def askopenresultsfilename():
global sampletimefilename # file with the sample names and times (switcherlog)
global mergedresultsfilename
global tempfilename
# get filename
fileopen_opt = options = {}
options['defaultextension'] = '.csv'
options['filetypes'] = [('csv files', '.csv'),('all files', '.*')]
options['initialdir'] = 'C:\SWITCH_CONTROL\SWITCHERLOG'
options['initialfile'] = 'sample_times_names.csv'
options['parent'] = root
options['title'] = 'Choose a csv file with samplenames and times to open'
sampletimefilename = tkFileDialog.askopenfilename(**fileopen_opt)
# open file
if sampletimefilename:
tempfilename=datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
MergeResults()
opensampletimefile=open(sampletimefilename, 'rb')
resultsfoldername = 'C:\DATA_TOOLS_UOFS'
os.chdir(resultsfoldername)
openinputfile=open(mergedresultsfilename, 'rb')
resultsfileName=tempfilename + '_results.csv'
openresultsfile=open(resultsfileName, 'wb')
pdffile1 = PdfPages(tempfilename +'_charts_sample_only.pdf')
pdffile2 = PdfPages(tempfilename +'_charts_whole_run.pdf')
sampletimes = numpy.genfromtxt(opensampletimefile, delimiter=',', dtype=None, names=True)
print "amount samples"
print len(sampletimes['SampleName'])
iCO2data = numpy.genfromtxt(openinputfile, delimiter='\t', dtype=None, names=True)
print "amount datalines"
print len(iCO2data['Epoch_Time'])
amountrows=len(iCO2data['Epoch_Time'])
quality='unknown'
resultswriter = csv.writer(openresultsfile, dialect='excel')
resultswriter.writerow(['SampleName', 'Rundate','Runtime', 'Port', 'Quality', '12CO2mean', '12CO2slope', '12CO2intercept',
'13CO2mean','13CO2slope','13CO2intercept', 'd13Cmean', 'd13Cslope', 'd13Cintercept','H2Omean'])
stabilizesec=float(pretimeentry.get())
sampletimesec=float(sampletimeentry.get())*60
# just a counter c1 for keeping track of where we are in the samplelist file
c1=0
# just a counter c2 for keeping track of where we are in the results file
c2=0
for row in sampletimes['SampleName']:
xsec=[]; y12CO2=[]; y13CO2=[]; yd13C=[]; yH2O=[];
xsecs=[]; y12CO2s=[]; y13CO2s=[]; yd13Cs=[]; yH2Os=[];
samplestartstr=str(sampletimes['Year'][c1])+" "+str(sampletimes['Month'][c1])+" "+str(sampletimes['Day'][c1])+" "+str(sampletimes['Hour'][c1])+" "+str(sampletimes['Minute'][c1]) +" "+str(sampletimes['Second'][c1])
samplestructtime=time.strptime(samplestartstr, "%Y %m %d %H %M %S")
sampleepochtime=calendar.timegm(samplestructtime)
print sampletimes['SampleName'][c1]
print time.strftime("%d %b %Y %H:%M:%S ", samplestructtime)
print sampleepochtime
# discard data before sample is started and stabilized
while sampleepochtime > iCO2data['Epoch_Time'][c2]:
c2=c2+1
while sampleepochtime+stabilizesec > iCO2data['Epoch_Time'][c2]:
xsec.append(iCO2data['Epoch_Time'][c2]-sampleepochtime)
y12CO2.append(iCO2data['12CO2_ppmv'][c2])
y13CO2.append(iCO2data['13CO2_ppmv'][c2])
yd13C.append(iCO2data['d13C'][c2])
yH2O.append(iCO2data['H2O_volperc'][c2])
c2=c2+1
while sampleepochtime+stabilizesec+sampletimesec > iCO2data['Epoch_Time'][c2]:
xsecs.append(iCO2data['Epoch_Time'][c2]-sampleepochtime)
y12CO2s.append(iCO2data['12CO2_ppmv'][c2])
y13CO2s.append(iCO2data['13CO2_ppmv'][c2])
yd13Cs.append(iCO2data['d13C'][c2])
yH2Os.append(iCO2data['H2O_volperc'][c2])
c2=c2+1
while sampleepochtime+stabilizesec+sampletimesec+120 > iCO2data['Epoch_Time'][c2]:
xsec.append(iCO2data['Epoch_Time'][c2]-sampleepochtime)
y12CO2.append(iCO2data['12CO2_ppmv'][c2])
y13CO2.append(iCO2data['13CO2_ppmv'][c2])
yd13C.append(iCO2data['d13C'][c2])
yH2O.append(iCO2data['H2O_volperc'][c2])
c2=c2+1
c2=0
print 'amount readings for this sample:' + str(len(y12CO2s))
rundate=time.strftime("%Y%m%d", samplestructtime)
runtime=time.strftime("%H%M%S", samplestructtime)
if len(y12CO2s)>2:
_12CO2mean=numpy.mean(y12CO2s)
_13CO2mean=numpy.mean(y13CO2s)
d13Cmean=numpy.mean(yd13Cs)
H2Omean=numpy.mean(yH2Os)
if _12CO2mean > 1500:
quality='CO2 high'
elif _12CO2mean < 300:
quality='CO2 low'
else:
quality=''
if _13CO2mean > 25:
quality=quality + ' 13CO2 high'
elif _13CO2mean < 3:
quality=quality + ' 13CO2 low'
if H2Omean > 3 :
quality= quality + ' H2O too high, DAMAGE POSSIBLE!'
elif H2Omean > 1 :
quality= quality + ' H2O high'
_12CO2slope, _12CO2intercept, _12CO2linr, _12CO2linp, _12CO2std_err = stats.linregress(xsecs,y12CO2s)
_13CO2slope, _13CO2intercept, _13CO2linr, _13CO2linp, _13CO2std_err = stats.linregress(xsecs,y13CO2s)
d13Cslope, d13Cintercept, d13Clinr, d13Clinp, d13Cstd_err = stats.linregress(xsecs,yd13Cs)
resultswriter.writerow([sampletimes['SampleName'][c1],rundate,runtime, sampletimes['Port'][c1],
quality, _12CO2mean, _12CO2slope, _12CO2intercept, _13CO2mean, _13CO2slope, _13CO2intercept,
d13Cmean, d13Cslope, d13Cintercept, H2Omean])
# # save fluxes to file
xs = numpy.array(xsecs)
y1s = numpy.array(y12CO2s)
y2s = numpy.array(y13CO2s)
y3s = numpy.array(yd13Cs)
x = numpy.array(xsec)
y1 = numpy.array(y12CO2)
y2 = numpy.array(y13CO2)
y3 = numpy.array(yd13C)
#____________________________ SAMPLE ONLY PDF______________________________
fig = P.figure(figsize=(16, 16))
(m,b)=P.polyfit(xs,y1s,1)
y12 = P.polyval([m,b],x)
(m,b)=P.polyfit(xs,y2s,1)
y22 = P.polyval([m,b],x)
(m,b)=P.polyfit(xs,y3s,1)
y32 = P.polyval([m,b],x)
line1=fig.add_subplot(311)
line1.scatter(xs, y1s)
line1.set_xlim(left=0)
line1.grid()
line1.set_title('Sample Name: '+str(sampletimes['SampleName'][c1])+' time: '+time.strftime("%d %b %Y %H:%M:%S ", samplestructtime))
line1.set_ylabel('12CO2 concentration (ppmv)', color='b')
line2=fig.add_subplot(312)
line2.scatter(xs, y2s)
line2.set_xlim(left=0)
line2.grid()
line2.set_ylabel('13CO2 concentration (ppmv)', color='b')
line3=fig.add_subplot(313)
line3.scatter(xs, y3s)
line3.set_xlim(left=0)
line3.grid()
line3.set_ylabel('d13C permil', color='b')
line3.set_xlabel('time (seconds)', color='b')
pdffile1.savefig(dpi=150)
P.close()
#_____________________ whole run PDF___________________________________________
fig = P.figure(figsize=(16, 16))
(m,b)=P.polyfit(xs,y1s,1)
y12 = P.polyval([m,b],x)
(m,b)=P.polyfit(xs,y2s,1)
y22 = P.polyval([m,b],x)
(m,b)=P.polyfit(xs,y3s,1)
y32 = P.polyval([m,b],x)
line1=fig.add_subplot(311)
line1.scatter(xs, y1s)
line1.scatter(x, y1, marker='+')
line1.set_xlim(left=0)
line1.grid()
line1.set_title('Sample Name: '+str(sampletimes['SampleName'][c1])+' time: '+time.strftime("%d %b %Y %H:%M:%S ", samplestructtime))
line1.set_ylabel('12CO2 concentration (ppmv)', color='b')
line2=fig.add_subplot(312)
line2.scatter(xs, y2s)
line2.scatter(x, y2, marker='+')
line2.set_xlim(left=0)
line2.grid()
line2.set_ylabel('13CO2 concentration (ppmv)', color='b')
line3=fig.add_subplot(313)
line3.scatter(xs, y3s)
line3.scatter(x, y3, marker='+')
line3.set_xlim(left=0)
line3.grid()
line3.set_ylabel('d13C permil', color='b')
line3.set_xlabel('time (seconds)', color='b')
pdffile2.savefig(dpi=150)
P.close()
else:
resultswriter.writerow([sampletimes['SampleName'][c1],rundate,runtime, sampletimes['Port'][c1],
'na', 'na', 'na', 'na','na','na',
'na', 'na', 'na', 'na'])
print 'NO DATA FOUND FOR THIS SAMPLE'
print '----------------------------------------------'
c1=c1+1
openinputfile.close()
openresultsfile.close()
pdffile1.close()
pdffile2.close()
#____________________________________________________________________________________________________________
#--------------------GUI-----------------------------------------------------------------------------------
#_____________________________________________________________________________________________________________
# create a root TkInter frame
root = Tk()
root.title('iCO2 results calculator 20130715')
#__________________________________LOGO&TITLE________________________________________
bigtitle = Label(root, anchor=W, font=('times', 20, 'bold'), fg='white',bg='darkgreen', text="iCO2 calculator ")
bigtitle.grid(row=0,column=0,columnspan=10,sticky=[N,S,E,W])
#____________________________OPTIONS______________________________________________________
optionstitle = Label(root, anchor=W, font=('times', 12, 'bold'), text="options:")
optionstitle.grid(row=1,column=0, columnspan=3, sticky=[N,S,E,W])
pretimeentrytitle = Label(root, anchor=W, text="stabilizing time to ignore at start (s):")
pretimeentrytitle.grid(row=3,column=0, columnspan=1, sticky=[E])
pretimeentry= Entry(root,width=4)
pretimeentry.insert(0,"270")
pretimeentry.grid(row=3,column=1, columnspan=1, sticky=[W])
sampletimeentrytitle = Label(root, anchor=W, text="sampling time to include (min):")
sampletimeentrytitle.grid(row=4,column=0, columnspan=1, sticky=[E])
sampletimeentry= Entry(root,width=4)
sampletimeentry.insert(0,"5")
sampletimeentry.grid(row=4,column=1, columnspan=1, sticky=[W])
UTCoffsettitle = Label(root, anchor=W, text="Offset local time UTC (SK: -6):")
UTCoffsettitle.grid(row=13,column=0, columnspan=1, sticky=[E])
UTCoffsetentry= Entry(root,width=4)
UTCoffsetentry.insert(0,"-6")
UTCoffsetentry.grid(row=13,column=1, columnspan=1, sticky=[W])
# doHMRfit=IntVar()
# doHMRapply = Checkbutton(root, text="Fit the exponential HMR model", variable=doHMRfit)
# doHMRapply.grid(row=12,column=0, columnspan=5, sticky=W)
# _______________________CALC INDIVIDUAL FLUXES_____________________________________________
f0=Frame(root,height=1, width=450, bg="grey")
f0.grid(row=24,column=0, columnspan=4, pady=5,sticky=S)
calcfluxtitle = Label(root, anchor=W, font=('times', 12, 'bold'), text="Calculate results")
calcfluxtitle.grid(row=25,column=0, columnspan=4, sticky=[N,S,E,W])
calcfluxhelp = Label(root, anchor=W, text="Open a merged results file")
calcfluxhelp.grid(row=26,column=0, columnspan=4, sticky=[N,S,E,W])
calcfluxhelp2 = Label(root, anchor=W, text="input concentrations in ppmv (=ul/l)")
calcfluxhelp2.grid(row=27,column=0, columnspan=4, sticky=[N,S,E,W])
buttonopenconcfile=Button(root, text='open sampletime and results file', command=askopenresultsfilename)
buttonopenconcfile.grid(row=28,column=1,columnspan=1,sticky=[W])
calcfluxhelp3 = Label(root, anchor=W, text="results are saved with same filename+results")
calcfluxhelp3.grid(row=29,column=0, columnspan=4, sticky=[N,S,E,W])
# #_____________________________________________________________________________________________________________
root.mainloop( )
| |
"""
Forked From https://github.com/theskumar/python-dotenv
"""
from __future__ import absolute_import
import codecs
import os
import sys
import warnings
import re
from collections import OrderedDict
__escape_decoder = codecs.getdecoder('unicode_escape')
__posix_variable = re.compile('\$\{[^\}]*\}')
def decode_escaped(escaped):
return __escape_decoder(escaped)[0]
def load_dotenv(dotenv_path):
"""
Read a .env file and load into os.environ.
"""
if not os.path.exists(dotenv_path):
warnings.warn("Not loading %s - it doesn't exist." % dotenv_path)
return None
for k, v in dotenv_values(dotenv_path).items():
os.environ.setdefault(k, v)
return True
def get_key(dotenv_path, key_to_get):
"""
Gets the value of a given key from the given .env
If the .env path given doesn't exist, fails
"""
key_to_get = str(key_to_get)
if not os.path.exists(dotenv_path):
warnings.warn("can't read %s - it doesn't exist." % dotenv_path)
return None
dotenv_as_dict = dotenv_values(dotenv_path)
if key_to_get in dotenv_as_dict:
return dotenv_as_dict[key_to_get]
else:
warnings.warn("key %s not found in %s." % (key_to_get, dotenv_path))
return None
def set_key(dotenv_path, key_to_set, value_to_set, quote_mode="always"):
"""
Adds or Updates a key/value to the given .env
If the .env path given doesn't exist, fails instead of risking creating
an orphan .env somewhere in the filesystem
"""
key_to_set = str(key_to_set)
value_to_set = str(value_to_set).strip("'").strip('"')
if not os.path.exists(dotenv_path):
warnings.warn("can't write to %s - it doesn't exist." % dotenv_path)
return None, key_to_set, value_to_set
dotenv_as_dict = OrderedDict(parse_dotenv(dotenv_path))
dotenv_as_dict[key_to_set] = value_to_set
success = flatten_and_write(dotenv_path, dotenv_as_dict, quote_mode)
return success, key_to_set, value_to_set
def unset_key(dotenv_path, key_to_unset, quote_mode="always"):
"""
Removes a given key from the given .env
If the .env path given doesn't exist, fails
If the given key doesn't exist in the .env, fails
"""
key_to_unset = str(key_to_unset)
if not os.path.exists(dotenv_path):
warnings.warn("can't delete from %s - it doesn't exist." % dotenv_path)
return None, key_to_unset
dotenv_as_dict = dotenv_values(dotenv_path)
if key_to_unset in dotenv_as_dict:
dotenv_as_dict.pop(key_to_unset, None)
else:
warnings.warn("key %s not removed from %s - key doesn't exist." % (key_to_unset, dotenv_path))
return None, key_to_unset
success = flatten_and_write(dotenv_path, dotenv_as_dict, quote_mode)
return success, key_to_unset
def dotenv_values(dotenv_path):
values = OrderedDict(parse_dotenv(dotenv_path))
values = resolve_nested_variables(values)
return values
def parse_dotenv(dotenv_path):
with open(dotenv_path) as f:
for line in f:
line = line.strip()
if not line or line.startswith('#') or '=' not in line:
continue
k, v = line.split('=', 1)
# Remove any leading and trailing spaces in key, value
k, v = k.strip(), v.strip()
if len(v) > 0:
quoted = v[0] == v[len(v) - 1] == '"'
if quoted:
v = decode_escaped(v[1:-1])
yield k, v
def resolve_nested_variables(values):
def _replacement(name):
"""
get appropiate value for a variable name.
first search in environ, if not found,
then look into the dotenv variables
"""
ret = os.getenv(name, values.get(name, ""))
return ret
def _re_sub_callback(match_object):
"""
From a match object gets the variable name and returns
the correct replacement
"""
return _replacement(match_object.group()[2:-1])
for k, v in values.items():
values[k] = __posix_variable.sub(_re_sub_callback, v)
return values
def flatten_and_write(dotenv_path, dotenv_as_dict, quote_mode="always"):
with open(dotenv_path, "w") as f:
for k, v in dotenv_as_dict.items():
_mode = quote_mode
if _mode == "auto" and " " in v:
_mode = "always"
str_format = '%s="%s"\n' if _mode == "always" else '%s=%s\n'
f.write(str_format % (k, v))
return True
def _walk_to_root(path):
"""
Yield directories starting from the given directory up to the root
"""
if not os.path.exists(path):
raise IOError('Starting path not found')
if os.path.isfile(path):
path = os.path.dirname(path)
last_dir = None
current_dir = os.path.abspath(path)
while last_dir != current_dir:
yield current_dir
parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir))
last_dir, current_dir = current_dir, parent_dir
def find_dotenv(filename='.env', raise_error_if_not_found=False, usecwd=False):
"""
Search in increasingly higher folders for the given file
Returns path to the file if found, or an empty string otherwise
"""
if usecwd or '__file__' not in globals():
# should work without __file__, e.g. in REPL or IPython notebook
path = os.getcwd()
else:
# will work for .py files
frame_filename = sys._getframe().f_back.f_code.co_filename
path = os.path.dirname(os.path.abspath(frame_filename))
for dirname in _walk_to_root(path):
check_path = os.path.join(dirname, filename)
if os.path.exists(check_path):
return check_path
if raise_error_if_not_found:
raise IOError('File not found')
return ''
def _magic(dotenv_path):
"""
dotenv [dotenv_path]
Search in increasingly higher folders for the `dotenv_path`
"""
# Locate the .env file
dotenv_path = dotenv_path or '.env'
try:
dotenv_path = find_dotenv(dotenv_path, True, True)
except IOError:
print("cannot find .env file")
return
# Load the .env file
load_dotenv(dotenv_path)
def load_ipython_extension(ipython):
"""Register the %dotenv magic."""
ipython.register_magic_function(_magic, magic_name='dotenv')
| |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from python_pachyderm.proto.v2.transaction import transaction_pb2 as python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2
class APIStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.BatchTransaction = channel.unary_unary(
'/transaction_v2.API/BatchTransaction',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.BatchTransactionRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfo.FromString,
)
self.StartTransaction = channel.unary_unary(
'/transaction_v2.API/StartTransaction',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.StartTransactionRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.Transaction.FromString,
)
self.InspectTransaction = channel.unary_unary(
'/transaction_v2.API/InspectTransaction',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.InspectTransactionRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfo.FromString,
)
self.DeleteTransaction = channel.unary_unary(
'/transaction_v2.API/DeleteTransaction',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.DeleteTransactionRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.ListTransaction = channel.unary_unary(
'/transaction_v2.API/ListTransaction',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.ListTransactionRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfos.FromString,
)
self.FinishTransaction = channel.unary_unary(
'/transaction_v2.API/FinishTransaction',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.FinishTransactionRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfo.FromString,
)
self.DeleteAll = channel.unary_unary(
'/transaction_v2.API/DeleteAll',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.DeleteAllRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class APIServicer(object):
"""Missing associated documentation comment in .proto file."""
def BatchTransaction(self, request, context):
"""Transaction rpcs
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StartTransaction(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectTransaction(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteTransaction(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListTransaction(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def FinishTransaction(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_APIServicer_to_server(servicer, server):
rpc_method_handlers = {
'BatchTransaction': grpc.unary_unary_rpc_method_handler(
servicer.BatchTransaction,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.BatchTransactionRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfo.SerializeToString,
),
'StartTransaction': grpc.unary_unary_rpc_method_handler(
servicer.StartTransaction,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.StartTransactionRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.Transaction.SerializeToString,
),
'InspectTransaction': grpc.unary_unary_rpc_method_handler(
servicer.InspectTransaction,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.InspectTransactionRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfo.SerializeToString,
),
'DeleteTransaction': grpc.unary_unary_rpc_method_handler(
servicer.DeleteTransaction,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.DeleteTransactionRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'ListTransaction': grpc.unary_unary_rpc_method_handler(
servicer.ListTransaction,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.ListTransactionRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfos.SerializeToString,
),
'FinishTransaction': grpc.unary_unary_rpc_method_handler(
servicer.FinishTransaction,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.FinishTransactionRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfo.SerializeToString,
),
'DeleteAll': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAll,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.DeleteAllRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'transaction_v2.API', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class API(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def BatchTransaction(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/transaction_v2.API/BatchTransaction',
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.BatchTransactionRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def StartTransaction(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/transaction_v2.API/StartTransaction',
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.StartTransactionRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.Transaction.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def InspectTransaction(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/transaction_v2.API/InspectTransaction',
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.InspectTransactionRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteTransaction(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/transaction_v2.API/DeleteTransaction',
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.DeleteTransactionRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListTransaction(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/transaction_v2.API/ListTransaction',
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.ListTransactionRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfos.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def FinishTransaction(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/transaction_v2.API/FinishTransaction',
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.FinishTransactionRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.TransactionInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/transaction_v2.API/DeleteAll',
python__pachyderm_dot_proto_dot_v2_dot_transaction_dot_transaction__pb2.DeleteAllRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| |
from functools import partial, wraps
from mock import patch
import ast
from sigtools import support, modifiers, specifiers, signatures, _util, _autoforwards
from sigtools.tests.util import Fixtures, tup
_wrapped = support.f('x, y, *, z', name='_wrapped')
def func(*args, **kwargs):
pass
class AutoforwardsMarkerReprs(Fixtures):
def _test(self, obj, exp_repr):
self.assertEqual(repr(obj), exp_repr)
_af = _autoforwards
name = _af.Name('spam'), "<name 'spam'>"
attribute = _af.Attribute(_af.Name('ham'), 'eggs'), "<attribute <name 'ham'>.eggs>"
arg = _af.Arg('eggs'), "<argument 'eggs'>"
unknown_unsourced = _af.Unknown(), '<irrelevant>'
unknown_marker = (
_af.Unknown(_af.Name('spam')),
"<unknown until runtime: <name 'spam'>>")
unknown_ast = (
_af.Unknown(ast.Pass()),
"<unknown until runtime: Pass()>")
unknown_ast_list = (
_af.Unknown([ast.Pass(), ast.Pass()]),
"<unknown until runtime: [Pass(), Pass()]>")
class AutoforwardsTests(Fixtures):
def _test(self, func, expected, sources, incoherent=False):
sig = specifiers.signature(func)
self.assertSigsEqual(sig, support.s(expected))
self.assertSourcesEqual(sig.sources, sources, func)
if not incoherent:
support.test_func_sig_coherent(
func, check_return=False, check_invalid=False)
@tup('a, b, x, y, *, z',
{'global_': ('a', 'b'), '_wrapped': ('x', 'y', 'z')})
def global_(a, b, *args, **kwargs):
return _wrapped(*args, **kwargs)
def _make_closure():
wrapped = _wrapped
def wrapper(b, a, *args, **kwargs):
return wrapped(*args, **kwargs)
return wrapper
closure = (
_make_closure(),
'b, a, x, y, *, z', {'wrapper': 'ba', '_wrapped': 'xyz'})
@tup('a, b, y', {'args': 'ab', '_wrapped': 'y'})
def args(a, b, *args, **kwargs):
return _wrapped(a, *args, z=b, **kwargs)
@tup('a, b, *, z', {'using_other_varargs': 'ab', '_wrapped': 'z'})
def using_other_varargs(a, b, **kwargs):
return _wrapped(a, *b, **kwargs)
# def test_external_args(self):
# def l1():
# a = None
# def l2(**kwargs):
# nonlocal a
# _wrapped(*a, **kwargs)
# return l2
# self._test_func(l1(), '*, z')
@tup('x, y, /, *, kwop', {'kwo': ['kwop'], '_wrapped': 'xy'})
@modifiers.kwoargs('kwop')
def kwo(kwop, *args):
_wrapped(*args, z=kwop)
@tup('a, b, y, *, z', {'subdef': 'ab', '_wrapped': 'yz'})
def subdef(a, b, *args, **kwargs):
def func():
_wrapped(42, *args, **kwargs)
func()
@tup('a, b, y, *, z', {'subdef_lambda': 'ab', '_wrapped': 'yz'})
def subdef_lambda(a, b, *args, **kwargs):
(lambda: _wrapped(42, *args, **kwargs))()
@tup('a, b, x, y, *, z', {0: 'ab', '_wrapped': 'xyz'})
def rebind_in_subdef(a, b, *args, **kwargs):
def func():
args = 1,
kwargs = {'z': 2}
_wrapped(42, *args, **kwargs)
_wrapped(*args, **kwargs)
func()
@tup('a, b, x, y, *, z', {'rebind_subdef_param': 'ab', '_wrapped': 'xyz'})
def rebind_subdef_param(a, b, *args, **kwargs):
def func(*args, **kwargs):
_wrapped(42, *args, **kwargs)
_wrapped(*args, **kwargs)
func(2, z=3)
@tup('a, b, *args, **kwargs',
{'rebind_subdef_lambda_param': ['a', 'b', 'args', 'kwargs']})
def rebind_subdef_lambda_param(a, b, *args, **kwargs):
f = lambda *args, **kwargs: _wrapped(*args, **kwargs)
f(1, 2, z=3)
# @tup('a, b, x, y, *, z', {0: 'ab', '_wrapped': 'xyz'})
# def nonlocal_already_executed(a, b, *args, **kwargs):
# def make_ret2(args, kwargs):
# def ret2():
# _wrapped(*args, **kwargs)
# make_ret2(args, kwargs)
# def ret1():
# nonlocal args, kwargs
# args = ()
# kwargs = {}
def test_nonlocal_outside(self):
x = _wrapped
def l1(*args, **kwargs):
nonlocal x
x(*args, **kwargs)
self._test(l1, 'x, y, *, z', {_wrapped: 'xyz'})
def test_partial(self):
def _wrapper(wrapped, a, *args, **kwargs):
return wrapped(*args, **kwargs)
func = partial(_wrapper, _wrapped)
sig = specifiers.signature(func)
self.assertSigsEqual(sig, support.s('a, x, y, *, z'))
self.assertEqual(sig.sources, {
'a': [_wrapper],
'x': [_wrapped], 'y': [_wrapped], 'z': [_wrapped],
'+depths': {func: 0, _wrapper: 1, _wrapped: 2}
})
support.test_func_sig_coherent(
func, check_return=False, check_invalid=False)
@staticmethod
@modifiers.kwoargs('wrapped')
def _wrapped_kwoarg(a, wrapped, *args, **kwargs):
return wrapped(*args, **kwargs) # pragma: no cover
def test_partial_kwo(self):
"""When given keyword arguments, functools.partial only makes them
defaults. The full signature is therefore not fully determined, since
the user can replace wrapped and change the meaning of *args, **kwargs.
The substitution could be made in good faith that the user wouldn't
change the value of the parameter, but this would potentially cause
confusing documentation where a function description says remaining
arguments will be forwarded to the given function, while the signature
in the documentation only shows the default target's arguments.
"""
func = partial(AutoforwardsTests._wrapped_kwoarg, wrapped=_wrapped)
expected = support.s('a, *args, wrapped=w, **kwargs',
locals={'w': _wrapped})
self.assertSigsEqual(specifiers.signature(func), expected)
_wrapped_attr = staticmethod(support.f('d, e, *, f'))
@tup('a, d, e, *, f', {0: 'a', 'func': 'def'})
def global_attribute(a, *args, **kwargs):
AutoforwardsTests._wrapped_attr(*args, **kwargs)
def test_instance_attribute(self):
class A(object):
def wrapped(self, x, y):
pass
def method(self, a, *args, **kwargs):
self.wrapped(a, *args, **kwargs)
a = A()
self._test(a.method, 'a, y', {0: 'a', 'wrapped': 'y'})
def test_multiple_method_calls(self):
class A(object):
def wrapped_1(self, x, y):
pass
def wrapped_2(self, x, y):
pass
def method(self, a, *args, **kwargs):
self.wrapped_1(a, *args, **kwargs)
self.wrapped_2(a, *args, **kwargs)
self._test(A().method, 'a, y', _util.OrderedDict([
(0, 'a'), ('method', 'a'),
('wrapped_1', 'y'), ('wrapped_2', 'y'),
('+depths', {'method': 0, 'wrapped_1': 1, 'wrapped_2': 1})]))
@staticmethod
@modifiers.kwoargs('b')
def _deeparg_l1(l2, b, *args, **kwargs):
l2(*args, **kwargs)
@staticmethod
@modifiers.kwoargs('c')
def _deeparg_l2(l3, c, *args, **kwargs):
l3(*args, **kwargs)
@tup('x, y, *, a, b, c, z', {
0: 'a', '_deeparg_l1': 'b', '_deeparg_l2': 'c', _wrapped: 'xyz'})
@modifiers.kwoargs('a')
def deeparg(a, *args, **kwargs):
AutoforwardsTests._deeparg_l1(
AutoforwardsTests._deeparg_l2, _wrapped,
*args, **kwargs)
@staticmethod
@modifiers.kwoargs('l2')
def _deeparg_kwo_l1(l2, b, *args, **kwargs):
l2(*args, **kwargs)
@staticmethod
@modifiers.kwoargs('l3')
def _deeparg_kwo_l2(l3, c, *args, **kwargs):
l3(*args, **kwargs)
@tup('a, b, c, x, y, *, z', {
0: 'a', '_deeparg_kwo_l1': 'b', '_deeparg_kwo_l2': 'c', _wrapped: 'xyz'})
def deeparg_kwo(a, *args, **kwargs):
AutoforwardsTests._deeparg_kwo_l1(
*args, l2=AutoforwardsTests._deeparg_kwo_l2, l3=_wrapped, **kwargs)
@tup('a, x, y, *, z', {0: 'a', _wrapped: 'xyz'})
def call_in_args(a, *args, **kwargs):
func(_wrapped(*args, **kwargs))
@tup('a, x, y, *, z', {0: 'a', _wrapped: 'xyz'})
def call_in_kwargs(a, *args, **kwargs):
func(kw=_wrapped(*args, **kwargs))
@tup('a, x, y, *, z', {0: 'a', _wrapped: 'xyz'})
def call_in_varargs(a, *args, **kwargs):
func(*_wrapped(*args, **kwargs))
@tup('a, x, y, *, z',
{0: 'a', _wrapped: 'xyz', '+depths': ['call_in_varkwargs', '_wrapped']})
def call_in_varkwargs(a, *args, **kwargs):
func(**_wrapped(*args, **kwargs))
def test_functools_wrapped(self):
@wraps(_wrapped)
def func(a, *args, **kwargs):
_wrapped(1, *args, **kwargs)
sig = specifiers.signature(func)
self.assertSigsEqual(sig, support.s('a, y, *, z'))
self.assertEqual(sig.sources, {
'+depths': {func: 0, _wrapped: 1},
'a': [func],
'y': [_wrapped], 'z': [_wrapped]
})
support.test_func_sig_coherent(
func, check_return=False, check_invalid=False)
def test_decorator_wraps(self):
def decorator(function):
@wraps(function)
@modifiers.autokwoargs
def _decorated(a, b=2, *args, **kwargs):
function(1, *args, **kwargs)
return _decorated
func = decorator(_wrapped)
sig = specifiers.signature(func)
self.assertSigsEqual(sig, support.s('a, y, *, b=2, z'))
self.assertEqual(sig.sources, {
'+depths': {func: 0, _wrapped: 1},
'a': [func], 'b': [func],
'y': [_wrapped], 'z': [_wrapped]
})
support.test_func_sig_coherent(
func, check_return=False, check_invalid=False)
@tup('a, b, *args, z',
{'unknown_args': ['a', 'b', 'args'], '_wrapped': 'z'})
def unknown_args(a, b, *args, **kwargs):
args = (1, 2)
return _wrapped(*args, **kwargs)
# @tup('a, b, c, x, y, *, z', {0: 'ab', 'sub': 'c', '_wrapped': 'xyz'})
# def use_subdef(a, b, *args, **kwargs):
# def sub(c, *args, **kwargs):
# _wrapped(*args, **kwargs)
# sub(1, *args, **kwargs)
@tup('a, b, x=None, y=None, *, z=None', {0: 'ab', '_wrapped': 'xyz'})
def pass_to_partial(a, b, *args, **kwargs):
partial(_wrapped, *args, **kwargs)
@tup('a, b, y=None', {0: 'ab', '_wrapped': 'y'})
def pass_to_partial_with_args(a, b, *args, **kwargs):
partial(_wrapped, a, *args, z=b, **kwargs)
@tup('x, y, *, z', {'_wrapped': 'xyz'})
def kwargs_passed_to_func_after(*args, **kwargs):
_wrapped(*args, **kwargs)
func(kwargs)
@tup('x, y, *, z', {'_wrapped': 'xyz'})
def args_passed_to_func(*args, **kwargs):
func(args)
_wrapped(*args, **kwargs)
not_callable = None
class UnresolvableAutoforwardsTests(Fixtures):
def _test(self, func, ensure_incoherent=True):
self.assertSigsEqual(
specifiers.signature(func),
signatures.signature(func))
if ensure_incoherent:
with self.assertRaises(AssertionError):
support.test_func_sig_coherent(
func, check_return=False, check_invalid=False)
@tup(False)
def missing_global(a, b, *p, **k):
return doesntexist(*p, **k) # pyflakes: silence
@tup()
def builtin(a, b, *args, **kwargs):
return iter(*args, **kwargs)
def test_get_from_object(self):
class A(object):
def wrapped(self, x, y):
pass
def method(self, a, *p, **k):
self.wrapped(a, *p, **k)
method = _util.safe_get(A.__dict__['method'], object(), type(A))
self._test(method, ensure_incoherent=False)
def test_unset_attribute(self):
class A(object):
def method(self, a, *p, **k):
self.wrapped(a, *p, **k)
a = A()
self._test(a.method, ensure_incoherent=False)
@tup(False)
def attribute_on_unset(*a, **k):
doesntexist.method(*a, **k) # pyflakes: silence
@tup()
def constant(a, *p, **k):
None(*p, **k)
@tup()
def not_callable(a, *p, **k):
not_callable(*p, **k)
def test_no_sig(self):
obj = object()
sig = support.s('a, *p, **k')
def sig_replace(obj_):
if obj is obj_:
raise ValueError("no sig for obj")
else:
return sig
def func(a, *p, **k):
obj(*p, **k)
with patch.multiple(_util.funcsigs, signature=sig_replace):
self.assertSigsEqual(specifiers.signature(func), sig)
@tup()
def nonforwardable(*args):
_wrapped(*args)
def test_super_with_args(self):
class Base(object):
def method(self, x, y, z):
pass
class Derived(Base):
def method(self, a, *args, **kwargs):
super(Derived, self).method(*args, **kwargs)
class MixIn(Base):
def method(self, b, *args, **kwargs):
super(MixIn, self).method(*args, **kwargs)
class MixedIn(Derived, MixIn):
pass
for cls in [Derived, MixedIn]:
with self.subTest(cls=cls.__name__):
self._test(cls().method)
@tup()
def kwargs_passed_to_func(**kwargs):
func(kwargs)
_wrapped(**kwargs)
@tup()
def kwargs_method_called(**kwargs):
kwargs.update({})
_wrapped(**kwargs)
@tup()
def kwargs_item_added(**kwargs):
kwargs['ham'] = 'spam'
_wrapped(**kwargs)
@tup(False)
def kwargs_item_removed(**kwargs):
del kwargs['ham']
_wrapped(**kwargs)
@tup()
def kwargs_item_popped(**kwargs):
kwargs.pop('ham', 'default')
_wrapped(**kwargs)
@tup(False)
def kwargs_item_accessed(**kwargs):
kwargs['ham']
_wrapped(**kwargs)
@tup()
def rebind_subdef_nonlocal(a, b, *args, **kwargs):
def func():
nonlocal args, kwargs
args = 2,
kwargs = {'z': 3}
_wrapped(42, *args, **kwargs)
func()
_wrapped(*args, **kwargs)
@tup()
def nonlocal_backchange(a, b, *args, **kwargs):
def ret1():
_wrapped(*args, **kwargs)
def ret2():
nonlocal args, kwargs
args = ()
kwargs = {}
ret2()
ret1()
@tup()
def nonlocal_deep(a, *args, **kwargs):
def l1():
def l2():
nonlocal args, kwargs
args = ()
kwargs = {}
l2()
l1()
_wrapped(*args, **kwargs)
def test_missing_freevar(self):
def make_closure():
var = 1
del var
def func(a, *p, **k):
var(*p, **k) # pyflakes: silence
return func
self._test(make_closure(), ensure_incoherent=False)
def test_deleted(self):
def makef(**kwargs):
def func():
_wrapped(**kwargs) # pyflakes: silence
del kwargs
return func
self._test(makef, ensure_incoherent=False)
def test_super_without_args(self):
class Base:
def method(self, x, y, *, z):
pass
class Derived(Base):
def method(self, *args, a, **kwargs):
super().method(*args, **kwargs)
class MixIn(Base):
def method(self, *args, b, **kwargs):
super().method(*args, **kwargs)
class MixedIn(Derived, MixIn):
pass
for cls in [Derived, MixedIn]:
with self.subTest(cls=cls.__name__):
self._test(cls().method)
class UnresolvableAutoforwardsWithSourcesTests(Fixtures):
def _test(self, func, expected, expected_src):
sig = specifiers.signature(func)
self.assertSigsEqual(sig, support.s(expected))
self.assertSourcesEqual(sig.sources, expected_src, func)
with self.assertRaises(AssertionError):
support.test_func_sig_coherent(
func, check_return=False, check_invalid=False)
@tup('v, w, *a, **k', {0: 'vwak'})
def double_starargs(v, w, *a, **k):
_wrapped(*a, *a)
@tup('v, w, *a, **k', {0: 'vwak'})
def double_kwargs(v, w, *a, **k):
_wrapped(**k, **w)
| |
from __future__ import absolute_import
import unittest
from pychron.experiment.utilities.frequency_generator import frequency_index_gen, parse_frequency_template, \
validate_frequency_template
from six.moves import range
class FrequencyTemplateTestCase(unittest.TestCase):
def test_parse1(self):
self._test_parse('s', (True, False, False, None))
def test_parse2(self):
self._test_parse('s,e', (True, True, False, None))
def test_parse3(self):
self._test_parse('s,3,4,e', (True, True, False, [3, 4]))
def test_parse4(self):
self._test_parse('s,E', (True, True, True, None))
def _test_parse(self, v, args):
pargs = parse_frequency_template(v)
self.assertTupleEqual(args, pargs)
def test_pass_validate1(self):
self._test_pass('s')
def test_pass_validate2(self):
self._test_pass('s,e')
def test_pass_validate3(self):
self._test_pass('s,3,e')
def test_pass_validate4(self):
self._test_pass('s,3,4,e')
def test_pass_validate5(self):
self._test_pass('3')
def test_pass_validate6(self):
self._test_pass('3,4')
def test_pass_validate7(self):
self._test_pass('3,4,e')
def test_pass_validate8(self):
self._test_pass('e')
def test_pass_validate9(self):
self._test_pass('s,E')
def test_pass_validate10(self):
self._test_pass('s,3,E')
def test_fail_validate1(self):
self._test_fail('s,')
def test_fail_validate2(self):
self._test_fail('s,x')
def test_fail_validate3(self):
self._test_fail('s,3e')
def test_fail_validate4(self):
self._test_fail('s,3e,4')
def test_fail_validate5(self):
self._test_fail('3e')
def test_fail_validate6(self):
self._test_fail('3,')
def test_fail_validate7(self):
self._test_fail('3,4,')
def test_fail_validate8(self):
self._test_fail('e,')
def test_fail_validate9(self):
self._test_fail(',e')
def _test_pass(self, v):
self.assertTrue(validate_frequency_template(v))
def _test_fail(self, v):
self.assertIsNone(validate_frequency_template(v))
def test_template_start(self):
self._test_template('s', ['blank', 'unknown', 'unknown', 'unknown'])
def test_template_end(self):
self._test_template('e', ['unknown', 'unknown', 'unknown', 'blank'])
def test_template_start_end(self):
self._test_template('s,e', ['blank', 'unknown', 'unknown', 'unknown', 'blank'])
def test_template_start_idx(self):
self._test_template('s,2', ['blank', 'unknown', 'unknown', 'blank', 'unknown'])
def test_template_start_end_idx(self):
self._test_template('s,2,e', ['blank', 'unknown', 'unknown', 'blank', 'unknown', 'blank'])
def test_template_start_idx2(self):
self._test_template('s,2', ['blank', 'unknown', 'unknown', 'blank', 'unknown',
'blank', 'unknown', 'unknown', 'blank', 'unknown'],
runs=self._get_runs())
def test_template_start_end_idx2(self):
self._test_template('s,2,e', ['blank', 'unknown', 'unknown', 'blank', 'unknown', 'blank',
'blank', 'unknown', 'unknown', 'blank', 'unknown', 'blank'],
runs=self._get_runs())
def test_template_start_idx3(self):
self._test_template('s,2,10', ['blank', 'unknown', 'unknown', 'blank', 'unknown','blank'])
def test_template_start2(self):
self._test_template('s', ['blank', 'unknown', 'unknown', 'unknown',
'blank', 'unknown', 'unknown', 'unknown', ],
runs=self._get_runs())
def test_template_start_end2(self):
self._test_template('s,e', ['blank', 'unknown', 'unknown', 'unknown', 'blank',
'blank', 'unknown', 'unknown', 'unknown', 'blank'],
runs=self._get_runs())
def test_template_end2(self):
self._test_template('e', ['unknown', 'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'unknown', 'blank'],
runs=self._get_runs())
def test_template_compress(self):
self._test_template('e', ['unknown', 'unknown', 'unknown', 'blank', 'air',
'unknown', 'unknown', 'unknown', 'blank'],
runs=self._get_runs2())
def test_template_start_end_ex(self):
self._test_template('s,E', ['blank', 'unknown', 'unknown', 'unknown',
'blank', 'unknown', 'unknown', 'unknown', 'blank'],
runs=self._get_runs())
def test_template_start_end_ex2(self):
self._test_template('s,E', ['blank', 'unknown', 'unknown', 'unknown', 'blank', 'air',
'blank', 'unknown', 'unknown', 'unknown', 'blank'],
runs=self._get_runs2())
def test_template_start_end_ex3(self):
self._test_template('s,E', ['blank', 'unknown', 'unknown', 'unknown', 'blank', 'air',
'blank', 'unknown', 'unknown', 'unknown', 'blank', 'air',
'blank', 'unknown', 'unknown', 'unknown', 'blank'],
runs=self._get_runs3())
def test_template_start_end2(self):
self._test_template('s,e', ['blank', 'unknown', 'unknown', 'unknown', 'blank',
'blank', 'unknown', 'unknown', 'unknown', 'blank',
'blank', 'unknown', 'unknown', 'unknown', 'blank'],
runs=self._get_runs4())
def test_template_start_idx3(self):
runs = [Run() for i in range(4)]
runs[1].skip=True
self._test_template('s,2',
['blank', 'unknown', 'unknown', 'unknown', 'blank', 'unknown'],
runs=runs)
def test_template_start_idx4(self):
runs = [Run() for i in range(4)]+[Run(aliquot=1) for i in range(4)]
runs[1].skip=True
runs[5].skip=True
self._test_template('s,2',
['blank', 'unknown', 'unknown', 'unknown', 'blank', 'unknown',
'blank', 'unknown', 'unknown', 'unknown', 'blank', 'unknown'],
runs=runs)
def _get_runs(self):
return [Run() for i in range(3)] + [Run(aliquot=1) for i in range(3)]
def _get_runs2(self):
return [Run() for i in range(3)] + [Run(analysis_type='air')] + \
[Run(aliquot=1) for i in range(3)]
def _get_runs3(self):
return [Run() for i in range(3)] + [Run(analysis_type='air')] + \
[Run(aliquot=1) for i in range(3)]+[Run(analysis_type='air')]+\
[Run(aliquot=2) for i in range(3)]
def _get_runs4(self):
return [Run() for i in range(3)] + \
[Run(aliquot=1) for i in range(3)]+\
[Run(aliquot=2) for i in range(3)]
def _test_template(self, temp, exp, runs=None):
if runs is None:
runs = [Run() for i in range(3)]
for i in reversed(list(frequency_index_gen(runs, temp, ('unknown', ), False, False))):
r = Run()
r.analysis_type = 'blank'
runs.insert(i, r)
atypes = [ri.analysis_type for ri in runs]
# print 'exception', exp
# print atypes
self.assertListEqual(atypes, exp)
class Run(object):
analysis_type = 'unknown'
aliquot = 0
skip = False
def __init__(self, aliquot=0, analysis_type='unknown'):
self.aliquot = aliquot
self.analysis_type = analysis_type
class FrequencyTestCase(unittest.TestCase):
def setUp(self):
self.runs=[Run() for i in range(10)]
def test_before(self):
runs = self.runs
for i in reversed(list(frequency_index_gen(runs, 2, ('unknown', ), True, False))):
r = Run()
r.analysis_type = 'blank'
runs.insert(i, r)
atypes = [ri.analysis_type for ri in runs]
self.assertListEqual(atypes, ['blank', 'unknown', 'unknown',
'blank', 'unknown', 'unknown',
'blank', 'unknown', 'unknown',
'blank', 'unknown', 'unknown',
'blank', 'unknown', 'unknown', ])
def test_after(self):
runs = self.runs
for i in reversed(list(frequency_index_gen(runs, 2, ('unknown', ), False, True))):
r = Run()
r.analysis_type = 'blank'
runs.insert(i, r)
atypes = [ri.analysis_type for ri in runs]
self.assertListEqual(atypes, ['unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank', ])
def test_before_and_after(self):
runs = self.runs
for i in reversed(list(frequency_index_gen(runs, 2, ('unknown', ), True, True))):
r = Run()
r.analysis_type = 'blank'
runs.insert(i, r)
atypes = [ri.analysis_type for ri in runs]
self.assertListEqual(atypes, ['blank', 'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank', ])
def test_not_before_or_after(self):
runs = self.runs
for i in reversed(list(frequency_index_gen(runs, 2, ('unknown', ), False, False))):
r = Run()
r.analysis_type = 'blank'
runs.insert(i, r)
atypes = [ri.analysis_type for ri in runs]
self.assertListEqual(atypes, ['unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'blank',
'unknown', 'unknown'])
def test_not_before_or_after3(self):
runs = self.runs
for i in reversed(list(frequency_index_gen(runs, 3, ('unknown', ), False, False))):
r = Run()
r.analysis_type = 'blank'
runs.insert(i, r)
atypes = [ri.analysis_type for ri in runs]
self.assertListEqual(atypes, ['unknown', 'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'unknown', 'blank',
'unknown'])
def test_after_subset1(self):
runs = self.runs
for i in reversed(list(frequency_index_gen(runs[:7], 3, ('unknown', ), False, False))):
r = Run()
r.analysis_type = 'blank'
runs.insert(i, r)
atypes = [ri.analysis_type for ri in runs]
self.assertListEqual(atypes, ['unknown', 'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'unknown',
'unknown'])
def test_after_subset2(self):
runs = self.runs
sidx = 3
for i in reversed(list(frequency_index_gen(runs[sidx:], 3, ('unknown', ), False, True, sidx=sidx))):
r = Run()
r.analysis_type = 'blank'
runs.insert(i, r)
atypes = [ri.analysis_type for ri in runs]
self.assertListEqual(atypes, ['unknown', 'unknown', 'unknown',
'unknown', 'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'unknown', 'blank',
'unknown'])
def test_after_subset3(self):
runs = self.runs
sidx = 3
for i in reversed(list(frequency_index_gen(runs[sidx:], 3, ('unknown', ), False, False, sidx=sidx))):
r = Run()
r.analysis_type = 'blank'
runs.insert(i, r)
atypes = [ri.analysis_type for ri in runs]
self.assertListEqual(atypes, ['unknown', 'unknown', 'unknown',
'unknown', 'unknown', 'unknown', 'blank',
'unknown', 'unknown', 'unknown',
'unknown'])
if __name__ == '__main__':
unittest.main()
| |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import uuid
import mock
from keystone.common import dependency
from keystone import config
from keystone.contrib.revoke import model
from keystone import exception
from keystone.openstack.common import timeutils
from keystone import tests
from keystone.tests import test_backend_sql
CONF = config.CONF
def _new_id():
return uuid.uuid4().hex
def _future_time():
expire_delta = datetime.timedelta(seconds=1000)
future_time = timeutils.utcnow() + expire_delta
return future_time
def _past_time():
expire_delta = datetime.timedelta(days=-1000)
past_time = timeutils.utcnow() + expire_delta
return past_time
def _sample_blank_token():
issued_delta = datetime.timedelta(minutes=-2)
issued_at = timeutils.utcnow() + issued_delta
token_data = model.blank_token_data(issued_at)
return token_data
def _matches(event, token_values):
"""See if the token matches the revocation event.
Used as a secondary check on the logic to Check
By Tree Below: This is abrute force approach to checking.
Compare each attribute from the event with the corresponding
value from the token. If the event does not have a value for
the attribute, a match is still possible. If the event has a
value for the attribute, and it does not match the token, no match
is possible, so skip the remaining checks.
:param event one revocation event to match
:param token_values dictionary with set of values taken from the
token
:returns if the token matches the revocation event, indicating the
token has been revoked
"""
# The token has three attributes that can match the user_id
if event.user_id is not None:
for attribute_name in ['user_id', 'trustor_id', 'trustee_id']:
if event.user_id == token_values[attribute_name]:
break
else:
return False
# The token has two attributes that can match the domain_id
if event.domain_id is not None:
dom_id_matched = False
for attribute_name in ['user_domain_id', 'project_domain_id']:
if event.domain_id == token_values[attribute_name]:
dom_id_matched = True
break
if not dom_id_matched:
return False
# If any one check does not match, the while token does
# not match the event. The numerous return False indicate
# that the token is still valid and short-circuits the
# rest of the logic.
attribute_names = ['project_id',
'expires_at', 'trust_id', 'consumer_id',
'access_token_id']
for attribute_name in attribute_names:
if getattr(event, attribute_name) is not None:
if (getattr(event, attribute_name) !=
token_values[attribute_name]):
return False
if event.role_id is not None:
roles = token_values['roles']
role_found = False
for role in roles:
if event.role_id == role:
role_found = True
break
if not role_found:
return False
if token_values['issued_at'] > event.issued_before:
return False
return True
@dependency.requires('revoke_api')
class RevokeTests(object):
def test_list(self):
self.revoke_api.revoke_by_user(user_id=1)
self.assertEqual(1, len(self.revoke_api.get_events()))
self.revoke_api.revoke_by_user(user_id=2)
self.assertEqual(2, len(self.revoke_api.get_events()))
def test_list_since(self):
self.revoke_api.revoke_by_user(user_id=1)
self.revoke_api.revoke_by_user(user_id=2)
past = timeutils.utcnow() - datetime.timedelta(seconds=1000)
self.assertEqual(2, len(self.revoke_api.get_events(past)))
future = timeutils.utcnow() + datetime.timedelta(seconds=1000)
self.assertEqual(0, len(self.revoke_api.get_events(future)))
def test_past_expiry_are_removed(self):
user_id = 1
self.revoke_api.revoke_by_expiration(user_id, _future_time())
self.assertEqual(1, len(self.revoke_api.get_events()))
event = model.RevokeEvent()
event.revoked_at = _past_time()
self.revoke_api.revoke(event)
self.assertEqual(1, len(self.revoke_api.get_events()))
@mock.patch.object(timeutils, 'utcnow')
def test_expired_events_removed_validate_token_success(self, mock_utcnow):
def _sample_token_values():
token = _sample_blank_token()
token['expires_at'] = timeutils.isotime(_future_time(),
subsecond=True)
return token
now = datetime.datetime.utcnow()
now_plus_2h = now + datetime.timedelta(hours=2)
mock_utcnow.return_value = now
# Build a token and validate it. This will seed the cache for the
# future 'synchronize' call.
token_values = _sample_token_values()
user_id = _new_id()
self.revoke_api.revoke_by_user(user_id)
token_values['user_id'] = user_id
self.assertRaises(exception.TokenNotFound,
self.revoke_api.check_token,
token_values)
# Move our clock forward by 2h, build a new token and validate it.
# 'synchronize' should now be exercised and remove old expired events
mock_utcnow.return_value = now_plus_2h
self.revoke_api.revoke_by_expiration(_new_id(), now_plus_2h)
#should no longer throw an exception
self.revoke_api.check_token(token_values)
class SqlRevokeTests(test_backend_sql.SqlTests, RevokeTests):
def config_overrides(self):
super(SqlRevokeTests, self).config_overrides()
self.config_fixture.config(
group='revoke',
driver='keystone.contrib.revoke.backends.sql.Revoke')
self.config_fixture.config(
group='token',
provider='keystone.token.providers.pki.Provider',
revoke_by_id=False)
class KvsRevokeTests(tests.TestCase, RevokeTests):
def config_overrides(self):
super(KvsRevokeTests, self).config_overrides()
self.config_fixture.config(
group='revoke',
driver='keystone.contrib.revoke.backends.kvs.Revoke')
self.config_fixture.config(
group='token',
provider='keystone.token.providers.pki.Provider',
revoke_by_id=False)
def setUp(self):
super(KvsRevokeTests, self).setUp()
self.load_backends()
class RevokeTreeTests(tests.TestCase):
def setUp(self):
super(RevokeTreeTests, self).setUp()
self.events = []
self.tree = model.RevokeTree()
self._sample_data()
def _sample_data(self):
user_ids = []
project_ids = []
role_ids = []
for i in range(0, 3):
user_ids.append(_new_id())
project_ids.append(_new_id())
role_ids.append(_new_id())
project_tokens = []
i = len(project_tokens)
project_tokens.append(_sample_blank_token())
project_tokens[i]['user_id'] = user_ids[0]
project_tokens[i]['project_id'] = project_ids[0]
project_tokens[i]['roles'] = [role_ids[1]]
i = len(project_tokens)
project_tokens.append(_sample_blank_token())
project_tokens[i]['user_id'] = user_ids[1]
project_tokens[i]['project_id'] = project_ids[0]
project_tokens[i]['roles'] = [role_ids[0]]
i = len(project_tokens)
project_tokens.append(_sample_blank_token())
project_tokens[i]['user_id'] = user_ids[0]
project_tokens[i]['project_id'] = project_ids[1]
project_tokens[i]['roles'] = [role_ids[0]]
token_to_revoke = _sample_blank_token()
token_to_revoke['user_id'] = user_ids[0]
token_to_revoke['project_id'] = project_ids[0]
token_to_revoke['roles'] = [role_ids[0]]
self.project_tokens = project_tokens
self.user_ids = user_ids
self.project_ids = project_ids
self.role_ids = role_ids
self.token_to_revoke = token_to_revoke
def _assertTokenRevoked(self, token_data):
self.assertTrue(any([_matches(e, token_data) for e in self.events]))
return self.assertTrue(self.tree.is_revoked(token_data),
'Token should be revoked')
def _assertTokenNotRevoked(self, token_data):
self.assertFalse(any([_matches(e, token_data) for e in self.events]))
return self.assertFalse(self.tree.is_revoked(token_data),
'Token should not be revoked')
def _revoke_by_user(self, user_id):
return self.tree.add_event(
model.RevokeEvent(user_id=user_id))
def _revoke_by_expiration(self, user_id, expires_at):
event = self.tree.add_event(
model.RevokeEvent(user_id=user_id,
expires_at=expires_at))
self.events.append(event)
return event
def _revoke_by_grant(self, role_id, user_id=None,
domain_id=None, project_id=None):
event = self.tree.add_event(
model.RevokeEvent(user_id=user_id,
role_id=role_id,
domain_id=domain_id,
project_id=project_id))
self.events.append(event)
return event
def _revoke_by_user_and_project(self, user_id, project_id):
event = self.tree.add_event(
model.RevokeEvent(project_id=project_id,
user_id=user_id))
self.events.append(event)
return event
def _revoke_by_project_role_assignment(self, project_id, role_id):
event = self.tree.add_event(
model.RevokeEvent(project_id=project_id,
role_id=role_id))
self.events.append(event)
return event
def _revoke_by_domain_role_assignment(self, domain_id, role_id):
event = self.tree.add_event(
model.RevokeEvent(domain_id=domain_id,
role_id=role_id))
self.events.append(event)
return event
def _user_field_test(self, field_name):
user_id = _new_id()
event = self._revoke_by_user(user_id)
self.events.append(event)
token_data_u1 = _sample_blank_token()
token_data_u1[field_name] = user_id
self._assertTokenRevoked(token_data_u1)
token_data_u2 = _sample_blank_token()
token_data_u2[field_name] = _new_id()
self._assertTokenNotRevoked(token_data_u2)
self.tree.remove_event(event)
self.events.remove(event)
self._assertTokenNotRevoked(token_data_u1)
def test_revoke_by_user(self):
self._user_field_test('user_id')
def test_revoke_by_user_matches_trustee(self):
self._user_field_test('trustee_id')
def test_revoke_by_user_matches_trustor(self):
self._user_field_test('trustor_id')
def test_by_user_expiration(self):
future_time = _future_time()
user_id = 1
event = self._revoke_by_expiration(user_id, future_time)
token_data_1 = _sample_blank_token()
token_data_1['user_id'] = user_id
token_data_1['expires_at'] = future_time
self._assertTokenRevoked(token_data_1)
token_data_2 = _sample_blank_token()
token_data_2['user_id'] = user_id
expire_delta = datetime.timedelta(seconds=2000)
future_time = timeutils.utcnow() + expire_delta
token_data_2['expires_at'] = future_time
self._assertTokenNotRevoked(token_data_2)
self.removeEvent(event)
self._assertTokenNotRevoked(token_data_1)
def removeEvent(self, event):
self.events.remove(event)
self.tree.remove_event(event)
def test_by_project_grant(self):
token_to_revoke = self.token_to_revoke
tokens = self.project_tokens
self._assertTokenNotRevoked(token_to_revoke)
for token in tokens:
self._assertTokenNotRevoked(token)
event = self._revoke_by_grant(role_id=self.role_ids[0],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._assertTokenRevoked(token_to_revoke)
for token in tokens:
self._assertTokenNotRevoked(token)
self.removeEvent(event)
self._assertTokenNotRevoked(token_to_revoke)
for token in tokens:
self._assertTokenNotRevoked(token)
token_to_revoke['roles'] = [self.role_ids[0],
self.role_ids[1],
self.role_ids[2]]
event = self._revoke_by_grant(role_id=self.role_ids[0],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._assertTokenRevoked(token_to_revoke)
self.removeEvent(event)
self._assertTokenNotRevoked(token_to_revoke)
event = self._revoke_by_grant(role_id=self.role_ids[1],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._assertTokenRevoked(token_to_revoke)
self.removeEvent(event)
self._assertTokenNotRevoked(token_to_revoke)
self._revoke_by_grant(role_id=self.role_ids[0],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._revoke_by_grant(role_id=self.role_ids[1],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._revoke_by_grant(role_id=self.role_ids[2],
user_id=self.user_ids[0],
project_id=self.project_ids[0])
self._assertTokenRevoked(token_to_revoke)
def test_by_project_and_user_and_role(self):
user_id1 = _new_id()
user_id2 = _new_id()
project_id = _new_id()
self.events.append(self._revoke_by_user(user_id1))
self.events.append(
self._revoke_by_user_and_project(user_id2, project_id))
token_data = _sample_blank_token()
token_data['user_id'] = user_id2
token_data['project_id'] = project_id
self._assertTokenRevoked(token_data)
def _assertEmpty(self, collection):
return self.assertEqual(0, len(collection), "collection not empty")
def _assertEventsMatchIteration(self, turn):
self.assertEqual(1, len(self.tree.revoke_map))
self.assertEqual(turn + 1, len(self.tree.revoke_map
['trust_id=*']
['consumer_id=*']
['access_token_id=*']))
# two different functions add domain_ids, +1 for None
self.assertEqual(2 * turn + 1, len(self.tree.revoke_map
['trust_id=*']
['consumer_id=*']
['access_token_id=*']
['expires_at=*']))
# two different functions add project_ids, +1 for None
self.assertEqual(2 * turn + 1, len(self.tree.revoke_map
['trust_id=*']
['consumer_id=*']
['access_token_id=*']
['expires_at=*']
['domain_id=*']))
# 10 users added
self.assertEqual(turn, len(self.tree.revoke_map
['trust_id=*']
['consumer_id=*']
['access_token_id=*']
['expires_at=*']
['domain_id=*']
['project_id=*']))
def test_cleanup(self):
events = self.events
self._assertEmpty(self.tree.revoke_map)
for i in range(0, 10):
events.append(
self._revoke_by_user(_new_id()))
events.append(
self._revoke_by_expiration(_new_id(), _future_time()))
events.append(
self._revoke_by_project_role_assignment(_new_id(), _new_id()))
events.append(
self._revoke_by_domain_role_assignment(_new_id(), _new_id()))
events.append(
self._revoke_by_domain_role_assignment(_new_id(), _new_id()))
events.append(
self._revoke_by_user_and_project(_new_id(), _new_id()))
self._assertEventsMatchIteration(i + 1)
for event in self.events:
self.tree.remove_event(event)
self._assertEmpty(self.tree.revoke_map)
| |
from __future__ import unicode_literals
from six import b
import unittest
import mock
from bs4 import BeautifulSoup
import os.path
import django
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.core.urlresolvers import reverse
from django.core.files.base import ContentFile
from django.test.utils import override_settings
from django.conf import settings
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailcore.models import Page
from wagtail.tests.testapp.models import EventPage, EventPageRelatedLink
from wagtail.wagtaildocs.models import Document
from wagtail.wagtaildocs import models
from wagtail.wagtaildocs.rich_text import DocumentLinkHandler
class TestDocumentPermissions(TestCase):
def setUp(self):
# Create some user accounts for testing permissions
User = get_user_model()
self.user = User.objects.create_user(username='user', email='user@email.com', password='password')
self.owner = User.objects.create_user(username='owner', email='owner@email.com', password='password')
self.editor = User.objects.create_user(username='editor', email='editor@email.com', password='password')
self.editor.groups.add(Group.objects.get(name='Editors'))
self.administrator = User.objects.create_superuser(username='administrator', email='administrator@email.com', password='password')
# Owner user must have the add_document permission
self.owner.user_permissions.add(Permission.objects.get(codename='add_document'))
# Create a document for running tests on
self.document = models.Document.objects.create(title="Test document", uploaded_by_user=self.owner)
def test_administrator_can_edit(self):
self.assertTrue(self.document.is_editable_by_user(self.administrator))
def test_editor_can_edit(self):
self.assertTrue(self.document.is_editable_by_user(self.editor))
def test_owner_can_edit(self):
self.assertTrue(self.document.is_editable_by_user(self.owner))
def test_user_cant_edit(self):
self.assertFalse(self.document.is_editable_by_user(self.user))
# ===== ADMIN VIEWS =====
class TestDocumentIndexView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def test_simple(self):
response = self.client.get(reverse('wagtaildocs_index'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/index.html')
def test_search(self):
response = self.client.get(reverse('wagtaildocs_index'), {'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def make_docs(self):
for i in range(50):
document = models.Document(title="Test " + str(i))
document.save()
def test_pagination(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs_index'), {'p': 2})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/index.html')
# Check that we got the correct page
self.assertEqual(response.context['documents'].number, 2)
def test_pagination_invalid(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs_index'), {'p': 'Hello World!'})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/index.html')
# Check that we got page one
self.assertEqual(response.context['documents'].number, 1)
def test_pagination_out_of_range(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs_index'), {'p': 99999})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/index.html')
# Check that we got the last page
self.assertEqual(response.context['documents'].number, response.context['documents'].paginator.num_pages)
def test_ordering(self):
orderings = ['title', '-created_at']
for ordering in orderings:
response = self.client.get(reverse('wagtaildocs_index'), {'ordering': ordering})
self.assertEqual(response.status_code, 200)
class TestDocumentAddView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def test_simple(self):
response = self.client.get(reverse('wagtaildocs_add_document'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/add.html')
def test_post(self):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Submit
post_data = {
'title': "Test document",
'file': fake_file,
}
response = self.client.post(reverse('wagtaildocs_add_document'), post_data)
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs_index'))
# Document should be created
self.assertTrue(models.Document.objects.filter(title="Test document").exists())
class TestDocumentEditView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Create a document to edit
self.document = models.Document.objects.create(title="Test document", file=fake_file)
def test_simple(self):
response = self.client.get(reverse('wagtaildocs_edit_document', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/edit.html')
def test_post(self):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Submit title change
post_data = {
'title': "Test document changed!",
'file': fake_file,
}
response = self.client.post(reverse('wagtaildocs_edit_document', args=(self.document.id,)), post_data)
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs_index'))
# Document title should be changed
self.assertEqual(models.Document.objects.get(id=self.document.id).title, "Test document changed!")
def test_with_missing_source_file(self):
# Build a fake file
fake_file = ContentFile(b("An ephemeral document"))
fake_file.name = 'to-be-deleted.txt'
# Create a new document to delete the source for
document = models.Document.objects.create(title="Test missing source document", file=fake_file)
document.file.delete(False)
response = self.client.get(reverse('wagtaildocs_edit_document', args=(document.id,)), {})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/edit.html')
self.assertContains(response, 'File not found')
class TestDocumentDeleteView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create a document to delete
self.document = models.Document.objects.create(title="Test document")
def test_simple(self):
response = self.client.get(reverse('wagtaildocs_delete_document', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/confirm_delete.html')
def test_delete(self):
# Submit title change
post_data = {
'foo': 'bar'
}
response = self.client.post(reverse('wagtaildocs_delete_document', args=(self.document.id,)), post_data)
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs_index'))
# Document should be deleted
self.assertFalse(models.Document.objects.filter(id=self.document.id).exists())
class TestDocumentChooserView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def test_simple(self):
response = self.client.get(reverse('wagtaildocs_chooser'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/chooser/chooser.html')
self.assertTemplateUsed(response, 'wagtaildocs/chooser/chooser.js')
def test_search(self):
response = self.client.get(reverse('wagtaildocs_chooser'), {'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def make_docs(self):
for i in range(50):
document = models.Document(title="Test " + str(i))
document.save()
def test_pagination(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs_chooser'), {'p': 2})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/list.html')
# Check that we got the correct page
self.assertEqual(response.context['documents'].number, 2)
def test_pagination_invalid(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs_chooser'), {'p': 'Hello World!'})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/list.html')
# Check that we got page one
self.assertEqual(response.context['documents'].number, 1)
def test_pagination_out_of_range(self):
self.make_docs()
response = self.client.get(reverse('wagtaildocs_chooser'), {'p': 99999})
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/documents/list.html')
# Check that we got the last page
self.assertEqual(response.context['documents'].number, response.context['documents'].paginator.num_pages)
class TestDocumentChooserChosenView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create a document to choose
self.document = models.Document.objects.create(title="Test document")
def test_simple(self):
response = self.client.get(reverse('wagtaildocs_document_chosen', args=(self.document.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/chooser/document_chosen.js')
class TestDocumentChooserUploadView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def test_simple(self):
response = self.client.get(reverse('wagtaildocs_chooser_upload'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtaildocs/chooser/chooser.html')
self.assertTemplateUsed(response, 'wagtaildocs/chooser/chooser.js')
def test_post(self):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Submit
post_data = {
'title': "Test document",
'file': fake_file,
}
response = self.client.post(reverse('wagtaildocs_chooser_upload'), post_data)
# Check that the response is a javascript file saying the document was chosen
self.assertTemplateUsed(response, 'wagtaildocs/chooser/document_chosen.js')
self.assertContains(response, "modal.respond('documentChosen'")
# Document should be created
self.assertTrue(models.Document.objects.filter(title="Test document").exists())
class TestDocumentFilenameProperties(TestCase):
def setUp(self):
self.document = models.Document(title="Test document")
self.document.file.save('example.doc', ContentFile("A boring example document"))
self.extensionless_document = models.Document(title="Test document")
self.extensionless_document.file.save('example', ContentFile("A boring example document"))
def test_filename(self):
self.assertEqual('example.doc', self.document.filename)
self.assertEqual('example', self.extensionless_document.filename)
def test_file_extension(self):
self.assertEqual('doc', self.document.file_extension)
self.assertEqual('', self.extensionless_document.file_extension)
def tearDown(self):
self.document.delete()
self.extensionless_document.delete()
class TestUsageCount(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.login()
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_unused_document_usage_count(self):
doc = Document.objects.get(id=1)
self.assertEqual(doc.get_usage().count(), 0)
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_used_document_usage_count(self):
doc = Document.objects.get(id=1)
page = EventPage.objects.get(id=4)
event_page_related_link = EventPageRelatedLink()
event_page_related_link.page = page
event_page_related_link.link_document = doc
event_page_related_link.save()
self.assertEqual(doc.get_usage().count(), 1)
def test_usage_count_does_not_appear(self):
doc = Document.objects.get(id=1)
page = EventPage.objects.get(id=4)
event_page_related_link = EventPageRelatedLink()
event_page_related_link.page = page
event_page_related_link.link_document = doc
event_page_related_link.save()
response = self.client.get(reverse('wagtaildocs_edit_document',
args=(1,)))
self.assertNotContains(response, 'Used 1 time')
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_usage_count_appears(self):
doc = Document.objects.get(id=1)
page = EventPage.objects.get(id=4)
event_page_related_link = EventPageRelatedLink()
event_page_related_link.page = page
event_page_related_link.link_document = doc
event_page_related_link.save()
response = self.client.get(reverse('wagtaildocs_edit_document',
args=(1,)))
self.assertContains(response, 'Used 1 time')
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_usage_count_zero_appears(self):
response = self.client.get(reverse('wagtaildocs_edit_document',
args=(1,)))
self.assertContains(response, 'Used 0 times')
class TestGetUsage(TestCase, WagtailTestUtils):
fixtures = ['test.json']
def setUp(self):
self.login()
def test_document_get_usage_not_enabled(self):
doc = Document.objects.get(id=1)
self.assertEqual(list(doc.get_usage()), [])
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_unused_document_get_usage(self):
doc = Document.objects.get(id=1)
self.assertEqual(list(doc.get_usage()), [])
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_used_document_get_usage(self):
doc = Document.objects.get(id=1)
page = EventPage.objects.get(id=4)
event_page_related_link = EventPageRelatedLink()
event_page_related_link.page = page
event_page_related_link.link_document = doc
event_page_related_link.save()
self.assertTrue(issubclass(Page, type(doc.get_usage()[0])))
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_usage_page(self):
doc = Document.objects.get(id=1)
page = EventPage.objects.get(id=4)
event_page_related_link = EventPageRelatedLink()
event_page_related_link.page = page
event_page_related_link.link_document = doc
event_page_related_link.save()
response = self.client.get(reverse('wagtaildocs_document_usage',
args=(1,)))
self.assertContains(response, 'Christmas')
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_usage_page_no_usage(self):
response = self.client.get(reverse('wagtaildocs_document_usage',
args=(1,)))
# There's no usage so there should be no table rows
self.assertRegex(response.content, b'<tbody>(\s|\n)*</tbody>')
class TestIssue613(TestCase, WagtailTestUtils):
def get_elasticsearch_backend(self):
from django.conf import settings
from wagtail.wagtailsearch.backends import get_search_backend
backend_path = 'wagtail.wagtailsearch.backends.elasticsearch'
# Search WAGTAILSEARCH_BACKENDS for an entry that uses the given backend path
for backend_name, backend_conf in settings.WAGTAILSEARCH_BACKENDS.items():
if backend_conf['BACKEND'] == backend_path:
return get_search_backend(backend_name)
else:
# no conf entry found - skip tests for this backend
raise unittest.SkipTest("No WAGTAILSEARCH_BACKENDS entry for the backend %s" % backend_path)
def setUp(self):
self.search_backend = self.get_elasticsearch_backend()
self.login()
def add_document(self, **params):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Submit
post_data = {
'title': "Test document",
'file': fake_file,
}
post_data.update(params)
response = self.client.post(reverse('wagtaildocs_add_document'), post_data)
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs_index'))
# Document should be created
doc = models.Document.objects.filter(title=post_data['title'])
self.assertTrue(doc.exists())
return doc.first()
def edit_document(self, **params):
# Build a fake file
fake_file = ContentFile(b("A boring example document"))
fake_file.name = 'test.txt'
# Create a document without tags to edit
document = models.Document.objects.create(title="Test document", file=fake_file)
# Build another fake file
another_fake_file = ContentFile(b("A boring example document"))
another_fake_file.name = 'test.txt'
# Submit
post_data = {
'title': "Test document changed!",
'file': another_fake_file,
}
post_data.update(params)
response = self.client.post(reverse('wagtaildocs_edit_document', args=(document.id,)), post_data)
# User should be redirected back to the index
self.assertRedirects(response, reverse('wagtaildocs_index'))
# Document should be changed
doc = models.Document.objects.filter(title=post_data['title'])
self.assertTrue(doc.exists())
return doc.first()
def test_issue_613_on_add(self):
# Reset the search index
self.search_backend.reset_index()
self.search_backend.add_type(Document)
# Add a document with some tags
document = self.add_document(tags="hello")
self.search_backend.refresh_index()
# Search for it by tag
results = self.search_backend.search("hello", Document)
# Check
self.assertEqual(len(results), 1)
self.assertEqual(results[0].id, document.id)
def test_issue_613_on_edit(self):
# Reset the search index
self.search_backend.reset_index()
self.search_backend.add_type(Document)
# Add a document with some tags
document = self.edit_document(tags="hello")
self.search_backend.refresh_index()
# Search for it by tag
results = self.search_backend.search("hello", Document)
# Check
self.assertEqual(len(results), 1)
self.assertEqual(results[0].id, document.id)
class TestServeView(TestCase):
def setUp(self):
self.document = models.Document(title="Test document")
self.document.file.save('example.doc', ContentFile("A boring example document"))
def get(self):
return self.client.get(reverse('wagtaildocs_serve', args=(self.document.id, 'example.doc')))
def test_response_code(self):
self.assertEqual(self.get().status_code, 200)
@unittest.expectedFailure # Filename has a random string appended to it
def test_content_disposition_header(self):
self.assertEqual(self.get()['Content-Disposition'], 'attachment; filename=example.doc')
def test_content_length_header(self):
self.assertEqual(self.get()['Content-Length'], '25')
def test_content_type_header(self):
self.assertEqual(self.get()['Content-Type'], 'application/msword')
def test_is_streaming_response(self):
self.assertTrue(self.get().streaming)
def test_content(self):
self.assertEqual(b"".join(self.get().streaming_content), b"A boring example document")
def test_document_served_fired(self):
mock_handler = mock.MagicMock()
models.document_served.connect(mock_handler)
self.get()
self.assertEqual(mock_handler.call_count, 1)
self.assertEqual(mock_handler.mock_calls[0][2]['sender'], models.Document)
self.assertEqual(mock_handler.mock_calls[0][2]['instance'], self.document)
def test_with_nonexistent_document(self):
response = self.client.get(reverse('wagtaildocs_serve', args=(1000, 'blahblahblah', )))
self.assertEqual(response.status_code, 404)
@unittest.expectedFailure
def test_with_incorrect_filename(self):
response = self.client.get(reverse('wagtaildocs_serve', args=(self.document.id, 'incorrectfilename')))
self.assertEqual(response.status_code, 404)
def clear_sendfile_cache(self):
from wagtail.utils.sendfile import _get_sendfile
_get_sendfile.clear()
class TestServeViewWithSendfile(TestCase):
def setUp(self):
# Import using a try-catch block to prevent crashes if the
# django-sendfile module is not installed
try:
import sendfile # noqa
except ImportError:
raise unittest.SkipTest("django-sendfile not installed")
self.document = models.Document(title="Test document")
self.document.file.save('example.doc', ContentFile("A boring example document"))
def get(self):
return self.client.get(reverse('wagtaildocs_serve', args=(self.document.id, 'example.doc')))
def clear_sendfile_cache(self):
from wagtail.utils.sendfile import _get_sendfile
_get_sendfile.clear()
@override_settings(SENDFILE_BACKEND='sendfile.backends.xsendfile')
def test_sendfile_xsendfile_backend(self):
self.clear_sendfile_cache()
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertEqual(response['X-Sendfile'], os.path.join(settings.MEDIA_ROOT, self.document.file.name))
@unittest.skipIf(django.VERSION >= (1, 8), "Fails on Django 1.8") # Under Django 1.8. It adds "http://" to beginning of Location when it shouldn't
@override_settings(SENDFILE_BACKEND='sendfile.backends.mod_wsgi', SENDFILE_ROOT=settings.MEDIA_ROOT, SENDFILE_URL=settings.MEDIA_URL[:-1])
def test_sendfile_mod_wsgi_backend(self):
self.clear_sendfile_cache()
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Location'], os.path.join(settings.MEDIA_URL, self.document.file.name))
@override_settings(SENDFILE_BACKEND='sendfile.backends.nginx', SENDFILE_ROOT=settings.MEDIA_ROOT, SENDFILE_URL=settings.MEDIA_URL[:-1])
def test_sendfile_nginx_backend(self):
self.clear_sendfile_cache()
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertEqual(response['X-Accel-Redirect'], os.path.join(settings.MEDIA_URL, self.document.file.name))
class TestServeWithUnicodeFilename(TestCase):
def setUp(self):
self.document = models.Document(title="Test document")
# Setting this filename in the content-disposition header fails on Django <1.8, Python 2
# due to https://code.djangoproject.com/ticket/20889
self.filename = 'docs\u0627\u0644\u0643\u0627\u062a\u062f\u0631\u0627\u064a\u064a\u0629_\u0648\u0627\u0644\u0633\u0648\u0642'
try:
self.document.file.save(self.filename, ContentFile("A boring example document"))
except UnicodeEncodeError:
raise unittest.SkipTest("Filesystem doesn't support unicode filenames")
def test_response_code(self):
response = self.client.get(reverse('wagtaildocs_serve', args=(self.document.id, self.filename)))
self.assertEqual(response.status_code, 200)
class TestDocumentRichTextLinkHandler(TestCase):
fixtures = ['test.json']
def test_get_db_attributes(self):
soup = BeautifulSoup(
'<a data-id="test-id">foo</a>'
)
tag = soup.a
result = DocumentLinkHandler.get_db_attributes(tag)
self.assertEqual(result,
{'id': 'test-id'})
def test_expand_db_attributes_document_does_not_exist(self):
result = DocumentLinkHandler.expand_db_attributes(
{'id': 0},
False
)
self.assertEqual(result, '<a>')
def test_expand_db_attributes_for_editor(self):
result = DocumentLinkHandler.expand_db_attributes(
{'id': 1},
True
)
self.assertEqual(result,
'<a data-linktype="document" data-id="1" href="/documents/1/test.pdf">')
def test_expand_db_attributes_not_for_editor(self):
result = DocumentLinkHandler.expand_db_attributes(
{'id': 1},
False
)
self.assertEqual(result,
'<a href="/documents/1/test.pdf">')
| |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
import sys
import uuid
import mock
import unittest2 as unittest
from quantum.agent.common import config
from quantum.agent.linux import interface
from quantum.agent.linux import utils
from quantum.common import exceptions
from quantum.debug import commands
from quantum.debug.debug_agent import DEVICE_OWNER_PROBE, QuantumDebugAgent
from quantum.openstack.common import cfg
class MyApp(object):
def __init__(self, _stdout):
self.stdout = _stdout
class TestDebugCommands(unittest.TestCase):
def setUp(self):
cfg.CONF.register_opts(interface.OPTS)
cfg.CONF.register_opts(QuantumDebugAgent.OPTS)
cfg.CONF(args=['quantum-debug'], project='quantum')
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.root_helper = 'sudo'
self.addCleanup(mock.patch.stopall)
device_exists_p = mock.patch(
'quantum.agent.linux.ip_lib.device_exists', return_value=False)
device_exists_p.start()
namespace_p = mock.patch(
'quantum.agent.linux.ip_lib.IpNetnsCommand')
namespace_p.start()
ensure_namespace_p = mock.patch(
'quantum.agent.linux.ip_lib.IPWrapper.ensure_namespace')
ensure_namespace_p.start()
dvr_cls_p = mock.patch('quantum.agent.linux.interface.NullDriver')
driver_cls = dvr_cls_p.start()
mock_driver = mock.MagicMock()
mock_driver.DEV_NAME_LEN = (
interface.LinuxInterfaceDriver.DEV_NAME_LEN)
mock_driver.get_device_name.return_value = 'tap12345678-12'
driver_cls.return_value = mock_driver
self.driver = mock_driver
client_cls_p = mock.patch('quantumclient.v2_0.client.Client')
client_cls = client_cls_p.start()
client_inst = mock.Mock()
client_cls.return_value = client_inst
fake_network = {'network': {'id': 'fake_net',
'tenant_id': 'fake_tenant',
'subnets': ['fake_subnet']}}
fake_port = {'port':
{'id': 'fake_port',
'device_owner': 'fake_device',
'mac_address': 'aa:bb:cc:dd:ee:ffa',
'network_id': 'fake_net',
'fixed_ips':
[{'subnet_id': 'fake_subnet', 'ip_address':'10.0.0.3'}]
}}
fake_ports = {'ports': [fake_port['port']]}
self.fake_ports = fake_ports
allocation_pools = [{'start': '10.0.0.2',
'end': '10.0.0.254'}]
fake_subnet_v4 = {'subnet': {'name': 'fake_subnet_v4',
'id': 'fake_subnet',
'network_id': 'fake_net',
'gateway_ip': '10.0.0.1',
'dns_nameservers': ['10.0.0.2'],
'host_routes': [],
'cidr': '10.0.0.0/24',
'allocation_pools': allocation_pools,
'enable_dhcp': True,
'ip_version': 4}}
client_inst.list_ports.return_value = fake_ports
client_inst.create_port.return_value = fake_port
client_inst.show_port.return_value = fake_port
client_inst.show_network.return_value = fake_network
client_inst.show_subnet.return_value = fake_subnet_v4
self.client = client_inst
mock_std = mock.Mock()
self.app = MyApp(mock_std)
self.app.debug_agent = QuantumDebugAgent(cfg.CONF,
client_inst,
mock_driver)
def test_create_probe(self):
cmd = commands.CreateProbe(self.app, None)
cmd_parser = cmd.get_parser('create_probe')
args = ['fake_net']
parsed_args = cmd_parser.parse_args(args)
cmd.run(parsed_args)
fake_port = {'port':
{'device_owner': DEVICE_OWNER_PROBE,
'admin_state_up': True,
'network_id': 'fake_net',
'tenant_id': 'fake_tenant',
'fixed_ips': [{'subnet_id': 'fake_subnet'}],
'device_id': socket.gethostname()}}
namespace = 'qprobe-fake_port'
self.client.assert_has_calls([mock.call.show_network('fake_net'),
mock.call.show_subnet('fake_subnet'),
mock.call.create_port(fake_port),
mock.call.show_subnet('fake_subnet')])
self.driver.assert_has_calls([mock.call.get_device_name(mock.ANY),
mock.call.plug('fake_net',
'fake_port',
'tap12345678-12',
'aa:bb:cc:dd:ee:ffa',
bridge=None,
namespace=namespace),
mock.call.init_l3('tap12345678-12',
['10.0.0.3/24'],
namespace=namespace
)])
def test_create_probe_external(self):
fake_network = {'network': {'id': 'fake_net',
'tenant_id': 'fake_tenant',
'router:external': True,
'subnets': ['fake_subnet']}}
self.client.show_network.return_value = fake_network
cmd = commands.CreateProbe(self.app, None)
cmd_parser = cmd.get_parser('create_probe')
args = ['fake_net']
parsed_args = cmd_parser.parse_args(args)
cmd.run(parsed_args)
fake_port = {'port':
{'device_owner': DEVICE_OWNER_PROBE,
'admin_state_up': True,
'network_id': 'fake_net',
'tenant_id': 'fake_tenant',
'fixed_ips': [{'subnet_id': 'fake_subnet'}],
'device_id': socket.gethostname()}}
namespace = 'qprobe-fake_port'
self.client.assert_has_calls([mock.call.show_network('fake_net'),
mock.call.show_subnet('fake_subnet'),
mock.call.create_port(fake_port),
mock.call.show_subnet('fake_subnet')])
self.driver.assert_has_calls([mock.call.get_device_name(mock.ANY),
mock.call.plug('fake_net',
'fake_port',
'tap12345678-12',
'aa:bb:cc:dd:ee:ffa',
bridge='br-ex',
namespace=namespace),
mock.call.init_l3('tap12345678-12',
['10.0.0.3/24'],
namespace=namespace
)])
def test_delete_probe(self):
cmd = commands.DeleteProbe(self.app, None)
cmd_parser = cmd.get_parser('delete_probe')
args = ['fake_port']
parsed_args = cmd_parser.parse_args(args)
cmd.run(parsed_args)
namespace = 'qprobe-fake_port'
self.client.assert_has_calls([mock.call.show_port('fake_port'),
mock.call.show_network('fake_net'),
mock.call.show_subnet('fake_subnet'),
mock.call.delete_port('fake_port')])
self.driver.assert_has_calls([mock.call.get_device_name(mock.ANY),
mock.call.unplug('tap12345678-12',
namespace=namespace,
bridge=None)])
def test_delete_probe_external(self):
fake_network = {'network': {'id': 'fake_net',
'tenant_id': 'fake_tenant',
'router:external': True,
'subnets': ['fake_subnet']}}
self.client.show_network.return_value = fake_network
cmd = commands.DeleteProbe(self.app, None)
cmd_parser = cmd.get_parser('delete_probe')
args = ['fake_port']
parsed_args = cmd_parser.parse_args(args)
cmd.run(parsed_args)
namespace = 'qprobe-fake_port'
self.client.assert_has_calls([mock.call.show_port('fake_port'),
mock.call.show_network('fake_net'),
mock.call.show_subnet('fake_subnet'),
mock.call.delete_port('fake_port')])
self.driver.assert_has_calls([mock.call.get_device_name(mock.ANY),
mock.call.unplug('tap12345678-12',
namespace=namespace,
bridge='br-ex')])
def test_delete_probe_without_namespace(self):
cfg.CONF.set_override('use_namespaces', False)
cmd = commands.DeleteProbe(self.app, None)
cmd_parser = cmd.get_parser('delete_probe')
args = ['fake_port']
parsed_args = cmd_parser.parse_args(args)
cmd.run(parsed_args)
self.client.assert_has_calls([mock.call.show_port('fake_port'),
mock.call.show_network('fake_net'),
mock.call.show_subnet('fake_subnet'),
mock.call.delete_port('fake_port')])
self.driver.assert_has_calls([mock.call.get_device_name(mock.ANY),
mock.call.unplug('tap12345678-12',
bridge=None)])
def test_list_probe(self):
cmd = commands.ListProbe(self.app, None)
cmd_parser = cmd.get_parser('list_probe')
args = []
parsed_args = cmd_parser.parse_args(args)
cmd.run(parsed_args)
self.client.assert_has_calls(
[mock.call.list_ports(device_owner=DEVICE_OWNER_PROBE)])
def test_exec_command(self):
cmd = commands.ExecProbe(self.app, None)
cmd_parser = cmd.get_parser('exec_command')
args = ['fake_port', 'fake_command']
parsed_args = cmd_parser.parse_args(args)
with mock.patch('quantum.agent.linux.ip_lib.IpNetnsCommand') as ns:
cmd.run(parsed_args)
ns.assert_has_calls([mock.call.execute(mock.ANY)])
self.client.assert_has_calls([mock.call.show_port('fake_port')])
def test_exec_command_without_namespace(self):
cfg.CONF.set_override('use_namespaces', False)
cmd = commands.ExecProbe(self.app, None)
cmd_parser = cmd.get_parser('exec_command')
args = ['fake_port', 'fake_command']
parsed_args = cmd_parser.parse_args(args)
with mock.patch('quantum.agent.linux.utils.execute') as exe:
cmd.run(parsed_args)
exe.assert_has_calls([mock.call.execute(mock.ANY)])
self.client.assert_has_calls([mock.call.show_port('fake_port')])
def test_clear_probe(self):
cmd = commands.ClearProbe(self.app, None)
cmd_parser = cmd.get_parser('clear_probe')
args = []
parsed_args = cmd_parser.parse_args(args)
cmd.run(parsed_args)
namespace = 'qprobe-fake_port'
self.client.assert_has_calls([mock.call.list_ports(
device_id=socket.gethostname(),
device_owner=DEVICE_OWNER_PROBE),
mock.call.show_port('fake_port'),
mock.call.show_network('fake_net'),
mock.call.show_subnet('fake_subnet'),
mock.call.delete_port('fake_port')])
self.driver.assert_has_calls([mock.call.get_device_name(mock.ANY),
mock.call.unplug('tap12345678-12',
namespace=namespace,
bridge=None)])
def test_ping_all_with_ensure_port(self):
fake_ports = self.fake_ports
def fake_port_list(network_id=None, device_owner=None, device_id=None):
if network_id:
# In order to test ensure_port, return []
return {'ports': []}
return fake_ports
self.client.list_ports.side_effect = fake_port_list
cmd = commands.PingAll(self.app, None)
cmd_parser = cmd.get_parser('ping_all')
args = []
parsed_args = cmd_parser.parse_args(args)
namespace = 'qprobe-fake_port'
with mock.patch('quantum.agent.linux.ip_lib.IpNetnsCommand') as ns:
cmd.run(parsed_args)
ns.assert_has_calls([mock.call.execute(mock.ANY)])
fake_port = {'port':
{'device_owner': DEVICE_OWNER_PROBE,
'admin_state_up': True,
'network_id': 'fake_net',
'tenant_id': 'fake_tenant',
'fixed_ips': [{'subnet_id': 'fake_subnet'}],
'device_id': socket.gethostname()}}
expected = [mock.call.show_network('fake_net'),
mock.call.show_subnet('fake_subnet'),
mock.call.create_port(fake_port),
mock.call.show_subnet('fake_subnet')]
self.client.assert_has_calls(expected)
self.driver.assert_has_calls([mock.call.init_l3('tap12345678-12',
['10.0.0.3/24'],
namespace=namespace
)])
def test_ping_all(self):
cmd = commands.PingAll(self.app, None)
cmd_parser = cmd.get_parser('ping_all')
args = []
parsed_args = cmd_parser.parse_args(args)
with mock.patch('quantum.agent.linux.ip_lib.IpNetnsCommand') as ns:
cmd.run(parsed_args)
ns.assert_has_calls([mock.call.execute(mock.ANY)])
fake_port = {'port':
{'device_owner': DEVICE_OWNER_PROBE,
'admin_state_up': True,
'network_id': 'fake_net',
'tenant_id': 'fake_tenant',
'fixed_ips': [{'subnet_id': 'fake_subnet'}],
'device_id': socket.gethostname()}}
expected = [mock.call.list_ports(),
mock.call.list_ports(network_id='fake_net',
device_owner=DEVICE_OWNER_PROBE,
device_id=socket.gethostname()),
mock.call.show_subnet('fake_subnet'),
mock.call.show_port('fake_port')]
self.client.assert_has_calls(expected)
def test_ping_all_v6(self):
fake_subnet_v6 = {'subnet': {'name': 'fake_v6',
'ip_version': 6}}
self.client.show_subnet.return_value = fake_subnet_v6
cmd = commands.PingAll(self.app, None)
cmd_parser = cmd.get_parser('ping_all')
args = []
parsed_args = cmd_parser.parse_args(args)
with mock.patch('quantum.agent.linux.ip_lib.IpNetnsCommand') as ns:
cmd.run(parsed_args)
ns.assert_has_calls([mock.call.execute(mock.ANY)])
self.client.assert_has_calls([mock.call.list_ports()])
| |
#!/usr/bin/env python
from ConfigParser import ConfigParser
import sys
import unittest
from afs.tests.BaseTest import parse_commandline
from afs.service.VolumeService import VolumeService
from afs.model.Volume import Volume
from afs.model.Historic import historic_Volume
from afs.util.DBManager import DBManager
import afs
class EvaluateTestResults(unittest.TestCase) :
def eval_get_volume_by_name(self, res) :
#sys.stderr.write("\n\n===================\n%s\n============================\n" % res)
self.assertEqual(len(res), 1)
self.assertTrue(isinstance(res[0], Volume))
self.assertEqual(res[0].vid, self.VolID)
self.assertEqual(res[0].servername, self.FSName)
self.assertEqual(res[0].partition, self.Part)
return
def eval_get_volume_group_by_name(self, res) :
#sys.stderr.write("\n\n===================\n%s\n============================\n" % res)
self.assertTrue(isinstance(res["RW"], Volume))
self.assertEqual(len(res["RO"]), 2)
self.assertEqual(res["BK"], None)
return
class TestVolServiceMethods(EvaluateTestResults) :
"""
Test VolService setter- and live- Methods
"""
@classmethod
def setUp(self):
"""
setup VolService
"""
self.test_config = ConfigParser()
self.test_config.read(afs.CONFIG.setup)
self.volMng = VolumeService()
self.VolID = int(self.test_config.get("VolService", "VolID"))
self.VolName = self.test_config.get("VolService", "VolName")
self.minCopy = int(self.test_config.get("VolService", "minCopy"))
self.Owner = self.test_config.get("VolService", "Owner")
self.FS = self.test_config.get("VolService", "FS")
self.FSName = self.test_config.get("VolService", "FSName")
self.Part = self.test_config.get("VolService", "Part")
return
def test_get_volume_by_name(self) :
res = self.volMng.get_volume(self.VolName, cached=False)
self.eval_get_volume_by_name(res)
return
def test_get_volume_group_by_name(self) :
res = self.volMng.get_volume_group(self.VolName, cached=False)
self.eval_get_volume_group_by_name(res)
return
class TestVolServiceMethods_async(EvaluateTestResults) :
"""
Test VolService setter- and live- Methods
"""
@classmethod
def setUp(self):
"""
setup VolService
"""
self.test_config = ConfigParser()
self.test_config.read(afs.CONFIG.setup)
self.volMng = VolumeService()
self.VolID = int(self.test_config.get("VolService", "VolID"))
self.VolName = self.test_config.get("VolService", "VolName")
self.minCopy = int(self.test_config.get("VolService", "minCopy"))
self.Owner = self.test_config.get("VolService", "Owner")
self.FS = self.test_config.get("VolService", "FS")
self.FSName = self.test_config.get("VolService", "FSName")
self.Part = self.test_config.get("VolService", "Part")
return
def test_get_volume_by_name(self) :
task_ident = self.volMng.get_volume(self.VolName, cached=False, async=True)
self.volMng.wait_for_task(task_ident)
res = self.volMng.get_task_result(task_ident)
self.eval_get_volume_by_name(res)
return
def test_get_volume_group_by_name(self) :
task_ident = self.volMng.get_volume_group(self.VolName, cached=False, async=True)
self.volMng.wait_for_task(task_ident)
res = self.volMng.get_task_result(task_ident)
self.eval_get_volume_group_by_name(res)
return
class TestVolServiceMethods_cached(EvaluateTestResults) :
"""
Tests VolService getter Methods
"""
@classmethod
def setUp(self):
"""
setup VolService
"""
self.test_config = ConfigParser()
self.test_config.read(afs.CONFIG.setup)
self.volMng = VolumeService()
self.VolID = int(self.test_config.get("VolService", "VolID"))
self.VolName = self.test_config.get("VolService", "VolName")
self.minCopy = int(self.test_config.get("VolService", "minCopy"))
self.Owner = self.test_config.get("VolService", "Owner")
self.FS = self.test_config.get("VolService", "FS")
self.FSName = self.test_config.get("VolService", "FSName")
self.Part = self.test_config.get("VolService", "Part")
return
def test_get_volume_by_name(self) :
res = self.volMng.get_volume(self.VolName, cached=True)
self.eval_get_volume_by_name(res)
return
def test_get_volume_group_by_name(self) :
res = self.volMng.get_volume_group(self.VolName, cached=True)
self.eval_get_volume_group_by_name(res)
return
class TestVolServiceMethods_cached_async(EvaluateTestResults) :
"""
Tests VolService getter Methods
"""
@classmethod
def setUp(self):
"""
setup VolService
"""
self.test_config = ConfigParser()
self.test_config.read(afs.CONFIG.setup)
self.volMng = VolumeService()
self.VolID = int(self.test_config.get("VolService", "VolID"))
self.VolName = self.test_config.get("VolService", "VolName")
self.minCopy = int(self.test_config.get("VolService", "minCopy"))
self.Owner = self.test_config.get("VolService", "Owner")
self.FS = self.test_config.get("VolService", "FS")
self.FSName = self.test_config.get("VolService", "FSName")
self.Part = self.test_config.get("VolService", "Part")
return
def test_get_volume_by_name(self) :
task_ident = self.volMng.get_volume(self.VolName, cached=True, async=True)
self.volMng.wait_for_task(task_ident)
res = self.volMng.get_task_result(task_ident)
self.eval_get_volume_by_name(res)
return
def test_get_volume_group_by_name(self) :
task_ident = self.volMng.get_volume_group(self.VolName, cached=True, async=True)
self.volMng.wait_for_task(task_ident)
res = self.volMng.get_task_result(task_ident)
self.eval_get_volume_group_by_name(res)
return
class TestVolServiceMethods_historic(EvaluateTestResults) :
@classmethod
def setUp(self):
"""
setup VolService
"""
self.test_config = ConfigParser()
self.test_config.read(afs.CONFIG.setup)
self.volMng = VolumeService()
self.VolID = int(self.test_config.get("VolService", "VolID"))
self.VolName = self.test_config.get("VolService", "VolName")
self.minCopy = int(self.test_config.get("VolService", "minCopy"))
self.Owner = self.test_config.get("VolService", "Owner")
self.FS = self.test_config.get("VolService", "FS")
self.FSName = self.test_config.get("VolService", "FSName")
self.Part = self.test_config.get("VolService", "Part")
return
def test_get_archived_volumes_by_name(self) :
archived_rw_objs = self.volMng.get_archived( historic_Volume, name=self.VolName )
self.assertEqual(len(archived_rw_objs), 1)
archived_ro_objs = self.volMng.get_archived( historic_Volume, name="%s.readonly" % self.VolName )
self.assertEqual(len(archived_ro_objs), 2)
return
if __name__ == '__main__' :
parse_commandline()
sys.stderr.write("Vacuum history tables\n")
sys.stderr.write("==============================\n")
afs.CONFIG.DB_HISTORY_NUM_PER_DAY = 0
afs.CONFIG.DB_HISTORY_NUM_DAYS = 0
DBMng = DBManager()
DBMng.vacuum_history(historic_Volume, 0)
sys.stderr.write("Testing live methods to fill DB_CACHE\n")
sys.stderr.write("==============================\n")
suite = unittest.TestLoader().loadTestsFromTestCase(TestVolServiceMethods)
unittest.TextTestRunner(verbosity=2).run(suite)
sys.stderr.write("Testing methods accessing DB_CACHE\n")
sys.stderr.write("================================\n")
if afs.CONFIG.DB_CACHE :
suite = unittest.TestLoader().loadTestsFromTestCase(TestVolServiceMethods_cached)
unittest.TextTestRunner(verbosity=2).run(suite)
else :
sys.stderr.write("Skipped, because DB_CACHE is disabled.\n")
sys.stderr.write("Testing live methods in async mode\n")
sys.stderr.write("==================================\n")
suite = unittest.TestLoader().loadTestsFromTestCase(TestVolServiceMethods_async)
unittest.TextTestRunner(verbosity=2).run(suite)
sys.stderr.write("Testing methods accessing DB_CACHE in async mode\n")
sys.stderr.write("===============================================\n")
if afs.CONFIG.DB_CACHE :
suite = unittest.TestLoader().loadTestsFromTestCase(TestVolServiceMethods_cached_async)
unittest.TextTestRunner(verbosity=2).run(suite)
else :
sys.stderr.write("Skipped, because DB_CACHE is disabled.\n")
sys.stderr.write("Testing history in DB_CACHE \n")
sys.stderr.write("============================\n")
if afs.CONFIG.DB_CACHE and afs.CONFIG.DB_HISTORY :
suite = unittest.TestLoader().loadTestsFromTestCase(TestVolServiceMethods_historic)
unittest.TextTestRunner(verbosity=2).run(suite)
else :
sys.stderr.write("Skipped, because DB_CACHE is disabled.\n")
| |
# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Fast neuron IO module."""
import logging
from collections import defaultdict, namedtuple
import numpy as np
from neurom.core.dataformat import COLS, POINT_TYPE, ROOT_ID
L = logging.getLogger(__name__)
TYPE, ID, PID = 0, 1, 2
class DataWrapper(object):
"""Class holding a raw data block and section information."""
def __init__(self, data_block, fmt, sections=None):
"""Section Data Wrapper.
data_block is np.array-like with the following columns:
[X, Y, Z, R, TYPE, ID, P]
X(float): x-coordinate
Y(float): y-coordinate
Z(float): z-coordinate
R(float): radius
TYPE(integer): one of the types described by POINT_TYPE
ID(integer): unique integer given to each point, the `ROOT_ID` is -1
P(integer): the ID of the parent
Args:
data_block: as defined above
fmt: File format designation, eg: SWC
sections: Already extracted sections, otherwise data_block will be used
Notes:
- there is no ordering constraint: a child can reference a parent ID that comes
later in the block
- there is no requirement that the IDs are dense
- there is no upper bound on the number of rows with the same 'P'arent: in other
words, multifurcations are allowed
"""
self.data_block = data_block
self.fmt = fmt
# list of DataBlockSection
self.sections = sections if sections is not None else _extract_sections(data_block)
def neurite_root_section_ids(self):
"""Get the section IDs of the intitial neurite sections."""
sec = self.sections
return [i for i, ss in enumerate(sec)
if ss.pid > -1 and (sec[ss.pid].ntype == POINT_TYPE.SOMA and
ss.ntype != POINT_TYPE.SOMA)]
def soma_points(self):
"""Get the soma points."""
db = self.data_block
return db[db[:, COLS.TYPE] == POINT_TYPE.SOMA]
def _merge_sections(sec_a, sec_b):
"""Merge two sections.
Merges sec_a into sec_b and sets sec_a attributes to default
"""
sec_b.ids = list(sec_a.ids) + list(sec_b.ids[1:])
sec_b.ntype = sec_a.ntype
sec_b.pid = sec_a.pid
sec_a.ids = []
sec_a.pid = -1
sec_a.ntype = 0
def _section_end_points(structure_block, id_map):
"""Get the section end-points."""
soma_idx = structure_block[:, TYPE] == POINT_TYPE.SOMA
soma_ids = structure_block[soma_idx, ID]
neurite_idx = structure_block[:, TYPE] != POINT_TYPE.SOMA
neurite_rows = structure_block[neurite_idx, :]
soma_end_pts = set(id_map[id_]
for id_ in soma_ids[np.in1d(soma_ids, neurite_rows[:, PID])])
# end points have either no children or more than one
# ie: leaf or multifurcation nodes
n_children = defaultdict(int)
for row in structure_block:
n_children[row[PID]] += 1
end_pts = set(i for i, row in enumerate(structure_block)
if n_children[row[ID]] != 1)
return end_pts.union(soma_end_pts)
class DataBlockSection(object):
"""Sections ((ids), type, parent_id)."""
def __init__(self, ids=None, ntype=0, pid=-1):
"""Initialize a DataBlockSection object."""
self.ids = [] if ids is None else ids
self.ntype = ntype
self.pid = pid
def __eq__(self, other):
"""Test for equality."""
return (self.ids == other.ids and
self.ntype == other.ntype and
self.pid == other.pid)
def __str__(self):
"""Return a string representation."""
return ('%s: ntype=%s, pid=%s: n_ids=%d' %
(self.__class__, self.ntype, self.pid, len(self.ids)))
__repr__ = __str__
def _extract_sections(data_block):
"""Make a list of sections from an SWC-style data wrapper block."""
structure_block = data_block[:, COLS.TYPE:COLS.COL_COUNT].astype(np.int)
# SWC ID -> structure_block position
id_map = {-1: -1}
for i, row in enumerate(structure_block):
id_map[row[ID]] = i
# end points have either no children, more than one, or are the start
# of a new gap
sec_end_pts = _section_end_points(structure_block, id_map)
# a 'gap' is when a section has part of it's segments interleaved
# with those of another section
gap_sections = set()
sections = []
def new_section():
"""A new_section."""
sections.append(DataBlockSection())
return sections[-1]
curr_section = new_section()
parent_section = {-1: -1}
for row in structure_block:
row_id = id_map[row[ID]]
parent_id = id_map[row[PID]]
if not curr_section.ids:
# first in section point is parent
curr_section.ids.append(parent_id)
curr_section.ntype = row[TYPE]
gap = parent_id != curr_section.ids[-1]
# If parent is not the previous point, create a section end-point.
# Else add the point to this section
if gap:
sec_end_pts.add(row_id)
else:
curr_section.ids.append(row_id)
if row_id in sec_end_pts:
parent_section[curr_section.ids[-1]] = len(sections) - 1
# Parent-child discontinuity section
if gap:
curr_section = new_section()
curr_section.ids.extend((parent_id, row_id))
curr_section.ntype = row[TYPE]
gap_sections.add(len(sections) - 2)
elif row_id != len(data_block) - 1:
# avoid creating an extra DataBlockSection for last row if it's a leaf
curr_section = new_section()
for sec in sections:
# get the section parent ID from the id of the first point.
if sec.ids:
sec.pid = parent_section[sec.ids[0]]
# join gap sections and "disable" first half
if sec.pid in gap_sections:
_merge_sections(sections[sec.pid], sec)
# TODO find a way to remove empty sections. Currently they are
# required to maintain tree integrity.
return sections
class BlockNeuronBuilder(object):
"""Helper to create DataWrapper for 'block' sections.
This helps create a new DataWrapper when one already has 'blocks'
(ie: contiguous points, forming all the segments) of a section, and they
just need to connect them together based on their parent.
Example:
>>> builder = BlockNeuronBuilder()
>>> builder.add_section(segment_id, parent_id, segment_type, points)
...
>>> morph = builder.get_datawrapper()
Note:
This will re-number the IDs if they are not 'dense' (ie: have gaps)
"""
BlockSection = namedtuple('BlockSection', 'parent_id section_type points')
def __init__(self):
"""Initialize a BlockNeuronBuilder object."""
self.sections = {}
def add_section(self, id_, parent_id, section_type, points):
"""Add a section.
Args:
id_(int): identifying number of the section
parent_id(int): identifying number of the parent of this section
section_type(int): the section type as defined by POINT_TYPE
points: an array of [X, Y, Z, R]
"""
# L.debug('Adding section %d, with parent %d, of type: %d with count: %d',
# id_, parent_id, section_type, len(points))
assert id_ not in self.sections, 'id %s already exists in sections' % id_
self.sections[id_] = BlockNeuronBuilder.BlockSection(parent_id, section_type, points)
def _make_datablock(self):
"""Make a data_block and sections list as required by DataWrapper."""
section_ids = sorted(self.sections)
# create all insertion id's, this needs to be done ahead of time
# as some of the children may have a lower id than their parents
id_to_insert_id = {}
row_count = 0
for section_id in section_ids:
row_count += len(self.sections[section_id].points)
id_to_insert_id[section_id] = row_count - 1
datablock = np.empty((row_count, COLS.COL_COUNT), dtype=np.float)
datablock[:, COLS.ID] = np.arange(len(datablock))
datablock[:, COLS.P] = datablock[:, COLS.ID] - 1
sections = []
insert_index = 0
for id_ in section_ids:
sec = self.sections[id_]
points, section_type, parent_id = sec.points, sec.section_type, sec.parent_id
idx = slice(insert_index, insert_index + len(points))
datablock[idx, COLS.XYZR] = points
datablock[idx, COLS.TYPE] = section_type
datablock[idx.start, COLS.P] = id_to_insert_id.get(parent_id, ROOT_ID)
sections.append(DataBlockSection(idx, section_type, parent_id))
insert_index = idx.stop
return datablock, sections
def _check_consistency(self):
"""See if the sections have obvious errors."""
type_count = defaultdict(int)
for _, section in sorted(self.sections.items()):
type_count[section.section_type] += 1
if type_count[POINT_TYPE.SOMA] != 1:
L.info('Have %d somas, expected 1', type_count[POINT_TYPE.SOMA])
def get_datawrapper(self, file_format='BlockNeuronBuilder', data_wrapper=DataWrapper):
"""Returns a DataWrapper."""
self._check_consistency()
datablock, sections = self._make_datablock()
return data_wrapper(datablock, file_format, sections)
| |
# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for chromite.lib.patch."""
from __future__ import print_function
import copy
import contextlib
import itertools
import mock
import os
import shutil
import time
from chromite.cbuildbot import constants
from chromite.lib import cros_build_lib
from chromite.lib import cros_build_lib_unittest
from chromite.lib import cros_test_lib
from chromite.lib import gerrit
from chromite.lib import git
from chromite.lib import osutils
from chromite.lib import patch as cros_patch
_GetNumber = iter(itertools.count()).next
FAKE_PATCH_JSON = {
'project': 'tacos/chromite',
'branch': 'master',
'id': 'Iee5c89d929f1850d7d4e1a4ff5f21adda800025f',
'currentPatchSet': {
'number': '2',
'ref': gerrit.GetChangeRef(1112, 2),
'revision': 'ff10979dd360e75ff21f5cf53b7f8647578785ef',
},
'number': '1112',
'subject': 'chromite commit',
'owner': {
'name': 'Chromite Master',
'email': 'chromite@chromium.org',
},
'url': 'https://chromium-review.googlesource.com/1112',
'lastUpdated': 1311024529,
'sortKey': '00166e8700001052',
'open': True,
'status': 'NEW',
}
# Change-ID of a known open change in public gerrit.
GERRIT_OPEN_CHANGEID = '8366'
GERRIT_MERGED_CHANGEID = '3'
GERRIT_ABANDONED_CHANGEID = '2'
class GitRepoPatchTestCase(cros_test_lib.TempDirTestCase):
"""Helper TestCase class for writing test cases."""
# No mock bits are to be used in this class's tests.
# This needs to actually validate git output, and git behaviour, rather
# than test our assumptions about git's behaviour/output.
patch_kls = cros_patch.GitRepoPatch
COMMIT_TEMPLATE = """\
commit abcdefgh
Author: Fake person
Date: Tue Oct 99
I am the first commit.
%(extra)s
%(change-id)s
"""
# Boolean controlling whether the target class natively knows its
# ChangeId; only GerritPatches do.
has_native_change_id = False
DEFAULT_TRACKING = 'refs/remotes/%s/master' % constants.EXTERNAL_REMOTE
def _CreateSourceRepo(self, path):
"""Generate a new repo with a single commit."""
tmp_path = '%s-tmp' % path
os.mkdir(path)
os.mkdir(tmp_path)
self._run(['git', 'init', '--separate-git-dir', path], cwd=tmp_path)
# Add an initial commit then wipe the working tree.
self._run(['git', 'commit', '--allow-empty', '-m', 'initial commit'],
cwd=tmp_path)
shutil.rmtree(tmp_path)
def setUp(self):
# Create an empty repo to work from.
self.source = os.path.join(self.tempdir, 'source.git')
self._CreateSourceRepo(self.source)
self.default_cwd = os.path.join(self.tempdir, 'unwritable')
self.original_cwd = os.getcwd()
os.mkdir(self.default_cwd)
os.chdir(self.default_cwd)
# Disallow write so as to smoke out any invalid writes to
# cwd.
os.chmod(self.default_cwd, 0o500)
def tearDown(self):
if hasattr(self, 'original_cwd'):
os.chdir(self.original_cwd)
def _MkPatch(self, source, sha1, ref='refs/heads/master', **kwargs):
return self.patch_kls(source, 'chromiumos/chromite', ref,
'%s/master' % constants.EXTERNAL_REMOTE,
kwargs.pop('remote', constants.EXTERNAL_REMOTE),
sha1=sha1, **kwargs)
def _run(self, cmd, cwd=None):
# Note that cwd is intentionally set to a location the user can't write
# to; this flushes out any bad usage in the tests that would work by
# fluke of being invoked from w/in a git repo.
if cwd is None:
cwd = self.default_cwd
return cros_build_lib.RunCommand(
cmd, cwd=cwd, print_cmd=False, capture_output=True).output.strip()
def _GetSha1(self, cwd, refspec):
return self._run(['git', 'rev-list', '-n1', refspec], cwd=cwd)
def _MakeRepo(self, name, clone, remote=None, alternates=True):
path = os.path.join(self.tempdir, name)
cmd = ['git', 'clone', clone, path]
if alternates:
cmd += ['--reference', clone]
if remote is None:
remote = constants.EXTERNAL_REMOTE
cmd += ['--origin', remote]
self._run(cmd)
return path
def _MakeCommit(self, repo, commit=None):
if commit is None:
commit = 'commit at %s' % (time.time(),)
self._run(['git', 'commit', '-a', '-m', commit], repo)
return self._GetSha1(repo, 'HEAD')
def CommitFile(self, repo, filename, content, commit=None, **kwargs):
osutils.WriteFile(os.path.join(repo, filename), content)
self._run(['git', 'add', filename], repo)
sha1 = self._MakeCommit(repo, commit=commit)
if not self.has_native_change_id:
kwargs.pop('ChangeId', None)
patch = self._MkPatch(repo, sha1, **kwargs)
self.assertEqual(patch.sha1, sha1)
return patch
def _CommonGitSetup(self):
git1 = self._MakeRepo('git1', self.source)
git2 = self._MakeRepo('git2', self.source)
patch = self.CommitFile(git1, 'monkeys', 'foon')
return git1, git2, patch
def MakeChangeId(self, how_many=1):
l = [cros_patch.MakeChangeId() for _ in xrange(how_many)]
if how_many == 1:
return l[0]
return l
def CommitChangeIdFile(self, repo, changeid=None, extra=None,
filename='monkeys', content='flinging',
raw_changeid_text=None, **kwargs):
template = self.COMMIT_TEMPLATE
if changeid is None:
changeid = self.MakeChangeId()
if raw_changeid_text is None:
raw_changeid_text = 'Change-Id: %s' % (changeid,)
if extra is None:
extra = ''
commit = template % {'change-id': raw_changeid_text, 'extra': extra}
return self.CommitFile(repo, filename, content, commit=commit,
ChangeId=changeid, **kwargs)
class TestGitRepoPatch(GitRepoPatchTestCase):
"""Unittests for git patch related methods."""
def testGetDiffStatus(self):
git1, _, patch1 = self._CommonGitSetup()
# Ensure that it can work on the first commit, even if it
# doesn't report anything (no delta; it's the first files).
patch1 = self._MkPatch(git1, self._GetSha1(git1, self.DEFAULT_TRACKING))
self.assertEqual({}, patch1.GetDiffStatus(git1))
patch2 = self.CommitFile(git1, 'monkeys', 'blah')
self.assertEqual({'monkeys': 'M'}, patch2.GetDiffStatus(git1))
git.RunGit(git1, ['mv', 'monkeys', 'monkeys2'])
patch3 = self._MkPatch(git1, self._MakeCommit(git1, commit='mv'))
self.assertEqual({'monkeys': 'D', 'monkeys2': 'A'},
patch3.GetDiffStatus(git1))
patch4 = self.CommitFile(git1, 'monkey2', 'blah')
self.assertEqual({'monkey2': 'A'}, patch4.GetDiffStatus(git1))
def testFetch(self):
_, git2, patch = self._CommonGitSetup()
patch.Fetch(git2)
self.assertEqual(patch.sha1, self._GetSha1(git2, 'FETCH_HEAD'))
# Verify reuse; specifically that Fetch doesn't actually run since
# the rev is already available locally via alternates.
patch.project_url = '/dev/null'
git3 = self._MakeRepo('git3', git2)
patch.Fetch(git3)
self.assertEqual(patch.sha1, self._GetSha1(git3, patch.sha1))
def testFetchFirstPatchInSeries(self):
git1, git2, patch = self._CommonGitSetup()
self.CommitFile(git1, 'monkeys', 'foon2')
patch.Fetch(git2)
def testFetchWithoutSha1(self):
git1, git2, _ = self._CommonGitSetup()
patch2 = self.CommitFile(git1, 'monkeys', 'foon2')
sha1, patch2.sha1 = patch2.sha1, None
patch2.Fetch(git2)
self.assertEqual(sha1, patch2.sha1)
def testAlreadyApplied(self):
git1 = self._MakeRepo('git1', self.source)
patch1 = self._MkPatch(git1, self._GetSha1(git1, 'HEAD'))
self.assertRaises2(cros_patch.PatchIsEmpty, patch1.Apply, git1,
self.DEFAULT_TRACKING, check_attrs={'inflight': False})
patch2 = self.CommitFile(git1, 'monkeys', 'rule')
self.assertRaises2(cros_patch.PatchIsEmpty, patch2.Apply, git1,
self.DEFAULT_TRACKING, check_attrs={'inflight': True})
def testDeleteEbuildTwice(self):
"""Test that double-deletes of ebuilds are flagged as conflicts."""
# Create monkeys.ebuild for testing.
git1 = self._MakeRepo('git1', self.source)
patch1 = self.CommitFile(git1, 'monkeys.ebuild', 'rule')
git.RunGit(git1, ['rm', 'monkeys.ebuild'])
patch2 = self._MkPatch(git1, self._MakeCommit(git1, commit='rm'))
# Delete an ebuild that does not exist in TOT.
check_attrs = {'inflight': False, 'files': ('monkeys.ebuild',)}
self.assertRaises2(cros_patch.EbuildConflict, patch2.Apply, git1,
self.DEFAULT_TRACKING, check_attrs=check_attrs)
# Delete an ebuild that exists in TOT, but does not exist in the current
# patch series.
check_attrs['inflight'] = True
self.assertRaises2(cros_patch.EbuildConflict, patch2.Apply, git1,
patch1.sha1, check_attrs=check_attrs)
def testCleanlyApply(self):
_, git2, patch = self._CommonGitSetup()
# Clone git3 before we modify git2; else we'll just wind up
# cloning its master.
git3 = self._MakeRepo('git3', git2)
patch.Apply(git2, self.DEFAULT_TRACKING)
# Verify reuse; specifically that Fetch doesn't actually run since
# the object is available in alternates. testFetch partially
# validates this; the Apply usage here fully validates it via
# ensuring that the attempted Apply goes boom if it can't get the
# required sha1.
patch.project_url = '/dev/null'
patch.Apply(git3, self.DEFAULT_TRACKING)
def testFailsApply(self):
_, git2, patch1 = self._CommonGitSetup()
patch2 = self.CommitFile(git2, 'monkeys', 'not foon')
# Note that Apply creates it's own branch, resetting to master
# thus we have to re-apply (even if it looks stupid, it's right).
patch2.Apply(git2, self.DEFAULT_TRACKING)
self.assertRaises2(cros_patch.ApplyPatchException,
patch1.Apply, git2, self.DEFAULT_TRACKING,
exact_kls=True, check_attrs={'inflight': True})
def testTrivial(self):
_, git2, patch1 = self._CommonGitSetup()
# Throw in a bunch of newlines so that content-merging would work.
content = 'not foon%s' % ('\n' * 100)
patch1 = self._MkPatch(git2, self._GetSha1(git2, 'HEAD'))
patch1 = self.CommitFile(git2, 'monkeys', content)
git.RunGit(
git2, ['update-ref', self.DEFAULT_TRACKING, patch1.sha1])
patch2 = self.CommitFile(git2, 'monkeys', '%sblah' % content)
patch3 = self.CommitFile(git2, 'monkeys', '%sblahblah' % content)
# Get us a back to the basic, then derive from there; this is used to
# verify that even if content merging works, trivial is flagged.
self.CommitFile(git2, 'monkeys', 'foon')
patch4 = self.CommitFile(git2, 'monkeys', content)
patch5 = self.CommitFile(git2, 'monkeys', '%sfoon' % content)
# Reset so we derive the next changes from patch1.
git.RunGit(git2, ['reset', '--hard', patch1.sha1])
patch6 = self.CommitFile(git2, 'blah', 'some-other-file')
self.CommitFile(git2, 'monkeys',
'%sblah' % content.replace('not', 'bot'))
self.assertRaises2(cros_patch.PatchIsEmpty,
patch1.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
check_attrs={'inflight': False, 'trivial': False})
# Now test conflicts since we're still at ToT; note that this is an actual
# conflict because the fuzz anchors have changed.
self.assertRaises2(cros_patch.ApplyPatchException,
patch3.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
check_attrs={'inflight': False, 'trivial': False},
exact_kls=True)
# Now test trivial conflict; this would've merged fine were it not for
# trivial.
self.assertRaises2(cros_patch.PatchIsEmpty,
patch4.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
check_attrs={'inflight': False, 'trivial': False},
exact_kls=True)
# Move us into inflight testing.
patch2.Apply(git2, self.DEFAULT_TRACKING, trivial=True)
# Repeat the tests from above; should still be the same.
self.assertRaises2(cros_patch.PatchIsEmpty,
patch4.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
check_attrs={'inflight': False, 'trivial': False})
# Actual conflict merge conflict due to inflight; non trivial induced.
self.assertRaises2(cros_patch.ApplyPatchException,
patch5.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
check_attrs={'inflight': True, 'trivial': False},
exact_kls=True)
self.assertRaises2(cros_patch.PatchIsEmpty,
patch1.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
check_attrs={'inflight': False})
self.assertRaises2(cros_patch.ApplyPatchException,
patch5.Apply, git2, self.DEFAULT_TRACKING, trivial=True,
check_attrs={'inflight': True, 'trivial': False},
exact_kls=True)
# And this should apply without issue, despite the differing history.
patch6.Apply(git2, self.DEFAULT_TRACKING, trivial=True)
def _assertLookupAliases(self, remote):
git1 = self._MakeRepo('git1', self.source)
patch = self.CommitChangeIdFile(git1, remote=remote)
prefix = '*' if patch.internal else ''
vals = [patch.sha1, getattr(patch, 'gerrit_number', None),
getattr(patch, 'original_sha1', None)]
# Append full Change-ID if it exists.
if patch.project and patch.tracking_branch and patch.change_id:
vals.append('%s~%s~%s' % (
patch.project, patch.tracking_branch, patch.change_id))
vals = [x for x in vals if x is not None]
self.assertEqual(set(prefix + x for x in vals), set(patch.LookupAliases()))
def testExternalLookupAliases(self):
self._assertLookupAliases(constants.EXTERNAL_REMOTE)
def testInternalLookupAliases(self):
self._assertLookupAliases(constants.INTERNAL_REMOTE)
def _CheckPaladin(self, repo, master_id, ids, extra):
patch = self.CommitChangeIdFile(
repo, master_id, extra=extra,
filename='paladincheck', content=str(_GetNumber()))
deps = patch.PaladinDependencies(repo)
# Assert that our parsing unique'ifies the results.
self.assertEqual(len(deps), len(set(deps)))
# Verify that we have the correct dependencies.
dep_ids = []
dep_ids += [(dep.remote, dep.change_id) for dep in deps
if dep.change_id is not None]
dep_ids += [(dep.remote, dep.gerrit_number) for dep in deps
if dep.gerrit_number is not None]
dep_ids += [(dep.remote, dep.sha1) for dep in deps
if dep.sha1 is not None]
for input_id in ids:
change_tuple = cros_patch.StripPrefix(input_id)
self.assertIn(change_tuple, dep_ids)
return patch
def testPaladinDependencies(self):
git1 = self._MakeRepo('git1', self.source)
cid1, cid2, cid3, cid4 = self.MakeChangeId(4)
# Verify it handles nonexistant CQ-DEPEND.
self._CheckPaladin(git1, cid1, [], '')
# Single key, single value.
self._CheckPaladin(git1, cid1, [cid2],
'CQ-DEPEND=%s' % cid2)
# Single key, gerrit number.
self._CheckPaladin(git1, cid1, ['123'],
'CQ-DEPEND=%s' % 123)
# Single key, gerrit number.
self._CheckPaladin(git1, cid1, ['123456'],
'CQ-DEPEND=%s' % 123456)
# Single key, gerrit number; ensure it
# cuts off before a million changes (this
# is done to avoid collisions w/ sha1 when
# we're using shortened versions).
self.assertRaises(cros_patch.BrokenCQDepends,
self._CheckPaladin, git1, cid1,
['1234567'], 'CQ-DEPEND=%s' % '1234567')
# Single key, gerrit number, internal.
self._CheckPaladin(git1, cid1, ['*123'],
'CQ-DEPEND=%s' % '*123')
# Ensure SHA1's aren't allowed.
sha1 = '0' * 40
self.assertRaises(cros_patch.BrokenCQDepends,
self._CheckPaladin, git1, cid1,
[sha1], 'CQ-DEPEND=%s' % sha1)
# Single key, multiple values
self._CheckPaladin(git1, cid1, [cid2, '1223'],
'CQ-DEPEND=%s %s' % (cid2, '1223'))
# Dumb comma behaviour
self._CheckPaladin(git1, cid1, [cid2, cid3],
'CQ-DEPEND=%s, %s,' % (cid2, cid3))
# Multiple keys.
self._CheckPaladin(git1, cid1, [cid2, '*245', cid4],
'CQ-DEPEND=%s, %s\nCQ-DEPEND=%s' % (cid2, '*245', cid4))
# Ensure it goes boom on invalid data.
self.assertRaises(cros_patch.BrokenCQDepends, self._CheckPaladin,
git1, cid1, [], 'CQ-DEPEND=monkeys')
self.assertRaises(cros_patch.BrokenCQDepends, self._CheckPaladin,
git1, cid1, [], 'CQ-DEPEND=%s monkeys' % (cid2,))
# Validate numeric is allowed.
self._CheckPaladin(git1, cid1, [cid2, '1'], 'CQ-DEPEND=1 %s' % cid2)
# Validate that it unique'ifies the results.
self._CheckPaladin(git1, cid1, ['1'], 'CQ-DEPEND=1 1')
# Invalid syntax
self.assertRaises(cros_patch.BrokenCQDepends, self._CheckPaladin,
git1, cid1, [], 'CQ-DEPENDS=1')
self.assertRaises(cros_patch.BrokenCQDepends, self._CheckPaladin,
git1, cid1, [], 'CQ_DEPEND=1')
self.assertRaises(cros_patch.BrokenCQDepends, self._CheckPaladin,
git1, cid1, [], ' CQ-DEPEND=1')
def testChangeIdMetadata(self):
"""Verify Change-Id is set in git metadata."""
git1, git2, _ = self._CommonGitSetup()
changeid = 'I%s' % ('1'.rjust(40, '0'))
patch = self.CommitChangeIdFile(git1, changeid=changeid, change_id=changeid,
raw_changeid_text='')
patch.change_id = changeid
patch.Fetch(git1)
self.assertIn('Change-Id: %s\n' % changeid, patch.commit_message)
patch = self.CommitChangeIdFile(git2, changeid=changeid, change_id=changeid)
patch.Fetch(git2)
self.assertEqual(patch.change_id, changeid)
self.assertIn('Change-Id: %s\n' % changeid, patch.commit_message)
class TestGetOptionLinesFromCommitMessage(cros_test_lib.TestCase):
"""Tests of GetOptionFromCommitMessage."""
_M1 = """jabberwocky: by Lewis Carroll
'Twas brillig, and the slithy toves
did gyre and gimble in the wabe.
"""
_M2 = """jabberwocky: by Lewis Carroll
All mimsy were the borogroves,
And the mome wraths outgrabe.
jabberwocky: Charles Lutwidge Dodgson
"""
_M3 = """jabberwocky: by Lewis Carroll
He took his vorpal sword in hand:
Long time the manxome foe he sought
jabberwocky:
"""
_M4 = """the poem continues...
jabberwocky: O frabjuous day!
jabberwocky: Calloh! Callay!
"""
def testNoMessage(self):
o = cros_patch.GetOptionLinesFromCommitMessage('', 'jabberwocky:')
self.assertEqual(None, o)
def testNoOption(self):
o = cros_patch.GetOptionLinesFromCommitMessage(self._M1, 'jabberwocky:')
self.assertEqual(None, o)
def testYesOption(self):
o = cros_patch.GetOptionLinesFromCommitMessage(self._M2, 'jabberwocky:')
self.assertEqual(['Charles Lutwidge Dodgson'], o)
def testEmptyOption(self):
o = cros_patch.GetOptionLinesFromCommitMessage(self._M3, 'jabberwocky:')
self.assertEqual([], o)
def testMultiOption(self):
o = cros_patch.GetOptionLinesFromCommitMessage(self._M4, 'jabberwocky:')
self.assertEqual(['O frabjuous day!', 'Calloh! Callay!'], o)
class TestApplyAgainstManifest(GitRepoPatchTestCase,
cros_test_lib.MockTestCase):
"""Test applying a patch against a manifest"""
MANIFEST_TEMPLATE = """\
<?xml version="1.0" encoding="UTF-8"?>
<manifest>
<remote name="cros" />
<default revision="refs/heads/master" remote="cros" />
%(projects)s
</manifest>
"""
def _CommonRepoSetup(self, *projects):
basedir = self.tempdir
repodir = os.path.join(basedir, '.repo')
manifest_file = os.path.join(repodir, 'manifest.xml')
proj_pieces = []
for project in projects:
proj_pieces.append('<project')
for key, val in project.items():
if key == 'path':
val = os.path.relpath(os.path.realpath(val),
os.path.realpath(self.tempdir))
proj_pieces.append(' %s="%s"' % (key, val))
proj_pieces.append(' />\n ')
proj_str = ''.join(proj_pieces)
content = self.MANIFEST_TEMPLATE % {'projects': proj_str}
os.mkdir(repodir)
osutils.WriteFile(manifest_file, content)
return basedir
def testApplyAgainstManifest(self):
git1, git2, _ = self._CommonGitSetup()
readme_text = 'Dummy README text.'
readme1 = self.CommitFile(git1, 'README', readme_text)
readme_text += ' Even more dummy README text.'
readme2 = self.CommitFile(git1, 'README', readme_text)
readme_text += ' Even more README text.'
readme3 = self.CommitFile(git1, 'README', readme_text)
git1_proj = {
'path': git1,
'name': 'chromiumos/chromite',
'revision': str(readme1.sha1),
'upstream': 'refs/heads/master',
}
git2_proj = {
'path': git2,
'name': 'git2',
}
basedir = self._CommonRepoSetup(git1_proj, git2_proj)
self.PatchObject(git.ManifestCheckout, '_GetManifestsBranch',
return_value=None)
manifest = git.ManifestCheckout(basedir)
readme2.ApplyAgainstManifest(manifest)
readme3.ApplyAgainstManifest(manifest)
# Verify that both readme2 and readme3 are on the patch branch.
cmd = ['git', 'log', '--format=%T',
'%s..%s' % (readme1.sha1, constants.PATCH_BRANCH)]
trees = self._run(cmd, git1).splitlines()
self.assertEqual(trees, [str(readme3.tree_hash), str(readme2.tree_hash)])
class TestLocalPatchGit(GitRepoPatchTestCase):
"""Test Local patch handling."""
patch_kls = cros_patch.LocalPatch
def setUp(self):
self.sourceroot = os.path.join(self.tempdir, 'sourceroot')
def _MkPatch(self, source, sha1, ref='refs/heads/master', **kwargs):
remote = kwargs.pop('remote', constants.EXTERNAL_REMOTE)
return self.patch_kls(source, 'chromiumos/chromite', ref,
'%s/master' % remote, remote, sha1, **kwargs)
def testUpload(self):
def ProjectDirMock(_sourceroot):
return git1
git1, git2, patch = self._CommonGitSetup()
git2_sha1 = self._GetSha1(git2, 'HEAD')
patch.ProjectDir = ProjectDirMock
# First suppress carbon copy behaviour so we verify pushing plain works.
sha1 = patch.sha1
patch._GetCarbonCopy = lambda: sha1 # pylint: disable=protected-access
patch.Upload(git2, 'refs/testing/test1')
self.assertEqual(self._GetSha1(git2, 'refs/testing/test1'),
patch.sha1)
# Enable CarbonCopy behaviour; verify it lands a different
# sha1. Additionally verify it didn't corrupt the patch's sha1 locally.
del patch._GetCarbonCopy
patch.Upload(git2, 'refs/testing/test2')
self.assertNotEqual(self._GetSha1(git2, 'refs/testing/test2'),
patch.sha1)
self.assertEqual(patch.sha1, sha1)
# Ensure the carbon creation didn't damage the target repo.
self.assertEqual(self._GetSha1(git1, 'HEAD'), sha1)
# Ensure we didn't damage the target repo's state at all.
self.assertEqual(git2_sha1, self._GetSha1(git2, 'HEAD'))
# Ensure the content is the same.
base = ['git', 'show']
self.assertEqual(
self._run(base + ['refs/testing/test1:monkeys'], git2),
self._run(base + ['refs/testing/test2:monkeys'], git2))
base = ['git', 'log', '--format=%B', '-n1']
self.assertEqual(
self._run(base + ['refs/testing/test1'], git2),
self._run(base + ['refs/testing/test2'], git2))
class UploadedLocalPatchTestCase(GitRepoPatchTestCase):
"""Test uploading of local git patches."""
PROJECT = 'chromiumos/chromite'
ORIGINAL_BRANCH = 'original_branch'
ORIGINAL_SHA1 = 'ffffffff'.ljust(40, '0')
patch_kls = cros_patch.UploadedLocalPatch
def _MkPatch(self, source, sha1, ref='refs/heads/master', **kwargs):
return self.patch_kls(source, self.PROJECT, ref,
'%s/master' % constants.EXTERNAL_REMOTE,
self.ORIGINAL_BRANCH,
kwargs.pop('original_sha1', self.ORIGINAL_SHA1),
kwargs.pop('remote', constants.EXTERNAL_REMOTE),
carbon_copy_sha1=sha1, **kwargs)
class TestUploadedLocalPatch(UploadedLocalPatchTestCase):
"""Test uploading of local git patches."""
def testStringRepresentation(self):
_, _, patch = self._CommonGitSetup()
str_rep = str(patch).split(':')
for element in [self.PROJECT, self.ORIGINAL_BRANCH, self.ORIGINAL_SHA1[:8]]:
self.assertTrue(element in str_rep,
msg="Couldn't find %s in %s" % (element, str_rep))
# pylint: disable=protected-access
class TestGerritPatch(TestGitRepoPatch):
"""Test Gerrit patch handling."""
has_native_change_id = True
class patch_kls(cros_patch.GerritPatch):
"""Test helper class to suppress pointing to actual gerrit."""
# Suppress the behaviour pointing the project url at actual gerrit,
# instead slaving it back to a local repo for tests.
def __init__(self, *args, **kwargs):
cros_patch.GerritPatch.__init__(self, *args, **kwargs)
assert hasattr(self, 'patch_dict')
self.project_url = self.patch_dict['_unittest_url_bypass']
@property
def test_json(self):
return copy.deepcopy(FAKE_PATCH_JSON)
def _MkPatch(self, source, sha1, ref='refs/heads/master', **kwargs):
json = self.test_json
remote = kwargs.pop('remote', constants.EXTERNAL_REMOTE)
url_prefix = kwargs.pop('url_prefix', constants.EXTERNAL_GERRIT_URL)
suppress_branch = kwargs.pop('suppress_branch', False)
change_id = kwargs.pop('ChangeId', None)
if change_id is None:
change_id = self.MakeChangeId()
json.update(kwargs)
change_num, patch_num = _GetNumber(), _GetNumber()
# Note we intentionally use a gerrit like refspec here; we want to
# ensure that none of our common code pathways puke on a non head/tag.
refspec = gerrit.GetChangeRef(change_num + 1000, patch_num)
json['currentPatchSet'].update(
dict(number=patch_num, ref=refspec, revision=sha1))
json['branch'] = os.path.basename(ref)
json['_unittest_url_bypass'] = source
json['id'] = change_id
obj = self.patch_kls(json.copy(), remote, url_prefix)
self.assertEqual(obj.patch_dict, json)
self.assertEqual(obj.remote, remote)
self.assertEqual(obj.url_prefix, url_prefix)
self.assertEqual(obj.project, json['project'])
self.assertEqual(obj.ref, refspec)
self.assertEqual(obj.change_id, change_id)
self.assertEqual(obj.id, '%s%s~%s~%s' % (
constants.CHANGE_PREFIX[remote], json['project'],
json['branch'], change_id))
# Now make the fetching actually work, if desired.
if not suppress_branch:
# Note that a push is needed here, rather than a branch; branch
# will just make it under refs/heads, we want it literally in
# refs/changes/
self._run(['git', 'push', source, '%s:%s' % (sha1, refspec)], source)
return obj
def testApprovalTimestamp(self):
"""Test that the approval timestamp is correctly extracted from JSON."""
repo = self._MakeRepo('git', self.source)
for approvals, expected in [(None, 0), ([], 0), ([1], 1), ([1, 3, 2], 3)]:
currentPatchSet = copy.deepcopy(FAKE_PATCH_JSON['currentPatchSet'])
if approvals is not None:
currentPatchSet['approvals'] = [{'grantedOn': x} for x in approvals]
patch = self._MkPatch(repo, self._GetSha1(repo, self.DEFAULT_TRACKING),
currentPatchSet=currentPatchSet)
msg = 'Expected %r, but got %r (approvals=%r)' % (
expected, patch.approval_timestamp, approvals)
self.assertEqual(patch.approval_timestamp, expected, msg)
def _assertGerritDependencies(self, remote=constants.EXTERNAL_REMOTE):
convert = str
if remote == constants.INTERNAL_REMOTE:
convert = lambda val: '*%s' % (val,)
git1 = self._MakeRepo('git1', self.source, remote=remote)
patch = self._MkPatch(git1, self._GetSha1(git1, 'HEAD'), remote=remote)
cid1, cid2 = '1', '2'
# Test cases with no dependencies, 1 dependency, and 2 dependencies.
self.assertEqual(patch.GerritDependencies(), [])
patch.patch_dict['dependsOn'] = [{'number': cid1}]
self.assertEqual(
[cros_patch.AddPrefix(x, x.gerrit_number)
for x in patch.GerritDependencies()],
[convert(cid1)])
patch.patch_dict['dependsOn'].append({'number': cid2})
self.assertEqual(
[cros_patch.AddPrefix(x, x.gerrit_number)
for x in patch.GerritDependencies()],
[convert(cid1), convert(cid2)])
def testExternalGerritDependencies(self):
self._assertGerritDependencies()
def testInternalGerritDependencies(self):
self._assertGerritDependencies(constants.INTERNAL_REMOTE)
def testReviewedOnMetadata(self):
"""Verify Change-Id and Reviewed-On are set in git metadata."""
git1, _, patch = self._CommonGitSetup()
patch.Apply(git1, self.DEFAULT_TRACKING)
reviewed_on = '/'.join([constants.EXTERNAL_GERRIT_URL, patch.gerrit_number])
self.assertIn('Reviewed-on: %s\n' % reviewed_on, patch.commit_message)
def _MakeFooters(self):
return (
(),
(('Footer-1', 'foo'),),
(('Change-id', '42'),),
(('Footer-1', 'foo'), ('Change-id', '42')),)
def _MakeCommitMessages(self):
headers = (
'A standard commit message header',
'',
'Footer-1: foo',
'Change-id: 42')
bodies = (
'',
'\n',
'Lots of comments\n about the commit\n' * 100)
for header, body, preexisting in itertools.product(headers,
bodies,
self._MakeFooters()):
yield '\n'.join((header,
body,
'\n'.join('%s: %s' for tag, ident in preexisting)))
def testAddFooters(self):
repo = self._MakeRepo('git', self.source)
patch = self._MkPatch(repo, self._GetSha1(repo, 'HEAD'))
approval = {'type': 'VRIF', 'value': '1', 'grantedOn': 1391733002}
for msg in self._MakeCommitMessages():
for footers in self._MakeFooters():
ctx = contextlib.nested(
mock.patch('chromite.lib.patch.FooterForApproval',
new=mock.Mock(side_effect=itertools.cycle(footers))),
mock.patch.object(patch, '_approvals',
new=[approval] * len(footers)))
with ctx:
patch._commit_message = msg
# Idempotence
self.assertEqual(patch._AddFooters(msg),
patch._AddFooters(patch._AddFooters(msg)))
# there may be pre-existing footers. This asserts that we
# can Get all of the footers after we Set them.
self.assertFalse(bool(
set(footers) -
set(patch._GetFooters(patch._AddFooters(msg)))))
if set(footers) - set(patch._GetFooters(msg)):
self.assertNotEqual(msg, patch._AddFooters(msg))
class PrepareRemotePatchesTest(cros_test_lib.TestCase):
"""Test preparing remote patches."""
def MkRemote(self,
project='my/project', original_branch='my-local',
ref='refs/tryjobs/elmer/patches', tracking_branch='master',
internal=False):
l = [project, original_branch, ref, tracking_branch,
getattr(constants, ('%s_PATCH_TAG' %
('INTERNAL' if internal else 'EXTERNAL')))]
return ':'.join(l)
def assertRemote(self, patch, project='my/project',
original_branch='my-local',
ref='refs/tryjobs/elmer/patches', tracking_branch='master',
internal=False):
self.assertEqual(patch.project, project)
self.assertEqual(patch.original_branch, original_branch)
self.assertEqual(patch.ref, ref)
self.assertEqual(patch.tracking_branch, tracking_branch)
self.assertEqual(patch.internal, internal)
def test(self):
# Check handling of a single patch...
patches = cros_patch.PrepareRemotePatches([self.MkRemote()])
self.assertEqual(len(patches), 1)
self.assertRemote(patches[0])
# Check handling of a multiple...
patches = cros_patch.PrepareRemotePatches(
[self.MkRemote(), self.MkRemote(project='foon')])
self.assertEqual(len(patches), 2)
self.assertRemote(patches[0])
self.assertRemote(patches[1], project='foon')
# Ensure basic validation occurs:
chunks = self.MkRemote().split(':')
self.assertRaises(ValueError, cros_patch.PrepareRemotePatches,
':'.join(chunks[:-1]))
self.assertRaises(ValueError, cros_patch.PrepareRemotePatches,
':'.join(chunks[:-1] + ['monkeys']))
self.assertRaises(ValueError, cros_patch.PrepareRemotePatches,
':'.join(chunks + [':']))
class PrepareLocalPatchesTests(cros_build_lib_unittest.RunCommandTestCase):
"""Test preparing local patches."""
def setUp(self):
self.path, self.project, self.branch = 'mydir', 'my/project', 'mybranch'
self.tracking_branch = 'kernel'
self.patches = ['%s:%s' % (self.project, self.branch)]
self.manifest = mock.MagicMock()
attrs = dict(tracking_branch=self.tracking_branch,
local_path=self.path,
remote='cros')
checkout = git.ProjectCheckout(attrs)
self.PatchObject(
self.manifest, 'FindCheckouts', return_value=[checkout]
)
def PrepareLocalPatches(self, output):
"""Check the returned GitRepoPatchInfo against golden values."""
output_obj = mock.MagicMock()
output_obj.output = output
self.PatchObject(cros_patch.LocalPatch, 'Fetch', return_value=output_obj)
self.PatchObject(git, 'RunGit', return_value=output_obj)
patch_info = cros_patch.PrepareLocalPatches(self.manifest, self.patches)[0]
self.assertEquals(patch_info.project, self.project)
self.assertEquals(patch_info.ref, self.branch)
self.assertEquals(patch_info.tracking_branch, self.tracking_branch)
def testBranchSpecifiedSuccessRun(self):
"""Test success with branch specified by user."""
self.PrepareLocalPatches('12345'.rjust(40, '0'))
def testBranchSpecifiedNoChanges(self):
"""Test when no changes on the branch specified by user."""
self.assertRaises(SystemExit, self.PrepareLocalPatches, '')
class TestFormatting(cros_test_lib.TestCase):
"""Test formatting of output."""
VALID_CHANGE_ID = 'I47ea30385af60ae4cc2acc5d1a283a46423bc6e1'
def _assertResult(self, functor, value, expected=None, raises=False,
**kwargs):
if raises:
self.assertRaises2(ValueError, functor, value,
msg='%s(%r) did not throw a ValueError'
% (functor.__name__, value), **kwargs)
else:
self.assertEqual(functor(value, **kwargs), expected,
msg='failed: %s(%r) != %r'
% (functor.__name__, value, expected))
def _assertBad(self, functor, values, **kwargs):
for value in values:
self._assertResult(functor, value, raises=True, **kwargs)
def _assertGood(self, functor, values, **kwargs):
for value, expected in values:
self._assertResult(functor, value, expected, **kwargs)
def testGerritNumber(self):
"""Tests that we can pasre a Gerrit number."""
self._assertGood(cros_patch.ParseGerritNumber,
[('12345',) * 2, ('12',) * 2, ('123',) * 2])
self._assertBad(
cros_patch.ParseGerritNumber,
['is', 'i1325', '01234567', '012345a', '**12345', '+123', '/0123'],
error_ok=False)
def testChangeID(self):
"""Tests that we can parse a change-ID."""
self._assertGood(cros_patch.ParseChangeID, [(self.VALID_CHANGE_ID,) * 2])
# Change-IDs too short/long, with unexpected characters in it.
self._assertBad(
cros_patch.ParseChangeID,
['is', '**i1325', 'i134'.ljust(41, '0'), 'I1234+'.ljust(41, '0'),
'I123'.ljust(42, '0')],
error_ok=False)
def testSHA1(self):
"""Tests that we can parse a SHA1 hash."""
self._assertGood(cros_patch.ParseSHA1,
[('1' * 40,) * 2,
('a' * 40,) * 2,
('1a7e034'.ljust(40, '0'),) * 2])
self._assertBad(
cros_patch.ParseSHA1,
['0abcg', 'Z', '**a', '+123', '1234ab' * 10],
error_ok=False)
def testFullChangeID(self):
"""Tests that we can parse a full change-ID."""
change_id = self.VALID_CHANGE_ID
self._assertGood(
cros_patch.ParseFullChangeID,
(('foo~bar~%s' % change_id,
cros_patch.FullChangeId('foo', 'bar', change_id)),
('foo/bar/baz~refs/heads/_my-branch_~%s' % change_id,
cros_patch.FullChangeId('foo/bar/baz', 'refs/heads/_my-branch_',
change_id))))
def testInvalidFullChangeID(self):
"""Should throw an error on bad inputs."""
change_id = self.VALID_CHANGE_ID
self._assertBad(
cros_patch.ParseFullChangeID,
['foo', 'foo~bar', 'foo~bar~baz', 'foo~refs/bar~%s' % change_id],
error_ok=False)
def testParsePatchDeps(self):
"""Tests that we can parse the dependency specified by the user."""
change_id = self.VALID_CHANGE_ID
vals = ['CL:12345', 'project~branch~%s' % change_id, change_id,
change_id[1:]]
for val in vals:
self.assertTrue(cros_patch.ParsePatchDep(val) is not None)
self._assertBad(cros_patch.ParsePatchDep,
['1454623', 'I47ea3', 'i47ea3'.ljust(41, '0')])
class MockPatchBase(cros_test_lib.MockTestCase):
"""Base test case with helper methods to generate mock patches."""
def setUp(self):
self.patch_mock = None
self._patch_counter = (itertools.count(1)).next
def MockPatch(self, change_id=None, patch_number=None, is_merged=False,
project='chromiumos/chromite', remote=constants.EXTERNAL_REMOTE,
tracking_branch='refs/heads/master', is_draft=False,
approvals=()):
"""Helper function to create mock GerritPatch objects."""
if change_id is None:
change_id = self._patch_counter()
gerrit_number = str(change_id)
change_id = hex(change_id)[2:].rstrip('L').lower()
change_id = 'I%s' % change_id.rjust(40, '0')
sha1 = hex(_GetNumber())[2:].rstrip('L').lower().rjust(40, '0')
patch_number = (patch_number if patch_number is not None else _GetNumber())
fake_url = 'http://foo/bar'
if not approvals:
approvals = [{'type': 'VRIF', 'value': '1', 'grantedOn': 1391733002},
{'type': 'CRVW', 'value': '2', 'grantedOn': 1391733002},
{'type': 'COMR', 'value': '1', 'grantedOn': 1391733002}]
current_patch_set = {
'number': patch_number,
'revision': sha1,
'draft': is_draft,
'approvals': approvals,
}
patch_dict = {
'currentPatchSet': current_patch_set,
'id': change_id,
'number': gerrit_number,
'project': project,
'branch': tracking_branch,
'owner': {'email': 'elmer.fudd@chromium.org'},
'remote': remote,
'status': 'MERGED' if is_merged else 'NEW',
'url': '%s/%s' % (fake_url, change_id),
}
patch = cros_patch.GerritPatch(patch_dict, remote, fake_url)
patch.pass_count = 0
patch.fail_count = 1
patch.total_fail_count = 3
return patch
def GetPatches(self, how_many=1, always_use_list=False, **kwargs):
"""Get a sequential list of patches.
Args:
how_many: How many patches to return.
always_use_list: Whether to use a list for a single item list.
**kwargs: Keyword arguments for self.MockPatch.
"""
patches = [self.MockPatch(**kwargs) for _ in xrange(how_many)]
if self.patch_mock:
for i, patch in enumerate(patches):
self.patch_mock.SetGerritDependencies(patch, patches[:i + 1])
if how_many == 1 and not always_use_list:
return patches[0]
return patches
| |
#
# DrawingMixin.py -- enable drawing capabilities.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import time
import math
from .CanvasMixin import CanvasMixin
class DrawingMixin(object):
"""The DrawingMixin is a mixin class that adds drawing capability for
some of the basic CanvasObject-derived types. The setSurface method is
used to associate a ImageViewCanvas object for layering on.
"""
def __init__(self):
assert isinstance(self, CanvasMixin), "Missing CanvasMixin class"
from .CanvasObject import drawCatalog
# For interactive drawing
self.candraw = False
self.drawDict = drawCatalog
drawtypes = self.drawDict.keys()
self.drawtypes = []
for key in ['point', 'line', 'circle', 'ellipse', 'square',
'rectangle', 'box', 'polygon', 'freepolygon',
'path', 'freepath', 'beziercurve',
'triangle', 'righttriangle', 'equilateraltriangle',
'ruler', 'compass', 'text']:
if key in drawtypes:
self.drawtypes.append(key)
self.t_drawtype = 'point'
self.t_drawparams = {}
self._start_x = 0
self._start_y = 0
self._points = []
# For interactive editing
self.canedit = False
self._cp_index = None
self._edit_obj = None
self._edit_status = False
# For selection
self._selected = []
# this controls whether an object is automatically selected for
# editing immediately after being drawn
self.edit_follows_draw = False
self._processTime = 0.0
# time delta threshold for deciding whether to update the image
self._deltaTime = 0.020
self._draw_obj = None
self._draw_crdmap = None
# NOTE: must be mixed in with a Callback.Callbacks
for name in ('draw-event', 'draw-down', 'draw-move', 'draw-up',
'draw-scroll', 'keydown-poly_add', 'keydown-poly_del',
'keydown-edit_del', 'edit-event', 'edit-down',
'edit-move', 'edit-up',
'edit-select', 'edit-scroll', 'drag-drop'):
self.enable_callback(name)
def setSurface(self, viewer):
self.viewer = viewer
# register this canvas for events of interest
self.add_callback('draw-down', self.draw_start, viewer)
self.add_callback('draw-move', self.draw_motion, viewer)
self.add_callback('draw-up', self.draw_stop, viewer)
self.add_callback('keydown-poly_add', self.draw_poly_add, viewer)
self.add_callback('keydown-poly_del', self.draw_poly_delete, viewer)
self.add_callback('keydown-edit_del', self._edit_delete_cb, viewer)
self.add_callback('edit-down', self.edit_start, viewer)
self.add_callback('edit-move', self.edit_motion, viewer)
self.add_callback('edit-up', self.edit_stop, viewer)
## self.add_callback('edit-up', self.select_stop, viewer)
#self.add_callback('edit-scroll', self._edit_scale_cb, viewer)
self.add_callback('edit-scroll', self._edit_rotate_cb, viewer)
def getSurface(self):
return self.viewer
def draw(self, viewer):
super(DrawingMixin, self).draw(viewer)
if self._draw_obj:
self._draw_obj.draw(viewer)
##### DRAWING LOGIC #####
def _draw_update(self, data_x, data_y, viewer):
klass = self.drawDict[self.t_drawtype]
obj = None
x, y = self._draw_crdmap.data_to(data_x, data_y)
if self.t_drawtype == 'point':
radius = max(abs(self._start_x - x),
abs(self._start_y - y))
obj = klass(self._start_x, self._start_y, radius,
**self.t_drawparams)
elif self.t_drawtype == 'compass':
radius = max(abs(self._start_x - x),
abs(self._start_y - y))
obj = klass(self._start_x, self._start_y,
radius, **self.t_drawparams)
elif self.t_drawtype == 'rectangle':
obj = klass(self._start_x, self._start_y,
x, y, **self.t_drawparams)
elif self.t_drawtype == 'square':
len_x = self._start_x - x
len_y = self._start_y - y
length = max(abs(len_x), abs(len_y))
len_x = cmp(len_x, 0) * length
len_y = cmp(len_y, 0) * length
obj = klass(self._start_x, self._start_y,
self._start_x-len_x, self._start_y-len_y,
**self.t_drawparams)
elif self.t_drawtype == 'equilateraltriangle':
len_x = self._start_x - x
len_y = self._start_y - y
length = max(abs(len_x), abs(len_y))
obj = klass(self._start_x, self._start_y,
length, length, **self.t_drawparams)
elif self.t_drawtype in ('box', 'ellipse', 'triangle'):
xradius = abs(self._start_x - x)
yradius = abs(self._start_y - y)
obj = klass(self._start_x, self._start_y, xradius, yradius,
**self.t_drawparams)
elif self.t_drawtype == 'circle':
radius = math.sqrt(abs(self._start_x - x)**2 +
abs(self._start_y - y)**2 )
obj = klass(self._start_x, self._start_y, radius,
**self.t_drawparams)
elif self.t_drawtype == 'line':
obj = klass(self._start_x, self._start_y, x, y,
**self.t_drawparams)
elif self.t_drawtype == 'righttriangle':
obj = klass(self._start_x, self._start_y, x, y,
**self.t_drawparams)
elif self.t_drawtype in ('polygon', 'path', 'beziercurve'):
points = list(self._points)
points.append((x, y))
obj = klass(points, **self.t_drawparams)
elif self.t_drawtype in ('freepolygon', 'freepath'):
self._points.append((x, y))
points = list(self._points)
obj = klass(points, **self.t_drawparams)
elif self.t_drawtype == 'text':
obj = klass(self._start_x, self._start_y, **self.t_drawparams)
elif self.t_drawtype == 'ruler':
obj = klass(self._start_x, self._start_y, x, y,
**self.t_drawparams)
if obj is not None:
obj.initialize(None, viewer, self.logger)
#obj.initialize(None, viewer, viewer.logger)
self._draw_obj = obj
if time.time() - self._processTime > self._deltaTime:
self.process_drawing(viewer)
return True
def draw_start(self, canvas, event, data_x, data_y, viewer):
if not self.candraw or (viewer != event.viewer):
return False
self._draw_obj = None
# get the drawing coordinate type (default 'data')
crdtype = self.t_drawparams.get('coord', 'data')
self._draw_crdmap = viewer.get_coordmap(crdtype)
# record the start point
x, y = self._draw_crdmap.data_to(data_x, data_y)
self._points = [(x, y)]
self._start_x, self._start_y = x, y
self._draw_update(x, y, viewer)
self.process_drawing(viewer)
return True
def draw_stop(self, canvas, event, data_x, data_y, viewer):
if not self.candraw or (viewer != event.viewer):
return False
self._draw_update(data_x, data_y, viewer)
obj, self._draw_obj = self._draw_obj, None
self._points = []
if obj:
objtag = self.add(obj, redraw=True)
self.make_callback('draw-event', objtag)
if self.edit_follows_draw:
self.clear_selected()
self.edit_select(obj)
self.make_callback('edit-select', self._edit_obj)
return True
else:
self.process_drawing(viewer)
def draw_motion(self, canvas, event, data_x, data_y, viewer):
if not self.candraw or (viewer != event.viewer):
return False
self._draw_update(data_x, data_y, viewer)
return True
def draw_poly_add(self, canvas, event, data_x, data_y, viewer):
if not self.candraw or (viewer != event.viewer):
return False
if self._draw_obj is None:
return self.edit_poly_add(canvas, event, data_x, data_y, viewer)
if self.t_drawtype in ('polygon', 'path'):
x, y = self._draw_crdmap.data_to(data_x, data_y)
self._points.append((x, y))
elif self.t_drawtype == 'beziercurve' and len(self._points) < 3:
x, y = self._draw_crdmap.data_to(data_x, data_y)
self._points.append((x, y))
return True
def draw_poly_delete(self, canvas, event, data_x, data_y, viewer):
if not self.candraw or (viewer != event.viewer):
return False
if self._draw_obj is None:
return self.edit_poly_del(canvas, event, data_x, data_y, viewer)
if self.t_drawtype in ('polygon', 'path', 'beziercurve'):
if len(self._points) > 0:
self._points.pop()
return True
def is_drawing(self):
return self._draw_obj is not None
def enable_draw(self, tf):
self.candraw = tf
def set_drawcolor(self, colorname):
self.t_drawparams['color'] = colorname
def set_drawtype(self, drawtype, **drawparams):
drawtype = drawtype.lower()
assert drawtype in self.drawtypes, \
ValueError("Bad drawing type '%s': must be one of %s" % (
drawtype, self.drawtypes))
self.t_drawtype = drawtype
self.t_drawparams = drawparams.copy()
def get_drawtypes(self):
return self.drawtypes
def get_drawtype(self):
return self.t_drawtype
def getDrawClass(self, drawtype):
drawtype = drawtype.lower()
klass = self.drawDict[drawtype]
return klass
def get_drawparams(self):
return self.t_drawparams.copy()
def process_drawing(self, viewer):
self._processTime = time.time()
#viewer.redraw(whence=3)
#self.redraw(whence=3)
self.update_canvas()
##### EDITING LOGIC #####
def get_edit_object(self):
return self._edit_obj
def is_editing(self):
return self.get_edit_obj() is not None
def enable_edit(self, tf):
self.canedit = tf
def _edit_update(self, data_x, data_y, viewer):
if (not self.canedit) or (self._cp_index is None):
return False
x, y = self._edit_obj.crdmap.data_to(data_x, data_y)
if self._cp_index < 0:
self._edit_obj.move_to(x - self._start_x,
y - self._start_y)
else:
# special hack for objects that have rot_deg attribute
if hasattr(self._edit_obj, 'rot_deg') and (self._cp_index > 0):
rot_deg = - self._edit_obj.rot_deg
xoff, yoff = self._edit_obj.get_center_pt()
x, y = self._edit_obj.crdmap.rotate_pt(x, y, rot_deg,
xoff=xoff, yoff=yoff)
self._edit_obj.set_edit_point(self._cp_index, (x, y))
if time.time() - self._processTime > self._deltaTime:
self.process_drawing(viewer)
return True
def _is_editable(self, obj, x, y, is_inside):
return is_inside and obj.editable
def _prepare_to_move(self, obj, data_x, data_y):
#print("moving an object")
self.edit_select(obj)
self._cp_index = -1
ref_x, ref_y = self._edit_obj.get_reference_pt()
x, y = obj.crdmap.data_to(data_x, data_y)
self._start_x, self._start_y = x - ref_x, y - ref_y
def edit_start(self, canvas, event, data_x, data_y, viewer):
if not self.canedit or (viewer != event.viewer):
return False
self._edit_tmp = self._edit_obj
self._edit_status = False
self._cp_index = None
#shift_held = 'shift' in event.modifiers
shift_held = False
selects = self.get_selected()
if len(selects) == 0:
# <-- no objects already selected
# check for objects at this location
#print("getting items")
objs = canvas.select_items_at(viewer, data_x, data_y,
test=self._is_editable)
#print("items: %s" % (str(objs)))
if len(objs) == 0:
# <-- no objects under cursor
return False
# pick top object
obj = objs[-1]
self._prepare_to_move(obj, data_x, data_y)
else:
self._edit_status = True
# Ugh. Check each selected object's control points
# for a match
contains = []
for obj in selects:
#print("editing: checking for cp")
#edit_pts = self._edit_obj.get_edit_points()
edit_pts = list(map(lambda pt: obj.crdmap.to_data(*pt),
obj.get_edit_points()))
#print((self._edit_obj, dir(self._edit_obj)))
#print(edit_pts)
i = obj.get_pt(viewer, edit_pts, data_x, data_y,
obj.cap_radius)
if i is not None:
#print("editing cp #%d" % (i))
# editing a control point from an existing object
self._edit_obj = obj
self._cp_index = i
self._edit_update(data_x, data_y, viewer)
return True
if obj.contains(data_x, data_y):
contains.append(obj)
# <-- no control points match, is there an object that contains
# this point?
if len(contains) > 0:
# TODO?: make a compound object of contains and move it?
obj = contains[-1]
if self.is_selected(obj) and shift_held:
# deselecting object
self.select_clear(obj)
else:
self._prepare_to_move(obj, data_x, data_y)
## Compound = self.getDrawClass('compoundobject')
## c_obj = Compound(*self.get_selected())
## c_obj.inherit_from(obj)
## self._prepare_to_move(c_obj, data_x, data_y)
else:
# <-- user clicked outside any selected item's control pt
# and outside any selected item
if not shift_held:
self.clear_selected()
# see now if there is an unselected item at this location
objs = canvas.select_items_at(viewer, data_x, data_y,
test=self._is_editable)
#print("items: %s" % (str(objs)))
if len(objs) > 0:
# pick top object
obj = objs[-1]
if self.num_selected() > 0:
# if there are already some selected items, then
# add this object to the selection, make a compound
# object
self.edit_select(obj)
Compound = self.getDrawClass('compoundobject')
c_obj = Compound(*self.get_selected())
c_obj.inherit_from(obj)
self._prepare_to_move(c_obj, data_x, data_y)
else:
# otherwise just start over with this new object
self._prepare_to_move(obj, data_x, data_y)
self.process_drawing(viewer)
return True
def edit_stop(self, canvas, event, data_x, data_y, viewer):
if not self.canedit or (viewer != event.viewer):
return False
if (self._edit_tmp != self._edit_obj) or (
(self._edit_obj is not None) and
(self._edit_status != self._edit_obj.is_editing())):
# <-- editing status has changed
#print("making edit-select callback")
self.make_callback('edit-select', self._edit_obj)
if (self._edit_obj is not None) and (self._cp_index is not None):
# <-- an object has been edited
self._edit_update(data_x, data_y, viewer)
self._cp_index = None
self.make_callback('edit-event', self._edit_obj)
return True
def edit_motion(self, canvas, event, data_x, data_y, viewer):
if not self.canedit or (viewer != event.viewer):
return False
if (self._edit_obj is not None) and (self._cp_index is not None):
self._edit_update(data_x, data_y, viewer)
return True
return False
def edit_poly_add(self, canvas, event, data_x, data_y, viewer):
if not self.canedit or (viewer != event.viewer):
return False
obj = self._edit_obj
if (obj is not None) and obj.is_editing() and \
(obj.kind in ('polygon', 'path')):
self.logger.debug("checking points")
# determine which line we are adding a point to
points = list(obj.get_points())
if obj.kind == 'polygon':
points = points + [points[0]]
x0, y0 = obj.crdmap.to_data(*points[0])
insert = None
for i in range(1, len(points[1:])):
x1, y1 = obj.crdmap.to_data(*points[i])
self.logger.debug("checking line %d" % (i))
if obj.within_line(viewer, data_x, data_y, x0, y0, x1, y1,
8):
insert = i
break
x0, y0 = x1, y1
if insert is not None:
self.logger.debug("inserting point")
# Point near a line
x, y = obj.crdmap.data_to(data_x, data_y)
points.insert(insert, (x, y))
obj.points = points
self.process_drawing(viewer)
else:
self.logger.debug("cursor not near a line")
return True
def edit_poly_del(self, canvas, event, data_x, data_y, viewer):
if not self.canedit or (viewer != event.viewer):
return False
obj = self._edit_obj
if (obj is not None) and obj.is_editing() and \
(obj.kind in ('polygon', 'path')):
self.logger.debug("checking points")
# determine which point we are deleting
points = list(obj.get_points())
delete = None
for i in range(len(points)):
x1, y1 = obj.crdmap.to_data(*points[i])
self.logger.debug("checking line %d" % (i))
if obj.within_radius(viewer, data_x, data_y, x1, y1,
8):
delete = i
break
if delete is not None:
self.logger.debug("deleting point")
points.pop(delete)
obj.points = points
self.process_drawing(viewer)
else:
self.logger.debug("cursor not near a point")
return True
def edit_rotate(self, delta_deg, viewer):
if self._edit_obj is None:
return False
self._edit_obj.rotate_by(delta_deg)
self.process_drawing(viewer)
self.make_callback('edit-event', self._edit_obj)
return True
def _edit_rotate_cb(self, canvas, event, viewer, msg=True):
if not self.canedit or (viewer != event.viewer):
return False
bd = viewer.get_bindings()
amount = event.amount
if bd.get_direction(event.direction) == 'down':
amount = - amount
return self.edit_rotate(amount)
def edit_scale(self, delta_x, delta_y, viewer):
if self._edit_obj is None:
return False
self._edit_obj.scale_by(delta_x, delta_y)
self.process_drawing(viewer)
self.make_callback('edit-event', self._edit_obj)
return True
def _edit_scale_cb(self, canvas, event, viewer, msg=True):
if not self.canedit or (viewer != event.viewer):
return False
bd = viewer.get_bindings()
if bd.get_direction(event.direction) == 'down':
amount = 0.9
else:
amount = 1.1
return self.edit_scale(amount, amount)
def edit_delete(self):
if (self._edit_obj is not None) and self._edit_obj.is_editing():
obj, self._edit_obj = self._edit_obj, None
self.deleteObject(obj)
self.make_callback('edit-event', self._edit_obj)
return True
def _edit_delete_cb(self, canvas, event, data_x, data_y, viewer):
if not self.canedit or (viewer != event.viewer):
return False
return self.edit_delete()
def edit_select(self, newobj):
if not self.canedit:
return False
# add new object to selection
self.select_add(newobj)
self._edit_obj = newobj
return True
##### SELECTION LOGIC #####
def _is_selectable(self, obj, x, y, is_inside):
return is_inside and obj.editable
#return is_inside
def is_selected(self, obj):
return obj in self._selected
def get_selected(self):
return self._selected
def num_selected(self):
return len(self._selected)
def clear_selected(self):
for obj in list(self._selected):
self.select_clear(obj)
def select_clear(self, obj):
if obj in self._selected:
self._selected.remove(obj)
obj.set_edit(False)
def select_add(self, obj):
if obj not in self._selected:
self._selected.append(obj)
obj.set_edit(True)
def select_stop(self, canvas, button, data_x, data_y, viewer):
#print("getting items")
objs = canvas.select_items_at(viewer, data_x, data_y,
test=self._is_selectable)
if len(objs) == 0:
# no objects
return False
# pick top object
obj = objs[-1]
if obj not in self._selected:
self._selected.append(obj)
obj.set_edit(True)
else:
self._selected.remove(obj)
obj.set_edit(False)
obj = None
self.logger.debug("selected: %s" % (str(self._selected)))
self.process_drawing(viewer)
#self.make_callback('edit-select', obj, self._selected)
return True
def group_selection(self):
Compound = self.getDrawClass('compoundobject')
c_obj = Compound(self._selected)
self._selected = [ comp_obj ]
#END
| |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import re
import shutil
import sys
import StringIO
import tempfile
import unittest
from telemetry import benchmark
from telemetry import story
from telemetry.core import exceptions
from telemetry.core import util
from telemetry import decorators
from telemetry.internal.browser import browser_finder
from telemetry.internal.browser import user_agent
from telemetry.internal.results import results_options
from telemetry.internal import story_runner
from telemetry.internal.testing.page_sets import example_domain
from telemetry.internal.util import exception_formatter
from telemetry.page import page as page_module
from telemetry.page import page_test
from telemetry.page import shared_page_state
from telemetry.util import image_util
from telemetry.testing import fakes
from telemetry.testing import options_for_unittests
from telemetry.testing import system_stub
SIMPLE_CREDENTIALS_STRING = """
{
"test": {
"username": "example",
"password": "asdf"
}
}
"""
class DummyTest(page_test.PageTest):
def ValidateAndMeasurePage(self, *_):
pass
def SetUpStoryRunnerArguments(options):
parser = options.CreateParser()
story_runner.AddCommandLineArgs(parser)
options.MergeDefaultValues(parser.get_default_values())
story_runner.ProcessCommandLineArgs(parser, options)
class EmptyMetadataForTest(benchmark.BenchmarkMetadata):
def __init__(self):
super(EmptyMetadataForTest, self).__init__('')
class StubCredentialsBackend(object):
def __init__(self, login_return_value):
self.did_get_login = False
self.did_get_login_no_longer_needed = False
self.login_return_value = login_return_value
@property
def credentials_type(self):
return 'test'
def LoginNeeded(self, *_):
self.did_get_login = True
return self.login_return_value
def LoginNoLongerNeeded(self, _):
self.did_get_login_no_longer_needed = True
def GetSuccessfulPageRuns(results):
return [run for run in results.all_page_runs if run.ok or run.skipped]
def CaptureStderr(func, output_buffer):
def wrapper(*args, **kwargs):
original_stderr, sys.stderr = sys.stderr, output_buffer
try:
return func(*args, **kwargs)
finally:
sys.stderr = original_stderr
return wrapper
# TODO: remove test cases that use real browsers and replace with a
# story_runner or shared_page_state unittest that tests the same logic.
class ActualPageRunEndToEndTests(unittest.TestCase):
# TODO(nduca): Move the basic "test failed, test succeeded" tests from
# page_test_unittest to here.
def setUp(self):
self._story_runner_logging_stub = None
self._formatted_exception_buffer = StringIO.StringIO()
self._original_formatter = exception_formatter.PrintFormattedException
def tearDown(self):
self.RestoreExceptionFormatter()
def CaptureFormattedException(self):
exception_formatter.PrintFormattedException = CaptureStderr(
exception_formatter.PrintFormattedException,
self._formatted_exception_buffer)
self._story_runner_logging_stub = system_stub.Override(
story_runner, ['logging'])
@property
def formatted_exception(self):
return self._formatted_exception_buffer.getvalue()
def RestoreExceptionFormatter(self):
exception_formatter.PrintFormattedException = self._original_formatter
if self._story_runner_logging_stub:
self._story_runner_logging_stub.Restore()
self._story_runner_logging_stub = None
def assertFormattedExceptionIsEmpty(self):
self.longMessage = False
self.assertEquals(
'', self.formatted_exception,
msg='Expected empty formatted exception: actual=%s' % '\n > '.join(
self.formatted_exception.split('\n')))
def assertFormattedExceptionOnlyHas(self, expected_exception_name):
self.longMessage = True
actual_exception_names = re.findall(r'^Traceback.*?^(\w+)',
self.formatted_exception,
re.DOTALL | re.MULTILINE)
self.assertEquals([expected_exception_name], actual_exception_names,
msg='Full formatted exception: %s' % '\n > '.join(
self.formatted_exception.split('\n')))
def testRaiseBrowserGoneExceptionFromRestartBrowserBeforeEachPage(self):
self.CaptureFormattedException()
story_set = story.StorySet()
story_set.AddStory(page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir()))
story_set.AddStory(page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir()))
class Test(page_test.PageTest):
def __init__(self, *args):
super(Test, self).__init__(
*args, needs_browser_restart_after_each_page=True)
self.run_count = 0
def RestartBrowserBeforeEachPage(self):
old_run_count = self.run_count
self.run_count += 1
if old_run_count == 0:
raise exceptions.BrowserGoneException(None)
return self._needs_browser_restart_after_each_page
def ValidateAndMeasurePage(self, page, tab, results):
pass
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
test = Test()
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
self.assertEquals(2, test.run_count)
self.assertEquals(1, len(GetSuccessfulPageRuns(results)))
self.assertEquals(1, len(results.failures))
self.assertFormattedExceptionIsEmpty()
def testNeedsBrowserRestartAfterEachPage(self):
self.CaptureFormattedException()
story_set = story.StorySet()
story_set.AddStory(page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir()))
story_set.AddStory(page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir()))
class Test(page_test.PageTest):
def __init__(self, *args, **kwargs):
super(Test, self).__init__(*args, **kwargs)
self.browser_starts = 0
def DidStartBrowser(self, *args):
super(Test, self).DidStartBrowser(*args)
self.browser_starts += 1
def ValidateAndMeasurePage(self, page, tab, results):
pass
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
test = Test(needs_browser_restart_after_each_page=True)
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
self.assertEquals(2, len(GetSuccessfulPageRuns(results)))
self.assertEquals(2, test.browser_starts)
self.assertFormattedExceptionIsEmpty()
def testCredentialsWhenLoginFails(self):
self.CaptureFormattedException()
credentials_backend = StubCredentialsBackend(login_return_value=False)
did_run = self.runCredentialsTest(credentials_backend)
assert credentials_backend.did_get_login
assert not credentials_backend.did_get_login_no_longer_needed
assert not did_run
self.assertFormattedExceptionIsEmpty()
def testCredentialsWhenLoginSucceeds(self):
credentials_backend = StubCredentialsBackend(login_return_value=True)
did_run = self.runCredentialsTest(credentials_backend)
assert credentials_backend.did_get_login
assert credentials_backend.did_get_login_no_longer_needed
assert did_run
def runCredentialsTest(self, credentials_backend):
story_set = story.StorySet()
did_run = [False]
try:
with tempfile.NamedTemporaryFile(delete=False) as f:
page = page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir(),
credentials_path=f.name)
page.credentials = "test"
story_set.AddStory(page)
f.write(SIMPLE_CREDENTIALS_STRING)
class TestThatInstallsCredentialsBackend(page_test.PageTest):
def __init__(self, credentials_backend):
super(TestThatInstallsCredentialsBackend, self).__init__()
self._credentials_backend = credentials_backend
def DidStartBrowser(self, browser):
browser.credentials.AddBackend(self._credentials_backend)
def ValidateAndMeasurePage(self, *_):
did_run[0] = True
test = TestThatInstallsCredentialsBackend(credentials_backend)
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
finally:
os.remove(f.name)
return did_run[0]
@decorators.Disabled('chromeos') # crbug.com/483212
def testUserAgent(self):
story_set = story.StorySet()
page = page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir(),
shared_page_state_class=shared_page_state.SharedTabletPageState)
story_set.AddStory(page)
class TestUserAgent(page_test.PageTest):
def ValidateAndMeasurePage(self, page, tab, results):
del page, results # unused
actual_user_agent = tab.EvaluateJavaScript('window.navigator.userAgent')
expected_user_agent = user_agent.UA_TYPE_MAPPING['tablet']
assert actual_user_agent.strip() == expected_user_agent
# This is so we can check later that the test actually made it into this
# function. Previously it was timing out before even getting here, which
# should fail, but since it skipped all the asserts, it slipped by.
self.hasRun = True # pylint: disable=attribute-defined-outside-init
test = TestUserAgent()
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
self.assertTrue(hasattr(test, 'hasRun') and test.hasRun)
# Ensure that story_runner forces exactly 1 tab before running a page.
@decorators.Enabled('has tabs')
def testOneTab(self):
story_set = story.StorySet()
page = page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir())
story_set.AddStory(page)
class TestOneTab(page_test.PageTest):
def DidStartBrowser(self, browser):
browser.tabs.New()
def ValidateAndMeasurePage(self, page, tab, results):
del page, results # unused
assert len(tab.browser.tabs) == 1
test = TestOneTab()
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
# Ensure that story_runner allows >1 tab for multi-tab test.
@decorators.Enabled('has tabs')
def testMultipleTabsOkayForMultiTabTest(self):
story_set = story.StorySet()
page = page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir())
story_set.AddStory(page)
class TestMultiTabs(page_test.PageTest):
def TabForPage(self, page, browser):
del page # unused
return browser.tabs.New()
def ValidateAndMeasurePage(self, page, tab, results):
del page, results # unused
assert len(tab.browser.tabs) == 2
test = TestMultiTabs()
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
# Ensure that story_runner allows the test to customize the browser
# before it launches.
def testBrowserBeforeLaunch(self):
story_set = story.StorySet()
page = page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir())
story_set.AddStory(page)
class TestBeforeLaunch(page_test.PageTest):
def __init__(self):
super(TestBeforeLaunch, self).__init__()
self._did_call_will_start = False
self._did_call_did_start = False
def WillStartBrowser(self, platform):
self._did_call_will_start = True
# TODO(simonjam): Test that the profile is available.
def DidStartBrowser(self, browser):
assert self._did_call_will_start
self._did_call_did_start = True
def ValidateAndMeasurePage(self, *_):
assert self._did_call_did_start
test = TestBeforeLaunch()
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
def testRunPageWithStartupUrl(self):
num_times_browser_closed = [0]
class TestSharedState(shared_page_state.SharedPageState):
def _StopBrowser(self):
super(TestSharedState, self)._StopBrowser()
num_times_browser_closed[0] += 1
story_set = story.StorySet()
page = page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir(),
startup_url='about:blank', shared_page_state_class=TestSharedState)
story_set.AddStory(page)
class Measurement(page_test.PageTest):
def __init__(self):
super(Measurement, self).__init__()
def ValidateAndMeasurePage(self, page, tab, results):
del page, tab, results # not used
options = options_for_unittests.GetCopy()
options.page_repeat = 2
options.output_formats = ['none']
options.suppress_gtest_report = True
if not browser_finder.FindBrowser(options):
return
test = Measurement()
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
self.assertEquals('about:blank', options.browser_options.startup_url)
# _StopBrowser should be called 3 times: after browser restarts, after page
# 2 has run and in the TearDownState after all the pages have run.
self.assertEquals(num_times_browser_closed[0], 3)
# Ensure that story_runner calls cleanUp when a page run fails.
def testCleanUpPage(self):
story_set = story.StorySet()
page = page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir())
story_set.AddStory(page)
class Test(page_test.PageTest):
def __init__(self):
super(Test, self).__init__()
self.did_call_clean_up = False
def ValidateAndMeasurePage(self, *_):
raise page_test.Failure
def DidRunPage(self, platform):
del platform # unused
self.did_call_clean_up = True
test = Test()
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
assert test.did_call_clean_up
# Ensure skipping the test if shared state cannot be run on the browser.
def testSharedPageStateCannotRunOnBrowser(self):
story_set = story.StorySet()
class UnrunnableSharedState(shared_page_state.SharedPageState):
def CanRunOnBrowser(self, browser_info, page):
del browser_info, page # unused
return False
def ValidateAndMeasurePage(self, _):
pass
story_set.AddStory(page_module.Page(
url='file://blank.html', page_set=story_set,
base_dir=util.GetUnittestDataDir(),
shared_page_state_class=UnrunnableSharedState))
class Test(page_test.PageTest):
def __init__(self, *args, **kwargs):
super(Test, self).__init__(*args, **kwargs)
self.will_navigate_to_page_called = False
def ValidateAndMeasurePage(self, *args):
del args # unused
raise Exception('Exception should not be thrown')
def WillNavigateToPage(self, page, tab):
del page, tab # unused
self.will_navigate_to_page_called = True
test = Test()
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
self.assertFalse(test.will_navigate_to_page_called)
self.assertEquals(1, len(GetSuccessfulPageRuns(results)))
self.assertEquals(1, len(results.skipped_values))
self.assertEquals(0, len(results.failures))
def testRunPageWithProfilingFlag(self):
story_set = story.StorySet()
story_set.AddStory(page_module.Page(
'file://blank.html', story_set, base_dir=util.GetUnittestDataDir()))
class Measurement(page_test.PageTest):
def ValidateAndMeasurePage(self, page, tab, results):
pass
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
options.reset_results = None
options.upload_results = None
options.results_label = None
options.output_dir = tempfile.mkdtemp()
options.profiler = 'trace'
try:
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(Measurement(), story_set, options, results)
self.assertEquals(1, len(GetSuccessfulPageRuns(results)))
self.assertEquals(0, len(results.failures))
self.assertEquals(0, len(results.all_page_specific_values))
self.assertTrue(os.path.isfile(
os.path.join(options.output_dir, 'blank_html.zip')))
finally:
shutil.rmtree(options.output_dir)
def _RunPageTestThatRaisesAppCrashException(self, test, max_failures):
class TestPage(page_module.Page):
def RunNavigateSteps(self, _):
raise exceptions.AppCrashException
story_set = story.StorySet()
for _ in range(5):
story_set.AddStory(
TestPage('file://blank.html', story_set,
base_dir=util.GetUnittestDataDir()))
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results,
max_failures=max_failures)
return results
def testSingleTabMeansCrashWillCauseFailureValue(self):
self.CaptureFormattedException()
class SingleTabTest(page_test.PageTest):
# Test is not multi-tab because it does not override TabForPage.
def ValidateAndMeasurePage(self, *_):
pass
test = SingleTabTest()
results = self._RunPageTestThatRaisesAppCrashException(
test, max_failures=1)
self.assertEquals([], GetSuccessfulPageRuns(results))
self.assertEquals(2, len(results.failures)) # max_failures + 1
self.assertFormattedExceptionIsEmpty()
@decorators.Enabled('has tabs')
def testMultipleTabsMeansCrashRaises(self):
self.CaptureFormattedException()
class MultipleTabsTest(page_test.PageTest):
# Test *is* multi-tab because it overrides TabForPage.
def TabForPage(self, page, browser):
return browser.tabs.New()
def ValidateAndMeasurePage(self, *_):
pass
test = MultipleTabsTest()
with self.assertRaises(page_test.MultiTabTestAppCrashError):
self._RunPageTestThatRaisesAppCrashException(test, max_failures=1)
self.assertFormattedExceptionOnlyHas('AppCrashException')
def testWebPageReplay(self):
story_set = example_domain.ExampleDomainPageSet()
body = []
class TestWpr(page_test.PageTest):
def ValidateAndMeasurePage(self, page, tab, results):
del page, results # unused
body.append(tab.EvaluateJavaScript('document.body.innerText'))
def DidRunPage(self, platform):
# Force the replay server to restart between pages; this verifies that
# the restart mechanism works.
platform.network_controller.StopReplay()
test = TestWpr()
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(test, story_set, options, results)
self.longMessage = True
self.assertIn('Example Domain', body[0],
msg='URL: %s' % story_set.stories[0].url)
self.assertIn('Example Domain', body[1],
msg='URL: %s' % story_set.stories[1].url)
self.assertEquals(2, len(GetSuccessfulPageRuns(results)))
self.assertEquals(0, len(results.failures))
def testScreenShotTakenForFailedPage(self):
self.CaptureFormattedException()
platform_screenshot_supported = [False]
tab_screenshot_supported = [False]
chrome_version_screen_shot = [None]
class FailingTestPage(page_module.Page):
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self._url)
platform_screenshot_supported[0] = (
action_runner.tab.browser.platform.CanTakeScreenshot)
tab_screenshot_supported[0] = action_runner.tab.screenshot_supported
if not platform_screenshot_supported[0] and tab_screenshot_supported[0]:
chrome_version_screen_shot[0] = action_runner.tab.Screenshot()
raise exceptions.AppCrashException
story_set = story.StorySet()
story_set.AddStory(page_module.Page('file://blank.html', story_set))
failing_page = FailingTestPage('chrome://version', story_set)
story_set.AddStory(failing_page)
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.browser_options.take_screenshot_for_failed_page = True
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(DummyTest(), story_set, options, results,
max_failures=2)
self.assertEquals(1, len(results.failures))
if not platform_screenshot_supported[0] and tab_screenshot_supported[0]:
self.assertEquals(1, len(results.pages_to_profiling_files))
self.assertIn(failing_page,
results.pages_to_profiling_files)
screenshot_file_path = (
results.pages_to_profiling_files[failing_page][0].GetAbsPath())
try:
actual_screenshot = image_util.FromPngFile(screenshot_file_path)
self.assertEquals(image_util.Pixels(chrome_version_screen_shot[0]),
image_util.Pixels(actual_screenshot))
finally: # Must clean up screenshot file if exists.
os.remove(screenshot_file_path)
def testNoProfilingFilesCreatedForPageByDefault(self):
self.CaptureFormattedException()
class FailingTestPage(page_module.Page):
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self._url)
raise exceptions.AppCrashException
story_set = story.StorySet()
story_set.AddStory(page_module.Page('file://blank.html', story_set))
failing_page = FailingTestPage('chrome://version', story_set)
story_set.AddStory(failing_page)
options = options_for_unittests.GetCopy()
options.output_formats = ['none']
options.suppress_gtest_report = True
SetUpStoryRunnerArguments(options)
results = results_options.CreateResults(EmptyMetadataForTest(), options)
story_runner.Run(DummyTest(), story_set, options, results,
max_failures=2)
self.assertEquals(1, len(results.failures))
self.assertEquals(0, len(results.pages_to_profiling_files))
class FakePageRunEndToEndTests(unittest.TestCase):
def setUp(self):
self.options = fakes.CreateBrowserFinderOptions()
self.options.output_formats = ['none']
self.options.suppress_gtest_report = True
SetUpStoryRunnerArguments(self.options)
def testNoScreenShotTakenForFailedPageDueToNoSupport(self):
self.options.browser_options.take_screenshot_for_failed_page = True
class FailingTestPage(page_module.Page):
def RunNavigateSteps(self, action_runner):
raise exceptions.AppCrashException
story_set = story.StorySet()
story_set.AddStory(page_module.Page('file://blank.html', story_set))
failing_page = FailingTestPage('chrome://version', story_set)
story_set.AddStory(failing_page)
results = results_options.CreateResults(
EmptyMetadataForTest(), self.options)
story_runner.Run(DummyTest(), story_set, self.options, results,
max_failures=2)
self.assertEquals(1, len(results.failures))
self.assertEquals(0, len(results.pages_to_profiling_files))
def testScreenShotTakenForFailedPageOnSupportedPlatform(self):
fake_platform = self.options.fake_possible_browser.returned_browser.platform
expected_png_base64 = """
iVBORw0KGgoAAAANSUhEUgAAAAIAAAACCAIAAAD91
JpzAAAAFklEQVR4Xg3EAQ0AAABAMP1LY3YI7l8l6A
T8tgwbJAAAAABJRU5ErkJggg==
"""
fake_platform.screenshot_png_data = expected_png_base64
self.options.browser_options.take_screenshot_for_failed_page = True
class FailingTestPage(page_module.Page):
def RunNavigateSteps(self, action_runner):
raise exceptions.AppCrashException
story_set = story.StorySet()
story_set.AddStory(page_module.Page('file://blank.html', story_set))
failing_page = FailingTestPage('chrome://version', story_set)
story_set.AddStory(failing_page)
results = results_options.CreateResults(
EmptyMetadataForTest(), self.options)
story_runner.Run(DummyTest(), story_set, self.options, results,
max_failures=2)
self.assertEquals(1, len(results.failures))
self.assertEquals(1, len(results.pages_to_profiling_files))
self.assertIn(failing_page,
results.pages_to_profiling_files)
screenshot_file_path = (
results.pages_to_profiling_files[failing_page][0].GetAbsPath())
try:
actual_screenshot_img = image_util.FromPngFile(screenshot_file_path)
self.assertTrue(image_util.AreEqual(
image_util.FromBase64Png(expected_png_base64),
actual_screenshot_img))
finally: # Must clean up screenshot file if exists.
os.remove(screenshot_file_path)
| |
# Copyright 2016 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_utils import units
xenserver_group = cfg.OptGroup('xenserver', title='Xenserver Options')
xenapi_agent_opts = [
cfg.IntOpt('agent_timeout',
default=30,
help="""
Number of seconds to wait for agent's reply to a request.
Nova configures/performs certain administrative actions on a
server with the help of an agent that's installed on the server.
The communication between Nova and the agent is achieved via
sharing messages, called records, over xenstore, a shared
storage across all the domains on a Xenserver host.
Operations performed by the agent on behalf of nova are:
'version',' key_init', 'password','resetnetwork','inject_file',
and 'agentupdate'.
To perform one of the above operations, the xapi 'agent' plugin
writes the command and its associated parameters to a certain
location known to the domain and awaits response. On being
notified of the message, the agent performs appropriate actions
on the server and writes the result back to xenstore.
This result is then read by the xapi 'agent' plugin to
determine the success/failure of the operation.
This config option determines how long the xapi 'agent' plugin
shall wait to read the response off of xenstore for a given
request/command. If the agent on the instance fails to write
the result in this time period, the operation is considered to
have timed out.
Services which consume this:
* ``nova-compute``
Possible values:
* Any positive integer
Related options:
* ``agent_version_timeout``
* ``agent_resetnetwork_timeout``
"""),
cfg.IntOpt('agent_version_timeout',
default=300,
help="""
Number of seconds to wait for agent't reply to version request.
This indicates the amount of time xapi 'agent' plugin waits
for the agent to respond to the 'version' request specifically.
The generic timeout for agent communication ``agent_timeout``
is ignored in this case.
During the build process the 'version' request is used to
determine if the agent is available/operational to perform
other requests such as 'resetnetwork', 'password', 'key_init'
and 'inject_file'. If the 'version' call fails, the other
configuration is skipped. So, this configuration option can
also be interpreted as time in which agent is expected to be
fully operational.
Services which consume this:
* ``nova-compute``
Possible values:
* Any positive integer
Related options:
* None
"""),
cfg.IntOpt('agent_resetnetwork_timeout',
default=60,
help="""
Number of seconds to wait for agent's reply to resetnetwork
request.
This indicates the amount of time xapi 'agent' plugin waits
for the agent to respond to the 'resetnetwork' request
specifically. The generic timeout for agent communication
``agent_timeout`` is ignored in this case.
Services which consume this:
* ``nova-compute``
Possible values:
* Any positive integer
Related options:
* None
"""),
cfg.StrOpt('agent_path',
default='usr/sbin/xe-update-networking',
help="""
Path to locate guest agent on the server.
Specifies the path in which the XenAPI guest agent should be
located. If the agent is present, network configuration is not
injected into the image.
Used if compute_driver=xenapi.XenAPIDriver and
flat_injected=True.
Services which consume this:
* ``nova-compute``
Possible values:
* A valid path
Related options:
* ``flat_injected``
* ``compute_driver``
"""),
cfg.BoolOpt('disable_agent',
default=False,
help="""
Disables the use of XenAPI agent.
This configuration option suggests whether the use of agent
should be enabled or not regardless of what image properties
are present. Image properties have an effect only when this
is set to ``True``. Read description of config option
``use_agent_default`` for more information.
Services which consume this:
* ``nova-compute``
Possible values:
* True
* False
Related options:
* ``use_agent_default``
"""),
cfg.BoolOpt('use_agent_default',
default=False,
help="""
Whether or not to use the agent by default when its usage is
enabled but not indicated by the image.
The use of XenAPI agent can be disabled altogether using the
configuration option ``disable_agent``. However, if it is not
disabled, the use of an agent can still be controlled by the
image in use through one of its properties,
``xenapi_use_agent``. If this property is either not present
or specified incorrectly on the image, the use of agent is
determined by this configuration option.
Note that if this configuration is set to ``True`` when the
agent is not present, the boot times will increase
significantly.
Services which consume this:
* ``nova-compute``
Possible values:
* True
* False
Related options:
* ``disable_agent``
"""),
]
xenapi_session_opts = [
cfg.IntOpt('login_timeout',
default=10,
help='Timeout in seconds for XenAPI login.'),
cfg.IntOpt('connection_concurrent',
default=5,
help='Maximum number of concurrent XenAPI connections. '
'Used only if compute_driver=xenapi.XenAPIDriver'),
]
xenapi_torrent_opts = [
cfg.StrOpt('torrent_base_url',
help='Base URL for torrent files; must contain a slash'
' character (see RFC 1808, step 6)'),
cfg.FloatOpt('torrent_seed_chance',
default=1.0,
help='Probability that peer will become a seeder.'
' (1.0 = 100%)'),
cfg.IntOpt('torrent_seed_duration',
default=3600,
help='Number of seconds after downloading an image via'
' BitTorrent that it should be seeded for other peers.'),
cfg.IntOpt('torrent_max_last_accessed',
default=86400,
help='Cached torrent files not accessed within this number of'
' seconds can be reaped'),
cfg.IntOpt('torrent_listen_port_start',
default=6881,
min=1,
max=65535,
help='Beginning of port range to listen on'),
cfg.IntOpt('torrent_listen_port_end',
default=6891,
min=1,
max=65535,
help='End of port range to listen on'),
cfg.IntOpt('torrent_download_stall_cutoff',
default=600,
help='Number of seconds a download can remain at the same'
' progress percentage w/o being considered a stall'),
cfg.IntOpt('torrent_max_seeder_processes_per_host',
default=1,
help='Maximum number of seeder processes to run concurrently'
' within a given dom0. (-1 = no limit)')
]
xenapi_vm_utils_opts = [
cfg.StrOpt('cache_images',
default='all',
choices=('all', 'some', 'none'),
help='Cache glance images locally. `all` will cache all'
' images, `some` will only cache images that have the'
' image_property `cache_in_nova=True`, and `none` turns'
' off caching entirely'),
cfg.IntOpt('image_compression_level',
min=1,
max=9,
help='Compression level for images, e.g., 9 for gzip -9.'
' Range is 1-9, 9 being most compressed but most CPU'
' intensive on dom0.'),
cfg.StrOpt('default_os_type',
default='linux',
help='Default OS type'),
cfg.IntOpt('block_device_creation_timeout',
default=10,
help='Time to wait for a block device to be created'),
cfg.IntOpt('max_kernel_ramdisk_size',
default=16 * units.Mi,
help='Maximum size in bytes of kernel or ramdisk images'),
cfg.StrOpt('sr_matching_filter',
default='default-sr:true',
help='Filter for finding the SR to be used to install guest '
'instances on. To use the Local Storage in default '
'XenServer/XCP installations set this flag to '
'other-config:i18n-key=local-storage. To select an SR '
'with a different matching criteria, you could set it to '
'other-config:my_favorite_sr=true. On the other hand, to '
'fall back on the Default SR, as displayed by XenCenter, '
'set this flag to: default-sr:true'),
cfg.BoolOpt('sparse_copy',
default=True,
help='Whether to use sparse_copy for copying data on a '
'resize down (False will use standard dd). This speeds '
'up resizes down considerably since large runs of zeros '
'won\'t have to be rsynced'),
cfg.IntOpt('num_vbd_unplug_retries',
default=10,
help='Maximum number of retries to unplug VBD. if <=0, '
'should try once and no retry'),
cfg.StrOpt('torrent_images',
default='none',
choices=('all', 'some', 'none'),
help='Whether or not to download images via Bit Torrent.'),
cfg.StrOpt('ipxe_network_name',
help='Name of network to use for booting iPXE ISOs'),
cfg.StrOpt('ipxe_boot_menu_url',
help='URL to the iPXE boot menu'),
cfg.StrOpt('ipxe_mkisofs_cmd',
default='mkisofs',
help='Name and optionally path of the tool used for '
'ISO image creation'),
]
xenapi_opts = [
cfg.StrOpt('connection_url',
help='URL for connection to XenServer/Xen Cloud Platform. '
'A special value of unix://local can be used to connect '
'to the local unix socket. '
'Required if compute_driver=xenapi.XenAPIDriver'),
cfg.StrOpt('connection_username',
default='root',
help='Username for connection to XenServer/Xen Cloud Platform. '
'Used only if compute_driver=xenapi.XenAPIDriver'),
cfg.StrOpt('connection_password',
help='Password for connection to XenServer/Xen Cloud Platform. '
'Used only if compute_driver=xenapi.XenAPIDriver',
secret=True),
cfg.FloatOpt('vhd_coalesce_poll_interval',
default=5.0,
help='The interval used for polling of coalescing vhds. '
'Used only if compute_driver=xenapi.XenAPIDriver'),
cfg.BoolOpt('check_host',
default=True,
help='Ensure compute service is running on host XenAPI '
'connects to.'),
cfg.IntOpt('vhd_coalesce_max_attempts',
default=20,
help='Max number of times to poll for VHD to coalesce. '
'Used only if compute_driver=xenapi.XenAPIDriver'),
cfg.StrOpt('sr_base_path',
default='/var/run/sr-mount',
help='Base path to the storage repository'),
cfg.StrOpt('target_host',
help='The iSCSI Target Host'),
cfg.StrOpt('target_port',
default='3260',
help='The iSCSI Target Port, default is port 3260'),
cfg.StrOpt('iqn_prefix',
default='iqn.2010-10.org.openstack',
help='IQN Prefix'),
# NOTE(sirp): This is a work-around for a bug in Ubuntu Maverick,
# when we pull support for it, we should remove this
cfg.BoolOpt('remap_vbd_dev',
default=False,
help='Used to enable the remapping of VBD dev '
'(Works around an issue in Ubuntu Maverick)'),
cfg.StrOpt('remap_vbd_dev_prefix',
default='sd',
help='Specify prefix to remap VBD dev to '
'(ex. /dev/xvdb -> /dev/sdb)'),
]
xenapi_vmops_opts = [
cfg.IntOpt('running_timeout',
default=60,
help='Number of seconds to wait for instance '
'to go to running state'),
cfg.StrOpt('vif_driver',
default='nova.virt.xenapi.vif.XenAPIBridgeDriver',
help='The XenAPI VIF driver using XenServer Network APIs.'),
cfg.StrOpt('image_upload_handler',
default='nova.virt.xenapi.image.glance.GlanceStore',
help='Dom0 plugin driver used to handle image uploads.'),
]
xenapi_volume_utils_opts = [
cfg.IntOpt('introduce_vdi_retry_wait',
default=20,
help='Number of seconds to wait for an SR to settle '
'if the VDI does not exist when first introduced'),
]
xenapi_ovs_integration_bridge_opts = [
cfg.StrOpt('ovs_integration_bridge',
default='xapi1',
help='Name of Integration Bridge used by Open vSwitch'),
]
xenapi_pool_opts = [
cfg.BoolOpt('use_join_force',
default=True,
help='To use for hosts with different CPUs'),
]
ALL_XENSERVER_OPTS = (xenapi_agent_opts +
xenapi_session_opts +
xenapi_torrent_opts +
xenapi_vm_utils_opts +
xenapi_opts +
xenapi_vmops_opts +
xenapi_volume_utils_opts +
xenapi_ovs_integration_bridge_opts +
xenapi_pool_opts)
def register_opts(conf):
conf.register_group(xenserver_group)
conf.register_opts(ALL_XENSERVER_OPTS, group=xenserver_group)
def list_opts():
return {xenserver_group: ALL_XENSERVER_OPTS}
| |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tools to work with checkpoints."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import saver
from tensorflow.python.training import training as train
__all__ = [
"load_checkpoint",
"load_variable",
"list_variables",
"init_from_checkpoint"]
def _get_checkpoint_filename(filepattern):
"""Returns checkpoint filename given directory or specific filepattern."""
if gfile.IsDirectory(filepattern):
return saver.latest_checkpoint(filepattern)
return filepattern
def load_checkpoint(filepattern):
"""Returns CheckpointReader for latest checkpoint.
Args:
filepattern: Directory with checkpoints file or path to checkpoint.
Returns:
`CheckpointReader` object.
Raises:
ValueError: if checkpoint_dir doesn't have 'checkpoint' file or checkpoints.
"""
filename = _get_checkpoint_filename(filepattern)
if filename is None:
raise ValueError("Couldn't find 'checkpoint' file or checkpoints in "
"given directory %s" % filepattern)
return train.NewCheckpointReader(filename)
def load_variable(checkpoint_dir, name):
"""Returns a Tensor with the contents of the given variable in the checkpoint.
Args:
checkpoint_dir: Directory with checkpoints file or path to checkpoint.
name: Name of the tensor to return.
Returns:
`Tensor` object.
"""
# TODO(b/29227106): Fix this in the right place and remove this.
if name.endswith(":0"):
name = name[:-2]
reader = load_checkpoint(checkpoint_dir)
return reader.get_tensor(name)
def list_variables(checkpoint_dir):
"""Returns list of all variables in the latest checkpoint.
Args:
checkpoint_dir: Directory with checkpoints file or path to checkpoint.
Returns:
List of tuples `(name, shape)`.
"""
reader = load_checkpoint(checkpoint_dir)
variable_map = reader.get_variable_to_shape_map()
names = sorted(variable_map.keys())
result = []
for name in names:
result.append((name, variable_map[name]))
return result
# pylint: disable=protected-access
# Currently variable_scope doesn't provide very good APIs to access
# all variables under scope and retrieve and check existing scopes.
# TODO(ipolosukhin): Refactor variable_scope module to provide nicer APIs.
def _set_checkpoint_initializer(variable, file_pattern, tensor_name, slice_spec,
name="checkpoint_initializer"):
"""Sets variable initializer to assign op form value in checkpoint's tensor.
Args:
variable: `Variable` object.
file_pattern: string, where to load checkpoints from.
tensor_name: Name of the `Tensor` to load from checkpoint reader.
slice_spec: Slice specification for loading partitioned variables.
name: Name of the operation.
"""
base_type = variable.dtype.base_dtype
restore_op = io_ops.restore_v2(
file_pattern, [tensor_name], [slice_spec], [base_type], name=name)[0]
variable._initializer_op = state_ops.assign(variable, restore_op)
def _set_variable_or_list_initializer(variable_or_list, file_pattern,
tensor_name):
if isinstance(variable_or_list, (list, tuple)):
# A set of slices.
slice_name = None
for v in variable_or_list:
if slice_name is None:
slice_name = v._save_slice_info.full_name
elif slice_name != v._save_slice_info.full_name:
raise ValueError("Slices must all be from the same tensor: %s != %s" %
(slice_name, v._save_slice_info.full_name))
_set_checkpoint_initializer(v, file_pattern, tensor_name,
v._save_slice_info.spec)
else:
_set_checkpoint_initializer(variable_or_list, file_pattern, tensor_name, "")
def init_from_checkpoint(checkpoint_dir, assignment_map):
"""Using assingment map initializes current variables with loaded tensors.
Note: This overrides default initialization ops of specified variables and
redefines dtype.
Assignment map supports following syntax:
`'checkpoint_scope_name/': 'scope_name/'` - will load all variables in
current `scope_name` from `checkpoint_scope_name` with matching variable
names.
`'checkpoint_scope_name/some_other_variable': 'scope_name/variable_name'` -
will initalize `scope_name/variable_name` variable
from `checkpoint_scope_name/some_other_variable`.
`'scope_variable_name': variable` - will initialize given `tf.Variable`
object with variable from the checkpoint.
`'scope_variable_name': list(variable)` - will initialize list of
partitioned variables with variable from the checkpoint.
`'scope_name/': '/'` - will load all variables in current `scope_name` from
checkpoint's root (e.g. no scope).
Supports loading into partitioned variables, which are represented as
'<variable>/part_<part #>'.
Example:
```python
# Create variables.
with tf.variable_scope('test'):
m = tf.get_variable('my_var')
with tf.variable_scope('test2'):
var2 = tf.get_variable('my_var')
var3 = tf.get_variable(name="my1", shape=[100, 100],
partitioner=lambda shape, dtype: [5, 1])
...
# Specify which variables to intialize from checkpoint.
init_from_checkpoint(checkpoint_dir, {
'some_var': 'test/my_var',
'some_scope/': 'test2/'})
...
# Or use `Variable` objects to identify what to initialize.
init_from_checkpoint(checkpoint_dir, {
'some_scope/var2': var2,
})
# Initialize partitioned variables
init_from_checkpoint(checkpoint_dir, {
'some_var_from_ckpt': 'part_var',
})
# Or specifying the list of `Variable` objects.
init_from_checkpoint(checkpoint_dir, {
'some_var_from_ckpt': var3._get_variable_list(),
})
...
# Initialize variables as usual.
session.run(tf.get_all_variables())
```
Args:
checkpoint_dir: Directory with checkpoints file or path to checkpoint.
assignment_map: Dict, where keys are names of the variables in the
checkpoint and values are current variables or names of current variables
(in default graph).
Raises:
tf.errors.OpError: If missing checkpoints or tensors in checkpoints.
ValueError: If missing variables in current graph.
"""
filepattern = _get_checkpoint_filename(checkpoint_dir)
reader = load_checkpoint(checkpoint_dir)
variable_map = reader.get_variable_to_shape_map()
for tensor_name_in_ckpt, current_var_or_name in six.iteritems(assignment_map):
var = None
# Check if this is Variable object or list of Variable objects (in case of
# partitioned variables).
is_var = lambda x: isinstance(x, variables.Variable)
if is_var(current_var_or_name) or (
isinstance(current_var_or_name, list)
and all(is_var(v) for v in current_var_or_name)):
var = current_var_or_name
else:
var_scope = vs._get_default_variable_store()
# Check if this variable is in var_store.
var = var_scope._vars.get(current_var_or_name, None)
# Also check if variable is partitioned as list.
if var is None:
if current_var_or_name + "/part_0" in var_scope._vars:
var = []
i = 0
while current_var_or_name + "/part_%d" % i in var_scope._vars:
var.append(var_scope._vars[current_var_or_name + "/part_%d" % i])
i += 1
if var is not None:
# If 1 to 1 mapping was provided, find variable in the checkpoint.
if tensor_name_in_ckpt not in variable_map:
raise ValueError("Tensor %s is not found in %s checkpoint %s" % (
tensor_name_in_ckpt, checkpoint_dir, variable_map
))
if is_var(var):
# Additional at-call-time checks.
if not var.get_shape().is_compatible_with(
variable_map[tensor_name_in_ckpt]):
raise ValueError(
"Shape of variable %s (%s) doesn't match with shape of "
"tensor %s (%s) from checkpoint reader." % (
var.name, str(var.get_shape()),
tensor_name_in_ckpt, str(variable_map[tensor_name_in_ckpt])
))
var_name = var.name
else:
var_name = ",".join([v.name for v in var])
_set_variable_or_list_initializer(var, filepattern, tensor_name_in_ckpt)
logging.info("Initialize variable %s from checkpoint %s with %s" % (
var_name, checkpoint_dir, tensor_name_in_ckpt
))
else:
scopes = ""
# TODO(vihanjain): Support list of 'current_var_or_name' here.
if "/" in current_var_or_name:
scopes = current_var_or_name[:current_var_or_name.rindex("/")]
if not tensor_name_in_ckpt.endswith("/"):
raise ValueError(
"Assignment map with scope only name {} should map to scope only "
"{}. Should be 'scope/': 'other_scope/'.".format(
scopes, tensor_name_in_ckpt))
# If scope to scope mapping was provided, find all variables in the scope.
for var_name in var_scope._vars:
if var_name.startswith(scopes):
# Lookup name with specified prefix and suffix from current variable.
# If tensor_name given is '/' (root), don't use it for full name.
if tensor_name_in_ckpt != "/":
full_tensor_name = tensor_name_in_ckpt + var_name[len(scopes) + 1:]
else:
full_tensor_name = var_name[len(scopes) + 1:]
if full_tensor_name not in variable_map:
raise ValueError(
"Tensor %s (%s in %s) is not found in %s checkpoint" % (
full_tensor_name, var_name[len(scopes) + 1:],
tensor_name_in_ckpt, checkpoint_dir
))
var = var_scope._vars[var_name]
_set_variable_or_list_initializer(var, filepattern, full_tensor_name)
logging.info("Initialize variable %s from checkpoint %s with %s" % (
var_name, checkpoint_dir, full_tensor_name
))
# pylint: enable=protected-access
| |
"""Support for Xiaomi Mi Air Purifier and Xiaomi Mi Air Humidifier."""
import asyncio
from enum import Enum
from functools import partial
import logging
from miio import ( # pylint: disable=import-error
AirFresh,
AirHumidifier,
AirPurifier,
Device,
DeviceException,
)
from miio.airfresh import ( # pylint: disable=import-error, import-error
LedBrightness as AirfreshLedBrightness,
OperationMode as AirfreshOperationMode,
)
from miio.airhumidifier import ( # pylint: disable=import-error, import-error
LedBrightness as AirhumidifierLedBrightness,
OperationMode as AirhumidifierOperationMode,
)
from miio.airpurifier import ( # pylint: disable=import-error, import-error
LedBrightness as AirpurifierLedBrightness,
OperationMode as AirpurifierOperationMode,
)
import voluptuous as vol
from homeassistant.components.fan import PLATFORM_SCHEMA, SUPPORT_SET_SPEED, FanEntity
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_MODE,
CONF_HOST,
CONF_NAME,
CONF_TOKEN,
)
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from .const import (
DOMAIN,
SERVICE_RESET_FILTER,
SERVICE_SET_AUTO_DETECT_OFF,
SERVICE_SET_AUTO_DETECT_ON,
SERVICE_SET_BUZZER_OFF,
SERVICE_SET_BUZZER_ON,
SERVICE_SET_CHILD_LOCK_OFF,
SERVICE_SET_CHILD_LOCK_ON,
SERVICE_SET_DRY_OFF,
SERVICE_SET_DRY_ON,
SERVICE_SET_EXTRA_FEATURES,
SERVICE_SET_FAVORITE_LEVEL,
SERVICE_SET_LEARN_MODE_OFF,
SERVICE_SET_LEARN_MODE_ON,
SERVICE_SET_LED_BRIGHTNESS,
SERVICE_SET_LED_OFF,
SERVICE_SET_LED_ON,
SERVICE_SET_TARGET_HUMIDITY,
SERVICE_SET_VOLUME,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Xiaomi Miio Device"
DATA_KEY = "fan.xiaomi_miio"
CONF_MODEL = "model"
MODEL_AIRPURIFIER_V1 = "zhimi.airpurifier.v1"
MODEL_AIRPURIFIER_V2 = "zhimi.airpurifier.v2"
MODEL_AIRPURIFIER_V3 = "zhimi.airpurifier.v3"
MODEL_AIRPURIFIER_V5 = "zhimi.airpurifier.v5"
MODEL_AIRPURIFIER_PRO = "zhimi.airpurifier.v6"
MODEL_AIRPURIFIER_PRO_V7 = "zhimi.airpurifier.v7"
MODEL_AIRPURIFIER_M1 = "zhimi.airpurifier.m1"
MODEL_AIRPURIFIER_M2 = "zhimi.airpurifier.m2"
MODEL_AIRPURIFIER_MA1 = "zhimi.airpurifier.ma1"
MODEL_AIRPURIFIER_MA2 = "zhimi.airpurifier.ma2"
MODEL_AIRPURIFIER_SA1 = "zhimi.airpurifier.sa1"
MODEL_AIRPURIFIER_SA2 = "zhimi.airpurifier.sa2"
MODEL_AIRPURIFIER_2S = "zhimi.airpurifier.mc1"
MODEL_AIRHUMIDIFIER_V1 = "zhimi.humidifier.v1"
MODEL_AIRHUMIDIFIER_CA1 = "zhimi.humidifier.ca1"
MODEL_AIRHUMIDIFIER_CB1 = "zhimi.humidifier.cb1"
MODEL_AIRFRESH_VA2 = "zhimi.airfresh.va2"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_TOKEN): vol.All(cv.string, vol.Length(min=32, max=32)),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MODEL): vol.In(
[
MODEL_AIRPURIFIER_V1,
MODEL_AIRPURIFIER_V2,
MODEL_AIRPURIFIER_V3,
MODEL_AIRPURIFIER_V5,
MODEL_AIRPURIFIER_PRO,
MODEL_AIRPURIFIER_PRO_V7,
MODEL_AIRPURIFIER_M1,
MODEL_AIRPURIFIER_M2,
MODEL_AIRPURIFIER_MA1,
MODEL_AIRPURIFIER_MA2,
MODEL_AIRPURIFIER_SA1,
MODEL_AIRPURIFIER_SA2,
MODEL_AIRPURIFIER_2S,
MODEL_AIRHUMIDIFIER_V1,
MODEL_AIRHUMIDIFIER_CA1,
MODEL_AIRHUMIDIFIER_CB1,
MODEL_AIRFRESH_VA2,
]
),
}
)
ATTR_MODEL = "model"
# Air Purifier
ATTR_TEMPERATURE = "temperature"
ATTR_HUMIDITY = "humidity"
ATTR_AIR_QUALITY_INDEX = "aqi"
ATTR_FILTER_HOURS_USED = "filter_hours_used"
ATTR_FILTER_LIFE = "filter_life_remaining"
ATTR_FAVORITE_LEVEL = "favorite_level"
ATTR_BUZZER = "buzzer"
ATTR_CHILD_LOCK = "child_lock"
ATTR_LED = "led"
ATTR_LED_BRIGHTNESS = "led_brightness"
ATTR_MOTOR_SPEED = "motor_speed"
ATTR_AVERAGE_AIR_QUALITY_INDEX = "average_aqi"
ATTR_PURIFY_VOLUME = "purify_volume"
ATTR_BRIGHTNESS = "brightness"
ATTR_LEVEL = "level"
ATTR_MOTOR2_SPEED = "motor2_speed"
ATTR_ILLUMINANCE = "illuminance"
ATTR_FILTER_RFID_PRODUCT_ID = "filter_rfid_product_id"
ATTR_FILTER_RFID_TAG = "filter_rfid_tag"
ATTR_FILTER_TYPE = "filter_type"
ATTR_LEARN_MODE = "learn_mode"
ATTR_SLEEP_TIME = "sleep_time"
ATTR_SLEEP_LEARN_COUNT = "sleep_mode_learn_count"
ATTR_EXTRA_FEATURES = "extra_features"
ATTR_FEATURES = "features"
ATTR_TURBO_MODE_SUPPORTED = "turbo_mode_supported"
ATTR_AUTO_DETECT = "auto_detect"
ATTR_SLEEP_MODE = "sleep_mode"
ATTR_VOLUME = "volume"
ATTR_USE_TIME = "use_time"
ATTR_BUTTON_PRESSED = "button_pressed"
# Air Humidifier
ATTR_TARGET_HUMIDITY = "target_humidity"
ATTR_TRANS_LEVEL = "trans_level"
ATTR_HARDWARE_VERSION = "hardware_version"
# Air Humidifier CA
ATTR_MOTOR_SPEED = "motor_speed"
ATTR_DEPTH = "depth"
ATTR_DRY = "dry"
# Air Fresh
ATTR_CO2 = "co2"
# Map attributes to properties of the state object
AVAILABLE_ATTRIBUTES_AIRPURIFIER_COMMON = {
ATTR_TEMPERATURE: "temperature",
ATTR_HUMIDITY: "humidity",
ATTR_AIR_QUALITY_INDEX: "aqi",
ATTR_MODE: "mode",
ATTR_FILTER_HOURS_USED: "filter_hours_used",
ATTR_FILTER_LIFE: "filter_life_remaining",
ATTR_FAVORITE_LEVEL: "favorite_level",
ATTR_CHILD_LOCK: "child_lock",
ATTR_LED: "led",
ATTR_MOTOR_SPEED: "motor_speed",
ATTR_AVERAGE_AIR_QUALITY_INDEX: "average_aqi",
ATTR_LEARN_MODE: "learn_mode",
ATTR_EXTRA_FEATURES: "extra_features",
ATTR_TURBO_MODE_SUPPORTED: "turbo_mode_supported",
ATTR_BUTTON_PRESSED: "button_pressed",
}
AVAILABLE_ATTRIBUTES_AIRPURIFIER = {
**AVAILABLE_ATTRIBUTES_AIRPURIFIER_COMMON,
ATTR_PURIFY_VOLUME: "purify_volume",
ATTR_SLEEP_TIME: "sleep_time",
ATTR_SLEEP_LEARN_COUNT: "sleep_mode_learn_count",
ATTR_AUTO_DETECT: "auto_detect",
ATTR_USE_TIME: "use_time",
ATTR_BUZZER: "buzzer",
ATTR_LED_BRIGHTNESS: "led_brightness",
ATTR_SLEEP_MODE: "sleep_mode",
}
AVAILABLE_ATTRIBUTES_AIRPURIFIER_PRO = {
**AVAILABLE_ATTRIBUTES_AIRPURIFIER_COMMON,
ATTR_PURIFY_VOLUME: "purify_volume",
ATTR_USE_TIME: "use_time",
ATTR_FILTER_RFID_PRODUCT_ID: "filter_rfid_product_id",
ATTR_FILTER_RFID_TAG: "filter_rfid_tag",
ATTR_FILTER_TYPE: "filter_type",
ATTR_ILLUMINANCE: "illuminance",
ATTR_MOTOR2_SPEED: "motor2_speed",
ATTR_VOLUME: "volume",
# perhaps supported but unconfirmed
ATTR_AUTO_DETECT: "auto_detect",
ATTR_SLEEP_TIME: "sleep_time",
ATTR_SLEEP_LEARN_COUNT: "sleep_mode_learn_count",
}
AVAILABLE_ATTRIBUTES_AIRPURIFIER_PRO_V7 = {
**AVAILABLE_ATTRIBUTES_AIRPURIFIER_COMMON,
ATTR_FILTER_RFID_PRODUCT_ID: "filter_rfid_product_id",
ATTR_FILTER_RFID_TAG: "filter_rfid_tag",
ATTR_FILTER_TYPE: "filter_type",
ATTR_ILLUMINANCE: "illuminance",
ATTR_MOTOR2_SPEED: "motor2_speed",
ATTR_VOLUME: "volume",
}
AVAILABLE_ATTRIBUTES_AIRPURIFIER_2S = {
**AVAILABLE_ATTRIBUTES_AIRPURIFIER_COMMON,
ATTR_BUZZER: "buzzer",
ATTR_FILTER_RFID_PRODUCT_ID: "filter_rfid_product_id",
ATTR_FILTER_RFID_TAG: "filter_rfid_tag",
ATTR_FILTER_TYPE: "filter_type",
ATTR_ILLUMINANCE: "illuminance",
}
AVAILABLE_ATTRIBUTES_AIRPURIFIER_V3 = {
# Common set isn't used here. It's a very basic version of the device.
ATTR_AIR_QUALITY_INDEX: "aqi",
ATTR_MODE: "mode",
ATTR_LED: "led",
ATTR_BUZZER: "buzzer",
ATTR_CHILD_LOCK: "child_lock",
ATTR_ILLUMINANCE: "illuminance",
ATTR_FILTER_HOURS_USED: "filter_hours_used",
ATTR_FILTER_LIFE: "filter_life_remaining",
ATTR_MOTOR_SPEED: "motor_speed",
# perhaps supported but unconfirmed
ATTR_AVERAGE_AIR_QUALITY_INDEX: "average_aqi",
ATTR_VOLUME: "volume",
ATTR_MOTOR2_SPEED: "motor2_speed",
ATTR_FILTER_RFID_PRODUCT_ID: "filter_rfid_product_id",
ATTR_FILTER_RFID_TAG: "filter_rfid_tag",
ATTR_FILTER_TYPE: "filter_type",
ATTR_PURIFY_VOLUME: "purify_volume",
ATTR_LEARN_MODE: "learn_mode",
ATTR_SLEEP_TIME: "sleep_time",
ATTR_SLEEP_LEARN_COUNT: "sleep_mode_learn_count",
ATTR_EXTRA_FEATURES: "extra_features",
ATTR_AUTO_DETECT: "auto_detect",
ATTR_USE_TIME: "use_time",
ATTR_BUTTON_PRESSED: "button_pressed",
}
AVAILABLE_ATTRIBUTES_AIRHUMIDIFIER_COMMON = {
ATTR_TEMPERATURE: "temperature",
ATTR_HUMIDITY: "humidity",
ATTR_MODE: "mode",
ATTR_BUZZER: "buzzer",
ATTR_CHILD_LOCK: "child_lock",
ATTR_TARGET_HUMIDITY: "target_humidity",
ATTR_LED_BRIGHTNESS: "led_brightness",
ATTR_USE_TIME: "use_time",
ATTR_HARDWARE_VERSION: "hardware_version",
}
AVAILABLE_ATTRIBUTES_AIRHUMIDIFIER = {
**AVAILABLE_ATTRIBUTES_AIRHUMIDIFIER_COMMON,
ATTR_TRANS_LEVEL: "trans_level",
ATTR_BUTTON_PRESSED: "button_pressed",
}
AVAILABLE_ATTRIBUTES_AIRHUMIDIFIER_CA_AND_CB = {
**AVAILABLE_ATTRIBUTES_AIRHUMIDIFIER_COMMON,
ATTR_MOTOR_SPEED: "motor_speed",
ATTR_DEPTH: "depth",
ATTR_DRY: "dry",
}
AVAILABLE_ATTRIBUTES_AIRFRESH = {
ATTR_TEMPERATURE: "temperature",
ATTR_AIR_QUALITY_INDEX: "aqi",
ATTR_AVERAGE_AIR_QUALITY_INDEX: "average_aqi",
ATTR_CO2: "co2",
ATTR_HUMIDITY: "humidity",
ATTR_MODE: "mode",
ATTR_LED: "led",
ATTR_LED_BRIGHTNESS: "led_brightness",
ATTR_BUZZER: "buzzer",
ATTR_CHILD_LOCK: "child_lock",
ATTR_FILTER_LIFE: "filter_life_remaining",
ATTR_FILTER_HOURS_USED: "filter_hours_used",
ATTR_USE_TIME: "use_time",
ATTR_MOTOR_SPEED: "motor_speed",
ATTR_EXTRA_FEATURES: "extra_features",
}
OPERATION_MODES_AIRPURIFIER = ["Auto", "Silent", "Favorite", "Idle"]
OPERATION_MODES_AIRPURIFIER_PRO = ["Auto", "Silent", "Favorite"]
OPERATION_MODES_AIRPURIFIER_PRO_V7 = OPERATION_MODES_AIRPURIFIER_PRO
OPERATION_MODES_AIRPURIFIER_2S = ["Auto", "Silent", "Favorite"]
OPERATION_MODES_AIRPURIFIER_V3 = [
"Auto",
"Silent",
"Favorite",
"Idle",
"Medium",
"High",
"Strong",
]
OPERATION_MODES_AIRFRESH = ["Auto", "Silent", "Interval", "Low", "Middle", "Strong"]
SUCCESS = ["ok"]
FEATURE_SET_BUZZER = 1
FEATURE_SET_LED = 2
FEATURE_SET_CHILD_LOCK = 4
FEATURE_SET_LED_BRIGHTNESS = 8
FEATURE_SET_FAVORITE_LEVEL = 16
FEATURE_SET_AUTO_DETECT = 32
FEATURE_SET_LEARN_MODE = 64
FEATURE_SET_VOLUME = 128
FEATURE_RESET_FILTER = 256
FEATURE_SET_EXTRA_FEATURES = 512
FEATURE_SET_TARGET_HUMIDITY = 1024
FEATURE_SET_DRY = 2048
FEATURE_FLAGS_AIRPURIFIER = (
FEATURE_SET_BUZZER
| FEATURE_SET_CHILD_LOCK
| FEATURE_SET_LED
| FEATURE_SET_LED_BRIGHTNESS
| FEATURE_SET_FAVORITE_LEVEL
| FEATURE_SET_LEARN_MODE
| FEATURE_RESET_FILTER
| FEATURE_SET_EXTRA_FEATURES
)
FEATURE_FLAGS_AIRPURIFIER_PRO = (
FEATURE_SET_CHILD_LOCK
| FEATURE_SET_LED
| FEATURE_SET_FAVORITE_LEVEL
| FEATURE_SET_AUTO_DETECT
| FEATURE_SET_VOLUME
)
FEATURE_FLAGS_AIRPURIFIER_PRO_V7 = (
FEATURE_SET_CHILD_LOCK
| FEATURE_SET_LED
| FEATURE_SET_FAVORITE_LEVEL
| FEATURE_SET_VOLUME
)
FEATURE_FLAGS_AIRPURIFIER_2S = (
FEATURE_SET_BUZZER
| FEATURE_SET_CHILD_LOCK
| FEATURE_SET_LED
| FEATURE_SET_FAVORITE_LEVEL
)
FEATURE_FLAGS_AIRPURIFIER_V3 = (
FEATURE_SET_BUZZER | FEATURE_SET_CHILD_LOCK | FEATURE_SET_LED
)
FEATURE_FLAGS_AIRHUMIDIFIER = (
FEATURE_SET_BUZZER
| FEATURE_SET_CHILD_LOCK
| FEATURE_SET_LED
| FEATURE_SET_LED_BRIGHTNESS
| FEATURE_SET_TARGET_HUMIDITY
)
FEATURE_FLAGS_AIRHUMIDIFIER_CA_AND_CB = FEATURE_FLAGS_AIRHUMIDIFIER | FEATURE_SET_DRY
FEATURE_FLAGS_AIRFRESH = (
FEATURE_SET_BUZZER
| FEATURE_SET_CHILD_LOCK
| FEATURE_SET_LED
| FEATURE_SET_LED_BRIGHTNESS
| FEATURE_RESET_FILTER
| FEATURE_SET_EXTRA_FEATURES
)
AIRPURIFIER_SERVICE_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids})
SERVICE_SCHEMA_LED_BRIGHTNESS = AIRPURIFIER_SERVICE_SCHEMA.extend(
{vol.Required(ATTR_BRIGHTNESS): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=2))}
)
SERVICE_SCHEMA_FAVORITE_LEVEL = AIRPURIFIER_SERVICE_SCHEMA.extend(
{vol.Required(ATTR_LEVEL): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=17))}
)
SERVICE_SCHEMA_VOLUME = AIRPURIFIER_SERVICE_SCHEMA.extend(
{vol.Required(ATTR_VOLUME): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=100))}
)
SERVICE_SCHEMA_EXTRA_FEATURES = AIRPURIFIER_SERVICE_SCHEMA.extend(
{vol.Required(ATTR_FEATURES): vol.All(vol.Coerce(int), vol.Range(min=0))}
)
SERVICE_SCHEMA_TARGET_HUMIDITY = AIRPURIFIER_SERVICE_SCHEMA.extend(
{
vol.Required(ATTR_HUMIDITY): vol.All(
vol.Coerce(int), vol.In([30, 40, 50, 60, 70, 80])
)
}
)
SERVICE_TO_METHOD = {
SERVICE_SET_BUZZER_ON: {"method": "async_set_buzzer_on"},
SERVICE_SET_BUZZER_OFF: {"method": "async_set_buzzer_off"},
SERVICE_SET_LED_ON: {"method": "async_set_led_on"},
SERVICE_SET_LED_OFF: {"method": "async_set_led_off"},
SERVICE_SET_CHILD_LOCK_ON: {"method": "async_set_child_lock_on"},
SERVICE_SET_CHILD_LOCK_OFF: {"method": "async_set_child_lock_off"},
SERVICE_SET_AUTO_DETECT_ON: {"method": "async_set_auto_detect_on"},
SERVICE_SET_AUTO_DETECT_OFF: {"method": "async_set_auto_detect_off"},
SERVICE_SET_LEARN_MODE_ON: {"method": "async_set_learn_mode_on"},
SERVICE_SET_LEARN_MODE_OFF: {"method": "async_set_learn_mode_off"},
SERVICE_RESET_FILTER: {"method": "async_reset_filter"},
SERVICE_SET_LED_BRIGHTNESS: {
"method": "async_set_led_brightness",
"schema": SERVICE_SCHEMA_LED_BRIGHTNESS,
},
SERVICE_SET_FAVORITE_LEVEL: {
"method": "async_set_favorite_level",
"schema": SERVICE_SCHEMA_FAVORITE_LEVEL,
},
SERVICE_SET_VOLUME: {"method": "async_set_volume", "schema": SERVICE_SCHEMA_VOLUME},
SERVICE_SET_EXTRA_FEATURES: {
"method": "async_set_extra_features",
"schema": SERVICE_SCHEMA_EXTRA_FEATURES,
},
SERVICE_SET_TARGET_HUMIDITY: {
"method": "async_set_target_humidity",
"schema": SERVICE_SCHEMA_TARGET_HUMIDITY,
},
SERVICE_SET_DRY_ON: {"method": "async_set_dry_on"},
SERVICE_SET_DRY_OFF: {"method": "async_set_dry_off"},
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the miio fan device from config."""
if DATA_KEY not in hass.data:
hass.data[DATA_KEY] = {}
host = config[CONF_HOST]
token = config[CONF_TOKEN]
name = config[CONF_NAME]
model = config.get(CONF_MODEL)
_LOGGER.info("Initializing with host %s (token %s...)", host, token[:5])
unique_id = None
if model is None:
try:
miio_device = Device(host, token)
device_info = await hass.async_add_executor_job(miio_device.info)
model = device_info.model
unique_id = f"{model}-{device_info.mac_address}"
_LOGGER.info(
"%s %s %s detected",
model,
device_info.firmware_version,
device_info.hardware_version,
)
except DeviceException:
raise PlatformNotReady
if model.startswith("zhimi.airpurifier."):
air_purifier = AirPurifier(host, token)
device = XiaomiAirPurifier(name, air_purifier, model, unique_id)
elif model.startswith("zhimi.humidifier."):
air_humidifier = AirHumidifier(host, token, model=model)
device = XiaomiAirHumidifier(name, air_humidifier, model, unique_id)
elif model.startswith("zhimi.airfresh."):
air_fresh = AirFresh(host, token)
device = XiaomiAirFresh(name, air_fresh, model, unique_id)
else:
_LOGGER.error(
"Unsupported device found! Please create an issue at "
"https://github.com/syssi/xiaomi_airpurifier/issues "
"and provide the following data: %s",
model,
)
return False
hass.data[DATA_KEY][host] = device
async_add_entities([device], update_before_add=True)
async def async_service_handler(service):
"""Map services to methods on XiaomiAirPurifier."""
method = SERVICE_TO_METHOD.get(service.service)
params = {
key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID
}
entity_ids = service.data.get(ATTR_ENTITY_ID)
if entity_ids:
devices = [
device
for device in hass.data[DATA_KEY].values()
if device.entity_id in entity_ids
]
else:
devices = hass.data[DATA_KEY].values()
update_tasks = []
for device in devices:
if not hasattr(device, method["method"]):
continue
await getattr(device, method["method"])(**params)
update_tasks.append(device.async_update_ha_state(True))
if update_tasks:
await asyncio.wait(update_tasks)
for air_purifier_service in SERVICE_TO_METHOD:
schema = SERVICE_TO_METHOD[air_purifier_service].get(
"schema", AIRPURIFIER_SERVICE_SCHEMA
)
hass.services.async_register(
DOMAIN, air_purifier_service, async_service_handler, schema=schema
)
class XiaomiGenericDevice(FanEntity):
"""Representation of a generic Xiaomi device."""
def __init__(self, name, device, model, unique_id):
"""Initialize the generic Xiaomi device."""
self._name = name
self._device = device
self._model = model
self._unique_id = unique_id
self._available = False
self._state = None
self._state_attrs = {ATTR_MODEL: self._model}
self._device_features = FEATURE_SET_CHILD_LOCK
self._skip_update = False
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_SET_SPEED
@property
def should_poll(self):
"""Poll the device."""
return True
@property
def unique_id(self):
"""Return an unique ID."""
return self._unique_id
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def available(self):
"""Return true when state is known."""
return self._available
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
return self._state_attrs
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@staticmethod
def _extract_value_from_attribute(state, attribute):
value = getattr(state, attribute)
if isinstance(value, Enum):
return value.value
return value
async def _try_command(self, mask_error, func, *args, **kwargs):
"""Call a miio device command handling error messages."""
try:
result = await self.hass.async_add_executor_job(
partial(func, *args, **kwargs)
)
_LOGGER.debug("Response received from miio device: %s", result)
return result == SUCCESS
except DeviceException as exc:
_LOGGER.error(mask_error, exc)
self._available = False
return False
async def async_turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn the device on."""
if speed:
# If operation mode was set the device must not be turned on.
result = await self.async_set_speed(speed)
else:
result = await self._try_command(
"Turning the miio device on failed.", self._device.on
)
if result:
self._state = True
self._skip_update = True
async def async_turn_off(self, **kwargs) -> None:
"""Turn the device off."""
result = await self._try_command(
"Turning the miio device off failed.", self._device.off
)
if result:
self._state = False
self._skip_update = True
async def async_set_buzzer_on(self):
"""Turn the buzzer on."""
if self._device_features & FEATURE_SET_BUZZER == 0:
return
await self._try_command(
"Turning the buzzer of the miio device on failed.",
self._device.set_buzzer,
True,
)
async def async_set_buzzer_off(self):
"""Turn the buzzer off."""
if self._device_features & FEATURE_SET_BUZZER == 0:
return
await self._try_command(
"Turning the buzzer of the miio device off failed.",
self._device.set_buzzer,
False,
)
async def async_set_child_lock_on(self):
"""Turn the child lock on."""
if self._device_features & FEATURE_SET_CHILD_LOCK == 0:
return
await self._try_command(
"Turning the child lock of the miio device on failed.",
self._device.set_child_lock,
True,
)
async def async_set_child_lock_off(self):
"""Turn the child lock off."""
if self._device_features & FEATURE_SET_CHILD_LOCK == 0:
return
await self._try_command(
"Turning the child lock of the miio device off failed.",
self._device.set_child_lock,
False,
)
class XiaomiAirPurifier(XiaomiGenericDevice):
"""Representation of a Xiaomi Air Purifier."""
def __init__(self, name, device, model, unique_id):
"""Initialize the plug switch."""
super().__init__(name, device, model, unique_id)
if self._model == MODEL_AIRPURIFIER_PRO:
self._device_features = FEATURE_FLAGS_AIRPURIFIER_PRO
self._available_attributes = AVAILABLE_ATTRIBUTES_AIRPURIFIER_PRO
self._speed_list = OPERATION_MODES_AIRPURIFIER_PRO
elif self._model == MODEL_AIRPURIFIER_PRO_V7:
self._device_features = FEATURE_FLAGS_AIRPURIFIER_PRO_V7
self._available_attributes = AVAILABLE_ATTRIBUTES_AIRPURIFIER_PRO_V7
self._speed_list = OPERATION_MODES_AIRPURIFIER_PRO_V7
elif self._model == MODEL_AIRPURIFIER_2S:
self._device_features = FEATURE_FLAGS_AIRPURIFIER_2S
self._available_attributes = AVAILABLE_ATTRIBUTES_AIRPURIFIER_2S
self._speed_list = OPERATION_MODES_AIRPURIFIER_2S
elif self._model == MODEL_AIRPURIFIER_V3:
self._device_features = FEATURE_FLAGS_AIRPURIFIER_V3
self._available_attributes = AVAILABLE_ATTRIBUTES_AIRPURIFIER_V3
self._speed_list = OPERATION_MODES_AIRPURIFIER_V3
else:
self._device_features = FEATURE_FLAGS_AIRPURIFIER
self._available_attributes = AVAILABLE_ATTRIBUTES_AIRPURIFIER
self._speed_list = OPERATION_MODES_AIRPURIFIER
self._state_attrs.update(
{attribute: None for attribute in self._available_attributes}
)
async def async_update(self):
"""Fetch state from the device."""
# On state change the device doesn't provide the new state immediately.
if self._skip_update:
self._skip_update = False
return
try:
state = await self.hass.async_add_executor_job(self._device.status)
_LOGGER.debug("Got new state: %s", state)
self._available = True
self._state = state.is_on
self._state_attrs.update(
{
key: self._extract_value_from_attribute(state, value)
for key, value in self._available_attributes.items()
}
)
except DeviceException as ex:
self._available = False
_LOGGER.error("Got exception while fetching the state: %s", ex)
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return self._speed_list
@property
def speed(self):
"""Return the current speed."""
if self._state:
return AirpurifierOperationMode(self._state_attrs[ATTR_MODE]).name
return None
async def async_set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
if self.supported_features & SUPPORT_SET_SPEED == 0:
return
_LOGGER.debug("Setting the operation mode to: %s", speed)
await self._try_command(
"Setting operation mode of the miio device failed.",
self._device.set_mode,
AirpurifierOperationMode[speed.title()],
)
async def async_set_led_on(self):
"""Turn the led on."""
if self._device_features & FEATURE_SET_LED == 0:
return
await self._try_command(
"Turning the led of the miio device off failed.", self._device.set_led, True
)
async def async_set_led_off(self):
"""Turn the led off."""
if self._device_features & FEATURE_SET_LED == 0:
return
await self._try_command(
"Turning the led of the miio device off failed.",
self._device.set_led,
False,
)
async def async_set_led_brightness(self, brightness: int = 2):
"""Set the led brightness."""
if self._device_features & FEATURE_SET_LED_BRIGHTNESS == 0:
return
await self._try_command(
"Setting the led brightness of the miio device failed.",
self._device.set_led_brightness,
AirpurifierLedBrightness(brightness),
)
async def async_set_favorite_level(self, level: int = 1):
"""Set the favorite level."""
if self._device_features & FEATURE_SET_FAVORITE_LEVEL == 0:
return
await self._try_command(
"Setting the favorite level of the miio device failed.",
self._device.set_favorite_level,
level,
)
async def async_set_auto_detect_on(self):
"""Turn the auto detect on."""
if self._device_features & FEATURE_SET_AUTO_DETECT == 0:
return
await self._try_command(
"Turning the auto detect of the miio device on failed.",
self._device.set_auto_detect,
True,
)
async def async_set_auto_detect_off(self):
"""Turn the auto detect off."""
if self._device_features & FEATURE_SET_AUTO_DETECT == 0:
return
await self._try_command(
"Turning the auto detect of the miio device off failed.",
self._device.set_auto_detect,
False,
)
async def async_set_learn_mode_on(self):
"""Turn the learn mode on."""
if self._device_features & FEATURE_SET_LEARN_MODE == 0:
return
await self._try_command(
"Turning the learn mode of the miio device on failed.",
self._device.set_learn_mode,
True,
)
async def async_set_learn_mode_off(self):
"""Turn the learn mode off."""
if self._device_features & FEATURE_SET_LEARN_MODE == 0:
return
await self._try_command(
"Turning the learn mode of the miio device off failed.",
self._device.set_learn_mode,
False,
)
async def async_set_volume(self, volume: int = 50):
"""Set the sound volume."""
if self._device_features & FEATURE_SET_VOLUME == 0:
return
await self._try_command(
"Setting the sound volume of the miio device failed.",
self._device.set_volume,
volume,
)
async def async_set_extra_features(self, features: int = 1):
"""Set the extra features."""
if self._device_features & FEATURE_SET_EXTRA_FEATURES == 0:
return
await self._try_command(
"Setting the extra features of the miio device failed.",
self._device.set_extra_features,
features,
)
async def async_reset_filter(self):
"""Reset the filter lifetime and usage."""
if self._device_features & FEATURE_RESET_FILTER == 0:
return
await self._try_command(
"Resetting the filter lifetime of the miio device failed.",
self._device.reset_filter,
)
class XiaomiAirHumidifier(XiaomiGenericDevice):
"""Representation of a Xiaomi Air Humidifier."""
def __init__(self, name, device, model, unique_id):
"""Initialize the plug switch."""
super().__init__(name, device, model, unique_id)
if self._model in [MODEL_AIRHUMIDIFIER_CA1, MODEL_AIRHUMIDIFIER_CB1]:
self._device_features = FEATURE_FLAGS_AIRHUMIDIFIER_CA_AND_CB
self._available_attributes = AVAILABLE_ATTRIBUTES_AIRHUMIDIFIER_CA_AND_CB
self._speed_list = [
mode.name
for mode in AirhumidifierOperationMode
if mode is not AirhumidifierOperationMode.Strong
]
else:
self._device_features = FEATURE_FLAGS_AIRHUMIDIFIER
self._available_attributes = AVAILABLE_ATTRIBUTES_AIRHUMIDIFIER
self._speed_list = [
mode.name
for mode in AirhumidifierOperationMode
if mode is not AirhumidifierOperationMode.Auto
]
self._state_attrs.update(
{attribute: None for attribute in self._available_attributes}
)
async def async_update(self):
"""Fetch state from the device."""
# On state change the device doesn't provide the new state immediately.
if self._skip_update:
self._skip_update = False
return
try:
state = await self.hass.async_add_executor_job(self._device.status)
_LOGGER.debug("Got new state: %s", state)
self._available = True
self._state = state.is_on
self._state_attrs.update(
{
key: self._extract_value_from_attribute(state, value)
for key, value in self._available_attributes.items()
}
)
except DeviceException as ex:
self._available = False
_LOGGER.error("Got exception while fetching the state: %s", ex)
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return self._speed_list
@property
def speed(self):
"""Return the current speed."""
if self._state:
return AirhumidifierOperationMode(self._state_attrs[ATTR_MODE]).name
return None
async def async_set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
if self.supported_features & SUPPORT_SET_SPEED == 0:
return
_LOGGER.debug("Setting the operation mode to: %s", speed)
await self._try_command(
"Setting operation mode of the miio device failed.",
self._device.set_mode,
AirhumidifierOperationMode[speed.title()],
)
async def async_set_led_brightness(self, brightness: int = 2):
"""Set the led brightness."""
if self._device_features & FEATURE_SET_LED_BRIGHTNESS == 0:
return
await self._try_command(
"Setting the led brightness of the miio device failed.",
self._device.set_led_brightness,
AirhumidifierLedBrightness(brightness),
)
async def async_set_target_humidity(self, humidity: int = 40):
"""Set the target humidity."""
if self._device_features & FEATURE_SET_TARGET_HUMIDITY == 0:
return
await self._try_command(
"Setting the target humidity of the miio device failed.",
self._device.set_target_humidity,
humidity,
)
async def async_set_dry_on(self):
"""Turn the dry mode on."""
if self._device_features & FEATURE_SET_DRY == 0:
return
await self._try_command(
"Turning the dry mode of the miio device off failed.",
self._device.set_dry,
True,
)
async def async_set_dry_off(self):
"""Turn the dry mode off."""
if self._device_features & FEATURE_SET_DRY == 0:
return
await self._try_command(
"Turning the dry mode of the miio device off failed.",
self._device.set_dry,
False,
)
class XiaomiAirFresh(XiaomiGenericDevice):
"""Representation of a Xiaomi Air Fresh."""
def __init__(self, name, device, model, unique_id):
"""Initialize the miio device."""
super().__init__(name, device, model, unique_id)
self._device_features = FEATURE_FLAGS_AIRFRESH
self._available_attributes = AVAILABLE_ATTRIBUTES_AIRFRESH
self._speed_list = OPERATION_MODES_AIRFRESH
self._state_attrs.update(
{attribute: None for attribute in self._available_attributes}
)
async def async_update(self):
"""Fetch state from the device."""
# On state change the device doesn't provide the new state immediately.
if self._skip_update:
self._skip_update = False
return
try:
state = await self.hass.async_add_executor_job(self._device.status)
_LOGGER.debug("Got new state: %s", state)
self._available = True
self._state = state.is_on
self._state_attrs.update(
{
key: self._extract_value_from_attribute(state, value)
for key, value in self._available_attributes.items()
}
)
except DeviceException as ex:
self._available = False
_LOGGER.error("Got exception while fetching the state: %s", ex)
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return self._speed_list
@property
def speed(self):
"""Return the current speed."""
if self._state:
return AirfreshOperationMode(self._state_attrs[ATTR_MODE]).name
return None
async def async_set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
if self.supported_features & SUPPORT_SET_SPEED == 0:
return
_LOGGER.debug("Setting the operation mode to: %s", speed)
await self._try_command(
"Setting operation mode of the miio device failed.",
self._device.set_mode,
AirfreshOperationMode[speed.title()],
)
async def async_set_led_on(self):
"""Turn the led on."""
if self._device_features & FEATURE_SET_LED == 0:
return
await self._try_command(
"Turning the led of the miio device off failed.", self._device.set_led, True
)
async def async_set_led_off(self):
"""Turn the led off."""
if self._device_features & FEATURE_SET_LED == 0:
return
await self._try_command(
"Turning the led of the miio device off failed.",
self._device.set_led,
False,
)
async def async_set_led_brightness(self, brightness: int = 2):
"""Set the led brightness."""
if self._device_features & FEATURE_SET_LED_BRIGHTNESS == 0:
return
await self._try_command(
"Setting the led brightness of the miio device failed.",
self._device.set_led_brightness,
AirfreshLedBrightness(brightness),
)
async def async_set_extra_features(self, features: int = 1):
"""Set the extra features."""
if self._device_features & FEATURE_SET_EXTRA_FEATURES == 0:
return
await self._try_command(
"Setting the extra features of the miio device failed.",
self._device.set_extra_features,
features,
)
async def async_reset_filter(self):
"""Reset the filter lifetime and usage."""
if self._device_features & FEATURE_RESET_FILTER == 0:
return
await self._try_command(
"Resetting the filter lifetime of the miio device failed.",
self._device.reset_filter,
)
| |
from datetime import datetime
from django.contrib.contenttypes.models import ContentType
import os
import json
from django.db import IntegrityError
from django.core.urlresolvers import reverse
from django.core.files.storage import default_storage, get_storage_class
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.contrib import messages
from django.http import HttpResponse, HttpResponseBadRequest, \
HttpResponseRedirect, HttpResponseForbidden, HttpResponseNotFound,\
HttpResponseServerError
from django.shortcuts import render_to_response, get_object_or_404
from django.template import loader, RequestContext
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_GET, require_POST,\
require_http_methods
from google_doc import GoogleDoc
from guardian.shortcuts import assign_perm, remove_perm, get_users_with_perms
from main.forms import UserProfileForm, FormLicenseForm, DataLicenseForm,\
SupportDocForm, QuickConverterFile, QuickConverterURL, QuickConverter,\
SourceForm, PermissionForm, MediaForm, MapboxLayerForm, \
ActivateSMSSupportFom
from main.models import UserProfile, MetaData
from odk_logger.models import Instance, XForm
from odk_logger.views import enter_data
from odk_viewer.models import DataDictionary, ParsedInstance
from odk_viewer.models.data_dictionary import upload_to
from odk_viewer.models.parsed_instance import GLOBAL_SUBMISSION_STATS,\
DATETIME_FORMAT
from odk_viewer.views import survey_responses, attachment_url
from stats.models import StatsCount
from stats.tasks import stat_log
from utils.decorators import is_owner
from utils.logger_tools import response_with_mimetype_and_name, publish_form
from utils.user_auth import check_and_set_user, set_profile_data,\
has_permission, helper_auth_helper, get_xform_and_perms,\
check_and_set_user_and_form, add_cors_headers
from utils.log import audit_log, Actions
from main.models import AuditLog
from django.conf import settings
from utils.viewer_tools import enketo_url
from utils.qrcode import generate_qrcode
from sms_support.tools import check_form_sms_compatibility, is_sms_related
from sms_support.autodoc import get_autodoc_for
from sms_support.providers import providers_doc
from registration.signals import user_registered
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
@receiver(user_registered, dispatch_uid='auto_add_crowdform')
def auto_add_crowd_form_to_registered_user(sender, **kwargs):
new_user = kwargs.get('user')
if hasattr(settings, 'AUTO_ADD_CROWDFORM') and \
settings.AUTO_ADD_CROWDFORM and \
hasattr(settings, 'DEFAULT_CROWDFORM'):
try:
default_crowdform = settings.DEFAULT_CROWDFORM
if isinstance(default_crowdform, dict) and\
'xform_username' in default_crowdform and\
'xform_id_string' in default_crowdform:
xform = XForm.objects.get(
id_string=default_crowdform['xform_id_string'],
user__username=default_crowdform['xform_username'])
MetaData.crowdform_users(xform, new_user.username)
except XForm.DoesNotExist:
pass
def home(request):
if request.user.username:
return HttpResponseRedirect(
reverse(profile, kwargs={'username': request.user.username}))
context = RequestContext(request)
return render_to_response('home.html', context_instance=context)
@login_required
def login_redirect(request):
return HttpResponseRedirect(reverse(profile,
kwargs={'username': request.user.username}))
@require_POST
@login_required
def clone_xlsform(request, username):
"""
Copy a public/Shared form to a users list of forms.
Eliminates the need to download Excel File and upload again.
"""
to_username = request.user.username
context = RequestContext(request)
context.message = {'type': None, 'text': '....'}
def set_form():
form_owner = request.POST.get('username')
id_string = request.POST.get('id_string')
xform = XForm.objects.get(user__username=form_owner,
id_string=id_string)
if len(id_string) > 0 and id_string[0].isdigit():
id_string = '_' + id_string
path = xform.xls.name
if default_storage.exists(path):
xls_file = upload_to(None, '%s%s.xls' % (
id_string, XForm.CLONED_SUFFIX), to_username)
xls_data = default_storage.open(path)
xls_file = default_storage.save(xls_file, xls_data)
context.message = u'%s-%s' % (form_owner, xls_file)
survey = DataDictionary.objects.create(
user=request.user,
xls=xls_file
).survey
# log to cloner's account
audit = {}
audit_log(
Actions.FORM_CLONED, request.user, request.user,
_("Cloned form '%(id_string)s'.") %
{
'id_string': survey.id_string,
}, audit, request)
clone_form_url = reverse(
show, kwargs={
'username': to_username,
'id_string': xform.id_string + XForm.CLONED_SUFFIX})
return {
'type': 'alert-success',
'text': _(u'Successfully cloned to %(form_url)s into your '
u'%(profile_url)s') %
{'form_url': u'<a href="%(url)s">%(id_string)s</a> ' % {
'id_string': survey.id_string,
'url': clone_form_url
},
'profile_url': u'<a href="%s">profile</a>.' %
reverse(profile, kwargs={'username': to_username})}
}
form_result = publish_form(set_form)
if form_result['type'] == 'alert-success':
# comment the following condition (and else)
# when we want to enable sms check for all.
# until then, it checks if form barely related to sms
if is_sms_related(form_result.get('form_o')):
form_result_sms = check_form_sms_compatibility(form_result)
context.message_list = [form_result, form_result_sms]
else:
context.message = form_result
else:
context.message = form_result
if request.is_ajax():
res = loader.render_to_string(
'message.html',
context_instance=context).replace("'", r"\'").replace('\n', '')
return HttpResponse(
"$('#mfeedback').html('%s').show();" % res)
else:
return HttpResponse(context.message['text'])
def profile(request, username):
context = RequestContext(request)
content_user = get_object_or_404(User, username=username)
context.form = QuickConverter()
# xlsform submission...
if request.method == 'POST' and request.user.is_authenticated():
def set_form():
form = QuickConverter(request.POST, request.FILES)
survey = form.publish(request.user).survey
audit = {}
audit_log(
Actions.FORM_PUBLISHED, request.user, content_user,
_("Published form '%(id_string)s'.") %
{
'id_string': survey.id_string,
}, audit, request)
enketo_webform_url = reverse(
enter_data,
kwargs={'username': username, 'id_string': survey.id_string}
)
return {
'type': 'alert-success',
'preview_url': reverse(enketo_preview, kwargs={
'username': username,
'id_string': survey.id_string
}),
'text': _(u'Successfully published %(form_id)s.'
u' <a href="%(form_url)s">Enter Web Form</a>'
u' or <a href="#preview-modal" data-toggle="modal">'
u'Preview Web Form</a>')
% {'form_id': survey.id_string,
'form_url': enketo_webform_url},
'form_o': survey
}
form_result = publish_form(set_form)
if form_result['type'] == 'alert-success':
# comment the following condition (and else)
# when we want to enable sms check for all.
# until then, it checks if form barely related to sms
if is_sms_related(form_result.get('form_o')):
form_result_sms = check_form_sms_compatibility(form_result)
context.message_list = [form_result, form_result_sms]
else:
context.message = form_result
else:
context.message = form_result
# profile view...
# for the same user -> dashboard
if content_user == request.user:
context.show_dashboard = True
context.all_forms = content_user.xforms.count()
context.form = QuickConverterFile()
context.form_url = QuickConverterURL()
context.odk_url = request.build_absolute_uri(
"/%s" % request.user.username)
xforms = XForm.objects.filter(user=content_user)\
.select_related('user', 'surveys')
context.user_xforms = xforms
crowdforms = XForm.objects.filter(
metadata__data_type=MetaData.CROWDFORM_USERS,
metadata__data_value=username,)\
.select_related('user')
context.crowdforms = crowdforms
# forms shared with user
xfct = ContentType.objects.get(app_label='odk_logger', model='xform')
xfs = content_user.userobjectpermission_set.filter(content_type=xfct)
shared_forms_pks = list(set([xf.object_pk for xf in xfs]))
context.forms_shared_with = XForm.objects.filter(
pk__in=shared_forms_pks).exclude(user=content_user)\
.select_related('user')
# for any other user -> profile
set_profile_data(context, content_user)
return render_to_response("profile.html", context_instance=context)
def members_list(request):
if not request.user.is_staff and not request.user.is_superuser:
return HttpResponseForbidden(_(u'Forbidden.'))
context = RequestContext(request)
users = User.objects.all()
context.template = 'people.html'
context.users = users
return render_to_response("people.html", context_instance=context)
@login_required
def profile_settings(request, username):
context = RequestContext(request)
content_user = check_and_set_user(request, username)
context.content_user = content_user
profile, created = UserProfile.objects.get_or_create(user=content_user)
if request.method == 'POST':
form = UserProfileForm(request.POST, instance=profile)
if form.is_valid():
# get user
# user.email = cleaned_email
form.instance.user.email = form.cleaned_data['email']
form.instance.user.save()
form.save()
# todo: add string rep. of settings to see what changed
audit = {}
audit_log(
Actions.PROFILE_SETTINGS_UPDATED, request.user, content_user,
_("Profile settings updated."), audit, request)
return HttpResponseRedirect(reverse(
public_profile, kwargs={'username': request.user.username}
))
else:
form = UserProfileForm(
instance=profile, initial={"email": content_user.email})
return render_to_response("settings.html", {'form': form},
context_instance=context)
@require_GET
def public_profile(request, username):
content_user = check_and_set_user(request, username)
if isinstance(content_user, HttpResponseRedirect):
return content_user
context = RequestContext(request)
set_profile_data(context, content_user)
context.is_owner = request.user == content_user
audit = {}
audit_log(
Actions.PUBLIC_PROFILE_ACCESSED, request.user, content_user,
_("Public profile accessed."), audit, request)
return render_to_response("profile.html", context_instance=context)
@login_required
def dashboard(request):
context = RequestContext(request)
context.form = QuickConverter()
content_user = request.user
set_profile_data(context, content_user)
context.odk_url = request.build_absolute_uri("/%s" % request.user.username)
return render_to_response("dashboard.html", context_instance=context)
@require_GET
def show(request, username=None, id_string=None, uuid=None):
if uuid:
xform = get_object_or_404(XForm, uuid=uuid)
request.session['public_link'] = \
xform.uuid if MetaData.public_link(xform) else False
return HttpResponseRedirect(reverse(show, kwargs={
'username': xform.user.username,
'id_string': xform.id_string
}))
xform, is_owner, can_edit, can_view = get_xform_and_perms(
username, id_string, request)
# no access
if not (xform.shared or can_view or request.session.get('public_link')):
return HttpResponseRedirect(reverse(home))
context = RequestContext(request)
context.cloned = len(
XForm.objects.filter(user__username=request.user.username,
id_string=id_string + XForm.CLONED_SUFFIX)
) > 0
context.public_link = MetaData.public_link(xform)
context.is_owner = is_owner
context.can_edit = can_edit
context.can_view = can_view or request.session.get('public_link')
context.xform = xform
context.content_user = xform.user
context.base_url = "https://%s" % request.get_host()
context.source = MetaData.source(xform)
context.form_license = MetaData.form_license(xform).data_value
context.data_license = MetaData.data_license(xform).data_value
context.supporting_docs = MetaData.supporting_docs(xform)
context.media_upload = MetaData.media_upload(xform)
context.mapbox_layer = MetaData.mapbox_layer_upload(xform)
if is_owner:
context.sms_support_form = ActivateSMSSupportFom(
initial={'enable_sms_support': xform.allows_sms,
'sms_id_string': xform.sms_id_string})
if not xform.allows_sms:
context.sms_compatible = check_form_sms_compatibility(
None, json_survey=json.loads(xform.json))
else:
url_root = request.build_absolute_uri('/')[:-1]
context.sms_providers_doc = providers_doc(
url_root=url_root,
username=username,
id_string=id_string)
context.url_root = url_root
context.form_license_form = FormLicenseForm(
initial={'value': context.form_license})
context.data_license_form = DataLicenseForm(
initial={'value': context.data_license})
context.doc_form = SupportDocForm()
context.source_form = SourceForm()
context.media_form = MediaForm()
context.mapbox_layer_form = MapboxLayerForm()
users_with_perms = []
for perm in get_users_with_perms(xform, attach_perms=True).items():
has_perm = []
if 'change_xform' in perm[1]:
has_perm.append(_(u"Can Edit"))
if 'view_xform' in perm[1]:
has_perm.append(_(u"Can View"))
users_with_perms.append((perm[0], u" | ".join(has_perm)))
context.users_with_perms = users_with_perms
context.permission_form = PermissionForm(username)
if xform.allows_sms:
context.sms_support_doc = get_autodoc_for(xform)
return render_to_response("show.html", context_instance=context)
@require_GET
def api_token(request, username=None):
user = get_object_or_404(User, username=username)
context = RequestContext(request)
context.token_key, created = Token.objects.get_or_create(user=user)
return render_to_response("api_token.html", context_instance=context)
@require_http_methods(["GET", "OPTIONS"])
def api(request, username=None, id_string=None):
"""
Returns all results as JSON. If a parameter string is passed,
it takes the 'query' parameter, converts this string to a dictionary, an
that is then used as a MongoDB query string.
NOTE: only a specific set of operators are allow, currently $or and $and.
Please send a request if you'd like another operator to be enabled.
NOTE: Your query must be valid JSON, double check it here,
http://json.parser.online.fr/
E.g. api?query='{"last_name": "Smith"}'
"""
if request.method == "OPTIONS":
response = HttpResponse()
add_cors_headers(response)
return response
helper_auth_helper(request)
helper_auth_helper(request)
xform, owner = check_and_set_user_and_form(username, id_string, request)
if not xform:
return HttpResponseForbidden(_(u'Not shared.'))
try:
args = {
'username': username,
'id_string': id_string,
'query': request.GET.get('query'),
'fields': request.GET.get('fields'),
'sort': request.GET.get('sort')
}
if 'start' in request.GET:
args["start"] = int(request.GET.get('start'))
if 'limit' in request.GET:
args["limit"] = int(request.GET.get('limit'))
if 'count' in request.GET:
args["count"] = True if int(request.GET.get('count')) > 0\
else False
cursor = ParsedInstance.query_mongo(**args)
except ValueError, e:
return HttpResponseBadRequest(e.__str__())
records = list(record for record in cursor)
response_text = json.dumps(records)
if 'callback' in request.GET and request.GET.get('callback') != '':
callback = request.GET.get('callback')
response_text = ("%s(%s)" % (callback, response_text))
response = HttpResponse(response_text, mimetype='application/json')
add_cors_headers(response)
return response
@require_GET
def public_api(request, username, id_string):
"""
Returns public information about the form as JSON
"""
xform = get_object_or_404(XForm,
user__username=username, id_string=id_string)
_DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
exports = {'username': xform.user.username,
'id_string': xform.id_string,
'bamboo_dataset': xform.bamboo_dataset,
'shared': xform.shared,
'shared_data': xform.shared_data,
'downloadable': xform.downloadable,
'is_crowd_form': xform.is_crowd_form,
'title': xform.title,
'date_created': xform.date_created.strftime(_DATETIME_FORMAT),
'date_modified': xform.date_modified.strftime(_DATETIME_FORMAT),
'uuid': xform.uuid,
}
response_text = json.dumps(exports)
return HttpResponse(response_text, mimetype='application/json')
@login_required
def edit(request, username, id_string):
xform = XForm.objects.get(user__username=username, id_string=id_string)
owner = xform.user
if request.GET.get('crowdform'):
crowdform_action = request.GET['crowdform']
request_username = request.user.username
# ensure is crowdform
if xform.is_crowd_form:
if crowdform_action == 'delete':
MetaData.objects.get(
xform__id_string=id_string,
data_value=request_username,
data_type=MetaData.CROWDFORM_USERS
).delete()
elif crowdform_action == 'add':
MetaData.crowdform_users(xform, request_username)
return HttpResponseRedirect(reverse(profile, kwargs={
'username': request_username
}))
if username == request.user.username or\
request.user.has_perm('odk_logger.change_xform', xform):
if request.POST.get('description'):
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Description for '%(id_string)s' updated from "
"'%(old_description)s' to '%(new_description)s'.") %
{
'id_string': xform.id_string,
'old_description': xform.description,
'new_description': request.POST['description']
}, audit, request)
xform.description = request.POST['description']
elif request.POST.get('title'):
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Title for '%(id_string)s' updated from "
"'%(old_title)s' to '%(new_title)s'.") %
{
'id_string': xform.id_string,
'old_title': xform.title,
'new_title': request.POST.get('title')
}, audit, request)
xform.title = request.POST['title']
elif request.POST.get('toggle_shared'):
if request.POST['toggle_shared'] == 'data':
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Data sharing updated for '%(id_string)s' from "
"'%(old_shared)s' to '%(new_shared)s'.") %
{
'id_string': xform.id_string,
'old_shared': _("shared")
if xform.shared_data else _("not shared"),
'new_shared': _("shared")
if not xform.shared_data else _("not shared")
}, audit, request)
xform.shared_data = not xform.shared_data
elif request.POST['toggle_shared'] == 'form':
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Form sharing for '%(id_string)s' updated "
"from '%(old_shared)s' to '%(new_shared)s'.") %
{
'id_string': xform.id_string,
'old_shared': _("shared")
if xform.shared else _("not shared"),
'new_shared': _("shared")
if not xform.shared else _("not shared")
}, audit, request)
xform.shared = not xform.shared
elif request.POST['toggle_shared'] == 'active':
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Active status for '%(id_string)s' updated from "
"'%(old_shared)s' to '%(new_shared)s'.") %
{
'id_string': xform.id_string,
'old_shared': _("shared")
if xform.downloadable else _("not shared"),
'new_shared': _("shared")
if not xform.downloadable else _("not shared")
}, audit, request)
xform.downloadable = not xform.downloadable
elif request.POST['toggle_shared'] == 'crowd':
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Crowdform status for '%(id_string)s' updated from "
"'%(old_status)s' to '%(new_status)s'.") %
{
'id_string': xform.id_string,
'old_status': _("crowdform")
if not xform.is_crowd_form else _("not crowdform"),
'new_status': _("crowdform")
if xform.is_crowd_form else _("not crowdform"),
}, audit, request)
if xform.is_crowd_form:
xform.is_crowd_form = False
else:
xform.is_crowd_form = True
xform.shared = True
xform.shared_data = True
elif request.POST.get('form-license'):
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Form License for '%(id_string)s' updated to "
"'%(form_license)s'.") %
{
'id_string': xform.id_string,
'form_license': request.POST['form-license'],
}, audit, request)
MetaData.form_license(xform, request.POST['form-license'])
elif request.POST.get('data-license'):
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Data license for '%(id_string)s' updated to "
"'%(data_license)s'.") %
{
'id_string': xform.id_string,
'data_license': request.POST['data-license'],
}, audit, request)
MetaData.data_license(xform, request.POST['data-license'])
elif request.POST.get('source') or request.FILES.get('source'):
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Source for '%(id_string)s' updated to '%(source)s'.") %
{
'id_string': xform.id_string,
'source': request.POST.get('source'),
}, audit, request)
MetaData.source(xform, request.POST.get('source'),
request.FILES.get('source'))
elif request.POST.get('enable_sms_support_trigger') is not None:
sms_support_form = ActivateSMSSupportFom(request.POST)
if sms_support_form.is_valid():
audit = {
'xform': xform.id_string
}
enabled = \
sms_support_form.cleaned_data.get('enable_sms_support')
if enabled:
audit_action = Actions.SMS_SUPPORT_ACTIVATED
audit_message = _(u"SMS Support Activated on")
else:
audit_action = Actions.SMS_SUPPORT_DEACTIVATED
audit_message = _(u"SMS Support Deactivated on")
audit_log(
audit_action, request.user, owner,
audit_message
% {'id_string': xform.id_string}, audit, request)
# stored previous states to be able to rollback form status
# in case we can't save.
pe = xform.allows_sms
pid = xform.sms_id_string
xform.allows_sms = enabled
xform.sms_id_string = \
sms_support_form.cleaned_data.get('sms_id_string')
compat = check_form_sms_compatibility(None,
json.loads(xform.json))
if compat['type'] == 'alert-error':
xform.allows_sms = False
xform.sms_id_string = pid
try:
xform.save()
except IntegrityError:
# unfortunately, there's no feedback mechanism here
xform.allows_sms = pe
xform.sms_id_string = pid
elif request.FILES.get('media'):
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Media added to '%(id_string)s'.") %
{
'id_string': xform.id_string
}, audit, request)
for aFile in request.FILES.getlist("media"):
MetaData.media_upload(xform, aFile)
elif request.POST.get('map_name'):
mapbox_layer = MapboxLayerForm(request.POST)
if mapbox_layer.is_valid():
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Map layer added to '%(id_string)s'.") %
{
'id_string': xform.id_string
}, audit, request)
MetaData.mapbox_layer_upload(xform, mapbox_layer.cleaned_data)
elif request.FILES:
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Supporting document added to '%(id_string)s'.") %
{
'id_string': xform.id_string
}, audit, request)
MetaData.supporting_docs(xform, request.FILES['doc'])
xform.update()
if request.is_ajax():
return HttpResponse(_(u'Updated succeeded.'))
else:
return HttpResponseRedirect(reverse(show, kwargs={
'username': username,
'id_string': id_string
}))
return HttpResponseForbidden(_(u'Update failed.'))
def getting_started(request):
context = RequestContext(request)
context.template = 'getting_started.html'
return render_to_response('base.html', context_instance=context)
def support(request):
context = RequestContext(request)
context.template = 'support.html'
return render_to_response('base.html', context_instance=context)
def faq(request):
context = RequestContext(request)
context.template = 'faq.html'
return render_to_response('base.html', context_instance=context)
def xls2xform(request):
context = RequestContext(request)
context.template = 'xls2xform.html'
return render_to_response('base.html', context_instance=context)
def tutorial(request):
context = RequestContext(request)
context.template = 'tutorial.html'
username = request.user.username if request.user.username else \
'your-user-name'
context.odk_url = request.build_absolute_uri("/%s" % username)
return render_to_response('base.html', context_instance=context)
def resources(request):
context = RequestContext(request)
if 'fr' in request.LANGUAGE_CODE.lower():
context.deck_id = 'a351f6b0a3730130c98b12e3c5740641'
else:
context.deck_id = '1a33a070416b01307b8022000a1de118'
return render_to_response('resources.html', context_instance=context)
def about_us(request):
context = RequestContext(request)
context.a_flatpage = '/about-us/'
username = request.user.username if request.user.username else \
'your-user-name'
context.odk_url = request.build_absolute_uri("/%s" % username)
return render_to_response('base.html', context_instance=context)
def syntax(request):
if 'fr' in request.LANGUAGE_CODE.lower():
doc_id = '1EhJTsqX3noztyW-UdKRBABhIln6R3TAvXv58DTZWCU4'
else:
doc_id = '1xD5gSjeyjGjw-V9g5hXx7FWeasRvn-L6zeQJsNeAGBI'
url = 'https://docs.google.com/document/pub?id=%s' % doc_id
doc = GoogleDoc(url)
context = RequestContext(request)
context.content = doc.to_html()
return render_to_response('base.html', context_instance=context)
def form_gallery(request):
"""
Return a list of urls for all the shared xls files. This could be
made a lot prettier.
"""
context = RequestContext(request)
if request.user.is_authenticated():
context.loggedin_user = request.user
context.shared_forms = XForm.objects.filter(shared=True)
# build list of shared forms with cloned suffix
id_strings_with_cloned_suffix = [
x.id_string + XForm.CLONED_SUFFIX for x in context.shared_forms
]
# build list of id_strings for forms this user has cloned
context.cloned = [
x.id_string.split(XForm.CLONED_SUFFIX)[0]
for x in XForm.objects.filter(
user__username=request.user.username,
id_string__in=id_strings_with_cloned_suffix
)
]
return render_to_response('form_gallery.html', context_instance=context)
def download_metadata(request, username, id_string, data_id):
xform = get_object_or_404(XForm,
user__username=username, id_string=id_string)
owner = xform.user
if username == request.user.username or xform.shared:
data = get_object_or_404(MetaData, pk=data_id)
file_path = data.data_file.name
filename, extension = os.path.splitext(file_path.split('/')[-1])
extension = extension.strip('.')
dfs = get_storage_class()()
if dfs.exists(file_path):
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Document '%(filename)s' for '%(id_string)s' downloaded.") %
{
'id_string': xform.id_string,
'filename': "%s.%s" % (filename, extension)
}, audit, request)
response = response_with_mimetype_and_name(
data.data_file_type,
filename, extension=extension, show_date=False,
file_path=file_path)
return response
else:
return HttpResponseNotFound()
return HttpResponseForbidden(_(u'Permission denied.'))
@login_required()
def delete_metadata(request, username, id_string, data_id):
xform = get_object_or_404(XForm,
user__username=username, id_string=id_string)
owner = xform.user
data = get_object_or_404(MetaData, pk=data_id)
dfs = get_storage_class()()
req_username = request.user.username
if request.GET.get('del', False) and username == req_username:
try:
dfs.delete(data.data_file.name)
data.delete()
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Document '%(filename)s' deleted from '%(id_string)s'.") %
{
'id_string': xform.id_string,
'filename': os.path.basename(data.data_file.name)
}, audit, request)
return HttpResponseRedirect(reverse(show, kwargs={
'username': username,
'id_string': id_string
}))
except Exception:
return HttpResponseServerError()
elif request.GET.get('map_name_del', False) and username == req_username:
data.delete()
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Map layer deleted from '%(id_string)s'.") %
{
'id_string': xform.id_string,
}, audit, request)
return HttpResponseRedirect(reverse(show, kwargs={
'username': username,
'id_string': id_string
}))
return HttpResponseForbidden(_(u'Permission denied.'))
def download_media_data(request, username, id_string, data_id):
xform = get_object_or_404(
XForm, user__username=username, id_string=id_string)
owner = xform.user
data = get_object_or_404(MetaData, id=data_id)
dfs = get_storage_class()()
if request.GET.get('del', False):
if username == request.user.username:
try:
dfs.delete(data.data_file.name)
data.delete()
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Media download '%(filename)s' deleted from "
"'%(id_string)s'.") %
{
'id_string': xform.id_string,
'filename': os.path.basename(data.data_file.name)
}, audit, request)
return HttpResponseRedirect(reverse(show, kwargs={
'username': username,
'id_string': id_string
}))
except Exception:
return HttpResponseServerError()
else:
if username: # == request.user.username or xform.shared:
file_path = data.data_file.name
filename, extension = os.path.splitext(file_path.split('/')[-1])
extension = extension.strip('.')
if dfs.exists(file_path):
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_UPDATED, request.user, owner,
_("Media '%(filename)s' downloaded from "
"'%(id_string)s'.") %
{
'id_string': xform.id_string,
'filename': os.path.basename(file_path)
}, audit, request)
response = response_with_mimetype_and_name(
data.data_file_type,
filename, extension=extension, show_date=False,
file_path=file_path)
return response
else:
return HttpResponseNotFound()
return HttpResponseForbidden(_(u'Permission denied.'))
def form_photos(request, username, id_string):
xform, owner = check_and_set_user_and_form(username, id_string, request)
if not xform:
return HttpResponseForbidden(_(u'Not shared.'))
context = RequestContext(request)
context.form_view = True
context.content_user = owner
context.xform = xform
image_urls = []
for instance in xform.surveys.all():
for attachment in instance.attachments.all():
# skip if not image e.g video or file
if not attachment.mimetype.startswith('image'):
continue
data = {}
for i in ['small', 'medium', 'large', 'original']:
url = reverse(attachment_url, kwargs={'size': i})
url = '%s?media_file=%s' % (url, attachment.media_file.name)
data[i] = url
image_urls.append(data)
context.images = image_urls
context.profile, created = UserProfile.objects.get_or_create(user=owner)
return render_to_response('form_photos.html', context_instance=context)
@require_POST
def set_perm(request, username, id_string):
xform = get_object_or_404(XForm,
user__username=username, id_string=id_string)
owner = xform.user
if username != request.user.username\
and not has_permission(xform, username, request):
return HttpResponseForbidden(_(u'Permission denied.'))
try:
perm_type = request.POST['perm_type']
for_user = request.POST['for_user']
except KeyError:
return HttpResponseBadRequest()
if perm_type in ['edit', 'view', 'remove']:
try:
user = User.objects.get(username=for_user)
except User.DoesNotExist:
messages.add_message(
request, messages.INFO,
_(u"Wrong username <b>%s</b>." % for_user),
extra_tags='alert-error')
else:
if perm_type == 'edit' and\
not user.has_perm('change_xform', xform):
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_PERMISSIONS_UPDATED, request.user, owner,
_("Edit permissions on '%(id_string)s' assigned to "
"'%(for_user)s'.") %
{
'id_string': xform.id_string,
'for_user': for_user
}, audit, request)
assign_perm('change_xform', user, xform)
elif perm_type == 'view' and\
not user.has_perm('view_xform', xform):
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_PERMISSIONS_UPDATED, request.user, owner,
_("View permissions on '%(id_string)s' "
"assigned to '%(for_user)s'.") %
{
'id_string': xform.id_string,
'for_user': for_user
}, audit, request)
assign_perm('view_xform', user, xform)
elif perm_type == 'remove':
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_PERMISSIONS_UPDATED, request.user, owner,
_("All permissions on '%(id_string)s' "
"removed from '%(for_user)s'.") %
{
'id_string': xform.id_string,
'for_user': for_user
}, audit, request)
remove_perm('change_xform', user, xform)
remove_perm('view_xform', user, xform)
elif perm_type == 'link':
current = MetaData.public_link(xform)
if for_user == 'all':
MetaData.public_link(xform, True)
elif for_user == 'none':
MetaData.public_link(xform, False)
elif for_user == 'toggle':
MetaData.public_link(xform, not current)
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_PERMISSIONS_UPDATED, request.user, owner,
_("Public link on '%(id_string)s' %(action)s.") %
{
'id_string': xform.id_string,
'action': "created"
if for_user == "all" or
(for_user == "toggle" and not current) else "removed"
}, audit, request)
if request.is_ajax():
return HttpResponse(
json.dumps(
{'status': 'success'}), mimetype='application/json')
return HttpResponseRedirect(reverse(show, kwargs={
'username': username,
'id_string': id_string
}))
def show_submission(request, username, id_string, uuid):
xform, is_owner, can_edit, can_view = get_xform_and_perms(
username, id_string, request)
owner = xform.user
# no access
if not (xform.shared_data or can_view or
request.session.get('public_link') == xform.uuid):
return HttpResponseRedirect(reverse(home))
submission = get_object_or_404(Instance, uuid=uuid)
audit = {
'xform': xform.id_string
}
audit_log(
Actions.SUBMISSION_ACCESSED, request.user, owner,
_("Submission '%(uuid)s' on '%(id_string)s' accessed.") %
{
'id_string': xform.id_string,
'uuid': uuid
}, audit, request)
return HttpResponseRedirect(reverse(
survey_responses, kwargs={'instance_id': submission.pk}))
@require_POST
@login_required
def delete_data(request, username=None, id_string=None):
xform, owner = check_and_set_user_and_form(username, id_string, request)
response_text = u''
if not xform:
return HttpResponseForbidden(_(u'Not shared.'))
data_id = request.POST.get('id')
if not data_id:
return HttpResponseBadRequest(_(u"id must be specified"))
Instance.set_deleted_at(data_id)
audit = {
'xform': xform.id_string
}
audit_log(
Actions.SUBMISSION_DELETED, request.user, owner,
_("Deleted submission with id '%(record_id)s' "
"on '%(id_string)s'.") %
{
'id_string': xform.id_string,
'record_id': data_id
}, audit, request)
response_text = json.dumps({"success": "Deleted data %s" % data_id})
if 'callback' in request.GET and request.GET.get('callback') != '':
callback = request.GET.get('callback')
response_text = ("%s(%s)" % (callback, response_text))
return HttpResponse(response_text, mimetype='application/json')
@require_POST
@is_owner
def link_to_bamboo(request, username, id_string):
xform = get_object_or_404(XForm,
user__username=username, id_string=id_string)
owner = xform.user
from utils.bamboo import (get_new_bamboo_dataset,
delete_bamboo_dataset, ensure_rest_service)
audit = {
'xform': xform.id_string
}
# try to delete the dataset first (in case it exists)
if xform.bamboo_dataset and delete_bamboo_dataset(xform):
xform.bamboo_dataset = u''
xform.save()
audit_log(
Actions.BAMBOO_LINK_DELETED, request.user, owner,
_("Bamboo link deleted on '%(id_string)s'.")
% {'id_string': xform.id_string}, audit, request)
# create a new one from all the data
dataset_id = get_new_bamboo_dataset(xform)
# update XForm
xform.bamboo_dataset = dataset_id
xform.save()
ensure_rest_service(xform)
audit_log(
Actions.BAMBOO_LINK_CREATED, request.user, owner,
_("Bamboo link created on '%(id_string)s'.") %
{
'id_string': xform.id_string,
}, audit, request)
return HttpResponseRedirect(reverse(show, kwargs={
'username': username,
'id_string': id_string
}))
@require_POST
@is_owner
def update_xform(request, username, id_string):
xform = get_object_or_404(
XForm, user__username=username, id_string=id_string)
owner = xform.user
def set_form():
form = QuickConverter(request.POST, request.FILES)
survey = form.publish(request.user, id_string).survey
enketo_webform_url = reverse(
enter_data,
kwargs={'username': username, 'id_string': survey.id_string}
)
audit = {
'xform': xform.id_string
}
audit_log(
Actions.FORM_XLS_UPDATED, request.user, owner,
_("XLS for '%(id_string)s' updated.") %
{
'id_string': xform.id_string,
}, audit, request)
return {
'type': 'alert-success',
'text': _(u'Successfully published %(form_id)s.'
u' <a href="%(form_url)s">Enter Web Form</a>'
u' or <a href="#preview-modal" data-toggle="modal">'
u'Preview Web Form</a>')
% {'form_id': survey.id_string,
'form_url': enketo_webform_url}
}
message = publish_form(set_form)
messages.add_message(
request, messages.INFO, message['text'], extra_tags=message['type'])
return HttpResponseRedirect(reverse(show, kwargs={
'username': username,
'id_string': id_string
}))
@is_owner
def activity(request, username):
owner = get_object_or_404(User, username=username)
context = RequestContext(request)
context.user = owner
return render_to_response('activity.html', context_instance=context)
def activity_fields(request):
fields = [
{
'id': 'created_on',
'label': _('Performed On'),
'type': 'datetime',
'searchable': False
},
{
'id': 'action',
'label': _('Action'),
'type': 'string',
'searchable': True,
'options': sorted([Actions[e] for e in Actions.enums])
},
{
'id': 'user',
'label': 'Performed By',
'type': 'string',
'searchable': True
},
{
'id': 'msg',
'label': 'Description',
'type': 'string',
'searchable': True
},
]
response_text = json.dumps(fields)
return HttpResponse(response_text, mimetype='application/json')
@is_owner
def activity_api(request, username):
from bson.objectid import ObjectId
def stringify_unknowns(obj):
if isinstance(obj, ObjectId):
return str(obj)
if isinstance(obj, datetime):
return obj.strftime(DATETIME_FORMAT)
#raise TypeError
return None
try:
query_args = {
'username': username,
'query': json.loads(request.GET.get('query'))
if request.GET.get('query') else {},
'fields': json.loads(request.GET.get('fields'))
if request.GET.get('fields') else [],
'sort': json.loads(request.GET.get('sort'))
if request.GET.get('sort') else {}
}
if 'start' in request.GET:
query_args["start"] = int(request.GET.get('start'))
if 'limit' in request.GET:
query_args["limit"] = int(request.GET.get('limit'))
if 'count' in request.GET:
query_args["count"] = True \
if int(request.GET.get('count')) > 0 else False
cursor = AuditLog.query_mongo(**query_args)
except ValueError, e:
return HttpResponseBadRequest(e.__str__())
records = list(record for record in cursor)
response_text = json.dumps(records, default=stringify_unknowns)
if 'callback' in request.GET and request.GET.get('callback') != '':
callback = request.GET.get('callback')
response_text = ("%s(%s)" % (callback, response_text))
return HttpResponse(response_text, mimetype='application/json')
def qrcode(request, username, id_string):
formhub_url = settings.SERVER_EXTERNAL_URL
form_url = formhub_url + username
if settings.TESTING_MODE:
form_url = "https://testserver.com/bob"
results = _(u"Unexpected Error occured: No QRCODE generated")
status = 200
try:
url = enketo_url(form_url, id_string)
except Exception, e:
error_msg = _(u"Error Generating QRCODE: %s" % e)
results = """<div class="alert alert-error">%s</div>""" % error_msg
status = 400
else:
if url:
image = generate_qrcode(url)
results = """<img class="qrcode" src="%s" alt="%s" />
</br><a href="%s" target="_blank">%s</a>""" \
% (image, url, url, url)
else:
status = 400
return HttpResponse(results, mimetype='text/html', status=status)
def enketo_preview(request, username, id_string):
xform = get_object_or_404(
XForm, user__username=username, id_string=id_string)
owner = xform.user
if not has_permission(xform, owner, request, xform.shared):
return HttpResponseForbidden(_(u'Not shared.'))
enekto_preview_url = \
"%(enketo_url)s?server=%(profile_url)s&id=%(id_string)s" % {
'enketo_url': settings.ENKETO_PREVIEW_URL,
'profile_url': request.build_absolute_uri(
reverse(profile, kwargs={'username': owner.username})),
'id_string': xform.id_string
}
return HttpResponseRedirect(enekto_preview_url)
@require_GET
@login_required
def username_list(request):
data = []
query = request.GET.get('query', None)
if query:
users = User.objects.values('username')\
.filter(username__startswith=query, is_active=True, pk__gte=0)
data = [user['username'] for user in users]
return HttpResponse(json.dumps(data), mimetype='application/json')
| |
__author__ = 'juliusskye'
import os, sys
sys.path.append('..')
from py.Rectangle import Rectangle
from py.CompareRectangles import CompareRectangles
from PIL import Image
import glob
import cv2
from scipy import misc
import numpy as np
import os.path
import time
class SimpleTest(object):
def __init__(self,a,b):
self.a = a
self.b = b
def add(self):
return self.a + self.b
def bounds(self):# global variables for boundary tests
self.boundary_tests = [0,1,-1,1,0]
return self.boundary_tests
def inverse_bounds(self):#initialise inverse results as results
self.inverse_boundary_tests = self.boundary_tests
for index in range(len(self.inverse_boundary_tests)):
if index != 0:
self.inverse_boundary_tests[index] =-1*self.inverse_boundary_tests[index]
return self.inverse_boundary_tests
def simple_contains(self):
boundary_tests = [1, 0, 0, 1]
print("testing if r1 fully contains r2")
fullyContains = True
for boundary in boundary_tests:
print(boundary)
if boundary ==0 or boundary == 1:
print("boundary 1 continue")
continue
else:
print("boundary not 1 break")
fullyContains = False
break
print("r1 fully contains r2:" + str(fullyContains))
def test(self, r1, r2):
print("Rectangle r1: ["+str(r1.getLeftXCoord())+", "+str(r1.getTopYCoord())+", "+ \
str(r1.getWidth())+", " + str(r1.getHeight())+"]"+" | Area(r1): "+str(r1.area()))
print("Rectangle r2: ["+str(r2.getLeftXCoord())+", "+str(r2.getTopYCoord())+", "+ \
str(r2.getWidth())+", " + str(r2.getHeight())+"]"+" | Area(r2): "+str(r2.area()))
compare_rects = CompareRectangles(r1,r2, 0.35)
# if compare_rects.rect_intersection() is not None:
# print("overlapping/intersection area: "+str(compare_rects.area_intersection()))
# print("union area: "+str(compare_rects.area_union()))
# else:
# print("no intersection area")
print(compare_rects.boundary_tests)
print("union: " +str(compare_rects.area_union()))
print("intersection: " +str(compare_rects.area_intersection()))
# print("division result: "+ str(compare_rects.area_intersection() / compare_rects.area_union()))
print("jaccard index: " +str(compare_rects.jaccard_index()))
print("rectangles similar: "+str(compare_rects.similar_rectangles()))
print("\n")
def printImage(self,r1,r2):
filepath = "/Users/juliusskye/COMP4120.Car.Detection.Research/car-classifier-research/src/hyp.verification.tools/py/test/testcoord"+str(time.time())+".png"
if os.path.exists(filepath):
image = cv2.imread(filepath)
else:
height = 250
width = 250
image = np.zeros((height,width,3), np.uint8)
cv2.rectangle(image, (r1.getLeftXCoord(), r1.getTopYCoord()), (r1.getLeftXCoord()+r1.getWidth(), r1.getTopYCoord()+r1.getHeight()), (255, 255, 0), 1)
# cv2.putText(image,"r1", (int(r1.getLeftXCoord() + 0.4 * r1.getWidth()), int(r1.getTopYCoord()+ 0.4*r1.getHeight())), cv2.FONT_HERSHEY_SIMPLEX, 0.25, 255)
# cv2.putText(image,"r1"+str(r1), (int(r1.getLeftXCoord()), int(r1.getBottomYCoord())), cv2.FONT_HERSHEY_PLAIN, 0.5, 255)
cv2.putText(image,"r1"+str(r1), (int(r1.getCenter().getXCoord()-0.5*r1.getWidth()), int(r1.getCenter().getYCoord())), cv2.FONT_HERSHEY_PLAIN, 0.5, 255)
cv2.rectangle(image, (r2.getLeftXCoord(), r2.getTopYCoord()), (r2.getLeftXCoord()+r2.getWidth(), r2.getTopYCoord()+r2.getHeight()), (0, 255, 0), 1)
# cv2.putText(image,"r2", (int(r2.getLeftXCoord() + 0.4 * r2.getWidth()), int(r2.getTopYCoord()+ 0.4*r2.getHeight())), cv2.FONT_HERSHEY_SIMPLEX, 0.25, 255)
cv2.putText(image,"r2"+str(r2), (int(r2.getCenter().getXCoord()-0.5*r2.getWidth()), int(r2.getCenter().getYCoord())), cv2.FONT_HERSHEY_PLAIN, 0.5, 255)
cv2.circle(image,(r1.getCenter().getXCoord(), r1.getCenter().getYCoord()),2,(255, 255, 0), 1)
cv2.circle(image,(r2.getCenter().getXCoord(), r2.getCenter().getYCoord()),2,(255, 255, 0), 1)
misc.imsave(filepath, image)
test_instance = SimpleTest(1,2)
print("---------------------------------")
print("---------------------------------")
print("Testing Compare Rectangles Class")
print("---------------------------------")
print("---------------------------------")
# print(test_instance.add())
# print("boundaries: "+ str(test_instance.bounds()) + \
# "inverse bounds: " + str(test_instance.inverse_bounds()))
# print("---------------------------------")
# print("case: same rects")
# print("---------------------------------")
# r1 = Rectangle(0,5,5,5)
# r2 = Rectangle(0,5,5,5)
# test_instance.test(r1,r2)
#
# print("---------------------------------")
# print("case: no intersection")
# print("---------------------------------")
# #correct
# r1 = Rectangle(0,5,5,5)
# r2 = Rectangle(0,100,5,5)
# test_instance.test(r1,r2)
#
#
# print("---------------------------------")
# print("case: fully contained rectangle")
# print("---------------------------------")
# #correct
# # r1 should contain r2, should be jaccard similar.
# r1 = Rectangle(0,0,50,50)
# r2 = Rectangle(0,0,40,40)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
# # r2 should contain r1
# r2 = Rectangle(0,0,50,50)
# r1 = Rectangle(0,0,40,40)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
#
# print("---------------------------------")
# print("case: x contained rectangle")
# print("---------------------------------")
# # correct
# r1 = Rectangle(0,100,50,50)
# r2 = Rectangle(20,100,20,100)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
#
#
# print("---------------------------------")
# print("case: y contained rectangle")
# print("---------------------------------")
# # correct
# r1 = Rectangle(10,0,100,100)
# r2 = Rectangle(0,5,50,30)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
#
# # correct
# r1 = Rectangle(10,0,50,100)
# r2 = Rectangle(0,5,20,30)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
#
#
# print("---------------------------------")
# print("case: not contained not parra intersect")
# print("---------------------------------")
# # correct
# r1 = Rectangle(0,0,50,50)
# r2 = Rectangle(25,25,50,50)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
# r1 = Rectangle(16, 25, 89, 89)
# r2 = Rectangle(27, 38, 72, 52)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
# print("---------------------------------")
# print("case:testing anomaly on images 26 No JI.")
# print("---------------------------------")
# r1 = Rectangle(16, 36, 82, 82)
# r2 = Rectangle(1, 31, 100, 64)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
#
# print("---------------------------------")
# print("case:testing anomaly on images 42 No JI.")
# print("---------------------------------")
# r1 = Rectangle(24, 32, 81, 81)
# r2 = Rectangle(6, 32, 95, 60)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
# print("---------------------------------")
# print("case:testing anomaly on images 60 JI>1.")
# print("---------------------------------")
# r1 = Rectangle(38, 75, 22, 22)
# r2 = Rectangle(12, 29, 89, 66)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
# # is for some reason doing union / intersection instead of int./ union
# print("---------------------------------")
# print("case:testing anomaly on images 60 JI>1.")
# print("---------------------------------")
# r1 = Rectangle(14, 21, 64, 64)
# r2 = Rectangle(4, 36, 95, 56)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
# intersection: 2671
# union: 6745
# division result: 2
# jaccard index: 2.52527143392
# rectangles similar: True
# print("---------------------------------")
# print("case:JI seems to be correct")
# print("---------------------------------")
# r1 = Rectangle(0, 6, 117, 117)
# r2 = Rectangle(4, 36, 95, 56)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
#fully contained.
# intersection: 13689
# union: 5320
# division result: 0
# jaccard index: 0.388633209146
# rectangles similar: True
# print("---------------------------------")
# print("case:testing anomaly on images 60 JI==None.")
# print("---------------------------------")
# r1 = Rectangle(19, 26, 82, 82)
# r2 = Rectangle(19, 31, 84, 59)
# test_instance.test(r1,r2)
# test_instance.printImage(r1,r2)
# print("---------------------------------")
# print("case:error. leftest.right is none")
# print("---------------------------------")
# r1 = Rectangle(51, 53, 71, 71)
# r2 = Rectangle(51, 28, 71, 81)
# test_instance.printImage(r1,r2)
# test_instance.test(r1,r2)
# print("---------------------------------")
# print("case:error. ")
# print("---------------------------------")
# r1 = Rectangle(133, 67, 24, 24)
# r2 = Rectangle(137, 32, 30, 19)
# test_instance.printImage(r1,r2)
# test_instance.test(r1,r2)
# print("---------------------------------")
# print("case:error. ")
# print("---------------------------------")
# r1 = Rectangle(114, 52, 57, 23)
# r2 = Rectangle(75, 52, 84, 23)
# test_instance.printImage(r1,r2)
# test_instance.test(r1,r2)
# # they intersect and shouldnt. fixed.
# print("---------------------------------")
# print("case:JI>1. ")
# print("---------------------------------")
# r1 = Rectangle(69, 18, 46, 46)
# r2 = Rectangle(69, 73, 46, 55)
# test_instance.printImage(r1,r2)
# test_instance.test(r1,r2)
# print(test_instance.simple_contains())
| |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Simple speech recognition to spot a limited number of keywords.
This is a self-contained example script that will train a very basic audio
recognition model in TensorFlow. It downloads the necessary training data and
runs with reasonable defaults to train within a few hours even only using a CPU.
For more information, please see
https://www.tensorflow.org/tutorials/audio_recognition.
It is intended as an introduction to using neural networks for audio
recognition, and is not a full speech recognition system. For more advanced
speech systems, I recommend looking into Kaldi. This network uses a keyword
detection style to spot discrete words from a small vocabulary, consisting of
"yes", "no", "up", "down", "left", "right", "on", "off", "stop", and "go".
To run the training process, use:
bazel run tensorflow/examples/speech_commands:train
This will write out checkpoints to /tmp/speech_commands_train/, and will
download over 1GB of open source training data, so you'll need enough free space
and a good internet connection. The default data is a collection of thousands of
one-second .wav files, each containing one spoken word. This data set is
collected from https://aiyprojects.withgoogle.com/open_speech_recording, please
consider contributing to help improve this and other models!
As training progresses, it will print out its accuracy metrics, which should
rise above 90% by the end. Once it's complete, you can run the freeze script to
get a binary GraphDef that you can easily deploy on mobile applications.
If you want to train on your own data, you'll need to create .wavs with your
recordings, all at a consistent length, and then arrange them into subfolders
organized by label. For example, here's a possible file structure:
my_wavs >
up >
audio_0.wav
audio_1.wav
down >
audio_2.wav
audio_3.wav
other>
audio_4.wav
audio_5.wav
You'll also need to tell the script what labels to look for, using the
`--wanted_words` argument. In this case, 'up,down' might be what you want, and
the audio in the 'other' folder would be used to train an 'unknown' category.
To pull this all together, you'd run:
bazel run tensorflow/examples/speech_commands:train -- \
--data_dir=my_wavs --wanted_words=up,down
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os.path
import sys
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
import input_data
import models
from tensorflow.python.platform import gfile
FLAGS = None
def main(_):
# We want to see all the logging messages for this tutorial.
tf.logging.set_verbosity(tf.logging.INFO)
# Start a new TensorFlow session.
sess = tf.InteractiveSession()
# Begin by making sure we have the training data we need. If you already have
# training data of your own, use `--data_url= ` on the command line to avoid
# downloading.
model_settings = models.prepare_model_settings(
len(input_data.prepare_words_list(FLAGS.wanted_words.split(','))),
FLAGS.sample_rate, FLAGS.clip_duration_ms, FLAGS.window_size_ms,
FLAGS.window_stride_ms, FLAGS.dct_coefficient_count)
audio_processor = input_data.AudioProcessor(
FLAGS.data_url, FLAGS.data_dir, FLAGS.silence_percentage,
FLAGS.unknown_percentage,
FLAGS.wanted_words.split(','), FLAGS.validation_percentage,
FLAGS.testing_percentage, model_settings)
fingerprint_size = model_settings['fingerprint_size']
label_count = model_settings['label_count']
time_shift_samples = int((FLAGS.time_shift_ms * FLAGS.sample_rate) / 1000)
# Figure out the learning rates for each training phase. Since it's often
# effective to have high learning rates at the start of training, followed by
# lower levels towards the end, the number of steps and learning rates can be
# specified as comma-separated lists to define the rate at each stage. For
# example --how_many_training_steps=10000,3000 --learning_rate=0.001,0.0001
# will run 13,000 training loops in total, with a rate of 0.001 for the first
# 10,000, and 0.0001 for the final 3,000.
training_steps_list = list(map(int, FLAGS.how_many_training_steps.split(',')))
learning_rates_list = list(map(float, FLAGS.learning_rate.split(',')))
if len(training_steps_list) != len(learning_rates_list):
raise Exception(
'--how_many_training_steps and --learning_rate must be equal length '
'lists, but are %d and %d long instead' % (len(training_steps_list),
len(learning_rates_list)))
fingerprint_input = tf.placeholder(
tf.float32, [None, fingerprint_size], name='fingerprint_input')
logits, dropout_prob = models.create_model(
fingerprint_input,
model_settings,
FLAGS.model_architecture,
is_training=True)
# Define loss and optimizer
ground_truth_input = tf.placeholder(
tf.float32, [None, label_count], name='groundtruth_input')
# Optionally we can add runtime checks to spot when NaNs or other symptoms of
# numerical errors start occurring during training.
control_dependencies = []
if FLAGS.check_nans:
checks = tf.add_check_numerics_ops()
control_dependencies = [checks]
# Create the back propagation and training evaluation machinery in the graph.
with tf.name_scope('cross_entropy'):
cross_entropy_mean = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(
labels=ground_truth_input, logits=logits))
tf.summary.scalar('cross_entropy', cross_entropy_mean)
with tf.name_scope('train'), tf.control_dependencies(control_dependencies):
learning_rate_input = tf.placeholder(
tf.float32, [], name='learning_rate_input')
train_step = tf.train.GradientDescentOptimizer(
learning_rate_input).minimize(cross_entropy_mean)
predicted_indices = tf.argmax(logits, 1)
expected_indices = tf.argmax(ground_truth_input, 1)
correct_prediction = tf.equal(predicted_indices, expected_indices)
confusion_matrix = tf.confusion_matrix(expected_indices, predicted_indices)
evaluation_step = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar('accuracy', evaluation_step)
global_step = tf.contrib.framework.get_or_create_global_step()
increment_global_step = tf.assign(global_step, global_step + 1)
saver = tf.train.Saver(tf.global_variables())
# Merge all the summaries and write them out to /tmp/retrain_logs (by default)
merged_summaries = tf.summary.merge_all()
train_writer = tf.summary.FileWriter(FLAGS.summaries_dir + '/train',
sess.graph)
validation_writer = tf.summary.FileWriter(FLAGS.summaries_dir + '/validation')
tf.global_variables_initializer().run()
start_step = 1
if FLAGS.start_checkpoint:
models.load_variables_from_checkpoint(sess, FLAGS.start_checkpoint)
start_step = global_step.eval(session=sess)
tf.logging.info('Training from step: %d ', start_step)
# Save graph.pbtxt.
tf.train.write_graph(sess.graph_def, FLAGS.train_dir,
FLAGS.model_architecture + '.pbtxt')
# Save list of words.
with gfile.GFile(
os.path.join(FLAGS.train_dir, FLAGS.model_architecture + '_labels.txt'),
'w') as f:
f.write('\n'.join(audio_processor.words_list))
# Training loop.
training_steps_max = np.sum(training_steps_list)
for training_step in xrange(start_step, training_steps_max + 1):
# Figure out what the current learning rate is.
training_steps_sum = 0
for i in range(len(training_steps_list)):
training_steps_sum += training_steps_list[i]
if training_step <= training_steps_sum:
learning_rate_value = learning_rates_list[i]
break
# Pull the audio samples we'll use for training.
train_fingerprints, train_ground_truth = audio_processor.get_data(
FLAGS.batch_size, 0, model_settings, FLAGS.background_frequency,
FLAGS.background_volume, time_shift_samples, 'training', sess)
# Run the graph with this batch of training data.
train_summary, train_accuracy, cross_entropy_value, _, _ = sess.run(
[
merged_summaries, evaluation_step, cross_entropy_mean, train_step,
increment_global_step
],
feed_dict={
fingerprint_input: train_fingerprints,
ground_truth_input: train_ground_truth,
learning_rate_input: learning_rate_value,
dropout_prob: 0.5
})
train_writer.add_summary(train_summary, training_step)
tf.logging.info('Step #%d: rate %f, accuracy %.1f%%, cross entropy %f' %
(training_step, learning_rate_value, train_accuracy * 100,
cross_entropy_value))
is_last_step = (training_step == training_steps_max)
if (training_step % FLAGS.eval_step_interval) == 0 or is_last_step:
set_size = audio_processor.set_size('validation')
total_accuracy = 0
total_conf_matrix = None
for i in xrange(0, set_size, FLAGS.batch_size):
validation_fingerprints, validation_ground_truth = (
audio_processor.get_data(FLAGS.batch_size, i, model_settings, 0.0,
0.0, 0, 'validation', sess))
# Run a validation step and capture training summaries for TensorBoard
# with the `merged` op.
validation_summary, validation_accuracy, conf_matrix = sess.run(
[merged_summaries, evaluation_step, confusion_matrix],
feed_dict={
fingerprint_input: validation_fingerprints,
ground_truth_input: validation_ground_truth,
dropout_prob: 1.0
})
validation_writer.add_summary(validation_summary, training_step)
batch_size = min(FLAGS.batch_size, set_size - i)
total_accuracy += (validation_accuracy * batch_size) / set_size
if total_conf_matrix is None:
total_conf_matrix = conf_matrix
else:
total_conf_matrix += conf_matrix
tf.logging.info('Confusion Matrix:\n %s' % (total_conf_matrix))
tf.logging.info('Step %d: Validation accuracy = %.1f%% (N=%d)' %
(training_step, total_accuracy * 100, set_size))
# Save the model checkpoint periodically.
if (training_step % FLAGS.save_step_interval == 0 or
training_step == training_steps_max):
checkpoint_path = os.path.join(FLAGS.train_dir,
FLAGS.model_architecture + '.ckpt')
tf.logging.info('Saving to "%s-%d"', checkpoint_path, training_step)
saver.save(sess, checkpoint_path, global_step=training_step)
set_size = audio_processor.set_size('testing')
tf.logging.info('set_size=%d', set_size)
total_accuracy = 0
total_conf_matrix = None
for i in xrange(0, set_size, FLAGS.batch_size):
test_fingerprints, test_ground_truth = audio_processor.get_data(
FLAGS.batch_size, i, model_settings, 0.0, 0.0, 0, 'testing', sess)
test_accuracy, conf_matrix = sess.run(
[evaluation_step, confusion_matrix],
feed_dict={
fingerprint_input: test_fingerprints,
ground_truth_input: test_ground_truth,
dropout_prob: 1.0
})
batch_size = min(FLAGS.batch_size, set_size - i)
total_accuracy += (test_accuracy * batch_size) / set_size
if total_conf_matrix is None:
total_conf_matrix = conf_matrix
else:
total_conf_matrix += conf_matrix
tf.logging.info('Confusion Matrix:\n %s' % (total_conf_matrix))
tf.logging.info('Final test accuracy = %.1f%% (N=%d)' % (total_accuracy * 100,
set_size))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--data_url',
type=str,
# pylint: disable=line-too-long
default='http://download.tensorflow.org/data/speech_commands_v0.01.tar.gz',
# pylint: enable=line-too-long
help='Location of speech training data archive on the web.')
parser.add_argument(
'--data_dir',
type=str,
default='/tmp/speech_dataset/',
help="""\
Where to download the speech training data to.
""")
parser.add_argument(
'--background_volume',
type=float,
default=0.1,
help="""\
How loud the background noise should be, between 0 and 1.
""")
parser.add_argument(
'--background_frequency',
type=float,
default=0.8,
help="""\
How many of the training samples have background noise mixed in.
""")
parser.add_argument(
'--silence_percentage',
type=float,
default=10.0,
help="""\
How much of the training data should be silence.
""")
parser.add_argument(
'--unknown_percentage',
type=float,
default=10.0,
help="""\
How much of the training data should be unknown words.
""")
parser.add_argument(
'--time_shift_ms',
type=float,
default=100.0,
help="""\
Range to randomly shift the training audio by in time.
""")
parser.add_argument(
'--testing_percentage',
type=int,
default=10,
help='What percentage of wavs to use as a test set.')
parser.add_argument(
'--validation_percentage',
type=int,
default=10,
help='What percentage of wavs to use as a validation set.')
parser.add_argument(
'--sample_rate',
type=int,
default=16000,
help='Expected sample rate of the wavs',)
parser.add_argument(
'--clip_duration_ms',
type=int,
default=1000,
help='Expected duration in milliseconds of the wavs',)
parser.add_argument(
'--window_size_ms',
type=float,
default=30.0,
help='How long each spectrogram timeslice is',)
parser.add_argument(
'--window_stride_ms',
type=float,
default=10.0,
help='How long each spectrogram timeslice is',)
parser.add_argument(
'--dct_coefficient_count',
type=int,
default=40,
help='How many bins to use for the MFCC fingerprint',)
parser.add_argument(
'--how_many_training_steps',
type=str,
default='15000,3000',
help='How many training loops to run',)
parser.add_argument(
'--eval_step_interval',
type=int,
default=400,
help='How often to evaluate the training results.')
parser.add_argument(
'--learning_rate',
type=str,
default='0.001,0.0001',
help='How large a learning rate to use when training.')
parser.add_argument(
'--batch_size',
type=int,
default=100,
help='How many items to train with at once',)
parser.add_argument(
'--summaries_dir',
type=str,
default='/tmp/retrain_logs',
help='Where to save summary logs for TensorBoard.')
parser.add_argument(
'--wanted_words',
type=str,
default='yes,no,up,down,left,right,on,off,stop,go',
help='Words to use (others will be added to an unknown label)',)
parser.add_argument(
'--train_dir',
type=str,
default='/tmp/speech_commands_train',
help='Directory to write event logs and checkpoint.')
parser.add_argument(
'--save_step_interval',
type=int,
default=100,
help='Save model checkpoint every save_steps.')
parser.add_argument(
'--start_checkpoint',
type=str,
default='',
help='If specified, restore this pretrained model before any training.')
parser.add_argument(
'--model_architecture',
type=str,
default='conv',
help='What model architecture to use')
parser.add_argument(
'--check_nans',
type=bool,
default=False,
help='Whether to check for invalid numbers during processing')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| |
"""
Configuration items for the shop.
Also contains shopping cart and related classes.
"""
from decimal import Decimal, ROUND_CEILING
from django.contrib.sites.models import Site
from django.conf import settings
from django.core import urlresolvers
from django.db import models
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext, ugettext_lazy as _
from l10n.models import Country
from l10n.utils import moneyfmt
from livesettings import ConfigurationSettings, config_value
from payment.fields import PaymentChoiceCharField
from product.models import Discount, Product, Price, get_product_quantity_adjustments
from product.prices import PriceAdjustmentCalc, PriceAdjustment
from satchmo_store.contact.models import Contact
from satchmo_utils.fields import CurrencyField
from satchmo_utils.numbers import trunc_decimal
from shipping.fields import ShippingChoiceCharField
from tax.utils import get_tax_processor
import datetime
import keyedcache
import logging
import operator
import signals
log = logging.getLogger('satchmo_store.shop.models')
class NullConfig(object):
"""Standin for a real config when we don't have one yet."""
def __init__(self):
self.store_name = self.store_description = _("Test Store")
self.store_email = self.street1 = self.street2 = self.city = self.state = self.postal_code = self.phone = ""
self.site = self.country = None
self.in_country_only = True
self.sales_country = None
def _options(self):
return ConfigurationSettings()
options = property(fget=_options)
def __str__(self):
return "Test Store - no configured store exists!"
class ConfigManager(models.Manager):
def get_current(self, site=None):
"""Convenience method to get the current shop config"""
if not site:
site = Site.objects.get_current()
site = site.id
try:
shop_config = keyedcache.cache_get("Config", site)
except keyedcache.NotCachedError, nce:
try:
shop_config = self.get(site__id__exact=site)
keyedcache.cache_set(nce.key, value=shop_config)
except Config.DoesNotExist:
log.warning("No Shop Config found, using test shop config for site=%s.", site)
shop_config = NullConfig()
return shop_config
class Config(models.Model):
"""
Used to store specific information about a store. Also used to
configure various store behaviors
"""
site = models.OneToOneField(Site, verbose_name=_("Site"), primary_key=True)
store_name = models.CharField(_("Store Name"),max_length=100, unique=True)
store_description = models.TextField(_("Description"), blank=True, null=True)
store_email = models.EmailField(_("Email"), blank=True, null=True, max_length=75)
street1=models.CharField(_("Street"),max_length=50, blank=True, null=True)
street2=models.CharField(_("Street"), max_length=50, blank=True, null=True)
city=models.CharField(_("City"), max_length=50, blank=True, null=True)
state=models.CharField(_("State"), max_length=30, blank=True, null=True)
postal_code=models.CharField(_("Zip Code"), blank=True, null=True, max_length=9)
country=models.ForeignKey(Country, blank=False, null=False, verbose_name=_('Country'))
phone = models.CharField(_("Phone Number"), blank=True, null=True, max_length=30)
in_country_only = models.BooleanField(
_("Only sell to in-country customers?"),
default=True)
sales_country = models.ForeignKey(
Country, blank=True, null=True,
related_name='sales_country',
verbose_name=_("Default country for customers"))
shipping_countries = models.ManyToManyField(
Country,
blank=True,
verbose_name=_("Shipping Countries"),
related_name="shop_configs")
objects = ConfigManager()
def _options(self):
return ConfigurationSettings()
options = property(fget=_options)
def areas(self):
"""Get country areas (states/counties). Used in forms."""
if self.in_country_only:
return self.sales_country.adminarea_set.filter(active=True)
else:
return None
def countries(self):
"""Get country selections. Used in forms."""
if self.in_country_only:
return Country.objects.filter(pk=self.sales_country.pk)
else:
return self.shipping_countries.filter(active=True)
def _base_url(self, secure=False):
prefix = "http"
if secure:
prefix += "s"
return prefix + "://" + self.site.domain
base_url = property(fget=_base_url)
def save(self, **kwargs):
keyedcache.cache_delete("Config", self.site.id)
# ensure the default country is in shipping countries
mycountry = self.country
if mycountry:
if not self.sales_country:
log.debug("%s: No sales_country set, adding country of store, '%s'", self, mycountry)
self.sales_country = mycountry
# This code doesn't work when creating a new site. At the time of creation, all of the necessary relationships
# aren't setup. I modified the load_store code so that it would create this relationship manually when running
# with sample data. This is a bit of a django limitation so I'm leaving this in here for now. - CBM
# salescountry = self.sales_country
# try:
# need = self.shipping_countries.get(pk=salescountry.pk)
# except Country.DoesNotExist:
# log.debug("%s: Adding default country '%s' to shipping countries", self, salescountry.iso2_code)
# self.shipping_countries.add(salescountry)
else:
log.warn("%s: has no country set", self)
super(Config, self).save(**kwargs)
keyedcache.cache_set("Config", self.site.id, value=self)
def __unicode__(self):
return self.store_name
class Meta:
verbose_name = _("Store Configuration")
verbose_name_plural = _("Store Configurations")
class NullCart(object):
"""Standin for a real cart when we don't have one yet. More convenient than testing for null all the time."""
desc = None
date_time_created = None
customer = None
total = Decimal("0")
numItems = 0
def add_item(self, *args, **kwargs):
pass
def remove_item(self, *args, **kwargs):
pass
def empty(self):
pass
def __str__(self):
return "NullCart (empty)"
def __iter__(self):
return iter([])
def __len__(self):
return 0
class OrderCart(NullCart):
"""Allows us to fake a cart if we are reloading an order."""
def __init__(self, order):
self._order = order
def _numItems(self):
return self._order.orderitem_set.count()
numItems = property(_numItems)
def _cartitem_set(self):
return self._order.orderitem_set
cartitem_set = property(_cartitem_set)
def _total(self):
return self._order.balance
total = property(_total)
is_shippable = False
def __str__(self):
return "OrderCart (%i) = %i" % (self._order.id, len(self))
def __len__(self):
return self.numItems
class CartManager(models.Manager):
def from_request(self, request, create=False, return_nullcart=True):
"""Get the current cart from the request"""
cart = None
try:
contact = Contact.objects.from_request(request, create=False)
except Contact.DoesNotExist:
contact = None
if 'cart' in request.session:
cartid = request.session['cart']
if cartid == "order":
log.debug("Getting Order Cart from request")
try:
order = Order.objects.from_request(request)
cart = OrderCart(order)
except Order.DoesNotExist:
pass
else:
try:
cart = Cart.objects.get(id=cartid)
except Cart.DoesNotExist:
log.debug('Removing invalid cart from session')
del request.session['cart']
if isinstance(cart, NullCart) and not isinstance(cart, OrderCart) and contact is not None:
carts = Cart.objects.filter(customer=contact)
if carts.count() > 0:
cart = carts[0]
request.session['cart'] = cart.id
if not cart:
if create:
site = Site.objects.get_current()
if contact is None:
cart = Cart(site=site)
else:
cart = Cart(site=site, customer=contact)
cart.save()
request.session['cart'] = cart.id
elif return_nullcart:
cart = NullCart()
else:
raise Cart.DoesNotExist()
#log.debug("Cart: %s", cart)
return cart
class Cart(models.Model):
"""
Store items currently in a cart
The desc isn't used but it is needed to make the admin interface work appropriately
Could be used for debugging
"""
site = models.ForeignKey(Site, verbose_name=_('Site'))
desc = models.CharField(_("Description"), blank=True, null=True, max_length=10)
date_time_created = models.DateTimeField(_("Creation Date"))
customer = models.ForeignKey(Contact, blank=True, null=True, verbose_name=_('Customer'))
objects = CartManager()
def _get_count(self):
itemCount = 0
for item in self.cartitem_set.all():
itemCount += item.quantity
return (itemCount)
numItems = property(_get_count)
def _get_discount(self):
return self.undiscounted_total - self.total
discount = property(_get_discount)
def _get_total(self, include_discount=True):
total = Decimal("0")
for item in self.cartitem_set.all():
if include_discount:
total += item.line_total
else:
total += item.undiscounted_line_total
return(total)
total = property(_get_total)
def _get_undiscounted_total(self):
return self._get_total(False)
undiscounted_total = property(_get_undiscounted_total)
def __iter__(self):
return iter(self.cartitem_set.all())
def __len__(self):
return self.cartitem_set.count()
def __nonzero__(self):
"""
This is used by django to evaluate whether or not to
include an object when dumping data. Therefore, we will dump
carts even if they have no items. This is ok because the most likely
scenario is moving data from one db to the next. See ticket #1015 for
discussion.
Use len(cart) if you want to know if there are items in the cart.
"""
return True
def _is_empty(self):
return self.cartitem_set.count() == 0
is_empty = property(_is_empty)
def __unicode__(self):
return u"Shopping Cart (%s)" % self.date_time_created
def add_item(self, chosen_item, number_added, details=[]):
alreadyInCart = False
# Custom Products will not be added, they will each get their own line item
if 'CustomProduct' in chosen_item.get_subtypes():
item_to_modify = CartItem(cart=self, product=chosen_item, quantity=Decimal('0'))
else:
item_to_modify = CartItem(cart=self, product=chosen_item, quantity=Decimal('0'))
for similarItem in self.cartitem_set.filter(product__id = chosen_item.id):
looksTheSame = len(details) == similarItem.details.count()
if looksTheSame:
for detail in details:
try:
similarItem.details.get(
name=detail['name'],
value=str(detail['value']), # typecasting for Postgresql
price_change=detail['price_change']
)
except CartItemDetails.DoesNotExist:
looksTheSame = False
if looksTheSame:
item_to_modify = similarItem
alreadyInCart = True
break
# Verify that the 'item_to_modify' can be added to the cart regardless
# of whether or not it is already in the cart
signals.satchmo_cart_add_verify.send(
self,
cart=self,
cartitem=item_to_modify,
added_quantity=number_added,
details=details)
if not alreadyInCart:
self.cartitem_set.add(item_to_modify)
item_to_modify.quantity += number_added
item_to_modify.save()
if not alreadyInCart:
for data in details:
item_to_modify.add_detail(data)
return item_to_modify
def remove_item(self, chosen_item_id, number_removed):
item_to_modify = self.cartitem_set.get(id = chosen_item_id)
item_to_modify.quantity -= number_removed
if item_to_modify.quantity <= 0:
item_to_modify.delete()
else:
item_to_modify.save()
def merge_carts(self, src_cart):
"""
Merge the items from the src_cart into
the destination. Source cart will be emptied
"""
for item in src_cart.cartitem_set.all():
self.add_item(item.product, item.quantity, item.details.all())
item.delete()
self.save()
def empty(self):
for item in self.cartitem_set.all():
item.delete()
self.save()
def save(self, **kwargs):
"""Ensure we have a date_time_created before saving the first time."""
if not self.pk:
self.date_time_created = datetime.datetime.now()
try:
site = self.site
except Site.DoesNotExist:
self.site = Site.objects.get_current()
super(Cart, self).save(**kwargs)
def _get_shippable(self):
"""Return whether the cart contains shippable items."""
for cartitem in self.cartitem_set.all():
if cartitem.is_shippable:
return True
return False
is_shippable = property(_get_shippable)
def get_shipment_list(self):
"""Return a list of shippable products, where each item is split into
multiple elements, one for each quantity."""
items = []
for cartitem in self.cartitem_set.all():
if cartitem.is_shippable:
p = cartitem.product
q = int(cartitem.quantity.quantize(Decimal('0'), ROUND_CEILING))
for single in range(0, q):
items.append(p)
return items
class Meta:
verbose_name = _("Shopping Cart")
verbose_name_plural = _("Shopping Carts")
class NullCartItem(object):
def __init__(self, itemid):
self.id = itemid
self.quantity = Decimal('0')
self.line_total = 0
class CartItem(models.Model):
"""
An individual item in the cart
"""
cart = models.ForeignKey(Cart, verbose_name=_('Cart'))
product = models.ForeignKey(Product, verbose_name=_('Product'))
quantity = models.DecimalField(_("Quantity"), max_digits=18, decimal_places=6)
def _get_line_unitprice(self, include_discount=True):
# Get the qty discount price as the unit price for the line.
if config_value('SHOP','CART_QTY'):
qty = self.cart.numItems
else:
qty = self.quantity
self.qty_price = self.get_qty_price(qty, include_discount=include_discount)
self.detail_price = self.get_detail_price()
#send signal to possibly adjust the unitprice
if include_discount:
signals.satchmo_cartitem_price_query.send(self, cartitem=self)
price = self.qty_price + self.detail_price
#clean up temp vars
del self.qty_price
del self.detail_price
return price
unit_price = property(_get_line_unitprice)
def _get_undiscounted_unitprice(self):
return self._get_line_unitprice(include_discount=False)
undiscounted_unit_price = property(_get_undiscounted_unitprice)
def get_detail_price(self):
"""Get the delta price based on detail modifications"""
delta = Decimal("0")
if self.has_details:
for detail in self.details.all():
if detail.price_change and detail.value:
delta += detail.price_change
return delta
def get_qty_price(self, qty, include_discount=True):
"""Get the price for for each unit before any detail modifications"""
return self.product.get_qty_price(qty, include_discount=include_discount)
def _get_line_total(self):
return self.unit_price * self.quantity
line_total = property(_get_line_total)
def _get_undiscounted_line_total(self):
return self.undiscounted_unit_price * self.quantity
undiscounted_line_total = property(_get_undiscounted_line_total)
def _get_description(self):
return self.product.translated_name()
description = property(_get_description)
def _is_shippable(self):
return self.product.is_shippable
is_shippable = property(fget=_is_shippable)
def add_detail(self, data):
detl = CartItemDetails(
cartitem=self,
name=data['name'],
value=data['value'],
sort_order=data['sort_order'],
price_change=data['price_change'])
detl.save()
#self.details.add(detl)
def _has_details(self):
"""
Determine if this specific item has more detail
"""
return (self.details.count() > 0)
has_details = property(_has_details)
def __unicode__(self):
money_format = force_unicode(moneyfmt(self.line_total))
return u'%s - %s %s' % (self.quantity, self.product.name,
money_format)
class Meta:
verbose_name = _("Cart Item")
verbose_name_plural = _("Cart Items")
ordering = ('id',)
class CartItemDetails(models.Model):
"""
An arbitrary detail about a cart item.
"""
cartitem = models.ForeignKey(CartItem, related_name='details', )
value = models.TextField(_('detail'))
name = models.CharField(_('name'), max_length=100)
price_change = CurrencyField(_("Item Detail Price Change"), max_digits=6,
decimal_places=2, blank=True, null=True)
sort_order = models.IntegerField(_("Sort Order"),
help_text=_("The display order for this group."))
class Meta:
ordering = ('sort_order',)
verbose_name = _("Cart Item Detail")
verbose_name_plural = _("Cart Item Details")
ORDER_CHOICES = (
('Online', _('Online')),
('In Person', _('In Person')),
('Show', _('Show')),
)
ORDER_STATUS = (
('Temp', _('Temp')),
('New', _('New')),
('Blocked', _('Blocked')),
('In Process', _('In Process')),
('Billed', _('Billed')),
('Shipped', _('Shipped')),
('Complete', _('Complete')),
('Cancelled', _('Cancelled')),
)
class OrderManager(models.Manager):
def from_request(self, request):
"""Get the order from the session
Returns:
- Order object
"""
order = None
if 'orderID' in request.session:
try:
order = Order.objects.get(id=request.session['orderID'])
# TODO: Validate against logged-in user.
except Order.DoesNotExist:
pass
if not order:
del request.session['orderID']
if not order:
raise Order.DoesNotExist()
return order
def remove_partial_order(self, request):
"""Delete cart from request if it exists and is incomplete (has no status)"""
try:
order = Order.objects.from_request(request)
if not order.status:
del request.session['orderID']
log.info("Deleting incomplete order #%i from database", order.id)
order.delete()
return True
except Order.DoesNotExist:
pass
return False
class Order(models.Model):
"""
Orders contain a copy of all the information at the time the order was
placed.
"""
site = models.ForeignKey(Site, verbose_name=_('Site'))
contact = models.ForeignKey(Contact, verbose_name=_('Contact'))
ship_addressee = models.CharField(_("Addressee"), max_length=61, blank=True)
ship_street1 = models.CharField(_("Street"), max_length=80, blank=True)
ship_street2 = models.CharField(_("Street"), max_length=80, blank=True)
ship_city = models.CharField(_("City"), max_length=50, blank=True)
ship_state = models.CharField(_("State"), max_length=50, blank=True)
ship_postal_code = models.CharField(_("Zip Code"), max_length=30, blank=True)
ship_country = models.CharField(_("Country"), max_length=2, blank=True)
bill_addressee = models.CharField(_("Addressee"), max_length=61, blank=True)
bill_street1 = models.CharField(_("Street"), max_length=80, blank=True)
bill_street2 = models.CharField(_("Street"), max_length=80, blank=True)
bill_city = models.CharField(_("City"), max_length=50, blank=True)
bill_state = models.CharField(_("State"), max_length=50, blank=True)
bill_postal_code = models.CharField(_("Zip Code"), max_length=30, blank=True)
bill_country = models.CharField(_("Country"), max_length=2, blank=True)
notes = models.TextField(_("Notes"), blank=True, null=True)
sub_total = CurrencyField(_("Subtotal"),
max_digits=18, decimal_places=10, blank=True, null=True, display_decimal=4)
total = CurrencyField(_("Total"),
max_digits=18, decimal_places=10, blank=True, null=True, display_decimal=4)
discount_code = models.CharField(
_("Discount Code"), max_length=20, blank=True, null=True,
help_text=_("Coupon Code"))
discount = CurrencyField(_("Discount amount"),
max_digits=18, decimal_places=10, blank=True, null=True)
method = models.CharField(_("Order method"),
choices=ORDER_CHOICES, max_length=50, blank=True)
shipping_description = models.CharField(_("Shipping Description"),
max_length=50, blank=True, null=True)
shipping_method = models.CharField(_("Shipping Method"),
max_length=50, blank=True, null=True)
shipping_model = ShippingChoiceCharField(_("Shipping Models"),
max_length=30, blank=True, null=True)
shipping_cost = CurrencyField(_("Shipping Cost"),
max_digits=18, decimal_places=10, blank=True, null=True)
shipping_discount = CurrencyField(_("Shipping Discount"),
max_digits=18, decimal_places=10, blank=True, null=True)
tax = CurrencyField(_("Tax"),
max_digits=18, decimal_places=10, blank=True, null=True)
time_stamp = models.DateTimeField(_("Timestamp"), blank=True, null=True)
status = models.CharField(_("Status"), max_length=20, choices=ORDER_STATUS,
blank=True, help_text=_("This is set automatically."))
objects = OrderManager()
def __unicode__(self):
return "Order #%s: %s" % (self.id, self.contact.full_name)
def add_status(self, status=None, notes=""):
orderstatus = OrderStatus()
if not status:
try:
curr_status = self.orderstatus_set.latest()
status = curr_status.status
except OrderStatus.DoesNotExist:
status = 'New'
orderstatus.status = status
orderstatus.notes = notes
orderstatus.order = self
orderstatus.save()
def add_variable(self, key, value):
"""Add an OrderVariable, used for misc stuff that is just too small to get its own field"""
try:
v = self.variables.get(key__exact=key)
v.value = value
except OrderVariable.DoesNotExist:
v = OrderVariable(order=self, key=key, value=value)
v.save()
def _authorized_remaining(self):
auths = [p.amount for p in self.authorizations.filter(complete=False)]
if auths:
amount = reduce(operator.add, auths)
else:
amount = Decimal('0.00')
return amount
authorized_remaining = property(fget=_authorized_remaining)
def _get_count(self):
itemCount = 0
for item in self.orderitem_set.all():
itemCount += item.quantity
return (itemCount)
numItems = property(_get_count)
def get_variable(self, key, default=None):
qry = self.variables.filter(key__exact=key)
ct = qry.count()
if ct == 0:
return default
else:
return qry[0]
def copy_addresses(self):
"""
Copy the addresses so we know what the information was at time of order.
"""
shipaddress = self.contact.shipping_address
billaddress = self.contact.billing_address
self.ship_addressee = shipaddress.addressee
self.ship_street1 = shipaddress.street1
self.ship_street2 = shipaddress.street2
self.ship_city = shipaddress.city
self.ship_state = shipaddress.state
self.ship_postal_code = shipaddress.postal_code
self.ship_country = shipaddress.country.iso2_code
self.bill_addressee = billaddress.addressee
self.bill_street1 = billaddress.street1
self.bill_street2 = billaddress.street2
self.bill_city = billaddress.city
self.bill_state = billaddress.state
self.bill_postal_code = billaddress.postal_code
self.bill_country = billaddress.country.iso2_code
def remove_all_items(self):
"""Delete all items belonging to this order."""
for item in self.orderitem_set.all():
item.delete()
self.save()
def _balance(self):
if self.total is None:
self.force_recalculate_total(save=True)
return trunc_decimal(self.total-self.balance_paid, 2)
balance = property(fget=_balance)
def balance_forward(self):
return moneyfmt(self.balance)
balance_forward = property(fget=balance_forward)
def _balance_paid(self):
payments = [p.amount for p in self.payments.all()]
if payments:
paid = reduce(operator.add, payments)
else:
paid = Decimal("0.0000000000")
return paid + self.authorized_remaining
balance_paid = property(_balance_paid)
def _credit_card(self):
"""Return the credit card associated with this payment."""
from payment.models import CreditCardDetail
for payment in self.payments.order_by('-time_stamp'):
try:
if payment.creditcards.count() > 0:
return payment.creditcards.get()
except CreditCardDetail.DoesNotExist:
pass
return None
credit_card = property(_credit_card)
def _full_bill_street(self, delim="\n"):
"""
Return both billing street entries separated by delim.
Note - Use linebreaksbr filter to convert to html in templates.
"""
if self.bill_street2:
address = self.bill_street1 + delim + self.bill_street2
else:
address = self.bill_street1
return mark_safe(address)
full_bill_street = property(_full_bill_street)
def _full_ship_street(self, delim="\n"):
"""
Return both shipping street entries separated by delim.
Note - Use linebreaksbr filterto convert to html in templates.
"""
if self.ship_street2:
address = self.ship_street1 + delim + self.ship_street2
else:
address = self.ship_street1
return mark_safe(address)
full_ship_street = property(_full_ship_street)
def _ship_country_name(self):
return Country.objects.get(iso2_code=self.ship_country).name
ship_country_name = property(_ship_country_name)
def _bill_country_name(self):
return Country.objects.get(iso2_code=self.bill_country).name
bill_country_name = property(_bill_country_name)
def _ship_first_name(self):
"""Given the addressee name, try to return a first name"""
return ' '.join(self.ship_addressee.split()[0:-1]) or ''
ship_first_name = property(_ship_first_name)
def _ship_last_name(self):
"""Given the addressee name, try to return a last name"""
return ' '.join(self.ship_addressee.split()[-1:]) or ''
ship_last_name = property(_ship_last_name)
def _discounted_sub_total(self):
return self.sub_total - self.item_discount
discounted_sub_total = property(_discounted_sub_total)
def _get_balance_remaining_url(self):
return ('satchmo_balance_remaining_order', None, {'order_id' : self.id})
get_balance_remaining_url = models.permalink(_get_balance_remaining_url)
def _partially_paid(self):
return self.balance_paid > Decimal("0.0000000000")
partially_paid = property(_partially_paid)
def _is_partially_paid(self):
if self.total:
return (
float(self.balance) > 0.0
and float(self.balance_paid) > 0.0
and self.balance != self.balance_paid
)
else:
return False
is_partially_paid = property(fget=_is_partially_paid)
def payments_completed(self):
q = self.payments.exclude(transaction_id__isnull = False, transaction_id = "PENDING")
result = [p for p in q if p.amount]
return result
def save(self, **kwargs):
"""
Copy addresses from contact. If the order has just been created, set
the create_date.
"""
if not self.pk:
self.time_stamp = datetime.datetime.now()
self.copy_addresses()
super(Order, self).save(**kwargs) # Call the "real" save() method.
def invoice(self):
url = urlresolvers.reverse('satchmo_print_shipping', None, None, {'doc' : 'invoice', 'id' : self.id})
return mark_safe(u'<a href="%s">%s</a>' % (url, ugettext('View')))
invoice.allow_tags = True
def _item_discount(self):
"""Get the discount of just the items, less the shipping discount."""
return self.discount-self.shipping_discount
item_discount = property(_item_discount)
def packingslip(self):
url = urlresolvers.reverse('satchmo_print_shipping', None, None, {'doc' : 'packingslip', 'id' : self.id})
return mark_safe(u'<a href="%s">%s</a>' % (url, ugettext('View')))
packingslip.allow_tags = True
def recalculate_total(self, save=True):
"""Calculates sub_total, taxes and total if the order is not already partially paid."""
if self.is_partially_paid:
log.debug("Order %i - skipping recalculate_total since product is partially paid.", self.id)
else:
self.force_recalculate_total(save=save)
def force_recalculate_total(self, save=True):
"""Calculates sub_total, taxes and total."""
zero = Decimal("0.0000000000")
total_discount = Decimal("0.0000000000")
discount = Discount.objects.by_code(self.discount_code)
discount.calc(self)
discounts = discount.item_discounts
itemprices = []
fullprices = []
qty_override = config_value('SHOP','CART_QTY')
if qty_override:
itemct = self.numItems
for lineitem in self.orderitem_set.all():
lid = lineitem.id
if lid in discounts:
lineitem.discount = discounts[lid]
total_discount += lineitem.discount
#log.debug('total_discount (calc): %s', total_discount)
else:
lineitem.discount = zero
# now double check against other discounts, such as tiered discounts
if qty_override:
qty = itemct
else:
qty = lineitem.quantity
adjustment = get_product_quantity_adjustments(lineitem.product, qty=qty)
if adjustment and adjustment.price:
baseprice = adjustment.price.price
finalprice = adjustment.final_price()
#We need to add in any OrderItemDetail price adjustments before we do anything else
baseprice += lineitem.get_detail_price()
finalprice += lineitem.get_detail_price()
if baseprice > finalprice or baseprice != lineitem.unit_price:
unitdiscount = (lineitem.discount/lineitem.quantity) + baseprice-finalprice
unitdiscount = trunc_decimal(unitdiscount, 2)
linediscount = unitdiscount * lineitem.quantity
total_discount += linediscount
#log.debug('total_discount (line): %s', total_discount)
fullydiscounted = (baseprice - unitdiscount) * lineitem.quantity
lineitem.unit_price = baseprice
lineitem.discount = linediscount
lineitem.line_item_price = baseprice * lineitem.quantity
log.debug('Adjusting lineitem unit price for %s. Full price=%s, discount=%s. Final price for qty %d is %s',
lineitem.product.slug, baseprice, unitdiscount, lineitem.quantity, fullydiscounted)
if save:
lineitem.save()
itemprices.append(lineitem.sub_total)
fullprices.append(lineitem.line_item_price)
shipprice = Price()
shipprice.price = self.shipping_cost
shipadjust = PriceAdjustmentCalc(shipprice)
if 'Shipping' in discounts:
shipadjust += PriceAdjustment('discount', _('Discount'), discounts['Shipping'])
signals.satchmo_shipping_price_query.send(self, adjustment=shipadjust)
shipdiscount = shipadjust.total_adjustment()
self.shipping_discount = shipdiscount
total_discount += shipdiscount
#log.debug('total_discount (+ship): %s', total_discount)
self.discount = total_discount
if itemprices:
item_sub_total = reduce(operator.add, itemprices)
else:
item_sub_total = zero
if fullprices:
full_sub_total = reduce(operator.add, fullprices)
else:
full_sub_total = zero
self.sub_total = full_sub_total
taxProcessor = get_tax_processor(self)
totaltax, taxrates = taxProcessor.process()
self.tax = totaltax
# clear old taxes
for taxdetl in self.taxes.all():
taxdetl.delete()
for taxdesc, taxamt in taxrates.items():
taxdetl = OrderTaxDetail(order=self, tax=taxamt, description=taxdesc, method=taxProcessor.method)
taxdetl.save()
log.debug("Order #%i, recalc: sub_total=%s, shipping=%s, discount=%s, tax=%s",
self.id,
moneyfmt(item_sub_total),
moneyfmt(self.shipping_sub_total),
moneyfmt(self.discount),
moneyfmt(self.tax))
self.total = Decimal(item_sub_total + self.shipping_sub_total + self.tax)
if save:
self.save()
def shippinglabel(self):
url = urlresolvers.reverse('satchmo_print_shipping', None, None, {'doc' : 'shippinglabel', 'id' : self.id})
return mark_safe(u'<a href="%s">%s</a>' % (url, ugettext('View')))
shippinglabel.allow_tags = True
def _order_total(self):
#Needed for the admin list display
return moneyfmt(self.total)
order_total = property(_order_total)
def order_success(self):
"""Run each item's order_success method."""
log.info("Order success: %s", self)
for orderitem in self.orderitem_set.all():
subtype = orderitem.product.get_subtype_with_attr('order_success')
if subtype:
subtype.order_success(self, orderitem)
signals.order_success.send(self, order=self)
def order_cancel(self):
"""Ask if the order can be cancelled. By default, do not cancel shipped, completed and
already cancelled orders."""
self.is_cancellable = self.status not in ('Shipped', 'Completed', 'Cancelled')
# listeners can override default flag setting and (dis)allow cancellation
signals.order_cancel_query.send(self, order=self)
if self.is_cancellable:
self.add_status('Cancelled')
signals.order_cancelled.send(self, order=self)
return self.is_cancellable
def _paid_in_full(self):
"""True if total has been paid"""
return self.balance == Decimal('0.00')
paid_in_full = property(fget=_paid_in_full)
def _has_downloads(self):
"""Determine if there are any downloadable products on this order"""
if 'product.modules.downloadable' in settings.INSTALLED_APPS \
and self.downloadlink_set.count() > 0:
return True
return False
has_downloads = property(_has_downloads)
def _is_downloadable(self):
"""Determine if all products on this order are downloadable"""
for orderitem in self.orderitem_set.all():
if not orderitem.product.is_downloadable:
return False
return True
is_downloadable = property(_is_downloadable)
def _is_shippable(self):
"""Determine if we will be shipping any items on this order """
for orderitem in self.orderitem_set.all():
if orderitem.is_shippable:
return True
return False
is_shippable = property(_is_shippable)
def _shipping_sub_total(self):
if self.shipping_cost is None:
self.shipping_cost = Decimal('0.00')
if self.shipping_discount is None:
self.shipping_discount = Decimal('0.00')
return self.shipping_cost-self.shipping_discount
shipping_sub_total = property(_shipping_sub_total)
def _shipping_tax(self):
rates = self.taxes.filter(description__iexact = 'shipping')
if rates.count()>0:
tax = reduce(operator.add, [t.tax for t in rates])
else:
tax = Decimal("0.0000000000")
return tax
shipping_tax = property(_shipping_tax)
def _shipping_with_tax(self):
return self.shipping_sub_total + self.shipping_tax
shipping_with_tax = property(_shipping_with_tax)
def sub_total_with_tax(self):
return reduce(operator.add, [o.total_with_tax for o in self.orderitem_set.all()])
def update_status(self, status):
"""WARNING: To just change order status, use Order.add_status().
This method is called back when OrderStatus is saved and does not create required object."""
oldstatus = self.status
self.status = status
self.save()
if (oldstatus != self.status):
signals.satchmo_order_status_changed.send(self, oldstatus=oldstatus, newstatus=status, order=self)
def validate(self, request):
"""
Return whether the order is valid.
Not guaranteed to be side-effect free.
"""
valid = True
for orderitem in self.orderitem_set.all():
for subtype_name in orderitem.product.get_subtypes():
subtype = getattr(orderitem.product, subtype_name.lower())
validate_method = getattr(subtype, 'validate_order', None)
if validate_method:
valid = valid and validate_method(request, self, orderitem)
return valid
class Meta:
verbose_name = _("Product Order")
verbose_name_plural = _("Product Orders")
class OrderItem(models.Model):
"""
A line item on an order.
"""
order = models.ForeignKey(Order, verbose_name=_("Order"))
product = models.ForeignKey(Product, verbose_name=_("Product"))
quantity = models.DecimalField(_("Quantity"), max_digits=18, decimal_places=6)
unit_price = CurrencyField(_("Unit price"),
max_digits=18, decimal_places=10)
unit_tax = CurrencyField(_("Unit tax"), default=Decimal('0.00'),
max_digits=18, decimal_places=10)
line_item_price = CurrencyField(_("Line item price"),
max_digits=18, decimal_places=10)
tax = CurrencyField(_("Line item tax"), default=Decimal('0.00'),
max_digits=18, decimal_places=10)
expire_date = models.DateField(_("Subscription End"), help_text=_("Subscription expiration date."), blank=True, null=True)
completed = models.BooleanField(_("Completed"), default=False)
discount = CurrencyField(_("Line item discount"),
max_digits=18, decimal_places=10, blank=True, null=True)
def __unicode__(self):
return self.product.translated_name()
def _get_category(self):
return(self.product.get_category.translated_name())
category = property(_get_category)
def _is_shippable(self):
return self.product.is_shippable
is_shippable = property(fget=_is_shippable)
def _has_details(self):
"""Determine if this specific item has more detail"""
return (self.orderitemdetail_set.count() > 0)
has_details = property(_has_details)
def get_detail_price(self):
"""Get the delta price based on detail modifications"""
delta = Decimal("0.000000")
if self.has_details:
for detail in self.orderitemdetail_set.all():
if detail.price_change and detail.value:
delta += detail.price_change
return delta
def _sub_total(self):
if self.discount:
return self.line_item_price-self.discount
else:
return self.line_item_price
sub_total = property(_sub_total)
def _total_with_tax(self):
return self.sub_total + self.tax
total_with_tax = property(_total_with_tax)
def _unit_price_with_tax(self):
return self.unit_price + self.unit_tax
unit_price_with_tax = property(_unit_price_with_tax)
def _get_description(self):
return self.product.translated_name()
description = property(_get_description)
def _get_line_total(self):
return self.unit_price * self.quantity
line_total = property(_get_line_total)
def save(self, **kwargs):
self.update_tax()
super(OrderItem, self).save(**kwargs)
def update_tax(self):
taxclass = self.product.taxClass
processor = get_tax_processor(order=self.order)
if self.product.taxable:
self.unit_tax = processor.by_price(taxclass, self.unit_price)
self.tax = processor.by_orderitem(self)
class Meta:
verbose_name = _("Order Line Item")
verbose_name_plural = _("Order Line Items")
ordering = ('id',)
class OrderItemDetail(models.Model):
"""
Name, value pair and price delta associated with a specific item in an order
"""
item = models.ForeignKey(OrderItem, verbose_name=_("Order Item"), )
name = models.CharField(_('Name'), max_length=100)
value = models.CharField(_('Value'), max_length=255)
price_change = CurrencyField(_("Price Change"), max_digits=18, decimal_places=10, blank=True, null=True)
sort_order = models.IntegerField(_("Sort Order"),
help_text=_("The display order for this group."))
def __unicode__(self):
return u"%s - %s,%s" % (self.item, self.name, self.value)
class Meta:
verbose_name = _("Order Item Detail")
verbose_name_plural = _("Order Item Details")
ordering = ('sort_order',)
class OrderStatus(models.Model):
"""
An order will have multiple statuses as it moves its way through processing.
"""
order = models.ForeignKey(Order, verbose_name=_("Order"))
status = models.CharField(_("Status"),
max_length=20, choices=ORDER_STATUS, blank=True)
notes = models.CharField(_("Notes"), max_length=100, blank=True)
time_stamp = models.DateTimeField(_("Timestamp"))
def __unicode__(self):
return self.status
def save(self, **kwargs):
if not self.pk and not self.time_stamp:
self.time_stamp = datetime.datetime.now()
super(OrderStatus, self).save(**kwargs)
self.order.update_status(self.status)
class Meta:
verbose_name = _("Order Status")
verbose_name_plural = _("Order Statuses")
ordering = ('time_stamp',)
get_latest_by = 'time_stamp'
class OrderPaymentBase(models.Model):
payment = PaymentChoiceCharField(_("Payment Method"),
max_length=25, blank=True)
amount = CurrencyField(_("amount"),
max_digits=18, decimal_places=10, blank=True, null=True)
time_stamp = models.DateTimeField(_("timestamp"), blank=True, null=True)
transaction_id = models.CharField(_("Transaction ID"), max_length=45, blank=True, null=True)
details = models.CharField(_("Details"), max_length=255, blank=True, null=True)
reason_code = models.CharField(_('Reason Code'), max_length=255, blank=True, null=True)
def _credit_card(self):
"""Return the credit card associated with this payment."""
try:
return self.creditcards.get()
except self.creditcards.model.DoesNotExist:
return None
credit_card = property(_credit_card)
def _amount_total(self):
return moneyfmt(self.amount)
amount_total = property(fget=_amount_total)
def save(self, **kwargs):
if not self.pk:
self.time_stamp = datetime.datetime.now()
super(OrderPaymentBase, self).save(**kwargs)
class Meta:
abstract = True
class OrderAuthorization(OrderPaymentBase):
order = models.ForeignKey(Order, related_name="authorizations")
capture = models.ForeignKey('OrderPayment', related_name="authorizations")
complete = models.BooleanField(_('Complete'), default=False)
def __unicode__(self):
if self.id is not None:
return u"Order Authorization #%i" % self.id
else:
return u"Order Authorization (unsaved)"
def remaining(self):
payments = [p.amount for p in self.order.payments.all()]
if payments:
amount = reduce(operator.add, payments)
else:
amount = Decimal('0.00')
remaining = self.order.total - amount
if remaining > self.amount:
remaining = self.amount
return trunc_decimal(remaining, 2)
def save(self, **kwargs):
# create linked payment
try:
capture = self.capture
except OrderPayment.DoesNotExist:
log.debug('Payment Authorization - creating linked payment')
log.debug('order is: %s', self.order)
self.capture = OrderPayment.objects.create_linked(self)
super(OrderPaymentBase, self).save(**kwargs)
class Meta:
verbose_name = _("Order Payment Authorization")
verbose_name_plural = _("Order Payment Authorizations")
class OrderPaymentManager(models.Manager):
def create_linked(self, other, **kwargs):
linked = OrderPayment(
order = other.order,
payment = other.payment,
amount=Decimal('0.00'),
transaction_id="LINKED",
details=other.details,
reason_code="")
linked.save(**kwargs)
return linked
class OrderPayment(OrderPaymentBase):
order = models.ForeignKey(Order, related_name="payments")
objects = OrderPaymentManager()
def __unicode__(self):
if self.id is not None:
return u"Order Payment #%i" % self.id
else:
return u"Order Payment (unsaved)"
class Meta:
verbose_name = _("Order Payment")
verbose_name_plural = _("Order Payments")
class OrderPendingPayment(OrderPaymentBase):
order = models.ForeignKey(Order, related_name="pendingpayments")
capture = models.ForeignKey('OrderPayment', related_name="pendingpayments")
def __unicode__(self):
if self.id is not None:
return u"Order Pending Payment #%i" % self.id
else:
return u"Order Pending Payment (unsaved)"
def save(self, **kwargs):
# create linked payment
try:
capture = self.capture
except OrderPayment.DoesNotExist:
log.debug('Pending Payment - creating linked payment')
self.capture = OrderPayment.objects.create_linked(self, **kwargs)
super(OrderPaymentBase, self).save(**kwargs)
class Meta:
verbose_name = _("Order Pending Payment")
verbose_name_plural = _("Order Pending Payments")
class OrderPaymentFailure(OrderPaymentBase):
order = models.ForeignKey(Order, null=True, blank=True, related_name='paymentfailures')
class OrderVariable(models.Model):
order = models.ForeignKey(Order, related_name="variables")
key = models.SlugField(_('key'), )
value = models.CharField(_('value'), max_length=100)
class Meta:
ordering=('key',)
verbose_name = _("Order variable")
verbose_name_plural = _("Order variables")
def __unicode__(self):
if len(self.value)>10:
v = self.value[:10] + '...'
else:
v = self.value
return u"OrderVariable: %s=%s" % (self.key, v)
class OrderTaxDetail(models.Model):
"""A tax line item"""
order = models.ForeignKey(Order, related_name="taxes")
method = models.CharField(_("Model"), max_length=50, )
description = models.CharField(_("Description"), max_length=50, blank=True)
tax = CurrencyField(_("Tax"),
max_digits=18, decimal_places=10, blank=True, null=True)
def __unicode__(self):
if self.description:
return u"Tax: %s %s" % (self.description, self.tax)
else:
return u"Tax: %s" % self.tax
class Meta:
verbose_name = _('Order tax detail')
verbose_name_plural = _('Order tax details')
ordering = ('id',)
import config
import listeners
listeners.start_default_listening()
| |
# mininode.py - YxomCoin P2P network half-a-node
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# This python code was modified from ArtForz' public domain half-a-node, as
# found in the mini-node branch of http://github.com/jgarzik/pynode.
#
# NodeConn: an object which manages p2p connectivity to a yxomcoin node
# NodeConnCB: a base class that describes the interface for receiving
# callbacks with network messages from a NodeConn
# CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
# data structures that should map to corresponding structures in
# yxomcoin/primitives
# msg_block, msg_tx, msg_headers, etc.:
# data structures that represent network messages
# ser_*, deser_*: functions that handle serialization/deserialization
import struct
import socket
import asyncore
import time
import sys
import random
from binascii import hexlify, unhexlify
from io import BytesIO
from codecs import encode
import hashlib
from threading import RLock
from threading import Thread
import logging
import copy
import yxomcoin_hash
BIP0031_VERSION = 60000
MY_VERSION = 70206 # current MIN_PEER_PROTO_VERSION
MY_SUBVERSION = b"/python-mininode-tester:0.0.2/"
MAX_INV_SZ = 50000
MAX_BLOCK_SIZE = 1000000
COIN = 100000000L # 1 btc in satoshis
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
# using select)
mininode_socket_map = dict()
# One lock for synchronizing all data access between the networking thread (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# NodeConn acquires this lock whenever delivering a message to to a NodeConnCB,
# and whenever adding anything to the send buffer (in send_message()). This
# lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the NodeConnCB or NodeConn.
mininode_lock = RLock()
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def hash256(s):
return sha256(sha256(s))
def yxomcoinhash(s):
return yxomcoin_hash.getPoWHash(s)
def deser_string(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return f.read(nit)
def ser_string(s):
if len(s) < 253:
return struct.pack("B", len(s)) + s
elif len(s) < 0x10000:
return struct.pack("<BH", 253, len(s)) + s
elif len(s) < 0x100000000L:
return struct.pack("<BI", 254, len(s)) + s
return struct.pack("<BQ", 255, len(s)) + s
def deser_uint256(f):
r = 0L
for i in xrange(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in xrange(8):
rs += struct.pack("<I", u & 0xFFFFFFFFL)
u >>= 32
return rs
def uint256_from_str(s):
r = 0L
t = struct.unpack("<IIIIIIII", s[:32])
for i in xrange(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFFL) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
def ser_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for sv in l:
r += ser_string(sv)
return r
def deser_int_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = struct.unpack("<i", f.read(4))[0]
r.append(t)
return r
def ser_int_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += struct.pack("<i", i)
return r
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(unhexlify(hex_string.encode('ascii'))))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return hexlify(obj.serialize()).decode('ascii')
# Objects that map to yxomcoind objects, which can be serialized/deserialized
class CAddress(object):
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
class CInv(object):
typemap = {
0: "Error",
1: "TX",
2: "Block"}
def __init__(self, t=0, h=0L):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator(object):
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint(object):
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn(object):
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
if outpoint is None:
self.prevout = COutPoint()
else:
self.prevout = outpoint
self.scriptSig = scriptSig
self.nSequence = nSequence
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
self.nSequence = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
% (repr(self.prevout), hexlify(self.scriptSig),
self.nSequence)
class CTxOut(object):
def __init__(self, nValue=0, scriptPubKey=b""):
self.nValue = nValue
self.scriptPubKey = scriptPubKey
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self):
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
% (self.nValue // COIN, self.nValue % COIN,
hexlify(self.scriptPubKey))
class CTransaction(object):
def __init__(self, tx=None):
if tx is None:
self.nVersion = 1
self.vin = []
self.vout = []
self.nLockTime = 0
self.sha256 = None
self.hash = None
else:
self.nVersion = tx.nVersion
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.nLockTime = tx.nLockTime
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
return r
def rehash(self):
self.sha256 = None
self.calc_sha256()
def calc_sha256(self):
if self.sha256 is None:
self.sha256 = uint256_from_str(hash256(self.serialize()))
self.hash = encode(hash256(self.serialize())[::-1], 'hex_codec').decode('ascii')
def is_valid(self):
self.calc_sha256()
for tout in self.vout:
if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
return False
return True
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), self.nLockTime)
class CBlockHeader(object):
def __init__(self, header=None):
if header is None:
self.set_null()
else:
self.nVersion = header.nVersion
self.hashPrevBlock = header.hashPrevBlock
self.hashMerkleRoot = header.hashMerkleRoot
self.nTime = header.nTime
self.nBits = header.nBits
self.nNonce = header.nNonce
self.sha256 = header.sha256
self.hash = header.hash
self.calc_sha256()
def set_null(self):
self.nVersion = 1
self.hashPrevBlock = 0
self.hashMerkleRoot = 0
self.nTime = 0
self.nBits = 0
self.nNonce = 0
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
return r
def calc_sha256(self):
if self.sha256 is None:
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
self.sha256 = uint256_from_str(yxomcoinhash(r))
self.hash = encode(yxomcoinhash(r)[::-1], 'hex_codec').decode('ascii')
def rehash(self):
self.sha256 = None
self.calc_sha256()
return self.sha256
def __repr__(self):
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce)
class CBlock(CBlockHeader):
def __init__(self, header=None):
super(CBlock, self).__init__(header)
self.vtx = []
def deserialize(self, f):
super(CBlock, self).deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self):
r = b""
r += super(CBlock, self).serialize()
r += ser_vector(self.vtx)
return r
def calc_merkle_root(self):
hashes = []
for tx in self.vtx:
tx.calc_sha256()
hashes.append(ser_uint256(tx.sha256))
while len(hashes) > 1:
newhashes = []
for i in xrange(0, len(hashes), 2):
i2 = min(i+1, len(hashes)-1)
newhashes.append(hash256(hashes[i] + hashes[i2]))
hashes = newhashes
return uint256_from_str(hashes[0])
def is_valid(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if self.sha256 > target:
return False
for tx in self.vtx:
if not tx.is_valid():
return False
if self.calc_merkle_root() != self.hashMerkleRoot:
return False
return True
def solve(self):
self.rehash()
target = uint256_from_compact(self.nBits)
while self.sha256 > target:
self.nNonce += 1
self.rehash()
def __repr__(self):
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
class CUnsignedAlert(object):
def __init__(self):
self.nVersion = 1
self.nRelayUntil = 0
self.nExpiration = 0
self.nID = 0
self.nCancel = 0
self.setCancel = []
self.nMinVer = 0
self.nMaxVer = 0
self.setSubVer = []
self.nPriority = 0
self.strComment = b""
self.strStatusBar = b""
self.strReserved = b""
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.nRelayUntil = struct.unpack("<q", f.read(8))[0]
self.nExpiration = struct.unpack("<q", f.read(8))[0]
self.nID = struct.unpack("<i", f.read(4))[0]
self.nCancel = struct.unpack("<i", f.read(4))[0]
self.setCancel = deser_int_vector(f)
self.nMinVer = struct.unpack("<i", f.read(4))[0]
self.nMaxVer = struct.unpack("<i", f.read(4))[0]
self.setSubVer = deser_string_vector(f)
self.nPriority = struct.unpack("<i", f.read(4))[0]
self.strComment = deser_string(f)
self.strStatusBar = deser_string(f)
self.strReserved = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<q", self.nRelayUntil)
r += struct.pack("<q", self.nExpiration)
r += struct.pack("<i", self.nID)
r += struct.pack("<i", self.nCancel)
r += ser_int_vector(self.setCancel)
r += struct.pack("<i", self.nMinVer)
r += struct.pack("<i", self.nMaxVer)
r += ser_string_vector(self.setSubVer)
r += struct.pack("<i", self.nPriority)
r += ser_string(self.strComment)
r += ser_string(self.strStatusBar)
r += ser_string(self.strReserved)
return r
def __repr__(self):
return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" \
% (self.nVersion, self.nRelayUntil, self.nExpiration, self.nID,
self.nCancel, self.nMinVer, self.nMaxVer, self.nPriority,
self.strComment, self.strStatusBar, self.strReserved)
class CAlert(object):
def __init__(self):
self.vchMsg = b""
self.vchSig = b""
def deserialize(self, f):
self.vchMsg = deser_string(f)
self.vchSig = deser_string(f)
def serialize(self):
r = b""
r += ser_string(self.vchMsg)
r += ser_string(self.vchSig)
return r
def __repr__(self):
return "CAlert(vchMsg.sz %d, vchSig.sz %d)" \
% (len(self.vchMsg), len(self.vchSig))
# Objects that correspond to messages on the wire
class msg_version(object):
command = b"version"
def __init__(self):
self.nVersion = MY_VERSION
self.nServices = 1
self.nTime = int(time.time())
self.addrTo = CAddress()
self.addrFrom = CAddress()
self.nNonce = random.getrandbits(64)
self.strSubVer = MY_SUBVERSION
self.nStartingHeight = -1
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress()
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
else:
self.nStartingHeight = None
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
return r
def __repr__(self):
return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i)' \
% (self.nVersion, self.nServices, time.ctime(self.nTime),
repr(self.addrTo), repr(self.addrFrom), self.nNonce,
self.strSubVer, self.nStartingHeight)
class msg_verack(object):
command = b"verack"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_verack()"
class msg_addr(object):
command = b"addr"
def __init__(self):
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_alert(object):
command = b"alert"
def __init__(self):
self.alert = CAlert()
def deserialize(self, f):
self.alert = CAlert()
self.alert.deserialize(f)
def serialize(self):
r = b""
r += self.alert.serialize()
return r
def __repr__(self):
return "msg_alert(alert=%s)" % (repr(self.alert), )
class msg_inv(object):
command = b"inv"
def __init__(self, inv=None):
if inv is None:
self.inv = []
else:
self.inv = inv
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata(object):
command = b"getdata"
def __init__(self, inv=None):
self.inv = inv if inv != None else []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks(object):
command = b"getblocks"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" \
% (repr(self.locator), self.hashstop)
class msg_tx(object):
command = b"tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_block(object):
command = b"block"
def __init__(self, block=None):
if block is None:
self.block = CBlock()
else:
self.block = block
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize()
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
class msg_getaddr(object):
command = b"getaddr"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_getaddr()"
class msg_ping_prebip31(object):
command = b"ping"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_ping() (pre-bip31)"
class msg_ping(object):
command = b"ping"
def __init__(self, nonce=0L):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_ping(nonce=%08x)" % self.nonce
class msg_pong(object):
command = b"pong"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_pong(nonce=%08x)" % self.nonce
class msg_mempool(object):
command = b"mempool"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_mempool()"
class msg_sendheaders(object):
command = b"sendheaders"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_sendheaders()"
# getheaders message has
# number of entries
# vector of hashes
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders(object):
command = b"getheaders"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getheaders(locator=%s, stop=%064x)" \
% (repr(self.locator), self.hashstop)
# headers message has
# <count> <vector of block headers>
class msg_headers(object):
command = b"headers"
def __init__(self):
self.headers = []
def deserialize(self, f):
# comment in yxomcoind indicates these should be deserialized as blocks
blocks = deser_vector(f, CBlock)
for x in blocks:
self.headers.append(CBlockHeader(x))
def serialize(self):
blocks = [CBlock(x) for x in self.headers]
return ser_vector(blocks)
def __repr__(self):
return "msg_headers(headers=%s)" % repr(self.headers)
class msg_reject(object):
command = b"reject"
REJECT_MALFORMED = 1
def __init__(self):
self.message = b""
self.code = 0
self.reason = b""
self.data = 0L
def deserialize(self, f):
self.message = deser_string(f)
self.code = struct.unpack("<B", f.read(1))[0]
self.reason = deser_string(f)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
self.data = deser_uint256(f)
def serialize(self):
r = ser_string(self.message)
r += struct.pack("<B", self.code)
r += ser_string(self.reason)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
r += ser_uint256(self.data)
return r
def __repr__(self):
return "msg_reject: %s %d %s [%064x]" \
% (self.message, self.code, self.reason, self.data)
# Helper function
def wait_until(predicate, attempts=float('inf'), timeout=float('inf')):
attempt = 0
elapsed = 0
while attempt < attempts and elapsed < timeout:
with mininode_lock:
if predicate():
return True
attempt += 1
elapsed += 0.05
time.sleep(0.05)
return False
# This is what a callback should look like for NodeConn
# Reimplement the on_* functions to provide handling for events
class NodeConnCB(object):
def __init__(self):
self.verack_received = False
# deliver_sleep_time is helpful for debugging race conditions in p2p
# tests; it causes message delivery to sleep for the specified time
# before acquiring the global lock and delivering the next message.
self.deliver_sleep_time = None
def set_deliver_sleep_time(self, value):
with mininode_lock:
self.deliver_sleep_time = value
def get_deliver_sleep_time(self):
with mininode_lock:
return self.deliver_sleep_time
# Spin until verack message is received from the node.
# Tests may want to use this as a signal that the test can begin.
# This can be called from the testing thread, so it needs to acquire the
# global lock.
def wait_for_verack(self):
while True:
with mininode_lock:
if self.verack_received:
return
time.sleep(0.05)
def deliver(self, conn, message):
deliver_sleep = self.get_deliver_sleep_time()
if deliver_sleep is not None:
time.sleep(deliver_sleep)
with mininode_lock:
try:
getattr(self, 'on_' + message.command)(conn, message)
except:
print "ERROR delivering %s (%s)" % (repr(message),
sys.exc_info()[0])
def on_version(self, conn, message):
if message.nVersion >= 209:
conn.send_message(msg_verack())
conn.ver_send = min(MY_VERSION, message.nVersion)
if message.nVersion < 209:
conn.ver_recv = conn.ver_send
def on_verack(self, conn, message):
conn.ver_recv = conn.ver_send
self.verack_received = True
def on_inv(self, conn, message):
want = msg_getdata()
for i in message.inv:
if i.type != 0:
want.inv.append(i)
if len(want.inv):
conn.send_message(want)
def on_addr(self, conn, message): pass
def on_alert(self, conn, message): pass
def on_getdata(self, conn, message): pass
def on_getblocks(self, conn, message): pass
def on_tx(self, conn, message): pass
def on_block(self, conn, message): pass
def on_getaddr(self, conn, message): pass
def on_headers(self, conn, message): pass
def on_getheaders(self, conn, message): pass
def on_ping(self, conn, message):
if conn.ver_send > BIP0031_VERSION:
conn.send_message(msg_pong(message.nonce))
def on_reject(self, conn, message): pass
def on_close(self, conn): pass
def on_mempool(self, conn): pass
def on_pong(self, conn, message): pass
# More useful callbacks and functions for NodeConnCB's which have a single NodeConn
class SingleNodeConnCB(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node
def sync_with_ping(self, timeout=30):
def received_pong():
return (self.last_pong.nonce == self.ping_counter)
self.send_message(msg_ping(nonce=self.ping_counter))
success = wait_until(received_pong, timeout)
self.ping_counter += 1
return success
# The actual NodeConn class
# This class provides an interface for a p2p connection to a specified node
class NodeConn(asyncore.dispatcher):
messagemap = {
b"version": msg_version,
b"verack": msg_verack,
b"addr": msg_addr,
b"alert": msg_alert,
b"inv": msg_inv,
b"getdata": msg_getdata,
b"getblocks": msg_getblocks,
b"tx": msg_tx,
b"block": msg_block,
b"getaddr": msg_getaddr,
b"ping": msg_ping,
b"pong": msg_pong,
b"headers": msg_headers,
b"getheaders": msg_getheaders,
b"reject": msg_reject,
b"mempool": msg_mempool,
}
MAGIC_BYTES = {
"mainnet": b"\xbf\x0c\x6b\xbd", # mainnet
"testnet3": b"\xce\xe2\xca\xff", # testnet3
"regtest": b"\xfc\xc1\xb7\xdc" # regtest
}
def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=1):
asyncore.dispatcher.__init__(self, map=mininode_socket_map)
self.log = logging.getLogger("NodeConn(%s:%d)" % (dstaddr, dstport))
self.dstaddr = dstaddr
self.dstport = dstport
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.sendbuf = b""
self.recvbuf = b""
self.ver_send = 209
self.ver_recv = 209
self.last_sent = 0
self.state = "connecting"
self.network = net
self.cb = callback
self.disconnect = False
# stuff version msg into sendbuf
vt = msg_version()
vt.nServices = services
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
vt.addrFrom.port = 0
self.send_message(vt, True)
print 'MiniNode: Connecting to YxomCoin Node IP # ' + dstaddr + ':' \
+ str(dstport)
try:
self.connect((dstaddr, dstport))
except:
self.handle_close()
self.rpc = rpc
def show_debug_msg(self, msg):
self.log.debug(msg)
def handle_connect(self):
self.show_debug_msg("MiniNode: Connected & Listening: \n")
self.state = "connected"
def handle_close(self):
self.show_debug_msg("MiniNode: Closing Connection to %s:%d... "
% (self.dstaddr, self.dstport))
self.state = "closed"
self.recvbuf = b""
self.sendbuf = b""
try:
self.close()
except:
pass
self.cb.on_close(self)
def handle_read(self):
try:
t = self.recv(8192)
if len(t) > 0:
self.recvbuf += t
self.got_data()
except:
pass
def readable(self):
return True
def writable(self):
with mininode_lock:
length = len(self.sendbuf)
return (length > 0)
def handle_write(self):
with mininode_lock:
try:
sent = self.send(self.sendbuf)
except:
self.handle_close()
return
self.sendbuf = self.sendbuf[sent:]
def got_data(self):
try:
while True:
if len(self.recvbuf) < 4:
return
if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]:
raise ValueError("got garbage %s" % repr(self.recvbuf))
if self.ver_recv < 209:
if len(self.recvbuf) < 4 + 12 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = None
if len(self.recvbuf) < 4 + 12 + 4 + msglen:
return
msg = self.recvbuf[4+12+4:4+12+4+msglen]
self.recvbuf = self.recvbuf[4+12+4+msglen:]
else:
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
return
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
th = sha256(msg)
h = sha256(th)
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
if command in self.messagemap:
f = BytesIO(msg)
t = self.messagemap[command]()
t.deserialize(f)
self.got_message(t)
else:
self.show_debug_msg("Unknown command: '" + command + "' " +
repr(msg))
except Exception as e:
print 'got_data:', repr(e)
def send_message(self, message, pushbuf=False):
if self.state != "connected" and not pushbuf:
return
self.show_debug_msg("Send %s" % repr(message))
command = message.command
data = message.serialize()
tmsg = self.MAGIC_BYTES[self.network]
tmsg += command
tmsg += b"\x00" * (12 - len(command))
tmsg += struct.pack("<I", len(data))
if self.ver_send >= 209:
th = sha256(data)
h = sha256(th)
tmsg += h[:4]
tmsg += data
with mininode_lock:
self.sendbuf += tmsg
self.last_sent = time.time()
def got_message(self, message):
if message.command == b"version":
if message.nVersion <= BIP0031_VERSION:
self.messagemap[b'ping'] = msg_ping_prebip31
if self.last_sent + 30 * 60 < time.time():
self.send_message(self.messagemap[b'ping']())
self.show_debug_msg("Recv %s" % repr(message))
self.cb.deliver(self, message)
def disconnect_node(self):
self.disconnect = True
class NetworkThread(Thread):
def run(self):
while mininode_socket_map:
# We check for whether to disconnect outside of the asyncore
# loop to workaround the behavior of asyncore when using
# select
disconnected = []
for fd, obj in mininode_socket_map.items():
if obj.disconnect:
disconnected.append(obj)
[ obj.handle_close() for obj in disconnected ]
asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1)
# An exception we can raise if we detect a potential disconnect
# (p2p or rpc) before the test is complete
class EarlyDisconnectError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
r"""
# .---. .-----------
# / \ __ / ------
# / / \( )/ ----- (`-') _ _(`-') <-. (`-')_
# ////// '\/ ` --- ( OO).-/( (OO ).-> .-> \( OO) ) .->
# //// / // : : --- (,------. \ .'_ (`-')----. ,--./ ,--/ ,--.' ,-.
# // / / / `\/ '-- | .---' '`'-..__)( OO).-. ' | \ | | (`-')'.' /
# // //..\\ (| '--. | | ' |( _) | | | | . '| |)(OO \ /
# ============UU====UU==== | .--' | | / : \| |)| | | |\ | | / /)
# '//||\\` | `---. | '-' / ' '-' ' | | \ | `-/ /`
# ''`` `------' `------' `-----' `--' `--' `--'
# ######################################################################################
#
# Author: edony - edonyzpc@gmail.com
#
# twitter : @edonyzpc
#
# Last modified: 2015-06-05 15:48
#
# Filename: getdirections.py
#
# Description: All Rights Are Reserved
#
"""
#import scipy as sp
#import math as m
#import matplotlib as mpl
#import matplotlib.pyplot as plt
#from mpl_toolkits.mplot3d import Axes3D as Ax3
#from scipy import stats as st
#from matplotlib import cm
#import numpy as np
import sys
if sys.version.startswith("3."):
from functools import reduce
import os
#import platform
class PyColor(object):
""" This class is for colored print in the python interpreter!
"F3" call Addpy() function to add this class which is defined
in the .vimrc for vim Editor."""
def __init__(self):
self.self_doc = r"""
STYLE: \033['display model';'foreground';'background'm
DETAILS:
FOREGROUND BACKGOUND COLOR
---------------------------------------
30 40 black
31 41 red
32 42 green
33 43 yellow
34 44 blue
35 45 purple
36 46 cyan
37 47 white
DISPLAY MODEL DETAILS
-------------------------
0 default
1 highlight
4 underline
5 flicker
7 reverse
8 non-visiable
e.g:
\033[1;31;40m <!--1-highlight;31-foreground red;40-background black-->
\033[0m <!--set all into default-->
"""
self.warningcolor = '\033[0;37;41m'
self.tipcolor = '\033[0;31;42m'
self.endcolor = '\033[0m'
self._newcolor = ''
@property
def new(self):
"""
Customized Python Print Color.
"""
return self._newcolor
@new.setter
def new(self, color_str):
"""
New Color.
"""
self._newcolor = color_str
def disable(self):
"""
Disable Color Print.
"""
self.warningcolor = ''
self.endcolor = ''
class GetDirections(object):
"""
Class GetDirections:
Get all the contents in the given path recursively and record the contents
into directions and files.
"""
def __init__(self, path):
"""
path: given path
directions: all directions in path (include sub-directions)
files: all files in the paht (include the files in sub-directions)
"""
self.path = path
self.directions = []
self.files = {}
self.color = PyColor()
@staticmethod
def normal_path(path):
"""
Normalize the path string split with "/"
"""
def add(head, tail):
"""
Replace \\ with / in path
"""
if tail == "\\":
tail = "/"
return head + tail
return reduce(add, path)
@staticmethod
def _find_subdir(path):
"""
A protected method for list all the sub-directions in the path.
"""
path = GetDirections.normal_path(path)
all_subdir = []
for item in os.listdir(path):
if os.path.isdir(item):
all_subdir.append(path + '/' + item)
return all_subdir
@staticmethod
def _all_dir(path, total_dirs):
"""
A protected method for list all the directions in path and record them.
"""
path = GetDirections.normal_path(path)
os.chdir(path)
tmp_dir = GetDirections._find_subdir(path)
total_dirs.append(path)
if len(tmp_dir) == 0:
return
else:
for item in tmp_dir:
total_dirs.append(item)
GetDirections._all_dir(item, total_dirs)
def structed_dir(self):
"""
Structure the all directions found with their names.
"""
if self.directions:
self.directions = sorted([direction for direction in self.directions])
else:
raise ValueError("Directions is empty!")
def get_dir(self):
"""
Main to get all the structured directions.
"""
self._all_dir(self.path, self.directions)
self.structed_dir()
def all_files(self):
"""
Get all the files in the directions.
"""
if self.directions:
for direction in self.directions:
os.chdir(direction)
list_dir = os.listdir(direction)
self.files[direction] = \
[file_name for file_name in list_dir if os.path.isfile(file_name)]
def file_tree(self):
"""
Get visualization of the path structure.
"""
self.get_dir()
self.all_files()
if self.files:
print(self.color.tipcolor, self.path, self.color.endcolor, '\n')
for key in self.files.keys():
print(self.color.warningcolor, "|-- .%s\\"%key.split(self.path)[1],\
self.color.endcolor)
for file_name in self.files[key]:
print(" | %s"%file_name)
print(" |")
if __name__ == '__main__':
TEST = GetDirections('/home/edony/code/github/pyexer')
# TEST.get_dir()
# TEST.all_files()
# for testdir in TEST.directions:
# print testdir
# for key in TEST.files.keys():
# print key
# print TEST.files[key]
TEST.file_tree()
| |
import argparse
import numpy as np
import keras.backend as K
from . import defines
def parse_args():
"""Parse command line arguments.
The args namespace is used promiscuously in this module.
Its fields control the tensor definition, dataset generation, training, file I/O and evaluation.
Some of the fields are typically dicts or lists that are not actually set on the command line,
but via a companion argument also in the namespace.
For example, input_symbols is set via the input_symbol_set string
and, annotations is set via the annotation_set string.
Here we also seed the random number generator.
The keras image data format is set here as well via the channels_last or channels_first arguments.
Returns:
namespace: The args namespace that is used throughout this module.
"""
parser = argparse.ArgumentParser()
# Tensor defining arguments
parser.add_argument('--tensor_name', default='read_tensor', choices=defines.TENSOR_MAPS_1D+defines.TENSOR_MAPS_2D,
help='String key which identifies the map from tensor channels to their meaning.')
parser.add_argument('--labels', default=defines.SNP_INDEL_LABELS,
help='Dict mapping label names to their index within label tensors.')
parser.add_argument('--input_symbol_set', default='dna_indel', choices=defines.INPUT_SYMBOLS.keys(),
help='Key which maps to an input symbol to index mapping.')
parser.add_argument('--input_symbols', help='Dict mapping input symbols to their index within input tensors, '
+ 'initialised via input_symbols_set argument')
parser.add_argument('--batch_size', default=32, type=int,
help='Mini batch size for stochastic gradient descent algorithms.')
parser.add_argument('--read_limit', default=128, type=int,
help='Maximum number of reads to load.')
parser.add_argument('--window_size', default=128, type=int,
help='Size of sequence window to use as input, typically centered at a variant.')
parser.add_argument('--base_quality_mode', default='phot', choices=['phot', 'phred', '1hot'],
help='How to treat base qualities, must be in [phot, phred, 1hot]')
parser.add_argument('--channels_last', default=True, dest='channels_last', action='store_true',
help='Store the channels in the last axis of tensors, tensorflow->true, theano->false')
parser.add_argument('--channels_first', dest='channels_last', action='store_false',
help='Store the channels in the first axis of tensors, tensorflow->false, theano->true')
# Annotation arguments
parser.add_argument('--annotations', help='Array of annotation names, initialised via annotation_set argument')
parser.add_argument('--annotation_set', default='best_practices', choices=defines.ANNOTATIONS_SETS.keys(),
help='Key which maps to an annotations list (or _ to ignore annotations).')
# Dataset generation related arguments
parser.add_argument('--samples', default=500, type=int,
help='Maximum number of data samples to write or load.')
parser.add_argument('--downsample_snps', default=1.0, type=float,
help='Rate of SNP examples that are kept must be in [0.0, 1.0].')
parser.add_argument('--downsample_indels', default=1.0, type=float,
help='Rate of INDEL examples that are kept must be in [0.0, 1.0].')
parser.add_argument('--downsample_not_snps', default=1.0, type=float,
help='Rate of NOT_SNP examples that are kept must be in [0.0, 1.0].')
parser.add_argument('--downsample_not_indels', default=1.0, type=float,
help='Rate of NOT_INDEL examples that are kept must be in [0.0, 1.0].')
parser.add_argument('--downsample_reference', default=0.001, type=float,
help='Rate of reference genotype examples that are kept must be in [0.0, 1.0].')
parser.add_argument('--downsample_homozygous', default=0.001, type=float,
help='Rate of homozygous genotypes that are kept must be in [0.0, 1.0].')
parser.add_argument('--start_pos', default=0, type=int,
help='Genomic position start for parallel tensor writing.')
parser.add_argument('--end_pos', default=0, type=int,
help='Genomic position end for parallel tensor writing.')
parser.add_argument('--skip_positive_class', default=False, action='store_true',
help='Whether to skip positive examples when writing tensors.')
parser.add_argument('--chrom', help='Chromosome to load for parallel tensor writing.')
# I/O files and directories: vcfs, bams, beds, hd5, fasta
parser.add_argument('--output_dir', default='./', help='Directory to write models or other data out.')
parser.add_argument('--image_dir', default=None, help='Directory to write images and plots to.')
parser.add_argument('--reference_fasta', help='The reference FASTA file (e.g. HG19 or HG38).')
parser.add_argument('--weights_hd5', default='',
help='A hd5 file of weights to initialize a model, will use all layers with names that match.')
parser.add_argument('--architecture', default='',
help='A json file specifying semantics and architecture of a neural net.')
parser.add_argument('--bam_file',
help='Path to a BAM file to train from or generate tensors with.')
parser.add_argument('--train_vcf',
help='Path to a VCF that has verified true calls from NIST, platinum genomes, etc.')
parser.add_argument('--input_vcf',
help='Haplotype Caller or VQSR generated VCF with raw annotation values [and quality scores].')
parser.add_argument('--output_vcf', default=None,
help='Optional VCF to write to.')
parser.add_argument('--bed_file',
help='Bed file specifying high confidence intervals associated with args.train_vcf.')
parser.add_argument('--data_dir',
help='Directory of tensors, must be split into test/valid/train directories'
+'with subdirectories for each label.')
# Training and optimization related arguments
parser.add_argument('--epochs', default=25, type=int,
help='Number of epochs, typically passes through the entire dataset, not always well-defined.')
parser.add_argument('--batch_normalization', default=False, action='store_true',
help='Mini batch normalization layers after convolutions.')
parser.add_argument('--patience', default=4, type=int,
help='Maximum number of epochs to run without validation loss improvements (Early Stopping).')
parser.add_argument('--training_steps', default=80, type=int,
help='Number of training batches to examine in an epoch.')
parser.add_argument('--validation_steps', default=40, type=int,
help='Number of validation batches to examine in an epoch validation.')
parser.add_argument('--iterations', default=5, type=int,
help='Generic iteration limit for hyperparameter optimization, animation, and other counts.')
parser.add_argument('--tensor_board', default=False, action='store_true',
help='Add the tensor board callback.')
# Architecture defining arguments
parser.add_argument('--conv_width', default=5, type=int, help='Width of convolutional kernels.')
parser.add_argument('--conv_height', default=5, type=int, help='Height of convolutional kernels.')
parser.add_argument('--conv_dropout', default=0.0, type=float,
help='Dropout rate in convolutional layers.')
parser.add_argument('--conv_batch_normalize', default=False, action='store_true',
help='Batch normalize convolutional layers.')
parser.add_argument('--conv_layers', nargs='+', default=[128, 96, 64, 48], type=int,
help='List of sizes for each convolutional filter layer')
parser.add_argument('--padding', default='valid', choices=['valid', 'same'],
help='Valid or same border padding for convolutional layers.')
parser.add_argument('--spatial_dropout', default=False, action='store_true',
help='Spatial dropout on the convolutional layers.')
parser.add_argument('--max_pools', nargs='+', default=[], type=int,
help='List of max-pooling layers.')
parser.add_argument('--fc_layers', nargs='+', default=[32], type=int,
help='List of sizes for each fully connected layer')
parser.add_argument('--fc_dropout', default=0.0, type=float,
help='Dropout rate in fully connected layers.')
parser.add_argument('--fc_batch_normalize', default=False, action='store_true',
help='Batch normalize fully connected layers.')
parser.add_argument('--annotation_units', default=16, type=int,
help='Number of units connected to the annotation input layer.')
parser.add_argument('--annotation_shortcut', default=False, action='store_true',
help='Shortcut connections on the annotations.')
# Evaluation related arguments
parser.add_argument('--score_keys', nargs='+', default=['VQSLOD'],
help='List of variant score keys for performance comparisons.')
parser.add_argument('--tranches', nargs='+', default=[100, 99.9, 99, 95, 90], type=float,
help='List of variant score keys for performance comparisons.')
# Run specific arguments
parser.add_argument('--mode', help='High level recipe: write tensors, train, test or evaluate models.')
parser.add_argument('--id', default='no_id',
help='Identifier for this run, user-defined string to keep experiments organized.')
parser.add_argument('--gatk_version', default='4.1.0.0',
help='GATK version used to run this code.')
parser.add_argument('--model_version', default='1.0',
help='Model version for this run.')
parser.add_argument('--random_seed', default=12878, type=int,
help='Random seed to use throughout run. Always use np.random.')
# Parse, print, set annotations and seed
args = parser.parse_args()
args.annotations = annotations_from_args(args)
args.input_symbols = input_symbols_from_args(args)
np.random.seed(args.random_seed)
if args.channels_last:
K.set_image_data_format('channels_last')
else:
K.set_image_data_format('channels_first')
print('Arguments are', args)
return args
def annotations_from_args(args):
"""Get list of annotations corresponding to the args.annotation_set.
The annotation_set argument allows us to name commonly used groups of annotations
without having to specify each annotation individually.
Arguments:
args.annotation_set: The key for which annotation set to use.
Returns:
list: Annotation strings as they appear in a VCF info/format field or None.
"""
if args.annotation_set and args.annotation_set in defines.ANNOTATIONS_SETS:
return defines.ANNOTATIONS_SETS[args.annotation_set]
return None
def input_symbols_from_args(args):
"""Get dictionary mapping input data symbols to indices in the input tensor.
Arguments:
args.input_symbol_set: The key for the symbol set to use.
Returns:
dict: if there is a input symbol dict otherwise None
"""
if args.input_symbol_set and args.input_symbol_set in defines.INPUT_SYMBOLS:
return defines.INPUT_SYMBOLS[args.input_symbol_set]
return None
def weight_path_from_args(args):
"""Create a weight file name from the command line arguments.
Arguments:
args.output_dir: The directory where the file will be saved
args.id: The name of the file is this run's id with tensor suffix as file extension
"""
save_weight_hd5 = args.output_dir + args.id + defines.TENSOR_SUFFIX
return save_weight_hd5
| |
# MIB modules loader
import os, sys, imp, struct, marshal, time, traceback
from pysnmp.smi import error
from pysnmp import debug
if sys.version_info[0] <= 2:
import types
classTypes = (types.ClassType, type)
else:
classTypes = (type,)
class __AbstractMibSource:
def __init__(self, srcName):
self._srcName = srcName
self.__magic = imp.get_magic()
self.__sfx = {}
self.__inited = None
for sfx, mode, typ in imp.get_suffixes():
if typ not in self.__sfx:
self.__sfx[typ] = []
self.__sfx[typ].append((sfx, len(sfx), mode))
debug.logger & debug.flagBld and debug.logger('trying %s' % self)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self._srcName)
def _uniqNames(self, files):
u = {}
for f in files:
if f[:9] == '__init__.':
continue
for typ in (imp.PY_SOURCE, imp.PY_COMPILED):
for sfx, sfxLen, mode in self.__sfx[typ]:
if f[-sfxLen:] == sfx:
u[f[:-sfxLen]] = None
return tuple(u.keys())
# MibSource API follows
def fullPath(self, f='', sfx=''):
return self._srcName + (f and (os.sep + f + sfx) or '')
def init(self):
if self.__inited is None:
self.__inited = self._init()
if self.__inited is self:
self.__inited = True
if self.__inited is True:
return self
else:
return self.__inited
def listdir(self): return self._listdir()
def read(self, f):
for pycSfx, pycSfxLen, pycMode in self.__sfx[imp.PY_COMPILED]:
try:
pycData = self._getData(f + pycSfx, pycMode)
except IOError:
pycTime = -1
else:
if self.__magic == pycData[:4]:
pycData = pycData[4:]
pycTime = struct.unpack('<L', pycData[:4])[0]
pycData = pycData[4:]
break
else:
debug.logger & debug.flagBld and debug.logger(
'bad magic in %s' % (f+pycSfx,)
)
pycTime = -1
debug.logger & debug.flagBld and debug.logger(
'file %s mtime %d' % (f+pycSfx, pycTime)
)
for pySfx, pySfxLen, pyMode in self.__sfx[imp.PY_SOURCE]:
try:
pyTime = self._getTimestamp(f+pySfx)
except (IOError, OSError):
pyTime = -1
else:
break
debug.logger & debug.flagBld and debug.logger(
'file %s mtime %d' % (f+pySfx, pyTime)
)
if pycTime != -1 and pycTime >= pyTime:
return marshal.loads(pycData), pycSfx
if pyTime != -1:
return self._getData(f+pySfx, pyMode), pySfx
raise IOError('No suitable module found')
# Interfaces for subclasses
def _init(self): raise NotImplementedError()
def _listdir(self): raise NotImplementedError()
def _getTimestamp(self, f): raise NotImplementedError()
def _getData(self, f, mode=None): NotImplementedError()
class ZipMibSource(__AbstractMibSource):
def _init(self):
try:
p = __import__(self._srcName, globals(), locals(), ['__init__'])
if hasattr(p, '__loader__') and hasattr(p.__loader__, '_files'):
self.__loader = p.__loader__
self._srcName = self._srcName.replace('.', os.sep)
return self
else:
# Dir relative to PYTHONPATH
return DirMibSource(os.path.split(p.__file__)[0]).init()
except ImportError:
# Dir relative to CWD
return DirMibSource(self._srcName).init()
def _parseDosTime(self, dosdate, dostime):
t = ( ((dosdate >> 9) & 0x7f) + 1980, # year
((dosdate >> 5) & 0x0f), # month
dosdate & 0x1f, # mday
(dostime >> 11) & 0x1f, # hour
(dostime >> 5) & 0x3f, # min
(dostime & 0x1f) * 2, # sec
-1, # wday
-1, # yday
-1 ) # dst
return time.mktime(t)
def _listdir(self):
l = []
for f in self.__loader._files.keys():
d, f = os.path.split(f)
if d == self._srcName:
l.append(f)
return tuple(self._uniqNames(l))
def _getTimestamp(self, f):
p = os.path.join(self._srcName, f)
if p in self.__loader._files:
return self._parseDosTime(
self.__loader._files[p][6],
self.__loader._files[p][5]
)
else:
raise IOError('No file in ZIP: %s' % p)
def _getData(self, f, mode=None):
return self.__loader.get_data(os.path.join(self._srcName, f))
class DirMibSource(__AbstractMibSource):
def _init(self):
self._srcName = os.path.normpath(self._srcName)
return self
def _listdir(self):
try:
return self._uniqNames(os.listdir(self._srcName))
except OSError:
return ()
def _getTimestamp(self, f):
return os.stat(os.path.join(self._srcName, f))[8]
def _getData(self, f, mode):
try:
if f in os.listdir(self._srcName): # make FS case-sensitive
return open(os.path.join(self._srcName, f), mode).read()
except OSError:
pass
raise IOError # pretend there's no such file
class MibBuilder:
loadTexts = 0
defaultCoreMibs = os.pathsep.join(
('pysnmp.smi.mibs.instances', 'pysnmp.smi.mibs')
)
defaultMiscMibs = 'pysnmp_mibs'
moduleID = 'PYSNMP_MODULE_ID'
def __init__(self):
self.lastBuildId = self._autoName = 0
sources = []
for m in os.environ.get('PYSNMP_MIB_PKGS', self.defaultCoreMibs).split(os.pathsep):
sources.append(ZipMibSource(m))
# Compatibility variable
if 'PYSNMP_MIB_DIR' in os.environ:
os.environ['PYSNMP_MIB_DIRS'] = os.environ['PYSNMP_MIB_DIR']
if 'PYSNMP_MIB_DIRS' in os.environ:
for m in os.environ['PYSNMP_MIB_DIRS'].split(os.pathsep):
sources.append(DirMibSource(m))
if self.defaultMiscMibs:
for m in self.defaultMiscMibs.split(os.pathsep):
sources.append(ZipMibSource(m))
self.mibSymbols = {}
self.__modSeen = {}
self.__modPathsSeen = {}
self.setMibSources(*sources)
# MIB modules management
def setMibSources(self, *mibSources):
self.__mibSources = [ s.init() for s in mibSources ]
debug.logger & debug.flagBld and debug.logger('setMibPath: new MIB sources %s' % (self.__mibSources,))
def getMibSources(self): return tuple(self.__mibSources)
# Legacy/compatibility methods (won't work for .eggs)
def setMibPath(self, *mibPaths):
self.setMibSources(*[ DirMibSource(x) for x in mibPaths ])
def getMibPath(self):
paths = ()
for mibSource in self.getMibSources():
if isinstance(mibSource, DirMibSource):
paths += ( mibSource.fullPath(), )
else:
raise error.SmiError(
'MIB source is not a plain directory: %s' % (mibSource,)
)
return paths
def loadModules(self, *modNames, **userCtx):
# Build a list of available modules
if not modNames:
modNames = {}
for mibSource in self.__mibSources:
for modName in mibSource.listdir():
modNames[modName] = None
modNames = list(modNames.keys())
if not modNames:
raise error.SmiError(
'No MIB module to load at %s' % (self,)
)
for modName in modNames:
for mibSource in self.__mibSources:
debug.logger & debug.flagBld and debug.logger('loadModules: trying %s at %s' % (modName, mibSource))
try:
modData, sfx = mibSource.read(modName)
except IOError:
debug.logger & debug.flagBld and debug.logger('loadModules: read %s from %s failed: %s' % (modName, mibSource, sys.exc_info()[1]))
continue
modPath = mibSource.fullPath(modName, sfx)
if modPath in self.__modPathsSeen:
debug.logger & debug.flagBld and debug.logger('loadModules: seen %s' % modPath)
break
else:
self.__modPathsSeen[modPath] = 1
debug.logger & debug.flagBld and debug.logger('loadModules: evaluating %s' % modPath)
g = { 'mibBuilder': self,
'userCtx': userCtx }
try:
exec(modData, g)
except Exception:
del self.__modPathsSeen[modPath]
raise error.SmiError(
'MIB module \"%s\" load error: %s' % (modPath, traceback.format_exception(*sys.exc_info()))
)
self.__modSeen[modName] = modPath
debug.logger & debug.flagBld and debug.logger('loadModules: loaded %s' % modPath)
break
if modName not in self.__modSeen:
raise error.SmiError(
'MIB file \"%s\" not found in search path' % (modName and modName + ".py[co]")
)
return self
def unloadModules(self, *modNames):
if not modNames:
modNames = list(self.mibSymbols.keys())
for modName in modNames:
if modName not in self.mibSymbols:
raise error.SmiError(
'No module %s at %s' % (modName, self)
)
self.unexportSymbols(modName)
del self.__modPathsSeen[self.__modSeen[modName]]
del self.__modSeen[modName]
debug.logger & debug.flagBld and debug.logger('unloadModules: ' % (modName))
return self
def importSymbols(self, modName, *symNames, **userCtx):
if not modName:
raise error.SmiError(
'importSymbols: empty MIB module name'
)
r = ()
for symName in symNames:
if modName not in self.mibSymbols:
self.loadModules(modName, **userCtx)
if modName not in self.mibSymbols:
raise error.SmiError(
'No module %s loaded at %s' % (modName, self)
)
if symName not in self.mibSymbols[modName]:
raise error.SmiError(
'No symbol %s::%s at %s' % (modName, symName, self)
)
r = r + (self.mibSymbols[modName][symName],)
return r
def exportSymbols(self, modName, *anonymousSyms, **namedSyms):
if modName not in self.mibSymbols:
self.mibSymbols[modName] = {}
mibSymbols = self.mibSymbols[modName]
for symObj in anonymousSyms:
debug.logger & debug.flagBld and debug.logger('exportSymbols: anonymous symbol %s::__pysnmp_%ld' % (modName, self._autoName))
mibSymbols['__pysnmp_%ld' % self._autoName] = symObj
self._autoName = self._autoName + 1
for symName, symObj in namedSyms.items():
if symName in mibSymbols:
raise error.SmiError(
'Symbol %s already exported at %s' % (symName, modName)
)
if symName != self.moduleID and \
not isinstance(symObj, classTypes):
label = symObj.getLabel()
if label:
symName = label
else:
symObj.setLabel(symName)
mibSymbols[symName] = symObj
debug.logger & debug.flagBld and debug.logger('exportSymbols: symbol %s::%s' % (modName, symName))
self.lastBuildId = self.lastBuildId + 1
def unexportSymbols(self, modName, *symNames):
if modName not in self.mibSymbols:
raise error.SmiError(
'No module %s at %s' % (modName, self)
)
mibSymbols = self.mibSymbols[modName]
if not symNames:
symNames = list(mibSymbols.keys())
for symName in symNames:
if symName not in mibSymbols:
raise error.SmiError(
'No symbol %s::%s at %s' % (modName, symName, self)
)
del mibSymbols[symName]
debug.logger & debug.flagBld and debug.logger('unexportSymbols: symbol %s::%s' % (modName, symName))
if not self.mibSymbols[modName]:
del self.mibSymbols[modName]
self.lastBuildId = self.lastBuildId + 1
| |
import unittest, random, sys, time
sys.path.extend(['.','..','py'])
import h2o, h2o_browse as h2b, h2o_exec as h2e, h2o_hosts, h2o_import as h2i
initList = [
# ('r1.hex', 'r1.hex=c(1.3,0,1,2,3,4,5)'),
# ('r2.hex', 'r2.hex=c(2.3,0,1,2,3,4,5)'),
# ('r3.hex', 'r3.hex=c(4.3,0,1,2,3,4,5)'),
('r.hex', 'r.hex=i.hex'),
('r1.hex', 'r1.hex=i.hex'),
('r2.hex', 'r2.hex=i.hex'),
('r3.hex', 'r3.hex=i.hex'),
# ('x', 'x=r.hex[,1]; rcnt=nrow(x)-sum(is.na(x))'),
# ('x', 'x=r.hex[,1]; total=sum(ifelse(is.na(x),0,x)); rcnt=nrow(x)-sum(is.na(x))'),
# ('x', 'x=r.hex[,1]; total=sum(ifelse(is.na(x),0,x)); rcnt=nrow(x)-sum(is.na(x)); mean=total / rcnt'),
# ('x', 'x=r.hex[,1]; total=sum(ifelse(is.na(x),0,x)); rcnt=nrow(x)-sum(is.na(x)); mean=total / rcnt; x=ifelse(is.na(x),mean,x)'),
]
if 1==0:
exprListFull = [
]
else:
exprListFull = [
'r1.hex=apply(r.hex,2,function(x){ifelse(is.na(x),0,x)})',
'cct.hex=runif(r.hex, -1);rTrain=r.hex[cct.hex<=0.9,];rTest=r.hex[cct.hex>0.9,]',
# 'r<n>[,0] = r0[,0] * r<n-1>[,0]',
# 'r<n>[0,] = r1[0,] + r<n-1>[0,]',
# 'r<n> = r1 + r<n-1>',
# doesn't work
# ';;',
'r1.hex[,1]=r1.hex[,1]==1.0',
# unsupported
# 'r1.hex[1,]=r1.hex[1,]==1.0',
'b.hex=runif(r3.hex[,1], -1)',
'b.hex=runif(r3.hex[1,], -1)',
# 'r1.hex[,1]=r1.hex[,1] + 1.3',
# 'r<n>.hex=min(r1.hex,1+2)',
# 'r<n>.hex=r2.hex + 1',
# 'r<n>.hex=r3.hex + 1',
# 'r<n>[,0] = r2[,0] / r<n-1>[,0]',
# 'r<n>[,0] = r3[,0] - r<n-1>[,0]',
# from h2o/src/test/java/water/exec/Expr2Test.java
# FIX! update to template form?
"1.23", # 1.23
# doesn't work
# ",1.23 + 2.34" # 3.57
# doesn't work
# ",1.23 + 2.34 * 3" # 10.71, L2R eval order
## ",1.23 2.34" # Syntax error
"1.23e0<2.34e1", # 1
"+1.23e0<+2.34e1", # 1
"-1.23e0<+-2.34e1", # 1
"-1.23e000<+-2.34e100", # 1
"-1.23e-001<+-2.34e-100", # 1
"1.23<2.34", # 1
"1.23<=2.34", # 1
"1.23>2.34", # 0
"1.23>=2.34", # 0
"1.23==2.34", # 0
"1.23!=2.34", # 1
"1.23 <2.34", # 1
"1.23 <=2.34", # 1
"1.23 >2.34", # 0
"1.23 >=2.34", # 0
"1.23 ==2.34", # 0
"1.23 !=2.34", # 1
"1.23< 2.34", # 1
"1.23<= 2.34", # 1
"1.23> 2.34", # 0
"1.23>= 2.34", # 0
"1.23== 2.34", # 0
"1.23!= 2.34", # 1
"r.hex", # Simple ref
# no longer legal
## "+(1.23,2.34)",# prefix 3.57
## "+(1.23)", # Syntax error, not enuf args
## "+(1.23,2,3)", # Syntax error, too many args
"r.hex[2,3]", # Scalar selection
## "r.hex[2,+]", # Function not allowed
"r.hex[2+4,-4]", # Select row 6, all-cols but 4
# "r.hex[1,-1]; r.hex[2,-2]; r.hex[3,-3]", # Partial results are freed
"r.hex[1,-1]; r.hex[1,-1]; r.hex[1,-1]", # Partial results are freed
## "r.hex[2+3,r.hex]",# Error: col selector has too many columns
# "r.hex[2,]", # Row 2 all cols
"r.hex[1,]", # Row 2 all cols
# "r.hex[,3]", # Col 3 all rows
"r.hex[,1]", # Col 3 all rows
"r.hex+1", # Broadcast scalar over ary
"r.hex-r.hex",
"1.23+(r.hex-r.hex)",
"(1.23+r.hex)-r.hex",
"min(r.hex,1+2)",
"is.na(r.hex)",
"nrow(r.hex)*3",
"r.hex[nrow(r.hex)-1,ncol(r.hex)-1]",
# doesn't work
# "1=2",
# doesn't work
# "x",
"x=0; x+2",
# doesn't work
# "2+x",
"x=1",
"x<-1", # Alternative R assignment syntax
# doesn't work
## "x=1;x=r.hex", # Allowed to change types via shadowing at REPL level
"a=r.hex", # Top-level assignment back to H2O.STORE
## "x<-+",
# ?
## "(r.hex+1)<-2",
"r.hex[nrow(r.hex),]",
"r.hex[,ncol(r.hex)]",
# double semi doesn't work
# "r.hex[2,3]<-4;",
"c(1,3,5)",
# what is this?
### "function(=){x+1}(2)",
# doesn't work?
# "function(x,=){x+1}(2)",
# doesn't work
# "function(x,<-){x+1}(2)",
# doesn't work
# "function(x,x){x+1}(2)",
"function(x,y,z){x[]}(r.hex,1,2)",
# doesn't work?
# "function(x){x[]}(2)",
"function(x){x+1}(2)",
# doesn't work
## "function(x){y=x+y}(2)",
# doesn't work
## "function(x,y){y=x+y}(2)",
# doesn't work
# "function(x){}(2)",
"function(x){y=x*2; y+1}(2)",
"function(x){y=1+2}(2)",
# doesn't work
# "function(x){y=1+2;y=c(1,2)}",# Not allowed to change types in inner scopes
"sum(1,2,3)",
"sum(c(1,3,5))",
"sum(4,c(1,3,5),2,6)",
"sum(1,r.hex,3)",
# unimplemented?
"r.hex[,c(1)]",
"r.hex[c(1),]",
"r.hex[c(1,3,5),]",
"r.hex[,c(1,3,5)]",
"a=c(11,22,33,44,55,66); a[c(2,6,1),]",
# doesn't work
# "function(a){a[];a=1}",
"a=1;a=2;function(x){x=a;a=3}",
"a=r.hex;function(x){x=a;a=3;nrow(x)*a}(a)",
# Higher-order function typing: fun is typed in the body of function(x)
"function(funy){function(x){funy(x)*funy(x)}}(sgn)(-2)",
# Filter/selection
"r.hex[r.hex[,1]>4,]",
"a=c(1,2,3); a[a[,1]>10,1]",
"apply(r.hex,2,sum)",
# doesn't work
# "y=5;apply(r.hex,1,function(x){x[]+y})",
# doesn't work
# "apply(r.hex,1,function(x){x=1;r.hex})",
# doesn't work
# "apply(r.hex,1,function(x){r.hex})",
"mean=function(x){apply(x,2,sum)/nrow(x)};mean(r.hex)",
# "mean=function(x){apply(x,1,sum)/nrow(x)};mean(r.hex)",
# Conditional selection;
"ifelse(0,1,2)",
"ifelse(0,r.hex+1,r.hex+2)",
"ifelse(r.hex>3,99,r.hex)",# Broadcast selection
"ifelse(0,+,*)(1,2)", # Select functions
"(0 ? + : *)(1,2)", # Trinary select
"(1? r.hex : (r.hex+1))[1,2]",# True (vs false) test
# Impute the mean
# doesn't work
# "apply(r.hex,2,function(x){total=sum(ifelse(is.na(x),0,x)); rcnt=nrow(x)-sum(is.na(x)); mean=total / rcnt; ifelse(is.na(x),mean,x)})",
"factor(r.hex[,5])",
# Slice assignment & map
"r.hex[,1]",
"r.hex[1,]",
"r.hex[,1]+1",
# unimplemented
# "r.hex[1,]+1",
"r.hex[,1]=3.3;r.hex", # Replace a col with a constant
# "r.hex[1,]=3.3;r.hex",
"r.hex[,1]=r.hex[,1]+1",# Replace a col
# "r.hex[1,]=r.hex[1,]+1",
"r.hex[,ncol(r.hex)+1]=4",# Extend a col
# can't do arith on the row
# "r.hex[nrow(r.hex)+1,]=4",
"a=ncol(r.hex); r.hex[,c(a+1,a+2)]=5",# Extend two cols
# doesn't work
# "table(r.hex)",
# doesn't work. wants integer
# "table(r.hex[,1])",
# "r.hex[r.hex[,2]>4,]=-99",
# "r.hex[2,]=r.hex[7,]",
# "r.hex[c(1,3,5),1] = r.hex[c(2,4,6),2]",
# "r.hex[c(1,3,5),1] = r.hex[c(2,4),2]",
# "map()",
# "map(1)",
# "map(+,r.hex,1)",
# "map(+,1,2)",
# "map(function(x){x[];1},r.hex)",
# "map(function(a,b,d){a+b+d},r.hex,r.hex,1)",
# "map(function(a,b){a+ncol(b)},r.hex,r.hex)",
"a=0;x=0", # Delete keys from global scope
]
# concatenate a lot of random choices to make life harder
exprList = []
for i in range(10):
expr = ""
for j in range(1):
expr += "z.hex=" + random.choice(exprListFull) + ";"
# expr += random.choice(exprListFull) + ";"
exprList.append(expr)
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED, localhost
SEED = h2o.setup_random_seed()
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(1, java_heap_GB=14)
else:
h2o_hosts.build_cloud_with_hosts(1)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_exec2_unary(self):
h2o.beta_features = True
bucket = 'home-0xdiag-datasets'
csvPathname = 'airlines/year2013.csv'
hexKey = 'i.hex'
parseResult = h2i.import_parse(bucket=bucket, path=csvPathname, schema='put', hex_key=hexKey)
for resultKey, execExpr in initList:
h2e.exec_expr(h2o.nodes[0], execExpr, resultKey=None, timeoutSecs=10)
start = time.time()
# h2e.exec_expr_list_rand(len(h2o.nodes), exprList, 'r1.hex', maxTrials=200, timeoutSecs=10)
h2e.exec_expr_list_rand(len(h2o.nodes), exprList, None, maxTrials=200, timeoutSecs=30, allowEmptyResult=True, nanOkay=True)
h2o.check_sandbox_for_errors()
print "exec end on ", "operators" , 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
| |
import docker
from .base import BaseIntegrationTest, TEST_API_VERSION
class ContainerCollectionTest(BaseIntegrationTest):
def test_run(self):
client = docker.from_env(version=TEST_API_VERSION)
self.assertEqual(
client.containers.run("alpine", "echo hello world", remove=True),
b'hello world\n'
)
def test_run_detach(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "sleep 300", detach=True)
self.tmp_containers.append(container.id)
assert container.attrs['Config']['Image'] == "alpine"
assert container.attrs['Config']['Cmd'] == ['sleep', '300']
def test_run_with_error(self):
client = docker.from_env(version=TEST_API_VERSION)
with self.assertRaises(docker.errors.ContainerError) as cm:
client.containers.run("alpine", "cat /test", remove=True)
assert cm.exception.exit_status == 1
assert "cat /test" in str(cm.exception)
assert "alpine" in str(cm.exception)
assert "No such file or directory" in str(cm.exception)
def test_run_with_image_that_does_not_exist(self):
client = docker.from_env(version=TEST_API_VERSION)
with self.assertRaises(docker.errors.ImageNotFound):
client.containers.run("dockerpytest_does_not_exist")
def test_get(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "sleep 300", detach=True)
self.tmp_containers.append(container.id)
assert client.containers.get(container.id).attrs[
'Config']['Image'] == "alpine"
def test_list(self):
client = docker.from_env(version=TEST_API_VERSION)
container_id = client.containers.run(
"alpine", "sleep 300", detach=True).id
self.tmp_containers.append(container_id)
containers = [c for c in client.containers.list() if c.id ==
container_id]
assert len(containers) == 1
container = containers[0]
assert container.attrs['Config']['Image'] == 'alpine'
container.kill()
container.remove()
assert container_id not in [c.id for c in client.containers.list()]
class ContainerTest(BaseIntegrationTest):
def test_commit(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run(
"alpine", "sh -c 'echo \"hello\" > /test'",
detach=True
)
self.tmp_containers.append(container.id)
container.wait()
image = container.commit()
self.assertEqual(
client.containers.run(image.id, "cat /test", remove=True),
b"hello\n"
)
def test_diff(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "touch /test", detach=True)
self.tmp_containers.append(container.id)
container.wait()
assert container.diff() == [{'Path': '/test', 'Kind': 1}]
def test_exec_run(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run(
"alpine", "sh -c 'echo \"hello\" > /test; sleep 60'", detach=True
)
self.tmp_containers.append(container.id)
assert container.exec_run("cat /test") == b"hello\n"
def test_kill(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "sleep 300", detach=True)
self.tmp_containers.append(container.id)
while container.status != 'running':
container.reload()
assert container.status == 'running'
container.kill()
container.reload()
assert container.status == 'exited'
def test_logs(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "echo hello world",
detach=True)
self.tmp_containers.append(container.id)
container.wait()
assert container.logs() == b"hello world\n"
def test_pause(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "sleep 300", detach=True)
self.tmp_containers.append(container.id)
container.pause()
container.reload()
assert container.status == "paused"
container.unpause()
container.reload()
assert container.status == "running"
def test_remove(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "echo hello", detach=True)
self.tmp_containers.append(container.id)
assert container.id in [c.id for c in client.containers.list(all=True)]
container.wait()
container.remove()
containers = client.containers.list(all=True)
assert container.id not in [c.id for c in containers]
def test_rename(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "echo hello", name="test1",
detach=True)
self.tmp_containers.append(container.id)
assert container.name == "test1"
container.rename("test2")
container.reload()
assert container.name == "test2"
def test_restart(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "sleep 100", detach=True)
self.tmp_containers.append(container.id)
first_started_at = container.attrs['State']['StartedAt']
container.restart()
container.reload()
second_started_at = container.attrs['State']['StartedAt']
assert first_started_at != second_started_at
def test_start(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.create("alpine", "sleep 50", detach=True)
self.tmp_containers.append(container.id)
assert container.status == "created"
container.start()
container.reload()
assert container.status == "running"
def test_stats(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "sleep 100", detach=True)
self.tmp_containers.append(container.id)
stats = container.stats(stream=False)
for key in ['read', 'networks', 'precpu_stats', 'cpu_stats',
'memory_stats', 'blkio_stats']:
assert key in stats
def test_stop(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "top", detach=True)
self.tmp_containers.append(container.id)
assert container.status in ("running", "created")
container.stop(timeout=2)
container.reload()
assert container.status == "exited"
def test_top(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "sleep 60", detach=True)
self.tmp_containers.append(container.id)
top = container.top()
assert len(top['Processes']) == 1
assert 'sleep 60' in top['Processes'][0]
def test_update(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "sleep 60", detach=True,
cpu_shares=2)
self.tmp_containers.append(container.id)
assert container.attrs['HostConfig']['CpuShares'] == 2
container.update(cpu_shares=3)
container.reload()
assert container.attrs['HostConfig']['CpuShares'] == 3
def test_wait(self):
client = docker.from_env(version=TEST_API_VERSION)
container = client.containers.run("alpine", "sh -c 'exit 0'",
detach=True)
self.tmp_containers.append(container.id)
assert container.wait() == 0
container = client.containers.run("alpine", "sh -c 'exit 1'",
detach=True)
self.tmp_containers.append(container.id)
assert container.wait() == 1
| |
# -*- coding: utf-8 -*-
"""
pygments.lexers.archetype
~~~~~~~~~~~~~~~~~~~~~~~~~
Lexer for Archetype-related syntaxes, including:
- ODIN syntax <https://github.com/openEHR/odin>
- ADL syntax <http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf>
- cADL sub-syntax of ADL
For uses of this syntax, see the openEHR archetypes <http://www.openEHR.org/ckm>
Contributed by Thomas Beale <https://github.com/wolandscat>,
<https://bitbucket.org/thomas_beale>.
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, bygroups, using, default
from pygments.token import Text, Comment, Name, Literal, Number, String, \
Punctuation, Keyword, Operator, Generic
__all__ = ['OdinLexer', 'CadlLexer', 'AdlLexer']
class AtomsLexer(RegexLexer):
"""
Lexer for Values used in ADL and ODIN.
.. versionadded:: 2.1
"""
tokens = {
# ----- pseudo-states for inclusion -----
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'[ \t]*--.*$', Comment),
],
'archetype_id': [
(r'[ \t]*([a-zA-Z]\w+(\.[a-zA-Z]\w+)*::)?[a-zA-Z]\w+(-[a-zA-Z]\w+){2}'
r'\.\w+[\w-]*\.v\d+(\.\d+){,2}((-[a-z]+)(\.\d+)?)?', Name.Decorator),
],
'date_constraints': [
# ISO 8601-based date/time constraints
(r'[Xx?YyMmDdHhSs\d]{2,4}([:-][Xx?YyMmDdHhSs\d]{2}){2}', Literal.Date),
# ISO 8601-based duration constraints + optional trailing slash
(r'(P[YyMmWwDd]+(T[HhMmSs]+)?|PT[HhMmSs]+)/?', Literal.Date),
],
'ordered_values': [
# ISO 8601 date with optional 'T' ligature
(r'\d{4}-\d{2}-\d{2}T?', Literal.Date),
# ISO 8601 time
(r'\d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{4}|Z)?', Literal.Date),
# ISO 8601 duration
(r'P((\d*(\.\d+)?[YyMmWwDd]){1,3}(T(\d*(\.\d+)?[HhMmSs]){,3})?|'
r'T(\d*(\.\d+)?[HhMmSs]){,3})', Literal.Date),
(r'[+-]?(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
(r'[+-]?(\d+)*\.\d+%?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[+-]?\d+%?', Number.Integer),
],
'values': [
include('ordered_values'),
(r'([Tt]rue|[Ff]alse)', Literal),
(r'"', String, 'string'),
(r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
(r'[a-z][a-z0-9+.-]*:', Literal, 'uri'),
# term code
(r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)(\w[\w-]*)(\])',
bygroups(Punctuation, Name.Decorator, Punctuation, Name.Decorator,
Punctuation)),
(r'\|', Punctuation, 'interval'),
# list continuation
(r'\.\.\.', Punctuation),
],
'constraint_values': [
(r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)',
bygroups(Punctuation, Name.Decorator, Punctuation), 'adl14_code_constraint'),
# ADL 1.4 ordinal constraint
(r'(\d*)(\|)(\[\w[\w-]*::\w[\w-]*\])((?:[,;])?)',
bygroups(Number, Punctuation, Name.Decorator, Punctuation)),
include('date_constraints'),
include('values'),
],
# ----- real states -----
'string': [
('"', String, '#pop'),
(r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
# all other characters
(r'[^\\"]+', String),
# stray backslash
(r'\\', String),
],
'uri': [
# effective URI terminators
(r'[,>\s]', Punctuation, '#pop'),
(r'[^>\s,]+', Literal),
],
'interval': [
(r'\|', Punctuation, '#pop'),
include('ordered_values'),
(r'\.\.', Punctuation),
(r'[<>=] *', Punctuation),
# handle +/-
(r'\+/-', Punctuation),
(r'\s+', Text),
],
'any_code': [
include('archetype_id'),
# if it is a code
(r'[a-z_]\w*[0-9.]+(@[^\]]+)?', Name.Decorator),
# if it is tuple with attribute names
(r'[a-z_]\w*', Name.Class),
# if it is an integer, i.e. Xpath child index
(r'[0-9]+', Text),
(r'\|', Punctuation, 'code_rubric'),
(r'\]', Punctuation, '#pop'),
# handle use_archetype statement
(r'\s*,\s*', Punctuation),
],
'code_rubric': [
(r'\|', Punctuation, '#pop'),
(r'[^|]+', String),
],
'adl14_code_constraint': [
(r'\]', Punctuation, '#pop'),
(r'\|', Punctuation, 'code_rubric'),
(r'(\w[\w-]*)([;,]?)', bygroups(Name.Decorator, Punctuation)),
include('whitespace'),
],
}
class OdinLexer(AtomsLexer):
"""
Lexer for ODIN syntax.
.. versionadded:: 2.1
"""
name = 'ODIN'
aliases = ['odin']
filenames = ['*.odin']
mimetypes = ['text/odin']
tokens = {
'path': [
(r'>', Punctuation, '#pop'),
# attribute name
(r'[a-z_]\w*', Name.Class),
(r'/', Punctuation),
(r'\[', Punctuation, 'key'),
(r'\s*,\s*', Punctuation, '#pop'),
(r'\s+', Text, '#pop'),
],
'key': [
include('values'),
(r'\]', Punctuation, '#pop'),
],
'type_cast': [
(r'\)', Punctuation, '#pop'),
(r'[^)]+', Name.Class),
],
'root': [
include('whitespace'),
(r'([Tt]rue|[Ff]alse)', Literal),
include('values'),
# x-ref path
(r'/', Punctuation, 'path'),
# x-ref path starting with key
(r'\[', Punctuation, 'key'),
# attribute name
(r'[a-z_]\w*', Name.Class),
(r'=', Operator),
(r'\(', Punctuation, 'type_cast'),
(r',', Punctuation),
(r'<', Punctuation),
(r'>', Punctuation),
(r';', Punctuation),
],
}
class CadlLexer(AtomsLexer):
"""
Lexer for cADL syntax.
.. versionadded:: 2.1
"""
name = 'cADL'
aliases = ['cadl']
filenames = ['*.cadl']
tokens = {
'path': [
# attribute name
(r'[a-z_]\w*', Name.Class),
(r'/', Punctuation),
(r'\[', Punctuation, 'any_code'),
(r'\s+', Punctuation, '#pop'),
],
'root': [
include('whitespace'),
(r'(cardinality|existence|occurrences|group|include|exclude|'
r'allow_archetype|use_archetype|use_node)\W', Keyword.Type),
(r'(and|or|not|there_exists|xor|implies|for_all)\W', Keyword.Type),
(r'(after|before|closed)\W', Keyword.Type),
(r'(not)\W', Operator),
(r'(matches|is_in)\W', Operator),
# is_in / not is_in char
(u'(\u2208|\u2209)', Operator),
# there_exists / not there_exists / for_all / and / or
(u'(\u2203|\u2204|\u2200|\u2227|\u2228|\u22BB|\223C)',
Operator),
# regex in slot or as string constraint
(r'(\{)(\s*/[^}]+/\s*)(\})',
bygroups(Punctuation, String.Regex, Punctuation)),
# regex in slot or as string constraint
(r'(\{)(\s*\^[^}]+\^\s*)(\})',
bygroups(Punctuation, String.Regex, Punctuation)),
(r'/', Punctuation, 'path'),
# for cardinality etc
(r'(\{)((?:\d+\.\.)?(?:\d+|\*))'
r'((?:\s*;\s*(?:ordered|unordered|unique)){,2})(\})',
bygroups(Punctuation, Number, Number, Punctuation)),
# [{ is start of a tuple value
(r'\[\{', Punctuation),
(r'\}\]', Punctuation),
(r'\{', Punctuation),
(r'\}', Punctuation),
include('constraint_values'),
# type name
(r'[A-Z]\w+(<[A-Z]\w+([A-Za-z_<>]*)>)?', Name.Class),
# attribute name
(r'[a-z_]\w*', Name.Class),
(r'\[', Punctuation, 'any_code'),
(r'(~|//|\\\\|\+|-|/|\*|\^|!=|=|<=|>=|<|>]?)', Operator),
(r'\(', Punctuation),
(r'\)', Punctuation),
# for lists of values
(r',', Punctuation),
(r'"', String, 'string'),
# for assumed value
(r';', Punctuation),
],
}
class AdlLexer(AtomsLexer):
"""
Lexer for ADL syntax.
.. versionadded:: 2.1
"""
name = 'ADL'
aliases = ['adl']
filenames = ['*.adl', '*.adls', '*.adlf', '*.adlx']
tokens = {
'whitespace': [
# blank line ends
(r'\s*\n', Text),
# comment-only line
(r'^[ \t]*--.*$', Comment),
],
'odin_section': [
# repeating the following two rules from the root state enable multi-line
# strings that start in the first column to be dealt with
(r'^(language|description|ontology|terminology|annotations|'
r'component_terminologies|revision_history)[ \t]*\n', Generic.Heading),
(r'^(definition)[ \t]*\n', Generic.Heading, 'cadl_section'),
(r'^([ \t]*|[ \t]+.*)\n', using(OdinLexer)),
(r'^([^"]*")(>[ \t]*\n)', bygroups(String, Punctuation)),
# template overlay delimiter
(r'^----------*\n', Text, '#pop'),
(r'^.*\n', String),
default('#pop'),
],
'cadl_section': [
(r'^([ \t]*|[ \t]+.*)\n', using(CadlLexer)),
default('#pop'),
],
'rules_section': [
(r'^[ \t]+.*\n', using(CadlLexer)),
default('#pop'),
],
'metadata': [
(r'\)', Punctuation, '#pop'),
(r';', Punctuation),
(r'([Tt]rue|[Ff]alse)', Literal),
# numbers and version ids
(r'\d+(\.\d+)*', Literal),
# Guids
(r'(\d|[a-fA-F])+(-(\d|[a-fA-F])+){3,}', Literal),
(r'\w+', Name.Class),
(r'"', String, 'string'),
(r'=', Operator),
(r'[ \t]+', Text),
default('#pop'),
],
'root': [
(r'^(archetype|template_overlay|operational_template|template|'
r'speciali[sz]e)', Generic.Heading),
(r'^(language|description|ontology|terminology|annotations|'
r'component_terminologies|revision_history)[ \t]*\n',
Generic.Heading, 'odin_section'),
(r'^(definition)[ \t]*\n', Generic.Heading, 'cadl_section'),
(r'^(rules)[ \t]*\n', Generic.Heading, 'rules_section'),
include('archetype_id'),
(r'[ \t]*\(', Punctuation, 'metadata'),
include('whitespace'),
],
}
| |
import csv
import json
import math
import pytz
import six
import time
from collections import defaultdict
from datetime import datetime
from io import StringIO, BytesIO
from flask import Flask
from structlog import get_logger
from werkzeug.http import http_date
from .config import configure
from .encoders import JSONEncoder
from .render.attime import parseATTime
from .render.datalib import fetchData
from .render.glyph import GraphTypes
from .utils import RequestParams, hash_request
from .events.views import fetchEvents
logger = get_logger()
def jsonify(data, status=200, headers=None):
if headers is None:
headers = {}
jsonp = RequestParams.get('jsonp', False)
body = json.dumps(data, cls=JSONEncoder)
if jsonp:
headers['Content-Type'] = 'text/javascript'
body = '{0}({1})'.format(jsonp, body)
else:
headers['Content-Type'] = 'application/json'
return body, status, headers
class Graphite(Flask):
@property
def store(self):
return self.config['GRAPHITE']['store']
@property
def functions(self):
return self.config['GRAPHITE']['functions']
@property
def logger(self):
# Flask has its own logger that doesn't get any handler if we use
# dictconfig(). Replace it with our structlog logger.
return logger
app = Graphite(__name__)
try:
configure(app)
except Exception:
import traceback
print(traceback.format_exc())
raise
methods = ('GET', 'POST')
# No-op routes, non-essential for creating dashboards
@app.route('/dashboard/find', methods=methods)
def dashboard_find():
return jsonify({'dashboards': []})
@app.route('/dashboard/load/<name>', methods=methods)
def dashboard_load(name):
return jsonify({'error': "Dashboard '{0}' does not exist.".format(name)},
status=404)
@app.route('/events/get_data', methods=methods)
def events():
errors = {}
tzinfo = pytz.timezone(app.config['TIME_ZONE'])
tz = RequestParams.get('tz')
if tz:
try:
tzinfo = pytz.timezone(tz)
except pytz.UnknownTimeZoneError:
errors['tz'] = "Unknown timezone: '{0}'.".format(tz)
until_time = parseATTime(RequestParams.get('until', 'now'), tzinfo)
from_time = parseATTime(RequestParams.get('from', '-1d'), tzinfo)
start_time = min(from_time, until_time)
end_time = max(from_time, until_time)
if start_time == end_time:
errors['from'] = errors['until'] = 'Invalid empty time range'
tags = RequestParams.get('tags')
return json.dumps(fetchEvents(start_time, end_time, tags)), 200, {'Content-Type': 'application/json'}
# API calls that actually do something
@app.route('/metrics/search', methods=methods)
def metrics_search():
errors = {}
try:
max_results = int(RequestParams.get('max_results', 25))
except ValueError:
errors['max_results'] = 'must be an integer.'
if 'query' not in RequestParams:
errors['query'] = 'this parameter is required.'
if errors:
return jsonify({'errors': errors}, status=400)
results = sorted(app.searcher.search(
query=RequestParams['query'],
max_results=max_results,
), key=lambda result: result['path'] or '')
return jsonify({'metrics': results})
@app.route('/metrics', methods=methods)
@app.route('/metrics/find', methods=methods)
def metrics_find():
errors = {}
from_time = None
until_time = None
wildcards = False
try:
wildcards = bool(int(RequestParams.get('wildcards', 0)))
except ValueError:
errors['wildcards'] = 'must be 0 or 1.'
try:
from_time = int(RequestParams.get('from', -1))
except ValueError:
errors['from'] = 'must be an epoch timestamp.'
try:
until_time = int(RequestParams.get('until', -1))
except ValueError:
errors['until'] = 'must be an epoch timestamp.'
if from_time == -1:
from_time = None
if until_time == -1:
until_time = None
format = RequestParams.get('format', 'treejson')
if format not in ['treejson', 'completer']:
errors['format'] = 'unrecognized format: "{0}".'.format(format)
if 'query' not in RequestParams:
errors['query'] = 'this parameter is required.'
if errors:
return jsonify({'errors': errors}, status=400)
query = RequestParams['query']
matches = sorted(
app.store.find(query, from_time, until_time),
key=lambda node: node.name
)
base_path = query.rsplit('.', 1)[0] + '.' if '.' in query else ''
if format == 'treejson':
data = tree_json(matches, base_path, wildcards=wildcards)
return (
json.dumps(data),
200,
{'Content-Type': 'application/json'}
)
results = []
for node in matches:
node_info = {
'path': node.path,
'name': node.name,
'is_leaf': int(node.is_leaf), # XXX Y was this cast to str
}
if not node.is_leaf:
node_info['path'] += '.'
results.append(node_info)
if len(results) > 1 and wildcards:
results.append({'name': '*'})
return jsonify({'metrics': results})
@app.route('/metrics/expand', methods=methods)
def metrics_expand():
errors = {}
try:
group_by_expr = bool(int(RequestParams.get('groupByExpr', 0)))
except ValueError:
errors['groupByExpr'] = 'must be 0 or 1.'
try:
leaves_only = bool(int(RequestParams.get('leavesOnly', 0)))
except ValueError:
errors['leavesOnly'] = 'must be 0 or 1.'
if 'query' not in RequestParams:
errors['query'] = 'this parameter is required.'
if errors:
return jsonify({'errors': errors}, status=400)
results = defaultdict(set)
for query in RequestParams.getlist('query'):
for node in app.store.find(query):
if node.is_leaf or not leaves_only:
results[query].add(node.path)
if group_by_expr:
for query, matches in results.items():
results[query] = sorted(matches)
else:
new_results = set()
for value in results.values():
new_results = new_results.union(value)
results = sorted(new_results)
return jsonify({'results': results})
def recurse(query, index):
"""
Recursively walk across paths, adding leaves to the index as they're found.
"""
for node in app.store.find(query):
if node.is_leaf:
index.add(node.path)
else:
recurse('{0}.*'.format(node.path), index)
@app.route('/metrics/index.json', methods=methods)
def metrics_index():
index = set()
recurse('*', index)
return jsonify(sorted(index))
def prune_datapoints(series, max_datapoints, start, end):
time_range = end - start
points = time_range // series.step
if max_datapoints < points:
values_per_point = int(
math.ceil(float(points) / float(max_datapoints))
)
seconds_per_point = values_per_point * series.step
nudge = (
seconds_per_point +
(series.start % series.step) -
(series.start % seconds_per_point)
)
series.start += nudge
values_to_lose = nudge // series.step
del series[:values_to_lose-1]
series.consolidate(values_per_point)
step = seconds_per_point
else:
step = series.step
timestamps = range(series.start, series.end + series.step, step)
datapoints = zip(series, timestamps)
return {'target': series.name, 'datapoints': datapoints}
@app.route('/render', methods=methods)
def render():
# Start with some defaults
errors = {}
graph_options = {
'width': 600,
'height': 300,
}
request_options = {}
# Fill in the request_options
graph_type = RequestParams.get('graphType', 'line')
try:
graph_class = GraphTypes[graph_type]
request_options['graphType'] = graph_type
request_options['graphClass'] = graph_class
except KeyError:
errors['graphType'] = (
"Invalid graphType '{0}', must be one of '{1}'.".format(
graph_type, "', '".join(sorted(GraphTypes.keys()))))
request_options['pieMode'] = RequestParams.get('pieMode', 'average')
targets = RequestParams.getlist('target')
if not len(targets):
errors['target'] = 'This parameter is required.'
request_options['targets'] = targets
if 'rawData' in RequestParams:
request_options['format'] = 'raw'
if 'format' in RequestParams:
request_options['format'] = RequestParams['format']
if 'jsonp' in RequestParams:
request_options['jsonp'] = RequestParams['jsonp']
if 'maxDataPoints' in RequestParams:
try:
request_options['maxDataPoints'] = int(
float(RequestParams['maxDataPoints']))
except ValueError:
errors['maxDataPoints'] = 'Must be an integer.'
if errors:
return jsonify({'errors': errors}, status=400)
# Fill in the graph_options
for opt in graph_class.customizable:
if opt in RequestParams:
value = RequestParams[opt]
try:
intvalue = int(value)
if str(intvalue) == str(value):
value = intvalue
except ValueError:
try:
value = float(value)
except ValueError:
if value.lower() in ('true', 'false'):
value = value.lower() == 'true'
elif value.lower() == 'default' or not value:
continue
graph_options[opt] = value
tzinfo = pytz.timezone(app.config['TIME_ZONE'])
tz = RequestParams.get('tz')
if tz:
try:
tzinfo = pytz.timezone(tz)
except pytz.UnknownTimeZoneError:
errors['tz'] = "Unknown timezone: '{0}'.".format(tz)
request_options['tzinfo'] = tzinfo
# Get the time interval for time-oriented graph types
until_time = parseATTime(RequestParams.get('until', 'now'), tzinfo)
from_time = parseATTime(RequestParams.get('from', '-1d'), tzinfo)
start_time = min(from_time, until_time)
end_time = max(from_time, until_time)
if start_time == end_time:
errors['from'] = errors['until'] = 'Invalid empty time range'
request_options['startTime'] = start_time
request_options['endTime'] = end_time
template = dict()
for key in RequestParams.keys():
if key.startswith('template['):
template[key[9:-1]] = RequestParams.get(key)
request_options['template'] = template
use_cache = app.cache is not None and 'noCache' not in RequestParams
cache_timeout = RequestParams.get('cacheTimeout')
if cache_timeout is not None:
cache_timeout = int(cache_timeout)
if errors:
return jsonify({'errors': errors}, status=400)
# Done with options.
if use_cache:
request_key = hash_request()
response = app.cache.get(request_key)
if response is not None:
return response
headers = {
'Last-Modified': http_date(time.time()),
'Expires': http_date(time.time() + (cache_timeout or 60)),
'Cache-Control': 'max-age={0}'.format(cache_timeout or 60)
} if use_cache else {
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
}
context = {
'startTime': request_options['startTime'],
'endTime': request_options['endTime'],
'tzinfo': request_options['tzinfo'],
'template': request_options['template'],
'data': [],
}
# Gather all data to take advantage of backends with fetch_multi
paths = []
for target in request_options['targets']:
if request_options['graphType'] == 'pie':
if ':' in target:
continue
if target.strip():
paths += pathsFromTarget(context, target)
data_store = fetchData(context, paths)
if request_options['graphType'] == 'pie':
for target in request_options['targets']:
if ':' in target:
name, value = target.split(':', 1)
try:
value = float(value)
except ValueError:
errors['target'] = "Invalid target: '{0}'.".format(target)
context['data'].append((name, value))
else:
series_list = evaluateTarget(context, target, data_store)
for series in series_list:
func = app.functions[request_options['pieMode']]
context['data'].append((series.name,
func(context, series) or 0))
if errors:
return jsonify({'errors': errors}, status=400)
else: # graphType == 'line'
for target in request_options['targets']:
if not target.strip():
continue
series_list = evaluateTarget(context, target, data_store)
context['data'].extend(series_list)
request_options['format'] = request_options.get('format')
if request_options['format'] == 'csv':
response = BytesIO() if six.PY2 else StringIO()
writer = csv.writer(response, dialect='excel')
for series in context['data']:
for index, value in enumerate(series):
ts = datetime.fromtimestamp(
series.start + index * series.step,
request_options['tzinfo']
)
writer.writerow((series.name,
ts.strftime("%Y-%m-%d %H:%M:%S"), value))
response.seek(0)
headers['Content-Type'] = 'text/csv'
return response.read(), 200, headers
if request_options['format'] == 'json':
series_data = []
if 'maxDataPoints' in request_options and any(context['data']):
start_time = min([s.start for s in context['data']])
end_time = max([s.end for s in context['data']])
for series in context['data']:
series_data.append(prune_datapoints(
series, request_options['maxDataPoints'],
start_time, end_time))
else:
for series in context['data']:
timestamps = range(series.start, series.end + series.step,
series.step)
datapoints = zip(series, timestamps)
series_data.append({'target': series.name,
'datapoints': datapoints})
return jsonify(series_data, headers=headers)
if request_options['format'] == 'dygraph':
series_data = {}
labels = ['Time']
if any(context['data']):
datapoints = [[ts * 1000]
for ts in range(context['data'][0].start,
context['data'][0].end,
context['data'][0].step)]
for series in context['data']:
labels.append(series.name)
for i, point in enumerate(series):
datapoints[i].append(point)
series_data = {'labels': labels, 'data': datapoints}
return jsonify(series_data, headers=headers)
if request_options['format'] == 'rickshaw':
series_data = []
for series in context['data']:
timestamps = range(series.start, series.end, series.step)
datapoints = [{'x': x, 'y': y}
for x, y in zip(timestamps, series)]
series_data.append(dict(target=series.name,
datapoints=datapoints))
return jsonify(series_data, headers=headers)
if request_options['format'] == 'raw':
response = StringIO()
for series in context['data']:
response.write(u"%s,%d,%d,%d|" % (
series.name, series.start, series.end, series.step))
response.write(u','.join(map(repr, series)))
response.write(u'\n')
response.seek(0)
headers['Content-Type'] = 'text/plain'
return response.read(), 200, headers
if request_options['format'] == 'svg':
graph_options['outputFormat'] = 'svg'
elif request_options['format'] == 'pdf':
graph_options['outputFormat'] = 'pdf'
graph_options['data'] = context['data']
image = doImageRender(request_options['graphClass'], graph_options)
use_svg = graph_options.get('outputFormat') == 'svg'
if use_svg and 'jsonp' in request_options:
headers['Content-Type'] = 'text/javascript'
response = ('{0}({1})'.format(request_options['jsonp'],
json.dumps(image.decode('utf-8'))),
200, headers)
else:
if use_svg:
ctype = 'image/svg+xml'
elif graph_options.get('outputFormat') == 'pdf':
ctype = 'application/x-pdf'
else:
ctype = 'image/png'
headers['Content-Type'] = ctype
response = image, 200, headers
if use_cache:
app.cache.add(request_key, response, cache_timeout)
return response
def tree_json(nodes, base_path, wildcards=False):
results = []
branchNode = {
'allowChildren': 1,
'expandable': 1,
'leaf': 0,
}
leafNode = {
'allowChildren': 0,
'expandable': 0,
'leaf': 1,
}
# Add a wildcard node if appropriate
if len(nodes) > 1 and wildcards:
wildcardNode = {'text': '*', 'id': base_path + '*'}
if any(not n.is_leaf for n in nodes):
wildcardNode.update(branchNode)
else:
wildcardNode.update(leafNode)
results.append(wildcardNode)
found = set()
results_leaf = []
results_branch = []
for node in nodes: # Now let's add the matching children
if node.name in found:
continue
found.add(node.name)
resultNode = {
'text': str(node.name),
'id': base_path + str(node.name),
}
if node.is_leaf:
resultNode.update(leafNode)
results_leaf.append(resultNode)
else:
resultNode.update(branchNode)
results_branch.append(resultNode)
results.extend(results_branch)
results.extend(results_leaf)
return results
def doImageRender(graphClass, graphOptions):
pngData = BytesIO()
img = graphClass(**graphOptions)
img.output(pngData)
imageData = pngData.getvalue()
pngData.close()
return imageData
from .evaluator import evaluateTarget, pathsFromTarget # noqa
| |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import re
import six
import webob
from oslo_config import cfg
from oslo_log import log
from oslo_messaging._drivers import common as rpc_common
import senlin.api.middleware.fault as fault
from senlin.common import exception as senlin_exc
from senlin.tests.unit.common import base
class ClusterNotFoundChild(senlin_exc.ClusterNotFound):
pass
class ErrorWithNewline(webob.exc.HTTPBadRequest):
pass
class FaultMiddlewareTest(base.SenlinTestCase):
def setUp(self):
super(FaultMiddlewareTest, self).setUp()
log.register_options(cfg.CONF)
def test_disguised_http_exception_with_newline(self):
wrapper = fault.FaultWrapper(None)
newline_error = ErrorWithNewline('Error with \n newline')
msg = wrapper._error(senlin_exc.HTTPExceptionDisguise(newline_error))
expected = {
'code': 400,
'error': {
'code': 400,
'message': 'Error with \n newline',
'traceback': None,
'type': 'ErrorWithNewline'
},
'explanation': 'The server could not comply with the request '
'since it is either malformed or otherwise '
'incorrect.',
'title': 'Bad Request'
}
self.assertEqual(expected, msg)
def test_openstack_exception_with_kwargs(self):
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(senlin_exc.ClusterNotFound(cluster='a'))
expected = {
'code': 404,
'error': {
'code': 404,
'message': 'The cluster (a) could not be found.',
'traceback': None,
'type': 'ClusterNotFound'
},
'explanation': 'The resource could not be found.',
'title': 'Not Found'
}
self.assertEqual(expected, msg)
def test_openstack_exception_without_kwargs(self):
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(senlin_exc.PolicyNotSpecified())
expected = {
'code': 500,
'error': {
'code': 500,
'message': 'Policy not specified.',
'traceback': None,
'type': 'PolicyNotSpecified'
},
'explanation': 'The server has either erred or is incapable of '
'performing the requested operation.',
'title': 'Internal Server Error'
}
self.assertEqual(expected, msg)
def test_exception_with_non_ascii_chars(self):
# We set debug to true to test the code path for serializing traces too
cfg.CONF.set_override('debug', True)
msg = u'Error with non-ascii chars \x80'
class TestException(senlin_exc.SenlinException):
msg_fmt = msg
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(TestException())
self.assertEqual(500, msg['code'])
self.assertEqual(500, msg['error']['code'])
self.assertEqual(u'Error with non-ascii chars \x80',
msg['error']['message'])
self.assertEqual('TestException', msg['error']['type'])
self.assertEqual('The server has either erred or is incapable of '
'performing the requested operation.',
msg['explanation'])
self.assertEqual('Internal Server Error', msg['title'])
def test_remote_exception(self):
# We want tracebacks
cfg.CONF.set_override('debug', True)
error = senlin_exc.ClusterNotFound(cluster='a')
exc_info = (type(error), error, None)
serialized = rpc_common.serialize_remote_exception(exc_info)
remote_error = rpc_common.deserialize_remote_exception(
serialized, ["senlin.common.exception"])
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(remote_error)
expected_message, expected_traceback = six.text_type(remote_error).\
split('\n', 1)
expected = {
'code': 404,
'error': {
'code': 404,
'message': expected_message,
'traceback': expected_traceback,
'type': 'ClusterNotFound'
},
'explanation': 'The resource could not be found.',
'title': 'Not Found'
}
self.assertEqual(expected, msg)
def remote_exception_helper(self, name, error):
exc_info = (type(error), error, None)
serialized = rpc_common.serialize_remote_exception(exc_info)
remote_error = rpc_common.deserialize_remote_exception(
serialized, name)
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(remote_error)
expected = {
'code': 500,
'error': {
'code': 500,
'message': msg['error']['message'],
'traceback': None,
'type': 'RemoteError'
},
'explanation': msg['explanation'],
'title': 'Internal Server Error'
}
self.assertEqual(expected, msg)
def test_all_remote_exceptions(self):
for name, obj in inspect.getmembers(
senlin_exc, lambda x: inspect.isclass(x) and issubclass(
x, senlin_exc.SenlinException)):
if '__init__' in obj.__dict__:
if obj == senlin_exc.SenlinException:
continue
elif obj == senlin_exc.Error:
error = obj('Error')
elif obj == senlin_exc.NodeNotFound:
error = obj()
else:
continue
self.remote_exception_helper(name, error)
continue
if hasattr(obj, 'msg_fmt'):
kwargs = {}
spec_names = re.findall('%\((\w+)\)([cdeEfFgGinorsxX])',
obj.msg_fmt)
for key, convtype in spec_names:
if convtype == 'r' or convtype == 's':
kwargs[key] = '"' + key + '"'
else:
# this is highly unlikely
raise Exception("test needs additional conversion"
" type added due to %s exception"
" using '%c' specifier" % (
obj, convtype))
error = obj(**kwargs)
self.remote_exception_helper(name, error)
def test_should_not_ignore_parent_classes(self):
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(ClusterNotFoundChild(cluster='a'))
expected = {
'code': 404,
'error': {
'code': 404,
'message': 'The cluster (a) could not be found.',
'traceback': None,
'type': 'ClusterNotFoundChild'
},
'explanation': 'The resource could not be found.',
'title': 'Not Found'
}
self.assertEqual(expected, msg)
def test_internal_server_error_when_exeption_and_parents_not_mapped(self):
wrapper = fault.FaultWrapper(None)
class NotMappedException(Exception):
pass
msg = wrapper._error(NotMappedException('A message'))
expected = {
'code': 500,
'error': {
'code': 500,
'message': u'A message',
'traceback': None,
'type': 'NotMappedException'
},
'explanation': ('The server has either erred or is incapable '
'of performing the requested operation.'),
'title': 'Internal Server Error'
}
self.assertEqual(expected, msg)
def test_should_not_ignore_parent_classes_even_for_remote_ones(self):
# We want tracebacks
cfg.CONF.set_override('debug', True)
error = ClusterNotFoundChild(cluster='a')
exc_info = (type(error), error, None)
serialized = rpc_common.serialize_remote_exception(exc_info)
remote_error = rpc_common.deserialize_remote_exception(
serialized, ["senlin.tests.unit.middleware.test_fault_middleware"])
wrapper = fault.FaultWrapper(None)
msg = wrapper._error(remote_error)
expected_message, expected_traceback = six.text_type(remote_error).\
split('\n', 1)
expected = {
'code': 404,
'error': {
'code': 404,
'message': expected_message,
'traceback': expected_traceback,
'type': 'ClusterNotFoundChild'
},
'explanation': 'The resource could not be found.',
'title': 'Not Found'
}
self.assertEqual(expected, msg)
| |
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
""" The file describes the parameter tree view
QParameterView
"""
from __future__ import division
from PyQt4 import QtCore, QtGui
from vistrails.core.inspector import PipelineInspector
from vistrails.core.modules.module_registry import get_module_registry
from vistrails.gui.common_widgets import QSearchTreeWindow, QSearchTreeWidget
from vistrails.gui.paramexplore.pe_pipeline import QAnnotatedPipelineView
from vistrails.gui.vistrails_palette import QVistrailsPaletteInterface
from vistrails.core.utils import InstanceObject
from vistrails.core.debug import debug
################################################################################
class ParameterInfo(InstanceObject):
# ParameterInfo(type=,
# identifier=,
# namespace=,
# value=,
# id=,
# dbtype=,
# parent_dbtype=,
# parent_id=,
# is_alias=)
#new: ParameterInfo(module_id=,
# name=,
# pos=,
# value=,
# spec=,
# is_alias=,
# union=)
pass
################################################################################
class QParameterView(QtGui.QWidget, QVistrailsPaletteInterface):
"""
QParameterView contains the parameter exploration properties and the
parameter palette
"""
def __init__(self, controller=None, parent=None):
QtGui.QWidget.__init__(self, parent)
self.set_title('Pipeline Methods')
self.controller = None
vLayout = QtGui.QVBoxLayout()
vLayout.setMargin(0)
vLayout.setSpacing(5)
self.setLayout(vLayout)
self.toggleUnsetParameters = QtGui.QCheckBox('Show Unset Parameters')
vLayout.addWidget(self.toggleUnsetParameters, 0, QtCore.Qt.AlignRight)
self.parameterWidget = QParameterWidget()
vLayout.addWidget(self.parameterWidget)
self.treeWidget = self.parameterWidget.treeWidget
self.pipeline_view = QAnnotatedPipelineView()
self.pipeline_view.setReadOnlyMode(True)
vLayout.addWidget(self.pipeline_view)
vLayout.setStretch(0,0)
vLayout.setStretch(1,1)
vLayout.setStretch(2,0)
self.connect(self.toggleUnsetParameters, QtCore.SIGNAL("toggled(bool)"),
self.parameterWidget.treeWidget.toggleUnsetParameters)
self.set_controller(controller)
def set_controller(self, controller):
if self.controller == controller:
return
self.controller = controller
self.pipeline_view.set_controller(controller)
if self.controller is not None:
self.set_pipeline(controller.current_pipeline)
else:
self.set_pipeline(None)
def set_pipeline(self, pipeline):
if self.controller is None:
return
self.pipeline = pipeline
self.parameterWidget.set_pipeline(pipeline, self.controller)
if pipeline:
self.pipeline_view.scene().setupScene(pipeline)
else:
self.pipeline_view.scene().clear()
self.pipeline_view.updateAnnotatedIds(pipeline)
# this ensures the correct pipeline is set when updating exploration
self.get_palette().set_pipeline(pipeline)
def get_palette(self):
from vistrails.gui.paramexplore.pe_inspector import QParamExploreInspector
return QParamExploreInspector.instance()
class QParameterWidget(QSearchTreeWindow):
"""
QParameterWidget is a special widget for displaying aliases and
parameters inside a pipeline
"""
def createTreeWidget(self):
""" createTreeWidget() -> QModuleTreeWidget
Return the search tree widget for this window
"""
treeWidget = QParameterTreeWidget(self)
return treeWidget
def set_pipeline(self, pipeline, controller):
self.pipeline = pipeline
self.treeWidget.updateFromPipeline(pipeline, controller)
class QParameterTreeWidget(QSearchTreeWidget):
"""
QParameterTreeWidget is a subclass of QSearchTreeWidget to display all
Vistrails Module
"""
def __init__(self, parent=None):
""" QParameterTreeWidget(parent: QWidget) -> QParameterTreeWidget
Set up size policy and header
"""
QSearchTreeWidget.__init__(self, parent)
self.header().hide()
self.setRootIsDecorated(False)
self.delegate = QParameterTreeWidgetItemDelegate(self, self)
self.setItemDelegate(self.delegate)
self.showUnsetParameters = False
def updateFromPipeline(self, pipeline, controller):
""" updateFromPipeline(pipeline: Pipeline) -> None
Read the list of aliases and parameters from the pipeline
"""
self.clear()
if not pipeline:
return
# Update the aliases
if len(pipeline.aliases)>0:
aliasRoot = QParameterTreeWidgetItem(None, self, ['Aliases'])
aliasRoot.setFlags(QtCore.Qt.ItemIsEnabled)
for (alias, info) in pipeline.aliases.iteritems():
ptype, pId, parentType, parentId, mId = info
parameter = pipeline.db_get_object(ptype, pId)
function = pipeline.db_get_object(parentType, parentId)
v = parameter.strValue
port_spec = function.get_spec('input')
port_spec_item = port_spec.port_spec_items[parameter.pos]
label = ['%s = %s' % (alias, v)]
pInfo = ParameterInfo(module_id=mId,
name=function.name,
pos=parameter.pos,
value=v,
spec=port_spec_item,
is_alias=True,
union=port_spec.union)
QParameterTreeWidgetItem((alias, [pInfo]),
aliasRoot, label)
aliasRoot.setExpanded(True)
vistrailVarsRoot = QParameterTreeWidgetItem(None, self,
['Vistrail Variables'])
vistrailVarsRoot.setHidden(True)
# Now go through all modules and functions
inspector = PipelineInspector()
inspector.inspect_ambiguous_modules(pipeline)
sortedModules = sorted(pipeline.modules.iteritems(),
key=lambda item: item[1].name)
reg = get_module_registry()
for mId, module in sortedModules:
if module.is_vistrail_var():
vistrailVarsRoot.setHidden(False)
vistrailVarsRoot.setExpanded(True)
port_spec = module.get_port_spec('value', 'input')
if not port_spec:
debug.critical("Not port_spec for value in module %s" % module)
continue
if not controller.has_vistrail_variable_with_uuid(
module.get_vistrail_var()):
continue
vv = controller.get_vistrail_variable_by_uuid(
module.get_vistrail_var())
label = ['%s = %s' % (vv.name, vv.value)]
pList = [ParameterInfo(module_id=mId,
name=port_spec.name,
pos=port_spec.port_spec_items[pId].pos,
value="",
spec=port_spec.port_spec_items[pId],
is_alias=False,
union=port_spec.union)
for pId in xrange(len(port_spec.port_spec_items))]
QParameterTreeWidgetItem((vv.name, pList),
vistrailVarsRoot,
label)
continue
if module.is_valid:
port_dict = dict((p.name, p) for p in module.destinationPorts())
function_names = {}
# Add existing parameters
mLabel = [module.name]
moduleItem = None
if len(module.functions)>0:
for fId in xrange(len(module.functions)):
function = module.functions[fId]
function_names[function.name] = function
if len(function.params)==0: continue
if moduleItem==None:
if inspector.annotated_modules.has_key(mId):
annotatedId = inspector.annotated_modules[mId]
moduleItem = QParameterTreeWidgetItem(annotatedId,
self, mLabel)
else:
moduleItem = QParameterTreeWidgetItem(None,
self, mLabel)
try:
if module.is_valid:
port_spec = port_dict[function.name]
else:
port_spec = function.get_spec('input')
except Exception:
debug.critical("get_spec failed: %s %s %s" % \
(module, function, function.sigstring))
continue
v = ', '.join([p.strValue for p in function.params])
label = ['%s(%s)' % (port_spec.union or function.name, v)]
port_spec_items = port_spec.port_spec_items
pList = [ParameterInfo(module_id=mId,
name=function.name,
pos=function.params[pId].pos,
value=function.params[pId].strValue,
spec=port_spec_items[pId],
is_alias=False,
union=port_spec.union)
for pId in xrange(len(function.params))]
mName = module.name
if moduleItem.parameter is not None:
mName += '(%d)' % moduleItem.parameter
fName = '%s :: %s' % (mName, port_spec.union or function.name)
QParameterTreeWidgetItem((fName, pList),
moduleItem,
label)
# Add available parameters
if module.is_valid:
for port_spec in port_dict.itervalues():
if (port_spec.name in function_names or
not port_spec.is_valid or
not len(port_spec.port_spec_items) or
not reg.is_constant(port_spec)):
# The function already exists or is empty
# or contains non-constant modules
continue
if moduleItem==None:
if inspector.annotated_modules.has_key(mId):
annotatedId = inspector.annotated_modules[mId]
moduleItem = QParameterTreeWidgetItem(annotatedId,
self,
mLabel,
False)
else:
moduleItem = QParameterTreeWidgetItem(None, self,
mLabel, False)
v = ', '.join([p.module for p in port_spec.port_spec_items])
label = ['%s(%s)' % (port_spec.union or port_spec.name, v)]
pList = [ParameterInfo(module_id=mId,
name=port_spec.name,
pos=port_spec.port_spec_items[pId].pos,
value="",
spec=port_spec.port_spec_items[pId],
is_alias=False,
union=port_spec.union)
for pId in xrange(len(port_spec.port_spec_items))]
mName = module.name
if moduleItem.parameter is not None:
mName += '(%d)' % moduleItem.parameter
fName = '%s :: %s' % (mName, port_spec.union or port_spec.name)
QParameterTreeWidgetItem((fName, pList),
moduleItem,
label, False)
if moduleItem:
moduleItem.setExpanded(True)
self.toggleUnsetParameters(self.showUnsetParameters)
def toggleUnsetParameters(self, state):
self.showUnsetParameters = state
for item in self.findItems("*", QtCore.Qt.MatchWildcard | QtCore.Qt.MatchRecursive):
if not item.isSet:
item.setHidden(not state)
class QParameterTreeWidgetItemDelegate(QtGui.QItemDelegate):
"""
QParameterTreeWidgetItemDelegate will override the original
QTreeWidget paint function to draw buttons for top-level item
similar to QtDesigner. This mimics
Qt/tools/designer/src/lib/shared/sheet_delegate, which is only a
private class from QtDesigned.
"""
def __init__(self, view, parent):
""" QParameterTreeWidgetItemDelegate(view: QTreeView,
parent: QWidget)
-> QParameterTreeWidgetItemDelegate
Create the item delegate given the tree view
"""
QtGui.QItemDelegate.__init__(self, parent)
self.treeView = view
def paint(self, painter, option, index):
""" painter(painter: QPainter, option QStyleOptionViewItem,
index: QModelIndex) -> None
Repaint the top-level item to have a button-look style
"""
model = index.model()
if not model.parent(index).isValid():
style = self.treeView.style()
r = option.rect
textrect = QtCore.QRect(r.left() + 10,
r.top(),
r.width() - 10,
r.height())
font = painter.font()
font.setBold(True)
painter.setFont(font)
text = option.fontMetrics.elidedText(
model.data(index, QtCore.Qt.DisplayRole),
QtCore.Qt.ElideMiddle,
textrect.width()-10)
style.drawItemText(painter,
textrect,
QtCore.Qt.AlignLeft,
option.palette,
self.treeView.isEnabled(),
text)
painter.setPen(QtGui.QPen(QtCore.Qt.black))
fm = QtGui.QFontMetrics(font)
size = fm.size(QtCore.Qt.TextSingleLine, text)
painter.drawLine(textrect.left()-5,
textrect.bottom()-1,
textrect.left()+size.width()+5,
textrect.bottom()-1)
annotatedId = model.data(index, QtCore.Qt.UserRole+1)
if annotatedId:
idRect = QtCore.QRect(
QtCore.QPoint(textrect.left()+size.width()+5,
textrect.top()),
textrect.bottomRight())
QAnnotatedPipelineView.drawId(painter, idRect,
annotatedId,
QtCore.Qt.AlignLeft |
QtCore.Qt.AlignVCenter)
else:
QtGui.QItemDelegate.paint(self, painter, option, index)
def sizeHint(self, option, index):
""" sizeHint(option: QStyleOptionViewItem, index: QModelIndex) -> None
Take into account the size of the top-level button
"""
return (QtGui.QItemDelegate.sizeHint(self, option, index) +
QtCore.QSize(2, 2))
class QParameterTreeWidgetItem(QtGui.QTreeWidgetItem):
"""
QParameterTreeWidgetItem represents module on QParameterTreeWidget
"""
def __init__(self, info, parent, labelList, isSet=True):
""" QParameterTreeWidgetItem(info: (str, []),
parent: QTreeWidgetItem
labelList: string,
isSet: bool)
-> QParameterTreeWidget
Create a new tree widget item with a specific parent and
labels. info describing a set of paramters as follow:
(name, [ParameterInfo]):
name = Name of the parameter set (alias or function)
If this item is a top-level item, info can either be None or
an integer specifying the annotated id of this module
isSet indicates if it represents a set or unset parameter
"""
self.parameter = info
QtGui.QTreeWidgetItem.__init__(self, parent, labelList)
if isinstance(self.parameter, int):
self.setData(0, QtCore.Qt.UserRole+1,
self.parameter)
self.isSet = isSet
| |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from heat.common import exception
from heat.common import template_format
from heat.engine import parser
from heat.engine import resource
from heat.engine import resources
from heat.engine.resources import instance
from heat.engine import rsrc_defn
from heat.engine import scheduler
from heat.tests.common import HeatTestCase
from heat.tests import utils
ig_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "Template to create multiple instances.",
"Parameters" : {},
"Resources" : {
"JobServerGroup" : {
"Type" : "OS::Heat::InstanceGroup",
"Properties" : {
"LaunchConfigurationName" : { "Ref" : "JobServerConfig" },
"Size" : "1",
"AvailabilityZones" : ["nova"]
}
},
"JobServerConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Metadata": {"foo": "bar"},
"Properties": {
"ImageId" : "foo",
"InstanceType" : "m1.large",
"KeyName" : "test",
"SecurityGroups" : [ "sg-1" ],
"UserData" : "jsconfig data",
"BlockDeviceMappings": [
{
"DeviceName": "vdb",
"Ebs": {"SnapshotId": "9ef5496e-7426-446a-bbc8-01f84d9c9972",
"DeleteOnTermination": "True"}
}]
}
}
}
}
'''
class InstanceGroupTest(HeatTestCase):
def setUp(self):
super(InstanceGroupTest, self).setUp()
def _stub_create(self, num, instance_class=instance.Instance):
"""
Expect creation of C{num} number of Instances.
:param instance_class: The resource class to expect to be created
instead of instance.Instance.
"""
self.m.StubOutWithMock(parser.Stack, 'validate')
parser.Stack.validate()
self.stub_KeypairConstraint_validate()
self.stub_ImageConstraint_validate()
self.m.StubOutWithMock(instance_class, 'handle_create')
self.m.StubOutWithMock(instance_class, 'check_create_complete')
cookie = object()
for x in range(num):
instance_class.handle_create().AndReturn(cookie)
instance_class.check_create_complete(cookie).AndReturn(False)
instance_class.check_create_complete(
cookie).MultipleTimes().AndReturn(True)
def create_resource(self, t, stack, resource_name):
# subsequent resources may need to reference previous created resources
# use the stack's resource objects instead of instantiating new ones
rsrc = stack[resource_name]
self.assertIsNone(rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
return rsrc
def test_instance_group(self):
t = template_format.parse(ig_template)
stack = utils.parse_stack(t)
# start with min then delete
self._stub_create(1)
self.m.StubOutWithMock(instance.Instance, 'FnGetAtt')
instance.Instance.FnGetAtt('PublicIp').AndReturn('1.2.3.4')
self.m.ReplayAll()
lc_rsrc = self.create_resource(t, stack, 'JobServerConfig')
# check bdm in configuration
self.assertIsNotNone(lc_rsrc.properties['BlockDeviceMappings'])
rsrc = self.create_resource(t, stack, 'JobServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual('1.2.3.4', rsrc.FnGetAtt('InstanceList'))
# check bdm in instance_definition
instance_definition = rsrc._get_instance_definition()
self.assertIn('BlockDeviceMappings',
instance_definition['Properties'])
nested = rsrc.nested()
self.assertEqual(nested.id, rsrc.resource_id)
rsrc.delete()
self.m.VerifyAll()
def test_instance_group_custom_resource(self):
"""
If AWS::EC2::Instance is overridden, InstanceGroup will automatically
use that overridden resource type.
"""
# resources may need to be initialised if this is the first test run.
resources.initialise()
class MyInstance(instance.Instance):
"""A customized Instance resource."""
original_instance = resource.get_class("AWS::EC2::Instance")
resource._register_class("AWS::EC2::Instance", MyInstance)
self.addCleanup(resource._register_class, "AWS::EC2::Instance",
original_instance)
t = template_format.parse(ig_template)
stack = utils.parse_stack(t)
self._stub_create(1, instance_class=MyInstance)
self.m.ReplayAll()
self.create_resource(t, stack, 'JobServerConfig')
rsrc = self.create_resource(t, stack, 'JobServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
rsrc.delete()
self.m.VerifyAll()
def test_missing_image(self):
t = template_format.parse(ig_template)
stack = utils.parse_stack(t)
self.stub_ImageConstraint_validate()
self.stub_KeypairConstraint_validate()
self.m.ReplayAll()
self.create_resource(t, stack, 'JobServerConfig')
rsrc = stack['JobServerGroup']
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(instance.Instance, 'handle_create')
not_found = exception.ImageNotFound(image_name='bla')
instance.Instance.handle_create().AndRaise(not_found)
self.m.StubOutWithMock(parser.Stack, 'validate')
parser.Stack.validate()
self.stub_KeypairConstraint_validate()
self.stub_ImageConstraint_validate()
self.m.ReplayAll()
create = scheduler.TaskRunner(rsrc.create)
self.assertRaises(exception.ResourceFailure, create)
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
self.m.VerifyAll()
def test_handle_update_size(self):
t = template_format.parse(ig_template)
properties = t['Resources']['JobServerGroup']['Properties']
properties['Size'] = '2'
stack = utils.parse_stack(t)
self._stub_create(2)
self.m.ReplayAll()
self.create_resource(t, stack, 'JobServerConfig')
rsrc = self.create_resource(t, stack, 'JobServerGroup')
self.m.VerifyAll()
self.m.UnsetStubs()
# Increase min size to 5
self._stub_create(3)
self.m.StubOutWithMock(instance.Instance, 'FnGetAtt')
instance.Instance.FnGetAtt('PublicIp').AndReturn('10.0.0.2')
instance.Instance.FnGetAtt('PublicIp').AndReturn('10.0.0.3')
instance.Instance.FnGetAtt('PublicIp').AndReturn('10.0.0.4')
instance.Instance.FnGetAtt('PublicIp').AndReturn('10.0.0.5')
instance.Instance.FnGetAtt('PublicIp').AndReturn('10.0.0.6')
self.m.ReplayAll()
props = copy.copy(rsrc.properties.data)
props['Size'] = 5
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
tmpl_diff = {'Properties': {'Size': '5'}}
prop_diff = {'Size': '5'}
self.assertIsNone(rsrc.handle_update(update_snippet, tmpl_diff,
prop_diff))
self.assertEqual('10.0.0.2,10.0.0.3,10.0.0.4,10.0.0.5,10.0.0.6',
rsrc.FnGetAtt('InstanceList'))
rsrc.delete()
self.m.VerifyAll()
def test_create_error(self):
"""
If a resource in an instance group fails to be created, the instance
group itself will fail and the broken inner resource will remain.
"""
t = template_format.parse(ig_template)
stack = utils.parse_stack(t)
self.m.StubOutWithMock(parser.Stack, 'validate')
parser.Stack.validate()
self.stub_ImageConstraint_validate()
self.stub_KeypairConstraint_validate()
self.m.StubOutWithMock(instance.Instance, 'handle_create')
instance.Instance.handle_create().AndRaise(Exception)
self.m.ReplayAll()
self.create_resource(t, stack, 'JobServerConfig')
self.assertRaises(
exception.ResourceFailure,
self.create_resource, t, stack, 'JobServerGroup')
rsrc = stack['JobServerGroup']
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
# The failed inner resource remains
self.assertEqual(1, len(rsrc.nested().resources))
child_resource = rsrc.nested().resources.values()[0]
self.assertEqual((child_resource.CREATE, child_resource.FAILED),
child_resource.state)
self.m.VerifyAll()
def test_update_error(self):
"""
If a resource in an instance group fails to be created during an
update, the instance group itself will fail and the broken inner
resource will remain.
"""
t = template_format.parse(ig_template)
stack = utils.parse_stack(t)
self._stub_create(1)
self.m.ReplayAll()
self.create_resource(t, stack, 'JobServerConfig')
rsrc = self.create_resource(t, stack, 'JobServerGroup')
self.assertEqual(1, len(rsrc.nested().resources))
succeeded_instance = rsrc.nested().resources.values()[0]
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(parser.Stack, 'validate')
parser.Stack.validate()
self.stub_ImageConstraint_validate()
self.stub_KeypairConstraint_validate()
self.m.StubOutWithMock(instance.Instance, 'handle_create')
instance.Instance.handle_create().AndRaise(Exception)
self.m.ReplayAll()
props = copy.copy(rsrc.properties.data)
props['Size'] = '2'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
updater = scheduler.TaskRunner(rsrc.update, update_snippet)
self.assertRaises(exception.ResourceFailure, updater)
self.assertEqual((rsrc.UPDATE, rsrc.FAILED), rsrc.state)
# The failed inner resource remains
self.assertEqual(2, len(rsrc.nested().resources))
child_resource = [r for r in rsrc.nested().resources.values()
if r.name != succeeded_instance.name][0]
self.assertEqual((child_resource.CREATE, child_resource.FAILED),
child_resource.state)
self.m.VerifyAll()
def test_update_fail_badprop(self):
t = template_format.parse(ig_template)
properties = t['Resources']['JobServerGroup']['Properties']
properties['Size'] = '2'
stack = utils.parse_stack(t)
self._stub_create(2)
self.m.ReplayAll()
self.create_resource(t, stack, 'JobServerConfig')
rsrc = self.create_resource(t, stack, 'JobServerGroup')
self.m.ReplayAll()
props = copy.copy(rsrc.properties.data)
props['AvailabilityZones'] = ['wibble']
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props)
updater = scheduler.TaskRunner(rsrc.update, update_snippet)
self.assertRaises(resource.UpdateReplace, updater)
rsrc.delete()
self.m.VerifyAll()
def test_update_config_metadata(self):
t = template_format.parse(ig_template)
properties = t['Resources']['JobServerGroup']['Properties']
properties['Size'] = '2'
stack = utils.parse_stack(t)
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_resource(t, stack, 'JobServerConfig')
self.create_resource(t, stack, 'JobServerGroup')
props = copy.copy(rsrc.properties.data)
metadata = copy.copy(rsrc.metadata_get())
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props,
metadata)
# Change nothing in the first update
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual('bar', metadata['foo'])
metadata['foo'] = 'wibble'
update_snippet = rsrc_defn.ResourceDefinition(rsrc.name,
rsrc.type(),
props,
metadata)
# Changing metadata in the second update triggers UpdateReplace
updater = scheduler.TaskRunner(rsrc.update, update_snippet)
self.assertRaises(resource.UpdateReplace, updater)
self.m.VerifyAll()
| |
"""The tests the for Locative device tracker platform."""
import pytest
from homeassistant import data_entry_flow
from homeassistant.components import locative
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN
from homeassistant.components.locative import DOMAIN, TRACKER_UPDATE
from homeassistant.config import async_process_ha_core_config
from homeassistant.const import HTTP_OK, HTTP_UNPROCESSABLE_ENTITY
from homeassistant.helpers.dispatcher import DATA_DISPATCHER
from homeassistant.setup import async_setup_component
from tests.async_mock import patch
# pylint: disable=redefined-outer-name
@pytest.fixture(autouse=True)
def mock_dev_track(mock_device_tracker_conf):
"""Mock device tracker config loading."""
pass
@pytest.fixture
async def locative_client(loop, hass, hass_client):
"""Locative mock client."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
with patch("homeassistant.components.device_tracker.legacy.update_config"):
return await hass_client()
@pytest.fixture
async def webhook_id(hass, locative_client):
"""Initialize the Geofency component and get the webhook_id."""
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.local:8123"},
)
result = await hass.config_entries.flow.async_init(
"locative", context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM, result
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
return result["result"].data["webhook_id"]
async def test_missing_data(locative_client, webhook_id):
"""Test missing data."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 1.0,
"longitude": 1.1,
"device": "123",
"id": "Home",
"trigger": "enter",
}
# No data
req = await locative_client.post(url)
assert req.status == HTTP_UNPROCESSABLE_ENTITY
# No latitude
copy = data.copy()
del copy["latitude"]
req = await locative_client.post(url, data=copy)
assert req.status == HTTP_UNPROCESSABLE_ENTITY
# No device
copy = data.copy()
del copy["device"]
req = await locative_client.post(url, data=copy)
assert req.status == HTTP_UNPROCESSABLE_ENTITY
# No location
copy = data.copy()
del copy["id"]
req = await locative_client.post(url, data=copy)
assert req.status == HTTP_UNPROCESSABLE_ENTITY
# No trigger
copy = data.copy()
del copy["trigger"]
req = await locative_client.post(url, data=copy)
assert req.status == HTTP_UNPROCESSABLE_ENTITY
# Test message
copy = data.copy()
copy["trigger"] = "test"
req = await locative_client.post(url, data=copy)
assert req.status == HTTP_OK
# Test message, no location
copy = data.copy()
copy["trigger"] = "test"
del copy["id"]
req = await locative_client.post(url, data=copy)
assert req.status == HTTP_OK
# Unknown trigger
copy = data.copy()
copy["trigger"] = "foobar"
req = await locative_client.post(url, data=copy)
assert req.status == HTTP_UNPROCESSABLE_ENTITY
async def test_enter_and_exit(hass, locative_client, webhook_id):
"""Test when there is a known zone."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 40.7855,
"longitude": -111.7367,
"device": "123",
"id": "Home",
"trigger": "enter",
}
# Enter the Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state_name = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])
).state
assert state_name == "home"
data["id"] = "HOME"
data["trigger"] = "exit"
# Exit Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state_name = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])
).state
assert state_name == "not_home"
data["id"] = "hOmE"
data["trigger"] = "enter"
# Enter Home again
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state_name = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])
).state
assert state_name == "home"
data["trigger"] = "exit"
# Exit Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state_name = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])
).state
assert state_name == "not_home"
data["id"] = "work"
data["trigger"] = "enter"
# Enter Work
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state_name = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"])
).state
assert state_name == "work"
async def test_exit_after_enter(hass, locative_client, webhook_id):
"""Test when an exit message comes after an enter message."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 40.7855,
"longitude": -111.7367,
"device": "123",
"id": "Home",
"trigger": "enter",
}
# Enter Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]))
assert state.state == "home"
data["id"] = "Work"
# Enter Work
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]))
assert state.state == "work"
data["id"] = "Home"
data["trigger"] = "exit"
# Exit Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]))
assert state.state == "work"
async def test_exit_first(hass, locative_client, webhook_id):
"""Test when an exit message is sent first on a new device."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 40.7855,
"longitude": -111.7367,
"device": "new_device",
"id": "Home",
"trigger": "exit",
}
# Exit Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]))
assert state.state == "not_home"
async def test_two_devices(hass, locative_client, webhook_id):
"""Test updating two different devices."""
url = f"/api/webhook/{webhook_id}"
data_device_1 = {
"latitude": 40.7855,
"longitude": -111.7367,
"device": "device_1",
"id": "Home",
"trigger": "exit",
}
# Exit Home
req = await locative_client.post(url, data=data_device_1)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["device"])
)
assert state.state == "not_home"
# Enter Home
data_device_2 = dict(data_device_1)
data_device_2["device"] = "device_2"
data_device_2["trigger"] = "enter"
req = await locative_client.post(url, data=data_device_2)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_2["device"])
)
assert state.state == "home"
state = hass.states.get(
"{}.{}".format(DEVICE_TRACKER_DOMAIN, data_device_1["device"])
)
assert state.state == "not_home"
@pytest.mark.xfail(
reason="The device_tracker component does not support unloading yet."
)
async def test_load_unload_entry(hass, locative_client, webhook_id):
"""Test that the appropriate dispatch signals are added and removed."""
url = f"/api/webhook/{webhook_id}"
data = {
"latitude": 40.7855,
"longitude": -111.7367,
"device": "new_device",
"id": "Home",
"trigger": "exit",
}
# Exit Home
req = await locative_client.post(url, data=data)
await hass.async_block_till_done()
assert req.status == HTTP_OK
state = hass.states.get("{}.{}".format(DEVICE_TRACKER_DOMAIN, data["device"]))
assert state.state == "not_home"
assert len(hass.data[DATA_DISPATCHER][TRACKER_UPDATE]) == 1
entry = hass.config_entries.async_entries(DOMAIN)[0]
await locative.async_unload_entry(hass, entry)
await hass.async_block_till_done()
assert not hass.data[DATA_DISPATCHER][TRACKER_UPDATE]
| |
from gusto import *
from firedrake import (FunctionSpace, as_vector,
VectorFunctionSpace,
PeriodicIntervalMesh,
ExtrudedMesh, Constant,
SpatialCoordinate, exp, pi, cos,
Function, conditional, Mesh, sin,
op2, sqrt)
from firedrake.petsc import PETSc
from argparse import ArgumentParser
import sys
def minimum(f):
fmin = op2.Global(1, [1000], dtype=float)
op2.par_loop(op2.Kernel("""
void minify(double *a, double *b) {
a[0] = a[0] > fabs(b[0]) ? fabs(b[0]) : a[0];
}
""", "minify"), f.dof_dset.set, fmin(op2.MIN), f.dat(op2.READ))
return fmin.data[0]
PETSc.Log.begin()
parser = ArgumentParser(description="""Flow over an isolated mountain (hydrostatic).""",
add_help=False)
parser.add_argument("--test",
action="store_true",
help="Enable a quick test run.")
parser.add_argument("--dt",
default=5.,
type=float,
action="store",
help="Time step size (seconds)")
parser.add_argument("--res",
default=1,
type=int,
action="store",
help="Resolution scaling parameter.")
parser.add_argument("--debug",
action="store_true",
help="Turn on KSP monitors")
parser.add_argument("--help",
action="store_true",
help="Show help.")
args, _ = parser.parse_known_args()
if args.help:
help = parser.format_help()
PETSc.Sys.Print("%s\n" % help)
sys.exit(1)
dt = args.dt
if args.test:
tmax = dt
else:
tmax = 15000.
res = args.res
nlayers = res*200 # horizontal layers
columns = res*120 # number of columns
H = 50000. # Height position of the model top
L = 240000.
deltax = L / columns
deltaz = H / nlayers
PETSc.Sys.Print("""
Problem parameters:\n
Test case: Hydrostatic gravity wave over an isolated mountain.\n
Time-step size: %s,\n
Dx (m): %s,\n
Dz (m): %s,\n
Test run: %s.\n
""" % (dt, deltax, deltaz,
bool(args.test)))
PETSc.Sys.Print("Initializing problem with dt: %s and tmax: %s.\n" % (dt,
tmax))
PETSc.Sys.Print("Creating mesh with %s columns and %s layers...\n" % (columns,
nlayers))
m = PeriodicIntervalMesh(columns, L)
# build volume mesh
ext_mesh = ExtrudedMesh(m, layers=nlayers, layer_height=H/nlayers)
Vc = VectorFunctionSpace(ext_mesh, "DG", 2)
coord = SpatialCoordinate(ext_mesh)
x = Function(Vc).interpolate(as_vector([coord[0], coord[1]]))
a = 10000.
xc = L/2.
x, z = SpatialCoordinate(ext_mesh)
hm = 1.
zs = hm*a**2/((x-xc)**2 + a**2)
smooth_z = True
dirname = 'h_mountain'
if smooth_z:
dirname += '_smootherz'
zh = 5000.
xexpr = as_vector([x, conditional(z < zh, z + cos(0.5*pi*z/zh)**6*zs, z)])
else:
xexpr = as_vector([x, z + ((H-z)/H)*zs])
new_coords = Function(Vc).interpolate(xexpr)
mesh = Mesh(new_coords)
# sponge function
W_DG = FunctionSpace(mesh, "DG", 2)
x, z = SpatialCoordinate(mesh)
zc = H-20000.
mubar = 0.3/dt
mu_top = conditional(z <= zc, 0.0, mubar*sin((pi/2.)*(z-zc)/(H-zc))**2)
mu = Function(W_DG).interpolate(mu_top)
fieldlist = ['u', 'rho', 'theta']
timestepping = TimesteppingParameters(dt=dt, alpha=0.51)
dirname += '_hybridization'
dumptime = 1000 # dump every 1000s
dumpfreq = int(dumptime / dt)
output = OutputParameters(dirname=dirname,
dumpfreq=dumpfreq,
dumplist=['u'],
perturbation_fields=['theta', 'rho'],
log_level='INFO')
parameters = CompressibleParameters(g=9.80665, cp=1004.)
diagnostics = Diagnostics(*fieldlist)
diagnostic_fields = [CourantNumber(),
VelocityZ(),
HydrostaticImbalance()]
state = State(mesh,
vertical_degree=1,
horizontal_degree=1,
family="CG",
sponge_function=mu,
hydrostatic=True,
timestepping=timestepping,
output=output,
parameters=parameters,
diagnostics=diagnostics,
fieldlist=fieldlist,
diagnostic_fields=diagnostic_fields)
# Initial conditions
u0 = state.fields("u")
rho0 = state.fields("rho")
theta0 = state.fields("theta")
# spaces
Vu = u0.function_space()
Vt = theta0.function_space()
Vr = rho0.function_space()
# Thermodynamic constants required for setting initial conditions
# and reference profiles
g = parameters.g
p_0 = parameters.p_0
c_p = parameters.cp
R_d = parameters.R_d
kappa = parameters.kappa
# Hydrostatic case: Isothermal with T = 250
Tsurf = 250.
N = g/sqrt(c_p*Tsurf)
# N^2 = (g/theta)dtheta/dz => dtheta/dz = theta N^2g => theta=theta_0exp(N^2gz)
thetab = Tsurf*exp(N**2*z/g)
theta_b = Function(Vt).interpolate(thetab)
# Calculate hydrostatic Pi
PETSc.Sys.Print("Computing hydrostatic varaibles...\n")
# Use vertical hybridization preconditioner for the balance initialization
piparams = {'ksp_type': 'gmres',
'ksp_monitor_true_residual': None,
'pc_type': 'python',
'mat_type': 'matfree',
'pc_python_type': 'gusto.VerticalHybridizationPC',
# Vertical trace system is only coupled vertically in columns
# block ILU is a direct solver!
'vert_hybridization': {'ksp_type': 'preonly',
'pc_type': 'bjacobi',
'sub_pc_type': 'ilu'}}
Pi = Function(Vr)
rho_b = Function(Vr)
compressible_hydrostatic_balance(state,
theta_b,
rho_b,
Pi,
top=True,
pi_boundary=0.5,
params=piparams)
p0 = Constant(minimum(Pi))
compressible_hydrostatic_balance(state,
theta_b,
rho_b,
Pi,
top=True,
params=piparams)
p1 = Constant(minimum(Pi))
alpha = Constant(2.*(p1-p0))
beta = p1-alpha
pi_top = (1.-beta)/alpha
compressible_hydrostatic_balance(state,
theta_b,
rho_b,
Pi,
top=True,
pi_boundary=pi_top,
solve_for_rho=True,
params=piparams)
theta0.assign(theta_b)
rho0.assign(rho_b)
u0.project(as_vector([20.0, 0.0]))
remove_initial_w(u0, state.Vv)
PETSc.Sys.Print("Finished computing hydrostatic varaibles...\n")
state.initialise([('u', u0),
('rho', rho0),
('theta', theta0)])
state.set_reference_profiles([('rho', rho_b),
('theta', theta_b)])
# Set up advection schemes
ueqn = EulerPoincare(state, Vu)
rhoeqn = AdvectionEquation(state, Vr, equation_form="continuity")
supg = True
if supg:
thetaeqn = SUPGAdvection(state, Vt, equation_form="advective")
else:
thetaeqn = EmbeddedDGAdvection(state, Vt, equation_form="advective",
options=EmbeddedDGOptions())
advected_fields = []
advected_fields.append(("u", ThetaMethod(state, u0, ueqn)))
advected_fields.append(("rho", SSPRK3(state, rho0, rhoeqn)))
advected_fields.append(("theta", SSPRK3(state, theta0, thetaeqn)))
# Set up linear solver
solver_parameters = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.SCPC',
# Velocity mass operator is singular in the hydrostatic case.
# So for reconstruction, we eliminate rho into u
'pc_sc_eliminate_fields': '1, 0',
'condensed_field': {'ksp_type': 'fgmres',
'ksp_rtol': 1.0e-8,
'ksp_atol': 1.0e-8,
'ksp_max_it': 100,
'pc_type': 'gamg',
'pc_gamg_sym_graph': True,
'mg_levels': {'ksp_type': 'gmres',
'ksp_max_it': 5,
'pc_type': 'bjacobi',
'sub_pc_type': 'ilu'}}}
if args.debug:
solver_parameters['condensed_field']['ksp_monitor_true_residual'] = None
linear_solver = CompressibleSolver(state,
solver_parameters=solver_parameters,
overwrite_solver_parameters=True)
# Set up forcing
compressible_forcing = CompressibleForcing(state)
# build time stepper
stepper = CrankNicolson(state,
advected_fields,
linear_solver,
compressible_forcing)
PETSc.Sys.Print("Starting simulation...\n")
stepper.run(t=0, tmax=tmax)
| |
#---------------------------------------------------------------------------
# Copyright 2012 The Open Source Electronic Health Record Agent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
import sys
import os
import re
import tempfile
import shutil
import argparse
import glob
from PatchInfoParser import installNameToDirName
from VistATestClient import VistATestClientFactory, createTestClientArgParser
from LoggerManager import logger, initConsoleLogging
from VistAPackageInfoFetcher import VistAPackageInfoFetcher
from VistAGlobalImport import VistAGlobalImport, DEFAULT_GLOBAL_IMPORT_TIMEOUT
from ExternalDownloader import obtainKIDSBuildFileBySha1
from ConvertToExternalData import readSha1SumFromSha1File
from ConvertToExternalData import isValidExternalDataFileName
from ConvertToExternalData import isValidGlobalFileSuffix, isValidGlobalSha1Suffix
from ConvertToExternalData import getSha1HashFromExternalDataFileName
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
DEFAULT_CACHE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "../"))
from VistAMenuUtil import VistAMenuUtil
DEFAULT_INSTALL_DUZ = 17 # VistA user, "USER,SEVENTEEN"
CHECK_INSTALLATION_PROGRESS_TIMEOUT = 1200 # 1200 seconds or 20 minutes
GLOBAL_IMPORT_BYTE_PER_SEC = 0.5*1024*1024 # import speed is 0.5 MiB per sec
""" Default Installer for KIDS Build """
class DefaultKIDSBuildInstaller(object):
#---------------------------------------------------------------------------#
# Class Constants
#---------------------------------------------------------------------------#
""" A list of tuple, defined the action list corresponding to KIDS Build
questions that might need to act.
each tuple should have three items.
first item: KIDS Menu option text
second item: default answer, use \"\" for default
third item: bool flag to indicate whether to break out of the menu loop
If more menu options is needed, please either add extra option
in the subclass if just specific to that KIDS Build, or add it here if
it is a general question
"""
KIDS_MENU_OPTION_ACTION_LIST = [
("Want to continue installing this build\?","YES", False),
("Enter the Coordinator for Mail Group", "", False),
("Want KIDS to Rebuild Menu Trees Upon Completion of Install\?",
"", False),
("Want KIDS to INHIBIT LOGONs during the install?",
"NO", False),
("Want to DISABLE Scheduled Options, Menu Options, and Protocols\?",
"NO", False),
("Delay Install \(Minutes\): \(0\-60\):", "0", False),
("do you want to include disabled components\?", "NO", False),
("DEVICE:", None, True)
]
""" A list of tuple, defined the action list corresponding to KIDS Build
questions that might need to act.
each tuple should have three items.
first item: KIDS Menu option text
second item: default answer, use \"\" for default
third item: bool flag to indicate whether to break out of the menu loop
If more menu options is needed, please either add extra option
in the subclass if just specific to that KIDS Build, or add it here if
it is a general question
"""
KIDS_LOAD_QUESTION_ACTION_LIST = [
("OK to continue with Load","YES", False),
("Want to Continue with Load\?","YES", False),
("Select Installation ","?", True),
("Want to continue installing this build\?","YES", False),
("Want to RUN the Environment Check Routine\? YES//","YES",False)
]
""" option action list for Exit KIDS menu, similar struct as above """
EXIT_KIDS_MENU_ACTION_LIST = [
("Select Installation ", "", False),
("Select Kernel Installation & Distribution System ", "", False),
("Select Programmer Options ", "", False),
("Select Systems Manager Menu ", "", False),
("Do you really want to halt\?", "YES", True)
]
KIDS_FILE_PATH_MAX_LEN = 75 # this might need to be fixed in VistA XPD
#---------------------------------------------------------------------------#
# Class Methods
#---------------------------------------------------------------------------#
""" Constructor
@kidsFile: the absolute path to KIDS Build file
@kidsInstallName: the install name for the KIDS Build
@seqNo: seqNo of the KIDS Build, default is None
@logFile: logFile to store the log information for VistA interaction
@multiBuildList: a python list of install names, only applies to
a multibuilds KIDS Build
@duz: the applier's VistA DUZ, default is set to 17, in VistA FOIA
it is USER SEVENTEEN
@**kargs: any extra information that might be needed
"""
def __init__(self, kidsFile, kidsInstallName, seqNo=None, logFile=None,
multiBuildList = None, duz = DEFAULT_INSTALL_DUZ, **kargs):
assert os.path.exists(kidsFile), ("kids file does not exist %s" % kidsFile)
self._origKidsFile = kidsFile
if len(kidsFile) >= self.KIDS_FILE_PATH_MAX_LEN:
destFilename = os.path.basename(kidsFile)
tempDir = tempfile.gettempdir()
if isValidExternalDataFileName(kidsFile):
# if read directly from inplace, need to replace the name with hash
destFilename = getSha1HashFromExternalDataFileName(kidsFile)
while (len(tempDir)+len(destFilename)+1) >= self.KIDS_FILE_PATH_MAX_LEN:
tempDir = os.path.split(tempDir)[0]
dest = os.path.join(tempDir, destFilename)
shutil.copy(kidsFile, dest)
self._kidsFile = os.path.normpath(dest)
logger.info("new kids file is %s" % self._kidsFile)
else:
self._kidsFile = os.path.normpath(kidsFile)
self._kidsInstallName = kidsInstallName
self._logFile = logFile
self._duz = duz
self._updatePackageLink = False
self._multiBuildList = multiBuildList
# store all the globals files associated with KIDS"
self._globalFiles = None
if "globals" in kargs:
self._globalFiles = kargs['globals']
self._tgOutputDir = None
if "printTG" in kargs:
self._tgOutputDir = kargs['printTG']
""" set up the log for VistA connection
@connection: a connection from a VistATestClient
"""
def __setupLogFile__(self, connection):
if self._logFile:
connection.logfile = open(self._logFile, "ab")
else:
connection.logfile = sys.stdout
""" Go to KIDS Main Menu
Always start with ready state (wait for promp)
"""
def __gotoKIDSMainMenu__(self, vistATestClient):
menuUtil = VistAMenuUtil(self._duz)
menuUtil.gotoKidsMainMenu(vistATestClient)
""" load the KIDS build distribution file via menu
must be called while in KIDS Main Menu
"""
def __loadKIDSBuild__(self, connection):
connection.send("Installation\r")
connection.expect("Select Installation ")
connection.send("1\r") # load the distribution
connection.expect("Enter a Host File:")
connection.send(self._kidsFile+"\r")
""" Answer all the KIDS install questions
"""
def __handleKIDSInstallQuestions__(self, connection):
connection.send("Install\r")
connection.expect("Select INSTALL NAME:")
connection.send(self._kidsInstallName+"\r")
""" handle any questions before general KIDS installation questions"""
result = self.handleKIDSInstallQuestions(connection)
if not result:
return False
kidsMenuActionLst = self.KIDS_MENU_OPTION_ACTION_LIST
while True:
index = connection.expect([x[0] for x in kidsMenuActionLst])
sendCmd = kidsMenuActionLst[index][1]
if sendCmd != None:
connection.send("%s\r" % sendCmd)
if kidsMenuActionLst[index][2]:
break
return True
""" restart the previous installation
"""
def restartInstallation(self, vistATestClient):
logger.warn("restart the previous installation for %s" %
self._kidsInstallName)
connection = vistATestClient.getConnection()
self.__gotoKIDSMainMenu__(vistATestClient)
self.__selectRestartInstallOption__(connection)
index = connection.expect(["DEVICE: ", "Select INSTALL NAME: "])
if index == 0:
self.__installationCommon__(vistATestClient)
return True
else:
logger.error("Restart install %s failed" % self._kidsInstallName)
""" go back to KIDS main menu first """
connection.send('\r')
connection.expect("Select Installation ")
connection.send('\r')
""" try to unload a distribution first """
result = self.unloadDistribution(vistATestClient, False)
if not result:
logger.error("Unload Distribution %s failed" % self._kidsInstallName)
return self.normalInstallation(vistATestClient)
""" go to the restart KIDS build option """
def __selectRestartInstallOption__(self, connection):
connection.send("Installation\r")
connection.expect("Select Installation ")
connection.send("Restart Install of\r") # restart install of package(s)
connection.expect("Select INSTALL NAME: ")
connection.send(self._kidsInstallName+"\r")
""" go to the unload a distribution option """
def __selectUnloadDistributionOption__(self, connection):
#connection.expect("Select Kernel Installation & Distribution System ")
connection.send("installation\r")
connection.expect("Select Installation ")
connection.send("Unload a Distribution\r")
connection.expect("Select INSTALL NAME: ")
connection.send(self._kidsInstallName+"\r")
""" unload a previous loaded distribution """
def unloadDistribution(self, vistATestClient, waitForPrompt=True):
connection = vistATestClient.getConnection()
logger.info("Unload distribution for %s" % self._kidsInstallName)
if waitForPrompt:
self.__gotoKIDSMainMenu__(vistATestClient)
self.__selectUnloadDistributionOption__(connection)
index = connection.expect([
"Want to continue with the Unload of this Distribution\? NO// ",
"Select INSTALL NAME: "])
if index == 1:
connection.send('\r')
self.__exitKIDSMenu__(vistATestClient)
return False
connection.send('YES\r')
self.__exitKIDSMenu__(vistATestClient)
return True
""" Do a fresh load and installation """
def normalInstallation(self, vistATestClient, reinst=True):
logger.info("Start installing %s" % self._kidsInstallName)
connection = vistATestClient.getConnection()
self.__gotoKIDSMainMenu__(vistATestClient)
self.__loadKIDSBuild__(connection)
result = self.__handleKIDSLoadOptions__(connection, reinst)
if not result:
logger.error("Error handling KIDS Load Options %s, %s" %
(self._kidsInstallName, self._kidsFile))
return False
if self._tgOutputDir:
if self._multiBuildList is None:
self.__printTransportGlobal__(vistATestClient,[self._kidsInstallName],self._tgOutputDir)
else:
self.__printTransportGlobal__(vistATestClient,self._multiBuildList,self._tgOutputDir)
result = self.__handleKIDSInstallQuestions__(connection)
if not result:
result = self.unloadDistribution(vistATestClient, False)
if not result:
logger.error("Unload %s failed" % self._kidsInstallName)
return False
return self.normalInstallation(vistATestClient, reinst)
self.__installationCommon__(vistATestClient)
return True
""" common shared workflow in KIDS installation process """
def __installationCommon__(self, vistATestClient):
connection = vistATestClient.getConnection()
self.setupDevice(connection)
self.__checkInstallationProgress__(connection)
self.__exitKIDSMenu__(vistATestClient)
self.extraFixWork(vistATestClient)
""" Handle options during load KIDS distribution section """
def __handleKIDSLoadOptions__(self, connection, reinst):
loadOptionActionList = self.KIDS_LOAD_QUESTION_ACTION_LIST[:]
""" make sure install completed is the last one """
loadOptionActionList.append(
(self._kidsInstallName + " Install Completed", None))
while True:
index = connection.expect([x[0] for x in loadOptionActionList], 120)
if index == len(loadOptionActionList) - 1:
if not reinst:
return False
else:
connection.send("%s\r" % (loadOptionActionList[index][1]))
if loadOptionActionList[index][2]:
break
return True
""" Exit the KIDS Menu option.
Make sure the VistA connection is in the ready state (wait for prompt)
"""
def __exitKIDSMenu__(self, vistATestClient):
exitMenuActionList = self.EXIT_KIDS_MENU_ACTION_LIST[:]
connection = vistATestClient.getConnection()
""" add wait for prompt """
exitMenuActionList.append((vistATestClient.getPrompt(), "\r", True))
expectList = [x[0] for x in exitMenuActionList]
while True:
idx = connection.expect(expectList)
connection.send("%s\r" % exitMenuActionList[idx][1])
if exitMenuActionList[idx][2]:
break
""" Checking the current status of the KIDS build
"""
def __checkInstallationProgress__(self, connection):
KIDS_BUILD_STATUS_ACTION_LIST = [
("Running Pre-Install Routine:",self.runPreInstallationRoutine,False),
("Running Post-Install Routine:",self.runPostInstallationRoutine,False),
("Starting Menu Rebuild:", None , False),
("Installing Routines:", None , False),
("Installing Data:", None , False),
("Menu Rebuild Complete:", None , False),
("Installing PACKAGE COMPONENTS:", None ,False),
("Send mail to: ", self.handleSendMailToOptions, False),
("Select Installation ", self.handleInstallError, True),
("Install Completed", self.installCompleted, True)
]
""" Bulid the status update action list """
statusActionList = []
installName = self._kidsInstallName
if self._multiBuildList:
for item in self._multiBuildList:
statusActionList.append(
(re.escape("Install Started for %s :" %item), None, False))
statusActionList.append(
(re.escape("%s Installed." % item), None, False))
else:
statusActionList.append(
(re.escape("Install Started for %s :" % installName),
None, False))
statusActionList.append(
(re.escape("%s Installed." % installName), None, False))
statusActionList.extend(KIDS_BUILD_STATUS_ACTION_LIST)
expectList = [x[0] for x in statusActionList]
while True:
index = connection.expect(expectList, CHECK_INSTALLATION_PROGRESS_TIMEOUT)
status = expectList[index].replace("\\","")
logger.info(status)
callback = statusActionList[index][1]
if callback:
callback(connection, status=status)
if statusActionList[index][2]:
break
else:
continue
""" This is the entry point of KIDS installer
It defines the workflow of KIDS installation process
@reinst: wether re-install the KIDS build, default is False
@return, True if no error, otherwise False
"""
def runInstallation(self, vistATestClient, reinst=False):
connection = vistATestClient.getConnection()
self.__setupLogFile__(connection)
infoFetcher = VistAPackageInfoFetcher(vistATestClient)
installStatus = infoFetcher.getInstallationStatus(self._kidsInstallName)
""" select KIDS installation workflow based on install status """
if infoFetcher.isInstallCompleted(installStatus):
logger.warn("install %s is already completed!" %
self._kidsInstallName)
if not reinst:
return True
# run pre-installation preparation
self.preInstallationWork(vistATestClient)
if infoFetcher.isInstallStarted(installStatus):
return self.restartInstallation(vistATestClient)
return self.normalInstallation(vistATestClient, reinst)
def __printTGlobalChecksums__(self,testClient,installname,outputDir):
connection = testClient.getConnection()
connection.expect("Select Installation")
connection.send("Verify Checksums\r")
connection.expect("Select INSTALL NAME")
connection.send(installname +"\r")
connection.expect("Want each Routine Listed with Checksums")
connection.send("YES\r")
connection.expect("DEVICE")
connection.send("HFS\r")
connection.expect("HOST FILE NAME")
logfile=os.path.join(outputDir,installNameToDirName(installname)+"Checksums.log")
if testClient.isCache():
logfile=os.path.normpath(logfile)
connection.send(logfile+"\r")
connection.expect("PARAMETERS")
if testClient.isCache():
connection.send("\r")
else:
connection.send("NEWVERSION:NOREADONLY:VARIABLE\r")
index = connection.expect(["Select Installation","overwrite it"],600)
if index == 0:
connection.send("?\r")
else:
connection.send('\r')
def __printTGlobalSummary__(self,testClient,installname,outputDir):
connection = testClient.getConnection()
connection.expect("Select Installation")
connection.send("Print Transport Global\r")
connection.expect("Select INSTALL NAME")
connection.send(installname +"\r")
connection.expect("What to Print")
connection.send('2\r')
connection.expect("DEVICE")
connection.send("HFS\r")
connection.expect("HOST FILE NAME")
logfile=os.path.join(outputDir,installNameToDirName(installname)+"Print.log")
if testClient.isCache():
logfile=os.path.normpath(logfile)
connection.send(logfile+"\r")
connection.expect("PARAMETERS")
if testClient.isCache():
connection.send("\r")
else:
connection.send("NEWVERSION:NOREADONLY:VARIABLE\r")
index = connection.expect(["Select Installation","overwrite it"],600)
if index == 0:
connection.send("?\r")
else:
connection.send('\r')
def __printTGlobalCompare__(self,testClient,installname,outputDir):
connection = testClient.getConnection()
connection.expect("Select Installation")
connection.send("Compare Transport Global\r")
connection.expect("Select INSTALL NAME")
connection.send(installname +"\r")
connection.expect("Type of Compare")
connection.send("1\r")
connection.expect("DEVICE")
connection.send("HFS\r")
connection.expect("HOST FILE NAME")
logfile=os.path.join(outputDir,installNameToDirName(installname)+"Compare.log")
if testClient.isCache():
logfile=os.path.normpath(logfile)
connection.send(logfile+"\r")
connection.expect("PARAMETERS")
if testClient.isCache():
connection.send("\r")
else:
connection.send("NEWVERSION:NOREADONLY:VARIABLE\r")
index = connection.expect(["Select Installation","overwrite it"],600)
if index == 0:
connection.send("?\r")
else:
connection.send('\r')
''' Print out the checksums and the summary of the transport global '''
def __printTransportGlobal__(self,testClient,installNameList,outputDir):
for installName in installNameList:
self.__printTGlobalChecksums__(testClient,installName,outputDir)
self.__printTGlobalSummary__(testClient,installName,outputDir)
self.__printTGlobalCompare__(testClient,installName,outputDir)
#---------------------------------------------------------------------------#
# Public override methods sections
#---------------------------------------------------------------------------#
""" Set up the KIDS installation result output device
default is to use HOME device
if you want to use a difference device, please override this method
"""
def setupDevice(self, connection):
connection.send("HOME;82;999\r")
""" intended to be implemented by subclass
this is to handle any build related questions that
comes up before the general KIDS questions
default implementation is to check the error condition
"""
def handleKIDSInstallQuestions(self, connection, **kargs):
errorCheckTimeout = 5 # 5 seconds
try:
connection.expect("\*\*INSTALL FILE IS CORRUPTED\*\*",errorCheckTimeout)
logger.error("%s:INSTALL FILE IS CORRUPTED" % self._kidsInstallName)
connection.expect("Select Installation ", errorCheckTimeout)
connection.send('\r')
return False
except Exception as ex:
return True
""" intended to be implemented by subclass
answer question related to pre install routine
"""
def runPreInstallationRoutine(self, connection, **kargs):
pass
""" intended to be implemented by subclass
answer question related to post install routine
"""
def runPostInstallationRoutine(self, connection, **kargs):
pass
""" intended to be implemented by subclass """
def extraFixWork(self, vistATestClient):
pass
""" default action for Send Mail To option
please override or enhance it if more action is needed
"""
def handleSendMailToOptions(self, connection, **kargs):
connection.send("\r")
connection.expect("Select basket to send to: ")
connection.send("\r")
connection.expect("Send ")
connection.send("\r")
""" default action for install completed
please override or enhance it if more action is needed
"""
def installCompleted(self, connection, **kargs):
extraInfo = connection.before
logger.debug(extraInfo)
if re.search("No link to PACKAGE file", extraInfo):
self._updatePackageLink = True
logger.warn("You might have to update KIDS build %s to link"
" to Package file" %
(self._kidsInstallName))
""" default action for installation error
please override or enhance it if more action is needed
"""
def handleInstallError(self, connection, **kargs):
logger.error("Installation failed for %s" % self._kidsInstallName)
connection.send("\r")
""" default action for pre-installation preperation.
right now it is just to import the globals file under
the same directory as the KIDs directory
please override or enhance it if more action is needed
"""
def preInstallationWork(self, vistATestClient, **kargs):
""" ignore the multi-build patch for now """
if self._multiBuildList is not None:
return
globalFiles = self.__getGlobalFileList__()
if globalFiles is None or len(globalFiles) == 0:
return
globalImport = VistAGlobalImport()
for glbFile in globalFiles:
logger.info("Import global file %s" % (glbFile))
fileSize = os.path.getsize(glbFile)
importTimeout = DEFAULT_GLOBAL_IMPORT_TIMEOUT
importTimeout += int(fileSize/GLOBAL_IMPORT_BYTE_PER_SEC)
globalImport.importGlobal(vistATestClient, glbFile, timeout=importTimeout)
#---------------------------------------------------------------------------#
# Utilities Functions
#---------------------------------------------------------------------------#
""" utility function to find the all global files ends with GLB/s """
def __getGlobalFileList__(self):
globalFiles = []
if self._globalFiles is None or len(self._globalFiles) == 0:
return globalFiles
for gFile in self._globalFiles:
if isValidGlobalFileSuffix(gFile):
globalFiles.append(gFile)
continue
if isValidGlobalSha1Suffix(gFile): # external file
sha1Sum = readSha1SumFromSha1File(gFile)
(result, path) = obtainKIDSBuildFileBySha1(gFile,
sha1Sum,
DEFAULT_CACHE_DIR)
if not result:
logger.error("Could not obtain global file for %s" % gFile)
raise Exception("Error getting global file for %s" % gFile)
globalFiles.append(path)
if len(globalFiles) > 0:
logger.info("global file lists %s" % globalFiles)
return globalFiles
""" utility function to find the name associated the DUZ """
def getPersonNameByDuz(inputDuz, vistAClient):
logger.info ("inputDuz is %s" % inputDuz)
""" user Kernel User API """
connection = vistAClient.getConnection()
menuUtil = VistAMenuUtil(duz=1)
menuUtil.gotoSystemMenu(vistAClient)
connection.send('Prog\r')
connection.expect('Select Programmer Options')
connection.send('^\r')
menuUtil.exitSystemMenu(vistAClient)
vistAClient.waitForPrompt()
connection.send('W $$NAME^XUSER(%s)\r' % inputDuz)
connection.expect('\)') # get rid of the echo
vistAClient.waitForPrompt()
result = connection.before.strip(' \r\n')
connection.send('\r')
return result
""" function to add an entry to PACAKGE HISTORY """
def addPackagePatchHistory(packageName, version, seqNo,
patchNo, vistAClient, inputDuz):
logger.info("Adding %s, %s, %s, %s to Package Patch history" %
(packageName, version, seqNo, patchNo))
appliedUser = getPersonNameByDuz(inputDuz, vistAClient)
connection = vistAClient.getConnection()
menuUtil = VistAMenuUtil(duz=1)
menuUtil.gotoFileManEditEnterEntryMenu(vistAClient)
connection.send("9.4\r") # package file
connection.expect("EDIT WHICH FIELD: ")
connection.send("VERSION\r")
connection.expect("EDIT WHICH VERSION SUB-FIELD: ")
connection.send("PATCH APPLICATION HISTORY\r")
connection.expect("EDIT WHICH PATCH APPLICATION HISTORY SUB-FIELD: ")
connection.send("ALL\r")
connection.expect("THEN EDIT VERSION SUB-FIELD: ")
connection.send("\r")
connection.expect("THEN EDIT FIELD: ")
connection.send("\r")
connection.expect("Select PACKAGE NAME: ")
connection.send("%s\r" % packageName)
connection.expect("Select VERSION: %s//" % version)
connection.send("\r")
connection.expect("Select PATCH APPLICATION HISTORY: ")
connection.send("%s SEQ #%s\r" % (patchNo, seqNo))
connection.expect("Are you adding .*\? No//")
connection.send("YES\r")
connection.expect("DATE APPLIED: ")
connection.send("T\r")
connection.expect("APPLIED BY: ")
connection.send("%s\r" % appliedUser)
connection.expect("DESCRIPTION:")
connection.send("\r")
connection.expect("Select PATCH APPLICATION HISTORY: ")
connection.send("\r")
connection.expect("Select PACKAGE NAME: ")
connection.send("\r")
menuUtil.exitFileManMenu(vistAClient)
""" class KIDSInstallerFactory
create KIDS installer via Factory methods
"""
class KIDSInstallerFactory(object):
installerDict = {}
@staticmethod
def createKIDSInstaller(kidsFile, kidsInstallName,
seqNo=None, logFile=None,
multiBuildList=None, duz=DEFAULT_INSTALL_DUZ,
**kargs):
return KIDSInstallerFactory.installerDict.get(
kidsInstallName,
DefaultKIDSBuildInstaller)(kidsFile,
kidsInstallName,
seqNo, logFile,
multiBuildList, duz,
**kargs)
@staticmethod
def registerKidsInstaller(kidsInstallName, kidsInstaller):
KIDSInstallerFactory.installerDict[kidsInstallName] = kidsInstaller
""" Test code """
def createTestClient():
testClientParser = createTestClientArgParser()
parser = argparse.ArgumentParser(description='Default KIDS Installer',
parents=[testClientParser])
result = parser.parse_args();
print (result)
testClient = VistATestClientFactory.createVistATestClientWithArgs(result)
return testClient
def testAddPackagePatchHistory():
testClient = createTestClient()
with testClient:
addPackagePatchHistory("LAB SERVICE", "5.2", "288", "334",
testClient, 17)
""" Test Function getPersonNameByDuz """
def testGetPersonNameByDuz():
testClient = createTestClient()
initConsoleLogging()
with testClient:
result = getPersonNameByDuz(1, testClient)
print ("Name is [%s]" % result)
""" main entry """
def main():
testClientParser = createTestClientArgParser()
parser = argparse.ArgumentParser(description='Default KIDS Installer',
parents=[testClientParser])
parser.add_argument('kidsFile', help='path to KIDS Build file')
parser.add_argument('-l', '--logFile', default=None, help='path to logFile')
parser.add_argument('-r', '--reinstall', default=False, action='store_true',
help='whether re-install the KIDS even it is already installed')
parser.add_argument('-t', '--tglobalprint', default=None,
help='folder to hold a printout of Transport global information')
parser.add_argument('-g', '--globalFiles', default=None, nargs='*',
help='list of global files that need to import')
parser.add_argument('-d', '--duz', default=DEFAULT_INSTALL_DUZ, type=int,
help='installer\'s VistA instance\'s DUZ')
result = parser.parse_args();
print (result)
testClient = VistATestClientFactory.createVistATestClientWithArgs(result)
assert testClient
initConsoleLogging()
with testClient:
kidsFile = os.path.abspath(result.kidsFile)
from KIDSBuildParser import KIDSBuildParser
kidsParser = KIDSBuildParser(None)
kidsParser.unregisterSectionHandler(KIDSBuildParser.ROUTINE_SECTION)
kidsParser.parseKIDSBuild(kidsFile)
installNameList = kidsParser.installNameList
installName = installNameList[0]
multiBuildList = installNameList
if len(installNameList) == 1:
multiBuildList = None
defaultKidsInstall = DefaultKIDSBuildInstaller(kidsFile,
installName,
logFile=result.logFile,
multiBuildList=multiBuildList,
duz = result.duz,
globals=result.globalFiles,
printTG=result.tglobalprint)
defaultKidsInstall.runInstallation(testClient, result.reinstall)
if __name__ == "__main__":
main()
| |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A utility function for importing TensorFlow graphs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import contextlib
import copy
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python import pywrap_tensorflow as c_api
from tensorflow.python.framework import c_api_util
from tensorflow.python.framework import device as pydev
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import function
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.util import compat
from tensorflow.python.util.deprecation import deprecated_args
from tensorflow.python.util.tf_export import tf_export
# TODO(josh11b): SWIG the code from node_def_util instead of duplicating
# the logic here.
def _GetNodeAttr(node_def, attr_name):
if attr_name not in node_def.attr:
raise ValueError('Expected one attr with name %r in %s.' % (attr_name,
str(node_def)))
return node_def.attr[attr_name]
def _ArgToTypesNoRef(node_def, arg_def):
if arg_def.number_attr:
repeats = _GetNodeAttr(node_def, arg_def.number_attr).i
if arg_def.type_attr:
dtype = _GetNodeAttr(node_def, arg_def.type_attr).type
else:
assert arg_def.type != types_pb2.DT_INVALID
dtype = arg_def.type
return [dtype] * repeats
elif arg_def.type_attr:
return [_GetNodeAttr(node_def, arg_def.type_attr).type]
elif arg_def.type_list_attr:
return _GetNodeAttr(node_def, arg_def.type_list_attr).list.type
else:
assert arg_def.type != types_pb2.DT_INVALID
return [arg_def.type]
def _SingleArgToTypes(node_def, arg_def):
types = _ArgToTypesNoRef(node_def, arg_def)
if arg_def.is_ref:
return [dtypes.as_dtype(dt)._as_ref.as_datatype_enum for dt in types] # pylint: disable=protected-access
return types
def _ArgsToTypes(node_def, arg_list):
types = []
for arg_def in arg_list:
types.extend(_SingleArgToTypes(node_def, arg_def))
return types
def _InputTypes(node_def, op_dict):
op_def = op_dict[node_def.op]
return _ArgsToTypes(node_def, op_def.input_arg)
def _OutputTypes(node_def, op_dict):
op_def = op_dict[node_def.op]
return _ArgsToTypes(node_def, op_def.output_arg)
def _IsControlInput(input_name):
# Expected format: '^operation_name' (control input).
return input_name.startswith('^')
def _ParseTensorName(tensor_name):
"""Parses a tensor name into an operation name and output index.
This function will canonicalize tensor names as follows:
* "foo:0" -> ("foo", 0)
* "foo:7" -> ("foo", 7)
* "foo" -> ("foo", 0)
* "foo:bar:baz" -> ValueError
Args:
tensor_name: The name of a tensor.
Returns:
A tuple containing the operation name, and the output index.
Raises:
ValueError: If `tensor_name' cannot be interpreted as the name of a tensor.
"""
components = tensor_name.split(':')
if len(components) == 2:
# Expected format: 'operation_name:output_index'.
try:
output_index = int(components[1])
except ValueError:
raise ValueError('Cannot convert %r to a tensor name.' % (tensor_name,))
return components[0], output_index
elif len(components) == 1:
# Expected format: 'operation_name' (implicit 0th output).
return components[0], 0
else:
raise ValueError('Cannot convert %r to a tensor name.' % (tensor_name,))
def _CanonicalInputName(input_name):
input_name = compat.as_str(input_name)
if _IsControlInput(input_name):
return input_name
input_op_name, output_index = _ParseTensorName(input_name)
return '%s:%d' % (input_op_name, output_index)
def _InvalidNodeMessage(node, message):
return 'graph_def is invalid at node %r: %s.' % (node.name, message)
@contextlib.contextmanager
def _MaybeDevice(device):
"""Applies the given device only if device is not None or empty."""
if device:
with ops.device(device):
yield
else:
yield
def _ProcessGraphDefParam(graph_def):
"""Type-checks and possibly canonicalizes `graph_def`."""
if not isinstance(graph_def, graph_pb2.GraphDef):
# `graph_def` could be a dynamically-created message, so try a duck-typed
# approach
try:
old_graph_def = graph_def
graph_def = graph_pb2.GraphDef()
graph_def.MergeFrom(old_graph_def)
except TypeError:
raise TypeError('graph_def must be a GraphDef proto.')
return graph_def
def _ProcessInputMapParam(input_map):
"""Type-checks and possibly canonicalizes `input_map`."""
if input_map is None:
input_map = {}
else:
if not (isinstance(input_map, dict) and all(
isinstance(k, compat.bytes_or_text_types) for k in input_map.keys())):
raise TypeError('input_map must be a dictionary mapping strings to '
'Tensor objects.')
return input_map
def _ProcessReturnElementsParam(return_elements):
"""Type-checks and possibly canonicalizes `return_elements`."""
if return_elements is None:
return None
if not all(
isinstance(x, compat.bytes_or_text_types) for x in return_elements):
raise TypeError('return_elements must be a list of strings.')
return tuple(compat.as_str(x) for x in return_elements)
def _FindAttrInOpDef(attr_name, op_def):
for attr_def in op_def.attr:
if attr_name == attr_def.name:
return attr_def
return None
def _RemoveDefaultAttrs(op_dict, producer_op_list, graph_def):
"""Removes unknown default attrs according to `producer_op_list`.
Removes any unknown attrs in `graph_def` (i.e. attrs that do not appear in
the OpDefs in `op_dict`) that have a default value in `producer_op_list`.
Args:
op_dict: dict mapping operation name to OpDef.
producer_op_list: OpList proto.
graph_def: GraphDef proto
"""
producer_op_dict = {op.name: op for op in producer_op_list.op}
for node in graph_def.node:
# Remove any default attr values that aren't in op_def.
if node.op in producer_op_dict:
op_def = op_dict[node.op]
producer_op_def = producer_op_dict[node.op]
# We make a copy of node.attr to iterate through since we may modify
# node.attr inside the loop.
for key in list(node.attr):
if _FindAttrInOpDef(key, op_def) is None:
# No attr_def in consumer, look in producer.
attr_def = _FindAttrInOpDef(key, producer_op_def)
if (attr_def and attr_def.HasField('default_value') and
node.attr[key] == attr_def.default_value):
# Unknown attr had default value in producer, delete it so it can be
# understood by consumer.
del node.attr[key]
def _ConvertInputMapValues(name, input_map):
"""Ensures all input map values are tensors.
This should be called from inside the import name scope.
Args:
name: the `name` argument passed to import_graph_def
input_map: the `input_map` argument passed to import_graph_def.
Returns:
An possibly-updated version of `input_map`.
Raises:
ValueError: if input map values cannot be converted due to empty name scope.
"""
if not all(isinstance(v, ops.Tensor) for v in input_map.values()):
if name == '': # pylint: disable=g-explicit-bool-comparison
raise ValueError(
'tf.import_graph_def() requires a non-empty `name` if `input_map` '
'contains non-Tensor values. Try calling tf.convert_to_tensor() on '
'`input_map` values before calling tf.import_graph_def().')
with ops.name_scope('_inputs'):
input_map = {k: ops.convert_to_tensor(v) for k, v in input_map.items()}
return input_map
def _PopulateTFImportGraphDefOptions(options, prefix, input_map,
return_elements):
"""Populates the TF_ImportGraphDefOptions `options`."""
c_api.TF_ImportGraphDefOptionsSetPrefix(options, prefix)
c_api.TF_ImportGraphDefOptionsSetUniquifyNames(options, True)
c_api.TF_ImportGraphDefOptionsSetUniquifyPrefix(options, True)
for input_src, input_dst in input_map.items():
input_src = compat.as_str(input_src)
if input_src.startswith('^'):
src_name = compat.as_bytes(input_src[1:])
dst_op = input_dst._as_tf_output().oper # pylint: disable=protected-access
c_api.TF_ImportGraphDefOptionsRemapControlDependency(
options, src_name, dst_op)
else:
src_name, src_idx = _ParseTensorName(input_src)
src_name = compat.as_str(src_name)
dst_output = input_dst._as_tf_output() # pylint: disable=protected-access
c_api.TF_ImportGraphDefOptionsAddInputMapping(options, src_name, src_idx,
dst_output)
for name in return_elements or []:
if ':' in name:
op_name, index = _ParseTensorName(name)
op_name = compat.as_str(op_name)
c_api.TF_ImportGraphDefOptionsAddReturnOutput(options, op_name, index)
else:
c_api.TF_ImportGraphDefOptionsAddReturnOperation(options,
compat.as_str(name))
def _ProcessNewOps(graph):
"""Processes the newly-added TF_Operations in `graph`."""
# Maps from a node to the names of the ops it's colocated with, if colocation
# is specified in the attributes.
colocation_pairs = {}
for new_op in graph._add_new_tf_operations(compute_devices=False): # pylint: disable=protected-access
colocation_names = _GetColocationNames(new_op)
if colocation_names:
colocation_pairs[new_op] = colocation_names
# Don't apply this op's device function, since colocation constraints
# override device functions. Note that this op's device may still be set
# by the loop below.
else:
with _MaybeDevice(new_op.device):
graph._apply_device_functions(new_op) # pylint: disable=protected-access
# The following loop populates the device field of ops that are colocated
# with another op. This is implied by the colocation attribute, but we
# propagate the device field for completeness.
for op, coloc_op_list in colocation_pairs.items():
coloc_device = None
# Find any device in the list of colocated ops that have a device, if it
# exists. We assume that if multiple ops have devices, they refer to the
# same device. Otherwise, a runtime error will occur since the colocation
# property cannot be guaranteed.
#
# One possible improvement is to try to check for compatibility of all
# devices in this list at import time here, which would require
# implementing a compatibility function for device specs in python.
for coloc_op_name in coloc_op_list:
try:
coloc_op = graph._get_operation_by_name_unsafe(coloc_op_name) # pylint: disable=protected-access
except KeyError:
raise ValueError('Specified colocation to an op that '
'does not exist during import: %s in %s' %
(coloc_op_name, op.name))
if coloc_op.device:
coloc_device = pydev.DeviceSpec.from_string(coloc_op.device)
break
if coloc_device:
op._set_device(coloc_device) # pylint: disable=protected-access
def _GetColocationNames(op):
"""Returns names of the ops that `op` should be colocated with."""
colocation_names = []
try:
class_values = op.get_attr('_class')
except ValueError:
# No _class attr
return
for val in class_values:
val = compat.as_str(val)
if val.startswith('loc:@'):
colocation_node_name = val[len('loc:@'):]
if colocation_node_name != op.name:
colocation_names.append(colocation_node_name)
return colocation_names
def _GatherReturnElements(requested_return_elements, graph, results):
"""Returns the requested return elements from results.
Args:
requested_return_elements: list of strings of operation and tensor names
graph: Graph
results: wrapped TF_ImportGraphDefResults
Returns:
list of `Operation` and/or `Tensor` objects
"""
return_outputs = c_api.TF_ImportGraphDefResultsReturnOutputs(results)
return_opers = c_api.TF_ImportGraphDefResultsReturnOperations(results)
combined_return_elements = []
outputs_idx = 0
opers_idx = 0
for name in requested_return_elements:
if ':' in name:
combined_return_elements.append(
graph._get_tensor_by_tf_output(return_outputs[outputs_idx])) # pylint: disable=protected-access
outputs_idx += 1
else:
combined_return_elements.append(
graph._get_operation_by_tf_operation(return_opers[opers_idx])) # pylint: disable=protected-access
opers_idx += 1
return combined_return_elements
@tf_export('import_graph_def')
@deprecated_args(None, 'Please file an issue at '
'https://github.com/tensorflow/tensorflow/issues if you depend'
' on this feature.', 'op_dict')
def import_graph_def(graph_def,
input_map=None,
return_elements=None,
name=None,
op_dict=None,
producer_op_list=None):
"""Imports the graph from `graph_def` into the current default `Graph`.
This function provides a way to import a serialized TensorFlow
[`GraphDef`](https://www.tensorflow.org/code/tensorflow/core/framework/graph.proto)
protocol buffer, and extract individual objects in the `GraphDef` as
@{tf.Tensor} and @{tf.Operation} objects. Once extracted,
these objects are placed into the current default `Graph`. See
@{tf.Graph.as_graph_def} for a way to create a `GraphDef`
proto.
Args:
graph_def: A `GraphDef` proto containing operations to be imported into
the default graph.
input_map: A dictionary mapping input names (as strings) in `graph_def`
to `Tensor` objects. The values of the named input tensors in the
imported graph will be re-mapped to the respective `Tensor` values.
return_elements: A list of strings containing operation names in
`graph_def` that will be returned as `Operation` objects; and/or
tensor names in `graph_def` that will be returned as `Tensor` objects.
name: (Optional.) A prefix that will be prepended to the names in
`graph_def`. Note that this does not apply to imported function names.
Defaults to `"import"`.
op_dict: (Optional.) Deprecated, do not use.
producer_op_list: (Optional.) An `OpList` proto with the (possibly stripped)
list of `OpDef`s used by the producer of the graph. If provided,
unrecognized attrs for ops in `graph_def` that have their default value
according to `producer_op_list` will be removed. This will allow some more
`GraphDef`s produced by later binaries to be accepted by earlier binaries.
Returns:
A list of `Operation` and/or `Tensor` objects from the imported graph,
corresponding to the names in `return_elements`.
Raises:
TypeError: If `graph_def` is not a `GraphDef` proto,
`input_map` is not a dictionary mapping strings to `Tensor` objects,
or `return_elements` is not a list of strings.
ValueError: If `input_map`, or `return_elements` contains names that
do not appear in `graph_def`, or `graph_def` is not well-formed (e.g.
it refers to an unknown tensor).
"""
graph_def = _ProcessGraphDefParam(graph_def)
input_map = _ProcessInputMapParam(input_map)
return_elements = _ProcessReturnElementsParam(return_elements)
op_dict = op_def_registry.get_registered_ops()
if producer_op_list is not None:
# TODO(skyewm): make a copy of graph_def so we're not mutating the argument?
_RemoveDefaultAttrs(op_dict, producer_op_list, graph_def)
graph = ops.get_default_graph()
if graph._c_graph: # pylint: disable=protected-access
with ops.name_scope(name, 'import', input_map.values()) as scope:
# Save unique prefix generated by name_scope
if scope:
assert scope.endswith('/')
prefix = scope[:-1]
else:
prefix = ''
# Generate any input map tensors inside name scope
input_map = _ConvertInputMapValues(name, input_map)
scoped_options = c_api_util.ScopedTFImportGraphDefOptions()
options = scoped_options.options
_PopulateTFImportGraphDefOptions(options, prefix, input_map,
return_elements)
with c_api_util.tf_buffer(graph_def.SerializeToString()) as serialized:
try:
with errors.raise_exception_on_not_ok_status() as status:
results = c_api.TF_GraphImportGraphDefWithResults(
graph._c_graph, serialized, options, status) # pylint: disable=protected-access
except errors.InvalidArgumentError as e:
# Convert to ValueError for backwards compatibility.
raise ValueError(str(e))
_ProcessNewOps(graph)
# Create _DefinedFunctions for any imported functions.
#
# We do this by creating _DefinedFunctions directly from `graph_def`, and
# adding them to `graph`. Adding an existing function to a TF_Graph is a
# no-op, so this only has the effect of updating the Python state (usually
# _DefinedFunction.add_to_graph also adds the function to the TF_Graph).
#
# TODO(skyewm): fetch the TF_Functions directly from the TF_Graph
# TODO(skyewm): avoid sending serialized FunctionDefs back to the TF_Graph
if graph_def.library and graph_def.library.function:
# pylint: disable=protected-access
functions = function._from_library(graph_def.library)
for f in functions:
f.add_to_graph(graph)
# pylint: enable=protected-access
# Treat input mappings that don't appear in the graph as an error, because
# they are likely to be due to a typo.
missing_unused_input_keys = (
c_api.TF_ImportGraphDefResultsMissingUnusedInputMappings_wrapper(
results))
if missing_unused_input_keys:
missing_unused_input_keys = [
compat.as_str(s) for s in missing_unused_input_keys
]
raise ValueError(
'Attempted to map inputs that were not found in graph_def: [%s]' %
', '.join(missing_unused_input_keys))
if return_elements is None:
return None
else:
return _GatherReturnElements(return_elements, graph, results)
else:
g = graph
# Use a canonical representation for all tensor names.
input_map = {_CanonicalInputName(k): v for k, v in input_map.items()}
used_input_keys = set()
name_to_op = {}
# Add any functions defined in `graph_def` to `g`
if graph_def.library and graph_def.library.function:
# Copy op_dict so we don't clobber the original
op_dict = copy.copy(op_dict)
# pylint: disable=protected-access
# Note that we do not prepend `name` to the function name. The reasoning
# is that function names are similar to op definition names, which
# currently do not have a scoped name or namespace scheme.
functions = function._from_library(graph_def.library)
for f in functions:
f.add_to_graph(g)
op_dict[f.name] = f.definition.signature
# pylint: enable=protected-access
# LINT.IfChange
with ops.name_scope(name, 'import', input_map.values()) as scope:
# TODO(ashankar): Should this just copy over or should it do some
# more nuanced merging? For example, the graph may already have some
# marked "bad versions" and we don't want to lose those because of
# what's in graph_def.versions? The C++ ImporGraphDef does something
# more nuanced.
g.graph_def_versions.CopyFrom(graph_def.versions)
input_map = _ConvertInputMapValues(name, input_map)
# NOTE(mrry): We do this in two passes, because there may be a cycle in
# `graph_def`.
# 1. Add operations without their inputs.
for node in graph_def.node:
# Check to see if this op's name matches a previously seen op
if node.name in name_to_op:
raise ValueError('Duplicate name \'%s\' in GraphDef.' % node.name)
# Set any default attr values that aren't present.
if node.op not in op_dict:
raise ValueError('No op named %s in defined operations.' % node.op)
op_def = op_dict[node.op]
for attr_def in op_def.attr:
key = attr_def.name
if attr_def.HasField('default_value'):
value = node.attr[key]
if value is None or value.WhichOneof('value') is None:
node.attr[key].CopyFrom(attr_def.default_value)
output_types = _OutputTypes(node, op_dict)
name_to_op[node.name] = g.create_op(
node.op, [], output_types, name=node.name, attrs=node.attr,
compute_shapes=False, compute_device=False,
op_def=op_def)
# Maps from a node to the ops it is colocated with, if colocation
# is specified in the attributes.
colocation_pairs = collections.defaultdict(list)
# 2. Add inputs to the operations.
for node in graph_def.node:
op = name_to_op[node.name]
input_types = _InputTypes(node, op_dict)
apply_device_function = True
# Rewrite the colocation attributes in the graph, since the
# names of new ops may have changed.
for key, value in op.node_def.attr.items():
if key == '_class':
class_values = value.list
new_class_values = []
for class_value in class_values.s:
if class_value.startswith(b'loc:@'):
op_to_bind_to = class_value[5:].decode()
# Find the op by its original name.
if op_to_bind_to not in name_to_op:
raise ValueError('Specified colocation to an op that '
'does not exist during import: %s in %s' % (
op_to_bind_to, node.name))
original_op = name_to_op[op_to_bind_to]
new_class_values.append(compat.as_bytes(
'loc:@' + original_op.name))
if op_to_bind_to != node.name:
# Keep track of this mapping for a later phase.
colocation_pairs[op].append(original_op)
# Don't apply this op's device function,
# the colocation constraint will ensure
# the proper device gets assigned at runtime.
apply_device_function = False
else:
new_class_values.append(class_value)
value.list.CopyFrom(attr_value_pb2.AttrValue.ListValue(
s=new_class_values))
# NOTE(mrry): We cannot use zip here because control inputs do not
# appear in the list of input_types.
for i, input_name in enumerate(
[_CanonicalInputName(x) for x in node.input]):
if _IsControlInput(input_name):
# (a) Input is a control input that should be taken from an op
# in "graph_def".
try:
source_op = name_to_op[input_name[1:]]
except KeyError:
raise ValueError(
_InvalidNodeMessage(
node,
'Control input %r not found in graph_def.'
% (input_name,)))
# pylint: disable=protected-access
op._add_control_input(source_op)
# pylint: enable=protected-access
else:
try:
input_type = input_types[i]
except IndexError:
raise ValueError(_InvalidNodeMessage(
node, 'More inputs specified (%r) than the op expects.'
% (input_name,)))
if input_name in input_map:
# (b) Input should be replaced by a tensor from the caller.
source_tensor = input_map[input_name]
used_input_keys.add(input_name)
else:
# (c) Input should be taken from an op in `graph_def`.
operation_name, output_index = _ParseTensorName(input_name)
try:
source_op = name_to_op[operation_name]
source_tensor = list(source_op.values())[output_index]
except (KeyError, IndexError):
raise ValueError(
_InvalidNodeMessage(
node,
'Input tensor %r not found in graph_def.'
% (input_name,)))
try:
# pylint: disable=protected-access
op._add_input(source_tensor, dtype=input_type)
# pylint: enable=protected-access
except TypeError as te:
raise ValueError(_InvalidNodeMessage(
node, 'Input tensor %r %s' % (input_name, te)))
# pylint: disable=protected-access
if op._input_types != input_types:
raise ValueError(
_InvalidNodeMessage(
node,
'Input types mismatch (expected %r but got %r)'
% (', '.join(dtypes.as_dtype(x).name for x in input_types),
', '.join(x.name for x in op._input_types))))
# pylint: enable=protected-access
if not g._is_function(op.type): # pylint: disable=protected-access
# Execute shape inference for this op.
# NOTE(mrry): If the graph contains a cycle, the full shape
# information may not be available for this op's inputs.
ops.set_shapes_for_outputs(op)
# For nodes with _output_shapes set, set the output shapes.
if '_output_shapes' in op.node_def.attr:
for i, output in enumerate(op.outputs):
dims = op.node_def.attr['_output_shapes'].list.shape[i]
output_shape = tensor_shape.TensorShape(
None if dims.unknown_rank else
[dim.size if dim.size >= 0 else None for dim in dims.dim])
try:
output.set_shape(output_shape)
except ValueError as e:
# If the output shape is incompatible with what is inferred
# by the graph for a very specific whitelist of ops, then we
# ignore this output shape. This can happen if there is a
# bug in the shape function for some operation, and the
# serialized graph def has the incorrect shape set when
# running on a newer binary with the fixed shape function.
# This is an escape hatch that allows us to correct shape
# functions that are not critical to correct execution but
# would cause graphs to fail if imported after correcting.
#
# This can be removed after 2017/03/08.
if op.type in ['RandomShuffleQueue', 'PaddingFIFOQueue',
'FIFOQueue', 'PriorityQueue', 'QueueSize',
'Stack', 'Barrier', 'BarrierReadySize',
'BarrierIncompleteSize', 'HashTable',
'MutableHashTable',
'MutableHashTableOfTensors', 'Mutex',
'CuckooTable', 'IndexTable',
'WholeFileReader', 'TextLineReader',
'FixedLengthRecordReader',
'TFRecordReader', 'IdentityReader',
'LMDBReader',
'RefSwitch', 'RefEnter', 'RefNextIteration',
'RefMerge', 'RefIdentity']:
pass
elif op.type in [
'ConditionalAccumulator', 'SparseConditionalAccumulator',
'Table'
]:
# This can be removed after 2017/04/24.
pass
else:
raise e
del op.node_def.attr['_output_shapes']
# NOTE(mrry): We do this after configuring the inputs, because
# the result of the device functions may depend on the inputs.
if apply_device_function:
with _MaybeDevice(node.device):
g._apply_device_functions(op) # pylint: disable=protected-access
# The following loop populates the device field of ops that are
# colocated with another op. This is implied by the colocation
# attribute, but we propagate the device field for completeness.
for op, coloc_op_list in colocation_pairs.items():
coloc_device = None
# Find any device in the list of colocated ops that have a
# device, if it exists. We assume that if multiple ops
# have devices, they refer to the same device. Otherwise, a
# runtime error will occur since the colocation property
# cannot be guaranteed.
#
# One possible improvement is to try to check for compatibility
# of all devices in this list at import time here, which would
# require implementing a compatibility function for device specs
# in python.
for coloc_op in coloc_op_list:
if coloc_op.device:
coloc_device = pydev.DeviceSpec.from_string(coloc_op.device)
break
if coloc_device:
op._set_device(coloc_device) # pylint: disable=protected-access
# Treat input mappings that don't appear in the graph as an error,
# because they are likely to be due to a typo.
def _IsImportedNodeOutput(tensor_name):
operation_name, output_index = _ParseTensorName(tensor_name)
try:
return output_index < len(name_to_op[operation_name].outputs)
except KeyError:
return False
absent_input_keys = [
k for k in frozenset(input_map.keys()).difference(used_input_keys)
if not _IsImportedNodeOutput(k)]
if absent_input_keys:
raise ValueError(
'Attempted to map inputs that were not found in graph_def: [%s]'
% ', '.join(absent_input_keys))
if return_elements is None:
return None
else:
ret = []
for name in return_elements:
name = compat.as_str(name)
if ':' in name:
try:
operation_name, output_index = _ParseTensorName(name)
ret.append(name_to_op[operation_name].outputs[output_index])
except (ValueError, KeyError, IndexError):
raise ValueError(
'Requested return_element %r not found in graph_def.' % name)
else:
try:
ret.append(name_to_op[name])
except KeyError:
raise ValueError(
'Requested return_element %r not found in graph_def.' % name)
return ret
# LINT.ThenChange(//tensorflow/core/graph/graph_constructor.cc)
| |
import sys
sys.path.insert(0, '../')
import unittest
import lib.base as sinon
from lib.spy import SinonSpy
from lib.sandbox import sinontest
from lib.matcher import SinonMatcher, Matcher
"""
======================================================
FOR TEST ONLY START
======================================================
"""
# build-in module
import os
# customized class
class A_object(object):
# customized function
def A_func(self):
return "test_global_A_func"
# global function
def B_func(x=None):
if x:
return "test_local_B_func"+str(x)
return "test_local_B_func"
def C_func(a="a", b="b", c="c"):
return "test_local_C_func"
def D_func(err=False):
if err:
raise err
else:
return "test_local_D_func"
def E_func(*args, **kwargs):
return str(args) + ' ' + str(kwargs)
"""
======================================================
FOR TEST ONLY END
======================================================
"""
class TestSinonSpy(unittest.TestCase):
def setUp(self):
sinon.g = sinon.init(globals())
@sinontest
def test040_called_method(self):
spy = SinonSpy(B_func)
sinon.g.B_func()
self.assertTrue(spy.called)
@sinontest
def test041_calledOnce_method(self):
spy = SinonSpy(B_func)
sinon.g.B_func()
self.assertTrue(spy.calledOnce)
@sinontest
def test042_calledTwice_method(self):
spy = SinonSpy(B_func)
sinon.g.B_func()
sinon.g.B_func()
self.assertTrue(spy.calledTwice)
@sinontest
def test043_calledThrice_method(self):
spy = SinonSpy(B_func)
sinon.g.B_func()
sinon.g.B_func()
sinon.g.B_func()
self.assertTrue(spy.calledThrice)
@sinontest
def test044_calledOnce_module_method(self):
spy = SinonSpy(os, "system")
os.system("cd")
self.assertTrue(spy.calledOnce)
@sinontest
def test045_calledTwice_module_method(self):
spy = SinonSpy(os, "system")
os.system("cd")
os.system("cd")
self.assertTrue(spy.calledTwice)
@sinontest
def test046_calledThrice_module_method(self):
spy = SinonSpy(os, "system")
os.system("cd")
os.system("cd")
os.system("cd")
self.assertTrue(spy.calledThrice)
@sinontest
def test047_calledOnce_empty(self):
spy = SinonSpy()
spy()
self.assertTrue(spy.calledOnce)
@sinontest
def test048_calledTwice_empty(self):
spy = SinonSpy()
spy()
spy()
self.assertTrue(spy.calledTwice)
@sinontest
def test049_calledThrice_empty(self):
spy = SinonSpy()
spy()
spy()
spy()
self.assertTrue(spy.calledThrice)
@sinontest
def test050_firstCall_callId(self):
spy1 = SinonSpy(os, "system")
spy2 = SinonSpy()
spy3 = SinonSpy(B_func)
spy4 = SinonSpy()
os.system("cd")
spy2()
sinon.g.B_func()
spy4()
self.assertEqual(spy1.firstCall.callId, 0)
self.assertEqual(spy2.firstCall.callId, 1)
self.assertEqual(spy3.firstCall.callId, 2)
self.assertEqual(spy4.firstCall.callId, 3)
@sinontest
def test051_calledBefore_calledAfter_normal(self):
spy1 = SinonSpy(os, "system")
spy2 = SinonSpy()
spy3 = SinonSpy(B_func)
os.system("cd")
spy2()
sinon.g.B_func()
self.assertFalse(spy1.calledBefore(spy1))
self.assertFalse(spy2.calledBefore(spy2))
self.assertFalse(spy3.calledBefore(spy3))
self.assertFalse(spy1.calledAfter(spy1))
self.assertFalse(spy2.calledAfter(spy2))
self.assertFalse(spy3.calledAfter(spy3))
self.assertTrue(spy1.calledBefore(spy2))
self.assertTrue(spy1.calledBefore(spy3))
self.assertTrue(spy2.calledBefore(spy3))
self.assertFalse(spy1.calledAfter(spy2))
self.assertFalse(spy1.calledAfter(spy3))
self.assertFalse(spy2.calledAfter(spy3))
self.assertTrue(spy2.calledAfter(spy1))
self.assertTrue(spy3.calledAfter(spy1))
self.assertTrue(spy3.calledAfter(spy2))
self.assertFalse(spy2.calledBefore(spy1))
self.assertFalse(spy3.calledBefore(spy1))
self.assertFalse(spy3.calledBefore(spy2))
@sinontest
def test052_calledBefore_nothing_called(self):
spy1 = SinonSpy(os, "system")
spy2 = SinonSpy()
self.assertFalse(spy1.calledBefore(spy2))
self.assertFalse(spy2.calledBefore(spy1))
self.assertFalse(spy2.calledAfter(spy1))
self.assertFalse(spy1.calledAfter(spy2))
@sinontest
def test053_calledBefore_calledAfter_recalled_method(self):
spy1 = SinonSpy(os, "system")
spy2 = SinonSpy()
os.system("cd")
self.assertTrue(spy1.calledBefore(spy2))
self.assertFalse(spy1.calledAfter(spy2))
self.assertFalse(spy2.calledBefore(spy1))
self.assertFalse(spy2.calledAfter(spy1))
spy2()
self.assertTrue(spy1.calledBefore(spy2))
self.assertFalse(spy1.calledAfter(spy2))
self.assertFalse(spy2.calledBefore(spy1))
self.assertTrue(spy2.calledAfter(spy1))
os.system("cd")
self.assertTrue(spy1.calledBefore(spy2))
self.assertTrue(spy1.calledAfter(spy2))
self.assertTrue(spy2.calledBefore(spy1))
self.assertTrue(spy2.calledAfter(spy1))
spy1.restore()
@sinontest
def test054_calledBefore_calledAfter_called_restore_recalled(self):
spy1 = SinonSpy(os, "system")
spy2 = SinonSpy()
os.system("cd")
spy1.restore()
spy1 = SinonSpy(os, "system")
spy2()
os.system("cd")
self.assertTrue(spy1.calledAfter(spy2))
self.assertFalse(spy2.calledAfter(spy1))
self.assertTrue(spy2.calledBefore(spy1))
self.assertFalse(spy1.calledBefore(spy2))
@sinontest
def test070_calledWith_method_fullmatch(self):
spy = SinonSpy(C_func)
#pure kwargs
sinon.g.C_func(a="a", b="b", c="c")
self.assertTrue(spy.calledWith(a="a", b="b", c="c"))
self.assertFalse(spy.calledWith(a="wrong", b="b", c="c"))
#pure args
sinon.g.C_func("a", "b", "c")
self.assertTrue(spy.calledWith("a", "b", "c"))
self.assertFalse(spy.calledWith("a", "wrong", "c"))
#combine kwargs and args
sinon.g.C_func("a", b="b", c="c")
self.assertTrue(spy.calledWith("a", b="b", c="c"))
self.assertTrue(spy.calledWith("a", "b", c="c"))
self.assertTrue(spy.calledWith("a", "b", "c"))
self.assertFalse(spy.calledWith("a", "b", "d"))
self.assertFalse(spy.calledWith("a", "b", c="d"))
@sinontest
def test071_calledWith_method_partialmatch(self):
spy = SinonSpy(C_func)
#pure kwargs
sinon.g.C_func(a="a", b="b", c="c")
self.assertFalse(spy.calledWith(a="wrong"))
self.assertTrue(spy.calledWith(a="a"))
self.assertTrue(spy.calledWith(b="b"))
self.assertTrue(spy.calledWith(c="c"))
self.assertFalse(spy.calledWith(a="wrong", b="b"))
self.assertTrue(spy.calledWith(a="a", b="b"))
self.assertTrue(spy.calledWith(b="b", c="c"))
self.assertTrue(spy.calledWith(a="a", c="c"))
#pure args
sinon.g.C_func("a", "b", "c")
self.assertFalse(spy.calledWith("d"))
self.assertTrue(spy.calledWith("a"))
self.assertFalse(spy.calledWith("b"))
self.assertFalse(spy.calledWith("c"))
self.assertFalse(spy.calledWith("wrong", "b"))
self.assertTrue(spy.calledWith("a", "b"))
self.assertFalse(spy.calledWith("b", "c"))
self.assertFalse(spy.calledWith("a", "c"))
self.assertTrue(spy.calledWith("a", "b", "c"))
#combine kwargs and args
sinon.g.C_func("a", b="b", c="c")
self.assertTrue(spy.calledWith("a", b="b"))
self.assertTrue(spy.calledWith("a", c="c"))
self.assertTrue(spy.calledWith(b="b", c="c"))
self.assertTrue(spy.calledWith("a"))
self.assertTrue(spy.calledWith(c="c"))
self.assertFalse(spy.calledWith("wrong", b="b"))
self.assertFalse(spy.calledWith("a", b="wrong"))
self.assertFalse(spy.calledWith("a", c="wrong"))
@sinontest
def test072_alwaysCalledWith_method_fullmatch(self):
spy = SinonSpy(C_func)
#pure kwargs
sinon.g.C_func(a="a", b="b", c="c")
sinon.g.C_func(a="a", b="b", c="c")
self.assertTrue(spy.alwaysCalledWith(a="a", b="b", c="c"))
sinon.g.C_func(a="d", b="e", c="f")
self.assertFalse(spy.alwaysCalledWith(a="a", b="b", c="c"))
spy.restore()
spy = SinonSpy(C_func)
#pure args
sinon.g.C_func("a", "b", "c")
sinon.g.C_func("a", "b", "c")
self.assertTrue(spy.alwaysCalledWith("a", "b", "c"))
sinon.g.C_func("d", "e", "f")
self.assertFalse(spy.alwaysCalledWith("a", "b", "c"))
spy.restore()
spy = SinonSpy(C_func)
#combine kwargs and args
sinon.g.C_func("a", b="b", c="c")
sinon.g.C_func("a", b="b", c="c")
self.assertTrue(spy.alwaysCalledWith("a", b="b", c="c"))
sinon.g.C_func("b", b="b", c="c")
self.assertFalse(spy.alwaysCalledWith("a", b="b", c="c"))
spy.restore()
@sinontest
def test073_alwaysCalledWith_method_partialmatch(self):
spy = SinonSpy(C_func)
#pure kwargs
sinon.g.C_func(a="a", b="b", c="c")
sinon.g.C_func(a="xxxx", b="b", c="c")
self.assertTrue(spy.alwaysCalledWith(b="b", c="c"))
sinon.g.C_func(a="d", b="e", c="f")
self.assertFalse(spy.alwaysCalledWith(b="b", c="c"))
spy.restore()
spy = SinonSpy(C_func)
#pure args
sinon.g.C_func("a", "b", "c")
sinon.g.C_func("a", "b", "xxxx")
self.assertTrue(spy.alwaysCalledWith("a", "b"))
sinon.g.C_func("d", "e", "f")
self.assertFalse(spy.alwaysCalledWith("a", "b"))
spy.restore()
spy = SinonSpy(C_func)
#combine kwargs and args
sinon.g.C_func("a", b="b", c="c")
sinon.g.C_func("a", b="b")
self.assertTrue(spy.alwaysCalledWith("a", b="b"))
sinon.g.C_func("b", b="b", c="c")
self.assertFalse(spy.alwaysCalledWith("a", b="b"))
spy.restore()
@sinontest
def test074_calledWithExactly_method_fullmatch(self):
spy = SinonSpy(C_func)
#pure kwargs
sinon.g.C_func(a="a", b="b", c="c")
self.assertTrue(spy.calledWithExactly(a="a", b="b", c="c"))
self.assertFalse(spy.calledWithExactly(a="d", b="e", c="f"))
#pure args
sinon.g.C_func("a", "b", "c")
self.assertTrue(spy.calledWithExactly("a", "b", "c"))
self.assertFalse(spy.calledWithExactly("d", "e", "f"))
#combine kwargs and args
sinon.g.C_func("a", b="b", c="c")
self.assertTrue(spy.calledWithExactly("a", b="b", c="c"))
self.assertFalse(spy.calledWithExactly("wrong", b="b", c="c"))
#Exception
with self.assertRaises(Exception) as context:
spy.calledWithExactly()
@sinontest
def test075_calledWithExactly_method_partialmatch(self):
spy = SinonSpy(C_func)
#pure kwargs
sinon.g.C_func(a="a", b="b", c="c")
self.assertFalse(spy.calledWithExactly(a="a", b="b"))
#pure args
sinon.g.C_func("a", "b", "c")
self.assertFalse(spy.calledWithExactly("a", "b"))
#combine kwargs and args
sinon.g.C_func("a", b="b", c="c")
self.assertFalse(spy.calledWithExactly("a", b="b"))
@sinontest
def test076_alwaysCalledWithExactly_method_fullmatch(self):
spy = SinonSpy(C_func)
#pure kwargs
sinon.g.C_func(a="a", b="b", c="c")
sinon.g.C_func(a="a", b="b", c="c")
self.assertTrue(spy.alwaysCalledWithExactly(a="a", b="b", c="c"))
sinon.g.C_func(a="d", b="e", c="f")
self.assertFalse(spy.alwaysCalledWithExactly(a="a", b="b", c="c"))
spy.restore()
spy = SinonSpy(C_func)
#pure args
sinon.g.C_func("a", "b", "c")
sinon.g.C_func("a", "b", "c")
self.assertTrue(spy.alwaysCalledWithExactly("a", "b", "c"))
sinon.g.C_func("d", "e", "f")
self.assertFalse(spy.alwaysCalledWithExactly("a", "b", "c"))
spy.restore()
spy = SinonSpy(C_func)
#combine kwargs and args
sinon.g.C_func("a", b="b", c="c")
sinon.g.C_func("a", b="b", c="c")
self.assertTrue(spy.alwaysCalledWithExactly("a", b="b", c="c"))
sinon.g.C_func("b", b="b", c="c")
self.assertFalse(spy.alwaysCalledWithExactly("a", b="b", c="c"))
spy.restore()
spy = SinonSpy(C_func)
#Exception
with self.assertRaises(Exception) as context:
spy.alwaysCalledWithExactly()
@sinontest
def test077_alwaysCalledWithExactly_method_partialmatch(self):
spy = SinonSpy(C_func)
#pure kwargs
sinon.g.C_func(a="a", b="b", c="c")
sinon.g.C_func(a="a", b="b", c="c")
self.assertTrue(spy.alwaysCalledWithExactly(a="a", b="b", c="c"))
sinon.g.C_func(a="xxxx", b="b", c="c")
self.assertFalse(spy.alwaysCalledWithExactly(b="b", c="c"))
spy.restore()
spy = SinonSpy(C_func)
#pure args
sinon.g.C_func("a", "b", "c")
sinon.g.C_func("a", "b", "c")
self.assertTrue(spy.alwaysCalledWithExactly("a", "b", "c"))
sinon.g.C_func("a", "b", "xxxx")
self.assertFalse(spy.alwaysCalledWithExactly("a", "b"))
spy.restore()
spy = SinonSpy(C_func)
#combine kwargs and args
sinon.g.C_func("a", b="b", c="c")
sinon.g.C_func("a", b="b", c="c")
self.assertTrue(spy.alwaysCalledWithExactly("a", b="b", c="c"))
sinon.g.C_func("a", b="b", c="xxx")
self.assertFalse(spy.alwaysCalledWithExactly("a", b="b"))
spy.restore()
@sinontest
def test078_neverCalledWith_method_fullmatch(self):
spy = SinonSpy(C_func)
#pure kwargs
sinon.g.C_func(a="a", b="b", c="c")
self.assertFalse(spy.neverCalledWith(a="a", b="b", c="c"))
self.assertTrue(spy.neverCalledWith(a="wrong", b="b", c="c"))
#pure args
sinon.g.C_func("a", "b", "c")
self.assertFalse(spy.neverCalledWith("a", "b", "c"))
self.assertTrue(spy.neverCalledWith("a", "wrong", "c"))
#combine kwargs and args
sinon.g.C_func("a", b="b", c="c")
self.assertFalse(spy.neverCalledWith("a", b="b", c="c"))
self.assertFalse(spy.neverCalledWith("a", "b", c="c"))
self.assertFalse(spy.neverCalledWith("a", "b", "c"))
self.assertTrue(spy.neverCalledWith("a", "b", "d"))
self.assertTrue(spy.neverCalledWith("a", "b", c="d"))
@sinontest
def test079_neverCalledWith_method_partialmatch(self):
spy = SinonSpy(C_func)
#pure kwargs
sinon.g.C_func(a="a", b="b", c="c")
self.assertTrue(spy.neverCalledWith(a="wrong"))
self.assertFalse(spy.neverCalledWith(a="a"))
self.assertFalse(spy.neverCalledWith(b="b"))
self.assertFalse(spy.neverCalledWith(c="c"))
self.assertTrue(spy.neverCalledWith(a="wrong", b="b"))
self.assertFalse(spy.neverCalledWith(a="a", b="b"))
self.assertFalse(spy.neverCalledWith(b="b", c="c"))
self.assertFalse(spy.neverCalledWith(a="a", c="c"))
#pure args
sinon.g.C_func("a", "b", "c")
self.assertTrue(spy.neverCalledWith("d"))
self.assertFalse(spy.neverCalledWith("a"))
self.assertTrue(spy.neverCalledWith("b"))
self.assertTrue(spy.neverCalledWith("c"))
self.assertTrue(spy.neverCalledWith("wrong", "b"))
self.assertFalse(spy.neverCalledWith("a", "b"))
self.assertTrue(spy.neverCalledWith("b", "c"))
self.assertTrue(spy.neverCalledWith("a", "c"))
self.assertFalse(spy.neverCalledWith("a", "b", "c"))
#combine kwargs and args
sinon.g.C_func("a", b="b", c="c")
self.assertFalse(spy.neverCalledWith("a", b="b"))
self.assertFalse(spy.neverCalledWith("a", c="c"))
self.assertFalse(spy.neverCalledWith(b="b", c="c"))
self.assertFalse(spy.neverCalledWith("a"))
self.assertFalse(spy.neverCalledWith(c="c"))
self.assertTrue(spy.neverCalledWith("wrong", b="b"))
self.assertTrue(spy.neverCalledWith("a", b="wrong"))
self.assertTrue(spy.neverCalledWith("a", c="wrong"))
@sinontest
def test090_threw_without_err(self):
spy = SinonSpy(D_func)
sinon.g.D_func(err=False)
self.assertFalse(spy.threw())
@sinontest
def test091_threw_with_err(self):
class MyException(Exception):
pass
spy = SinonSpy(D_func)
try:
sinon.g.D_func(err=MyException)
except:
pass
self.assertTrue(spy.threw())
self.assertTrue(spy.threw(MyException))
self.assertFalse(spy.threw(ValueError))
try:
sinon.g.D_func(err=ValueError)
except:
pass
self.assertTrue(spy.threw(ValueError))
@sinontest
def test092_alwaysThrew_without_err(self):
spy = SinonSpy(D_func)
sinon.g.D_func(err=False)
sinon.g.D_func(err=False)
self.assertFalse(spy.alwaysThrew())
@sinontest
def test093_alwaysThrew_with_same_err(self):
class MyException(Exception):
pass
spy = SinonSpy(D_func)
try:
sinon.g.D_func(err=MyException)
sinon.g.D_func(err=MyException)
except:
pass
self.assertTrue(spy.alwaysThrew())
self.assertTrue(spy.alwaysThrew(MyException))
try:
sinon.g.D_func(err=ValueError)
except:
pass
self.assertFalse(spy.alwaysThrew(MyException))
@sinontest
def test100_returned(self):
spy = SinonSpy(B_func)
sinon.g.B_func()
self.assertTrue(spy.returned("test_local_B_func"))
sinon.g.B_func(2)
self.assertTrue(spy.returned("test_local_B_func2"))
@sinontest
def test101_returned_exception(self):
# exception will return a empty function with no return
spy = SinonSpy(D_func)
try:
sinon.g.D_func(err=ValueError)
except:
pass
self.assertFalse(spy.returned("test_local_D_func"))
sinon.g.D_func()
self.assertTrue(spy.returned("test_local_D_func"))
@sinontest
def test102_alwaysReturned(self):
spy = SinonSpy(B_func)
sinon.g.B_func()
sinon.g.B_func()
self.assertTrue(spy.alwaysReturned("test_local_B_func"))
sinon.g.B_func(123)
self.assertFalse(spy.alwaysReturned("test_local_B_func"))
@sinontest
def test110_called(self):
spy1 = SinonSpy(B_func)
spy2 = SinonSpy(C_func)
sinon.g.B_func()
self.assertTrue(spy1.called) #B_func is called
self.assertFalse(spy2.called) #C_func is never called
@sinontest
def test111_getCall_wrongIndex(self):
spy = SinonSpy(C_func)
sinon.g.C_func()
self.assertEqual(spy.getCall(-100), None)
self.assertEqual(spy.getCall(0).callId, 0)
self.assertEqual(spy.getCall(100), None)
@sinontest
def test120_kwargs(self):
spy = SinonSpy(C_func)
self.assertEqual(spy.kwargs, [])
sinon.g.C_func(a="a", b="b", c="c")
self.assertEqual(spy.kwargs, [{"a":"a", "b":"b", "c":"c"}])
sinon.g.C_func(a="a", b="b", c="c")
self.assertEqual(spy.kwargs, [{"a":"a", "b":"b", "c":"c"}, {"a":"a", "b":"b", "c":"c"}])
sinon.g.C_func("a", b="b", c="c")
self.assertEqual(spy.kwargs, [{"a":"a", "b":"b", "c":"c"}, {"a":"a", "b":"b", "c":"c"}, {"b": "b", "c": "c"}])
@sinontest
def test121_args(self):
spy = SinonSpy(C_func)
self.assertEqual(spy.args, [])
sinon.g.C_func("a", "b", "c")
self.assertEqual(spy.args, [("a", "b", "c")])
sinon.g.C_func("a", "b", "c")
self.assertEqual(spy.args, [("a", "b", "c"), ("a", "b", "c")])
sinon.g.C_func("a", b="b", c="c")
self.assertEqual(spy.args, [("a", "b", "c"), ("a", "b", "c"), ("a",)])
@sinontest
def test122_exceptions(self):
spy = SinonSpy(D_func)
self.assertEqual(spy.exceptions, [])
try:
sinon.g.D_func(ValueError)
except:
pass
self.assertEqual(spy.exceptions, [ValueError])
try:
sinon.g.D_func(TypeError)
except:
pass
self.assertEqual(spy.exceptions, [ValueError, TypeError])
@sinontest
def test123_returnValues(self):
spy = SinonSpy(B_func)
self.assertEqual(spy.returnValues, [])
sinon.g.B_func()
self.assertEqual(spy.returnValues, ["test_local_B_func"])
sinon.g.B_func(2)
self.assertEqual(spy.returnValues, ["test_local_B_func", "test_local_B_func2"])
@sinontest
def test124_args_module_function(self):
spy = SinonSpy(os, "system")
self.assertEqual(spy.args, [])
os.system("cd")
self.assertEqual(spy.args, [("cd", )])
@sinontest
def test125_kwargs_module_function(self):
spy = SinonSpy(os, "walk")
self.assertEqual(spy.kwargs, [])
os.walk(".", topdown=False)
self.assertEqual(spy.kwargs, [{"topdown": False}])
@sinontest
def test126_kwargs_pure(self):
spy = SinonSpy()
self.assertEqual(spy.kwargs, [])
spy(a="a")
self.assertEqual(spy.kwargs, [{"a": "a"}])
@sinontest
def test127_returnValues_module_function(self):
spy = SinonSpy(os, "system")
self.assertEqual(spy.returnValues, [])
os.system("cd")
self.assertEqual(spy.returnValues, [0])
@sinontest
def test130_reset(self):
spy = SinonSpy(B_func)
sinon.g.B_func(2)
self.assertTrue(spy.called)
self.assertTrue(spy.args)
spy.reset()
self.assertFalse(spy.called)
self.assertFalse(spy.args)
@sinontest
def test140_spy_as_callback(self):
def func(f):
f()
spy = SinonSpy()
func(spy)
self.assertTrue(spy.called)
self.assertTrue(spy.calledOnce)
@sinontest
def test141_spy_as_callback_withargs(self):
def func(f):
f(1)
spy = SinonSpy()
func(spy)
self.assertTrue(spy.calledWith(1))
@sinontest
def test200_calledWithMatch_args(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", "c")
self.assertTrue(spy.calledWithMatch("a"))
self.assertTrue(spy.calledWithMatch("a", "b"))
self.assertTrue(spy.calledWithMatch("a", "b", "c"))
self.assertFalse(spy.calledWithMatch("a", "b", "c", "d"))
self.assertFalse(spy.calledWithMatch("a", "c"))
self.assertTrue(spy.calledWithMatch(str))
self.assertFalse(spy.calledWithMatch(str, int))
self.assertTrue(spy.calledWithMatch(str, str))
self.assertTrue(spy.calledWithMatch(str, str, str))
sinon.g.C_func("d", "e")
self.assertTrue(spy.calledWithMatch("a", "b"))
self.assertTrue(spy.calledWithMatch("d", "e"))
self.assertFalse(spy.calledWithMatch("a", "e"))
self.assertFalse(spy.calledWithMatch("d", "e", "c")) #it's a combination
@sinontest
def test201_calledWith_matcher(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", "c")
self.assertFalse(spy.calledWith(str))
self.assertTrue(spy.calledWith(SinonMatcher(str)))
@sinontest
def test203_calledWithMatch_kwargs(self):
spy = SinonSpy(C_func)
sinon.g.C_func(a="a", b="b", c="c")
self.assertTrue(spy.calledWithMatch(a="a"))
self.assertTrue(spy.calledWithMatch(a="a", b="b"))
self.assertFalse(spy.calledWithMatch(a="d", b="e"))
self.assertTrue(spy.calledWithMatch(a="a", b="b", c="c"))
self.assertTrue(spy.calledWithMatch(a="a", c="c")) # dict is not rely on order of arguments
self.assertTrue(spy.calledWithMatch(a=str))
self.assertFalse(spy.calledWithMatch(a=str, b=int))
self.assertTrue(spy.calledWithMatch(a=str, b=str))
self.assertTrue(spy.calledWithMatch(a=str, b=str, c=str))
sinon.g.C_func(a="d", b="e")
self.assertTrue(spy.calledWithMatch(a="a", b="b"))
self.assertTrue(spy.calledWithMatch(a="d", b="e"))
self.assertFalse(spy.calledWithMatch(a="a", b="e"))
self.assertFalse(spy.calledWithMatch(a="d", b="e", c="c")) #it's a combination
@sinontest
def test206_calledWithMatch_combination(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", c="c")
self.assertTrue(spy.calledWithMatch("a"))
self.assertTrue(spy.calledWithMatch("a", "b"))
self.assertFalse(spy.calledWithMatch("d", "e"))
self.assertTrue(spy.calledWithMatch("a", "b", c="c"))
self.assertTrue(spy.calledWithMatch("a", c="c")) # dict is not rely on order of arguments
self.assertTrue(spy.calledWithMatch(str))
self.assertFalse(spy.calledWithMatch(str, int))
self.assertTrue(spy.calledWithMatch(str, str))
self.assertTrue(spy.calledWithMatch(str, str, c=str))
sinon.g.C_func("d", b="e")
self.assertFalse(spy.calledWithMatch("a", b="b"))
self.assertTrue(spy.calledWithMatch("d", b="e"))
self.assertTrue(spy.calledWithMatch("a", b="e")) #it's a combination
self.assertFalse(spy.calledWithMatch("d", "e", c="c")) #it's a combination
sinon.g.C_func(c="f")
self.assertTrue(spy.calledWithMatch("a", "b", c="f")) #it's a combination but called
@sinontest
def test210_calledWith_Match_args(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", "c")
self.assertTrue(spy.calledWith(SinonMatcher(str)))
self.assertFalse(spy.calledWith(SinonMatcher(str), SinonMatcher(int)))
self.assertTrue(spy.calledWith(SinonMatcher(str), SinonMatcher(str)))
self.assertTrue(spy.calledWith(SinonMatcher(str), SinonMatcher(str), "c"))
@sinontest
def test213_calledWith_Match_kwargs(self):
spy = SinonSpy(C_func)
sinon.g.C_func(a="a", b="b", c="c")
self.assertTrue(spy.calledWith(a=SinonMatcher(str)))
self.assertFalse(spy.calledWith(a=SinonMatcher(str), b=SinonMatcher(int)))
self.assertTrue(spy.calledWith(a=SinonMatcher(str), b=SinonMatcher(str)))
self.assertTrue(spy.calledWith(a=SinonMatcher(str), b=SinonMatcher(str), c="c"))
@sinontest
def test216_calledWith_Match_combination(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", c="c")
self.assertTrue(spy.calledWith(SinonMatcher(str)))
self.assertFalse(spy.calledWith(SinonMatcher(str), SinonMatcher(int)))
self.assertFalse(spy.calledWith(SinonMatcher(str), b=SinonMatcher(str)))
self.assertTrue(spy.calledWith(SinonMatcher(str), SinonMatcher(str), c="c"))
@sinontest
def test219_calledWithMatch_exception(self):
spy = SinonSpy(C_func)
with self.assertRaises(Exception) as context:
spy.calledWithMatch()
@sinontest
def test220_alwaysCalledWithMatch_args(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", "c")
self.assertTrue(spy.alwaysCalledWithMatch("a"))
self.assertTrue(spy.alwaysCalledWithMatch("a", "b"))
self.assertTrue(spy.alwaysCalledWithMatch("a", "b", "c"))
self.assertFalse(spy.alwaysCalledWithMatch("a", "b", "c", "d"))
self.assertFalse(spy.alwaysCalledWithMatch("a", "c"))
self.assertTrue(spy.alwaysCalledWithMatch(str))
self.assertFalse(spy.alwaysCalledWithMatch(str, int))
self.assertTrue(spy.alwaysCalledWithMatch(str, str))
self.assertTrue(spy.alwaysCalledWithMatch(str, str, str))
sinon.g.C_func("d", "e")
self.assertFalse(spy.alwaysCalledWithMatch("a", "b"))
self.assertFalse(spy.alwaysCalledWithMatch("d", "e"))
self.assertFalse(spy.alwaysCalledWithMatch("a", "e"))
self.assertFalse(spy.alwaysCalledWithMatch("d", "e", "c")) #it's a combination
@sinontest
def test221_alwaysCalledWith_matcher(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", "c")
self.assertFalse(spy.alwaysCalledWith(str))
self.assertTrue(spy.alwaysCalledWith(SinonMatcher(str)))
@sinontest
def test223_alwaysCalledWithMatch_kwargs(self):
spy = SinonSpy(C_func)
sinon.g.C_func(a="a", b="b", c="c")
self.assertTrue(spy.alwaysCalledWithMatch(a="a"))
self.assertTrue(spy.alwaysCalledWithMatch(a="a", b="b"))
self.assertFalse(spy.alwaysCalledWithMatch(a="d", b="e"))
self.assertTrue(spy.alwaysCalledWithMatch(a="a", b="b", c="c"))
self.assertTrue(spy.alwaysCalledWithMatch(a="a", c="c")) # dict is not rely on order of arguments
self.assertTrue(spy.alwaysCalledWithMatch(a=str))
self.assertFalse(spy.alwaysCalledWithMatch(a=str, b=int))
self.assertTrue(spy.alwaysCalledWithMatch(a=str, b=str))
self.assertTrue(spy.alwaysCalledWithMatch(a=str, b=str, c=str))
sinon.g.C_func(a="d", b="e")
self.assertFalse(spy.alwaysCalledWithMatch(a="a", b="b"))
self.assertFalse(spy.alwaysCalledWithMatch(a="d", b="e"))
self.assertFalse(spy.alwaysCalledWithMatch(a="a", b="e"))
self.assertFalse(spy.alwaysCalledWithMatch(a="d", b="e", c="c")) #it's a combination
@sinontest
def test226_alwaysCalledWithMatch_combination(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", c="c")
self.assertTrue(spy.alwaysCalledWithMatch("a"))
self.assertTrue(spy.alwaysCalledWithMatch("a", "b"))
self.assertFalse(spy.alwaysCalledWithMatch("d", "e"))
self.assertTrue(spy.alwaysCalledWithMatch("a", "b", c="c"))
self.assertTrue(spy.alwaysCalledWithMatch("a", c="c")) # dict is not rely on order of arguments
self.assertTrue(spy.alwaysCalledWithMatch(str))
self.assertFalse(spy.alwaysCalledWithMatch(str, int))
self.assertTrue(spy.alwaysCalledWithMatch(str, str))
self.assertTrue(spy.alwaysCalledWithMatch(str, str, c=str))
sinon.g.C_func("d", b="e")
self.assertFalse(spy.alwaysCalledWithMatch("a", b="b"))
self.assertFalse(spy.alwaysCalledWithMatch("d", b="e"))
self.assertFalse(spy.alwaysCalledWithMatch("a", b="e")) #it's a combination
self.assertFalse(spy.alwaysCalledWithMatch("d", "e", c="c")) #it's a combination
sinon.g.C_func(c="f")
self.assertFalse(spy.alwaysCalledWithMatch("a", "b", c="f")) #it's a combination but called
@sinontest
def test229_alwaysCalledWithMatch_exception(self):
spy = SinonSpy(C_func)
with self.assertRaises(Exception) as context:
spy.alwaysCalledWithMatch()
@sinontest
def test230_alwaysCalledWith_Match_args(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", "c")
self.assertTrue(spy.alwaysCalledWith(SinonMatcher(str)))
self.assertFalse(spy.alwaysCalledWith(SinonMatcher(str), SinonMatcher(int)))
self.assertTrue(spy.alwaysCalledWith(SinonMatcher(str), SinonMatcher(str)))
self.assertTrue(spy.alwaysCalledWith(SinonMatcher(str), SinonMatcher(str), "c"))
@sinontest
def test233_alwaysCalledWith_Match_kwargs(self):
spy = SinonSpy(C_func)
sinon.g.C_func(a="a", b="b", c="c")
self.assertTrue(spy.alwaysCalledWith(a=SinonMatcher(str)))
self.assertFalse(spy.alwaysCalledWith(a=SinonMatcher(str), b=SinonMatcher(int)))
self.assertTrue(spy.alwaysCalledWith(a=SinonMatcher(str), b=SinonMatcher(str)))
self.assertTrue(spy.alwaysCalledWith(a=SinonMatcher(str), b=SinonMatcher(str), c="c"))
@sinontest
def test236_alwaysCalledWith_Match_combination(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", c="c")
self.assertTrue(spy.alwaysCalledWith(SinonMatcher(str)))
self.assertFalse(spy.alwaysCalledWith(SinonMatcher(str), SinonMatcher(int)))
self.assertFalse(spy.alwaysCalledWith(SinonMatcher(str), b=SinonMatcher(str)))
self.assertTrue(spy.alwaysCalledWith(SinonMatcher(str), SinonMatcher(str), c="c"))
@sinontest
def test240_neverCalledWithMatch_args(self):
spy = SinonSpy(C_func)
sinon.g.C_func("a", "b", "c")
self.assertFalse(spy.neverCalledWithMatch("a"))
self.assertFalse(spy.neverCalledWithMatch("a", "b"))
self.assertFalse(spy.neverCalledWithMatch("a", "b", "c"))
self.assertTrue(spy.neverCalledWithMatch("a", "b", "c", "d"))
self.assertTrue(spy.neverCalledWithMatch("a", "c"))
self.assertFalse(spy.neverCalledWithMatch(str))
self.assertTrue(spy.neverCalledWithMatch(str, int))
self.assertFalse(spy.neverCalledWithMatch(str, str))
self.assertFalse(spy.neverCalledWithMatch(str, str, str))
sinon.g.C_func("d", "e")
self.assertFalse(spy.neverCalledWithMatch("a", "b"))
self.assertFalse(spy.neverCalledWithMatch("d", "e"))
self.assertTrue(spy.neverCalledWithMatch("a", "e"))
self.assertTrue(spy.neverCalledWithMatch("d", "e", "c")) #it's a combination
@sinontest
def test250_firstCall_to_lastCall_with_call(self):
spy = SinonSpy(E_func)
sinon.g.E_func(1, 2)
sinon.g.E_func(3, 4)
sinon.g.E_func(5, 6)
sinon.g.E_func(7, 8)
self.assertEqual(type(spy.firstCall).__name__, "SpyCall")
self.assertEqual(spy.firstCall.args, (1, 2))
self.assertEqual(type(spy.secondCall).__name__, "SpyCall")
self.assertEqual(spy.secondCall.args, (3, 4))
self.assertEqual(type(spy.thirdCall).__name__, "SpyCall")
self.assertEqual(spy.thirdCall.args, (5, 6))
self.assertEqual(type(spy.lastCall).__name__, "SpyCall")
self.assertEqual(spy.lastCall.args, (7, 8))
@sinontest
def test251_firstCall_to_lastCall_without_call(self):
spy = SinonSpy(C_func)
self.assertEqual(spy.firstCall, None)
self.assertEqual(spy.secondCall, None)
self.assertEqual(spy.thirdCall, None)
self.assertEqual(spy.lastCall, None)
@sinontest
def test260_getCall(self):
spy = SinonSpy(E_func)
sinon.g.E_func(1, 2, a=1)
sinon.g.E_func(3, 4, b=2)
sinon.g.E_func(5, 6, c=3)
call0 = spy.getCall(0)
self.assertTupleEqual(call0.args, (1,2))
self.assertDictEqual(call0.kwargs, {'a':1})
self.assertEqual(call0.callId, 0)
self.assertEqual(call0.exception, None)
self.assertEqual(call0.proxy, spy)
self.assertEqual(call0.returnValue, "(1, 2) {'a': 1}")
self.assertEqual(type(call0.stack), type([]))
call1 = spy.getCall(1)
self.assertTupleEqual(call1.args, (3,4))
self.assertDictEqual(call1.kwargs, {'b':2})
self.assertEqual(call1.callId, 1)
self.assertEqual(call1.exception, None)
self.assertEqual(call1.proxy, spy)
self.assertEqual(call1.returnValue, "(3, 4) {'b': 2}")
self.assertEqual(type(call1.stack), type([]))
call2 = spy.getCall(2)
self.assertTupleEqual(call2.args, (5,6))
self.assertDictEqual(call2.kwargs, {'c':3})
self.assertEqual(call2.callId, 2)
self.assertEqual(call2.exception, None)
self.assertEqual(call2.proxy, spy)
self.assertEqual(call2.returnValue, "(5, 6) {'c': 3}")
self.assertEqual(type(call2.stack), type([]))
@sinontest
def test261_getCall_exception(self):
spy = SinonSpy(D_func)
exception = BaseException()
try:
sinon.g.D_func(exception)
except BaseException as e:
call = spy.getCall(0)
self.assertTupleEqual(call.args, (exception,))
self.assertDictEqual(call.kwargs, {})
self.assertEqual(call.callId, 0)
self.assertEqual(exception, e)
self.assertEqual(call.exception, exception)
self.assertEqual(call.proxy, spy)
self.assertEqual(call.returnValue, None)
self.assertEqual(type(call.stack), type([]))
else:
self.assertTrue(False, "Failed to catch exception")
@sinontest
def test261_getCall_multipleSpies(self):
spy1 = SinonSpy(C_func)
spy2 = SinonSpy(E_func)
sinon.g.C_func(1, 2, 3)
sinon.g.E_func(4, 5, 6)
self.assertTupleEqual(spy1.getCall(0).args, (1, 2, 3))
self.assertListEqual(spy1.args, [(1, 2, 3)])
self.assertTupleEqual(spy2.getCall(0).args, (4, 5, 6))
self.assertListEqual(spy2.args, [(4, 5, 6)])
self.assertEqual(spy1.getCall(1), None)
self.assertEqual(spy2.getCall(1), None)
@sinontest
def test270_args_and_kwargs(self):
spy = SinonSpy(E_func)
sinon.g.E_func()
self.assertListEqual(spy.args, [()])
self.assertListEqual(spy.kwargs, [{}])
sinon.g.E_func(1, a=1)
self.assertListEqual(spy.args, [(), (1,)])
self.assertListEqual(spy.kwargs, [{}, {'a':1}])
sinon.g.E_func(1, 2, a=1, b=2)
self.assertListEqual(spy.args, [(), (1,), (1,2)])
self.assertListEqual(spy.kwargs, [{}, {'a':1}, {'a':1,'b':2}])
| |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Image warping using sparse flow defined at control points."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.image.python.ops import dense_image_warp
from tensorflow.contrib.image.python.ops import interpolate_spline
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
def _get_grid_locations(image_height, image_width):
"""Wrapper for np.meshgrid."""
y_range = np.linspace(0, image_height - 1, image_height)
x_range = np.linspace(0, image_width - 1, image_width)
y_grid, x_grid = np.meshgrid(y_range, x_range, indexing='ij')
return np.stack((y_grid, x_grid), -1)
def _expand_to_minibatch(np_array, batch_size):
"""Tile arbitrarily-sized np_array to include new batch dimension."""
tiles = [batch_size] + [1] * np_array.ndim
return np.tile(np.expand_dims(np_array, 0), tiles)
def _get_boundary_locations(image_height, image_width, num_points_per_edge):
"""Compute evenly-spaced indices along edge of image."""
y_range = np.linspace(0, image_height - 1, num_points_per_edge + 2)
x_range = np.linspace(0, image_width - 1, num_points_per_edge + 2)
ys, xs = np.meshgrid(y_range, x_range, indexing='ij')
is_boundary = np.logical_or(
np.logical_or(xs == 0, xs == image_width - 1),
np.logical_or(ys == 0, ys == image_height - 1))
return np.stack([ys[is_boundary], xs[is_boundary]], axis=-1)
def _add_zero_flow_controls_at_boundary(control_point_locations,
control_point_flows, image_height,
image_width, boundary_points_per_edge):
"""Add control points for zero-flow boundary conditions.
Augment the set of control points with extra points on the
boundary of the image that have zero flow.
Args:
control_point_locations: input control points
control_point_flows: their flows
image_height: image height
image_width: image width
boundary_points_per_edge: number of points to add in the middle of each
edge (not including the corners).
The total number of points added is
4 + 4*(boundary_points_per_edge).
Returns:
merged_control_point_locations: augmented set of control point locations
merged_control_point_flows: augmented set of control point flows
"""
batch_size = control_point_locations.get_shape()[0].value
boundary_point_locations = _get_boundary_locations(image_height, image_width,
boundary_points_per_edge)
boundary_point_flows = np.zeros([boundary_point_locations.shape[0], 2])
type_to_use = control_point_locations.dtype
boundary_point_locations = constant_op.constant(
_expand_to_minibatch(boundary_point_locations, batch_size),
dtype=type_to_use)
boundary_point_flows = constant_op.constant(
_expand_to_minibatch(boundary_point_flows, batch_size), dtype=type_to_use)
merged_control_point_locations = array_ops.concat(
[control_point_locations, boundary_point_locations], 1)
merged_control_point_flows = array_ops.concat(
[control_point_flows, boundary_point_flows], 1)
return merged_control_point_locations, merged_control_point_flows
def sparse_image_warp(image,
source_control_point_locations,
dest_control_point_locations,
interpolation_order=2,
regularization_weight=0.0,
num_boundary_points=0,
name='sparse_image_warp'):
"""Image warping using correspondences between sparse control points.
Apply a non-linear warp to the image, where the warp is specified by
the source and destination locations of a (potentially small) number of
control points. First, we use a polyharmonic spline
(`tf.contrib.image.interpolate_spline`) to interpolate the displacements
between the corresponding control points to a dense flow field.
Then, we warp the image using this dense flow field
(`tf.contrib.image.dense_image_warp`).
Let t index our control points. For regularization_weight=0, we have:
warped_image[b, dest_control_point_locations[b, t, 0],
dest_control_point_locations[b, t, 1], :] =
image[b, source_control_point_locations[b, t, 0],
source_control_point_locations[b, t, 1], :].
For regularization_weight > 0, this condition is met approximately, since
regularized interpolation trades off smoothness of the interpolant vs.
reconstruction of the interpolant at the control points.
See `tf.contrib.image.interpolate_spline` for further documentation of the
interpolation_order and regularization_weight arguments.
Args:
image: `[batch, height, width, channels]` float `Tensor`
source_control_point_locations: `[batch, num_control_points, 2]` float
`Tensor`
dest_control_point_locations: `[batch, num_control_points, 2]` float
`Tensor`
interpolation_order: polynomial order used by the spline interpolation
regularization_weight: weight on smoothness regularizer in interpolation
num_boundary_points: How many zero-flow boundary points to include at
each image edge.Usage:
num_boundary_points=0: don't add zero-flow points
num_boundary_points=1: 4 corners of the image
num_boundary_points=2: 4 corners and one in the middle of each edge
(8 points total)
num_boundary_points=n: 4 corners and n-1 along each edge
name: A name for the operation (optional).
Note that image and offsets can be of type tf.half, tf.float32, or
tf.float64, and do not necessarily have to be the same type.
Returns:
warped_image: `[batch, height, width, channels]` float `Tensor` with same
type as input image.
flow_field: `[batch, height, width, 2]` float `Tensor` containing the dense
flow field produced by the interpolation.
"""
image = ops.convert_to_tensor(image)
source_control_point_locations = ops.convert_to_tensor(
source_control_point_locations)
dest_control_point_locations = ops.convert_to_tensor(
dest_control_point_locations)
control_point_flows = (
dest_control_point_locations - source_control_point_locations)
clamp_boundaries = num_boundary_points > 0
boundary_points_per_edge = num_boundary_points - 1
with ops.name_scope(name):
batch_size, image_height, image_width, _ = image.get_shape().as_list()
# This generates the dense locations where the interpolant
# will be evaluated.
grid_locations = _get_grid_locations(image_height, image_width)
flattened_grid_locations = np.reshape(grid_locations,
[image_height * image_width, 2])
flattened_grid_locations = constant_op.constant(
_expand_to_minibatch(flattened_grid_locations, batch_size), image.dtype)
if clamp_boundaries:
(dest_control_point_locations,
control_point_flows) = _add_zero_flow_controls_at_boundary(
dest_control_point_locations, control_point_flows, image_height,
image_width, boundary_points_per_edge)
flattened_flows = interpolate_spline.interpolate_spline(
dest_control_point_locations, control_point_flows,
flattened_grid_locations, interpolation_order, regularization_weight)
dense_flows = array_ops.reshape(flattened_flows,
[batch_size, image_height, image_width, 2])
warped_image = dense_image_warp.dense_image_warp(image, dense_flows)
return warped_image, dense_flows
| |
from __future__ import absolute_import
import pytest
import mock
from datetime import datetime, timedelta
from django.utils import timezone
from sentry.models import EventUser, GroupStatus, Team, User
from sentry.testutils import TestCase
from sentry.search.base import ANY
from sentry.search.utils import parse_query, get_numeric_field_value
def test_get_numeric_field_value():
assert get_numeric_field_value('foo', '10') == {
'foo': 10,
}
assert get_numeric_field_value('foo', '>10') == {
'foo_lower': 10,
'foo_lower_inclusive': False,
}
assert get_numeric_field_value('foo', '>=10') == {
'foo_lower': 10,
'foo_lower_inclusive': True,
}
assert get_numeric_field_value('foo', '<10') == {
'foo_upper': 10,
'foo_upper_inclusive': False,
}
assert get_numeric_field_value('foo', '<=10') == {
'foo_upper': 10,
'foo_upper_inclusive': True,
}
assert get_numeric_field_value(
'foo', '>3.5', type=float
) == {
'foo_lower': 3.5,
'foo_lower_inclusive': False,
}
assert get_numeric_field_value(
'foo', '<=-3.5', type=float
) == {
'foo_upper': -3.5,
'foo_upper_inclusive': True,
}
class ParseQueryTest(TestCase):
def parse_query(self, query):
return parse_query(self.project, query, self.user)
def test_simple(self):
result = self.parse_query('foo bar')
assert result == {'tags': {}, 'query': 'foo bar'}
def test_useless_prefix(self):
result = self.parse_query('foo: bar')
assert result == {'tags': {}, 'query': 'foo: bar'}
def test_useless_prefix_with_symbol(self):
result = self.parse_query('foo: @ba$r')
assert result == {'tags': {}, 'query': 'foo: @ba$r'}
def test_useless_prefix_with_colon(self):
result = self.parse_query('foo: :ba:r::foo:')
assert result == {'tags': {}, 'query': 'foo: :ba:r::foo:'}
def test_handles_space_seperation_after_useless_prefix_exception(self):
result = self.parse_query('foo: bar foo:bar')
assert result == {'tags': {'foo': 'bar'}, 'query': 'foo: bar'}
def test_handles_period_in_tag_key(self):
result = self.parse_query('foo.bar:foobar')
assert result == {'tags': {'foo.bar': 'foobar'}, 'query': ''}
def test_handles_dash_in_tag_key(self):
result = self.parse_query('foo-bar:foobar')
assert result == {'tags': {'foo-bar': 'foobar'}, 'query': ''}
# TODO: update docs to include minutes, days, and weeks suffixes
@mock.patch('django.utils.timezone.now')
def test_age_tag_negative_value(self, now):
start = datetime(2016, 1, 1, tzinfo=timezone.utc)
now.return_value = start
expected = start - timedelta(hours=12)
result = self.parse_query('age:-12h')
assert result == {'tags': {}, 'query': '', 'age_from': expected, 'age_from_inclusive': True}
@mock.patch('django.utils.timezone.now')
def test_age_tag_positive_value(self, now):
start = datetime(2016, 1, 1, tzinfo=timezone.utc)
now.return_value = start
expected = start - timedelta(hours=12)
result = self.parse_query('age:+12h')
assert result == {'tags': {}, 'query': '', 'age_to': expected, 'age_to_inclusive': True}
@mock.patch('django.utils.timezone.now')
def test_age_tag_weeks(self, now):
start = datetime(2016, 1, 1, tzinfo=timezone.utc)
now.return_value = start
expected = start - timedelta(days=35)
result = self.parse_query('age:+5w')
assert result == {'tags': {}, 'query': '', 'age_to': expected, 'age_to_inclusive': True}
@mock.patch('django.utils.timezone.now')
def test_age_tag_days(self, now):
start = datetime(2016, 1, 1, tzinfo=timezone.utc)
now.return_value = start
expected = start - timedelta(days=10)
result = self.parse_query('age:+10d')
assert result == {'tags': {}, 'query': '', 'age_to': expected, 'age_to_inclusive': True}
@mock.patch('django.utils.timezone.now')
def test_age_tag_hours(self, now):
start = datetime(2016, 1, 1, tzinfo=timezone.utc)
now.return_value = start
expected = start - timedelta(hours=10)
result = self.parse_query('age:+10h')
assert result == {'tags': {}, 'query': '', 'age_to': expected, 'age_to_inclusive': True}
@mock.patch('django.utils.timezone.now')
def test_age_tag_minutes(self, now):
start = datetime(2016, 1, 1, tzinfo=timezone.utc)
now.return_value = start
expected = start - timedelta(minutes=30)
result = self.parse_query('age:+30m')
assert result == {'tags': {}, 'query': '', 'age_to': expected, 'age_to_inclusive': True}
@mock.patch('django.utils.timezone.now')
def test_two_age_tags(self, now):
start = datetime(2016, 1, 1, tzinfo=timezone.utc)
now.return_value = start
expected_to = start - timedelta(hours=12)
expected_from = start - timedelta(hours=24)
result = self.parse_query('age:+12h age:-24h')
assert result == {
'tags': {},
'query': '',
'age_to': expected_to,
'age_from': expected_from,
'age_to_inclusive': True,
'age_from_inclusive': True,
}
def test_event_timestamp_syntax(self):
result = self.parse_query('event.timestamp:2016-01-02')
assert result == {
'query': '',
'date_from': datetime(2016, 1, 2, tzinfo=timezone.utc),
'date_from_inclusive': True,
'date_to': datetime(2016, 1, 3, tzinfo=timezone.utc),
'date_to_inclusive': False,
'tags': {}
}
def test_times_seen_syntax(self):
result = self.parse_query('timesSeen:10')
assert result == {'tags': {}, 'times_seen': 10, 'query': ''}
# TODO: query parser for '>' timestamp should set inclusive to False.
@pytest.mark.xfail
def test_greater_than_comparator(self):
result = self.parse_query('timesSeen:>10 event.timestamp:>2016-01-02')
assert result == {
'tags': {},
'query': '',
'times_seen_lower': 10,
'times_seen_lower_inclusive': False,
'date_from': datetime(2016, 1, 2, tzinfo=timezone.utc),
'date_from_inclusive': False
}
def test_greater_than_equal_comparator(self):
result = self.parse_query('timesSeen:>=10 event.timestamp:>=2016-01-02')
assert result == {
'tags': {},
'query': '',
'times_seen_lower': 10,
'times_seen_lower_inclusive': True,
'date_from': datetime(2016, 1, 2, tzinfo=timezone.utc),
'date_from_inclusive': True
}
def test_less_than_comparator(self):
result = self.parse_query('event.timestamp:<2016-01-02 timesSeen:<10')
assert result == {
'tags': {},
'query': '',
'times_seen_upper': 10,
'times_seen_upper_inclusive': False,
'date_to': datetime(2016, 1, 2, tzinfo=timezone.utc),
'date_to_inclusive': False
}
# TODO: query parser for '<=' timestamp should set inclusive to True.
@pytest.mark.xfail
def test_less_than_equal_comparator(self):
result = self.parse_query('event.timestamp:<=2016-01-02 timesSeen:<=10')
assert result == {
'tags': {},
'query': '',
'times_seen_upper': 10,
'times_seen_upper_inclusive': True,
'date_to': datetime(2016, 1, 2, tzinfo=timezone.utc),
'date_to_inclusive': True
}
def test_handles_underscore_in_tag_key(self):
result = self.parse_query('foo_bar:foobar')
assert result == {'tags': {'foo_bar': 'foobar'}, 'query': ''}
def test_mix_tag_and_query(self):
result = self.parse_query('foo bar key:value')
assert result == {'tags': {'key': 'value'}, 'query': 'foo bar'}
def test_single_tag(self):
result = self.parse_query('key:value')
assert result == {'tags': {'key': 'value'}, 'query': ''}
def test_tag_with_colon_in_value(self):
result = self.parse_query('url:http://example.com')
assert result == {'tags': {'url': 'http://example.com'}, 'query': ''}
def test_single_space_in_value(self):
result = self.parse_query('key:"value1 value2"')
assert result == {'tags': {'key': 'value1 value2'}, 'query': ''}
def test_multiple_spaces_in_value(self):
result = self.parse_query('key:"value1 value2"')
assert result == {'tags': {'key': 'value1 value2'}, 'query': ''}
def test_invalid_tag_as_query(self):
result = self.parse_query('Resque::DirtyExit')
assert result == {'tags': {}, 'query': 'Resque::DirtyExit'}
def test_colons_in_tag_value(self):
result = self.parse_query('key:Resque::DirtyExit')
assert result == {'tags': {'key': 'Resque::DirtyExit'}, 'query': ''}
def test_multiple_tags(self):
result = self.parse_query('foo:bar key:value')
assert result == {'tags': {'key': 'value', 'foo': 'bar'}, 'query': ''}
def test_single_tag_with_quotes(self):
result = self.parse_query('foo:"bar"')
assert result == {'tags': {'foo': 'bar'}, 'query': ''}
def test_tag_with_quotes_and_query(self):
result = self.parse_query('key:"a value" hello')
assert result == {'tags': {'key': 'a value'}, 'query': 'hello'}
def test_is_resolved(self):
result = self.parse_query('is:resolved')
assert result == {'status': GroupStatus.RESOLVED, 'tags': {}, 'query': ''}
def test_assigned_me(self):
result = self.parse_query('assigned:me')
assert result == {'assigned_to': self.user, 'tags': {}, 'query': ''}
def test_assigned_email(self):
result = self.parse_query('assigned:%s' % (self.user.email, ))
assert result == {'assigned_to': self.user, 'tags': {}, 'query': ''}
def test_assigned_unknown_user(self):
result = self.parse_query('assigned:fake@example.com')
assert isinstance(result['assigned_to'], User)
assert result['assigned_to'].id == 0
def test_assigned_valid_team(self):
result = self.parse_query('assigned:#{}'.format(self.team.slug))
assert result['assigned_to'] == self.team
def test_assigned_unassociated_team(self):
team2 = self.create_team(organization=self.organization)
result = self.parse_query('assigned:#{}'.format(team2.slug))
assert isinstance(result['assigned_to'], Team)
assert result['assigned_to'].id == 0
def test_assigned_invalid_team(self):
result = self.parse_query('assigned:#invalid')
assert isinstance(result['assigned_to'], Team)
assert result['assigned_to'].id == 0
def test_bookmarks_me(self):
result = self.parse_query('bookmarks:me')
assert result == {'bookmarked_by': self.user, 'tags': {}, 'query': ''}
def test_bookmarks_email(self):
result = self.parse_query('bookmarks:%s' % (self.user.email, ))
assert result == {'bookmarked_by': self.user, 'tags': {}, 'query': ''}
def test_bookmarks_unknown_user(self):
result = self.parse_query('bookmarks:fake@example.com')
assert result['bookmarked_by'].id == 0
def test_first_release(self):
result = self.parse_query('first-release:bar')
assert result == {'first_release': 'bar', 'tags': {}, 'query': ''}
def test_release(self):
result = self.parse_query('release:bar')
assert result == {'tags': {'sentry:release': 'bar'}, 'query': ''}
def test_dist(self):
result = self.parse_query('dist:123')
assert result == {'tags': {'sentry:dist': '123'}, 'query': ''}
def test_padded_spacing(self):
result = self.parse_query('release:bar foo bar')
assert result == {'tags': {'sentry:release': 'bar'}, 'query': 'foo bar'}
def test_unknown_user_with_dot_query(self):
result = self.parse_query('user.email:fake@example.com')
assert result['tags']['sentry:user'] == 'email:fake@example.com'
def test_unknown_user_value(self):
result = self.parse_query('user.xxxxxx:example')
assert result['tags']['sentry:user'] == 'xxxxxx:example'
def test_user_lookup_with_dot_query(self):
euser = EventUser.objects.create(
project_id=self.project.id,
ident='1',
username='foobar',
)
result = self.parse_query('user.username:foobar')
assert result['tags']['sentry:user'] == euser.tag_value
def test_unknown_user_legacy_syntax(self):
result = self.parse_query('user:email:fake@example.com')
assert result['tags']['sentry:user'] == 'email:fake@example.com'
def test_user_lookup_legacy_syntax(self):
euser = EventUser.objects.create(
project_id=self.project.id,
ident='1',
username='foobar',
)
result = self.parse_query('user:username:foobar')
assert result['tags']['sentry:user'] == euser.tag_value
def test_is_unassigned(self):
result = self.parse_query('is:unassigned')
assert result == {'unassigned': True, 'tags': {}, 'query': ''}
def test_is_assigned(self):
result = self.parse_query('is:assigned')
assert result == {'unassigned': False, 'tags': {}, 'query': ''}
def test_age_from(self):
result = self.parse_query('age:-24h')
assert result['age_from'] > timezone.now() - timedelta(hours=25)
assert result['age_from'] < timezone.now() - timedelta(hours=23)
assert not result.get('age_to')
def test_age_to(self):
result = self.parse_query('age:+24h')
assert result['age_to'] > timezone.now() - timedelta(hours=25)
assert result['age_to'] < timezone.now() - timedelta(hours=23)
assert not result.get('age_from')
def test_age_range(self):
result = self.parse_query('age:-24h age:+12h')
assert result['age_from'] > timezone.now() - timedelta(hours=25)
assert result['age_from'] < timezone.now() - timedelta(hours=23)
assert result['age_to'] > timezone.now() - timedelta(hours=13)
assert result['age_to'] < timezone.now() - timedelta(hours=11)
def test_first_seen_range(self):
result = self.parse_query('firstSeen:-24h firstSeen:+12h')
assert result['age_from'] > timezone.now() - timedelta(hours=25)
assert result['age_from'] < timezone.now() - timedelta(hours=23)
assert result['age_to'] > timezone.now() - timedelta(hours=13)
assert result['age_to'] < timezone.now() - timedelta(hours=11)
def test_date_range(self):
result = self.parse_query('event.timestamp:>2016-01-01 event.timestamp:<2016-01-02')
assert result['date_from'] == datetime(2016, 1, 1, tzinfo=timezone.utc)
assert result['date_from_inclusive'] is False
assert result['date_to'] == datetime(2016, 1, 2, tzinfo=timezone.utc)
assert result['date_to_inclusive'] is False
def test_date_range_inclusive(self):
result = self.parse_query('event.timestamp:>=2016-01-01 event.timestamp:<=2016-01-02')
assert result['date_from'] == datetime(2016, 1, 1, tzinfo=timezone.utc)
assert result['date_from_inclusive'] is True
assert result['date_to'] == datetime(2016, 1, 2, tzinfo=timezone.utc)
assert result['date_to_inclusive'] is True
def test_date_approx_day(self):
date_value = datetime(2016, 1, 1, tzinfo=timezone.utc)
result = self.parse_query('event.timestamp:2016-01-01')
assert result['date_from'] == date_value
assert result['date_from_inclusive']
assert result['date_to'] == date_value + timedelta(days=1)
assert not result['date_to_inclusive']
def test_date_approx_precise(self):
date_value = datetime(2016, 1, 1, tzinfo=timezone.utc)
result = self.parse_query('event.timestamp:2016-01-01T00:00:00')
assert result['date_from'] == date_value - timedelta(minutes=5)
assert result['date_from_inclusive']
assert result['date_to'] == date_value + timedelta(minutes=6)
assert not result['date_to_inclusive']
def test_active_range(self):
result = self.parse_query('activeSince:-24h activeSince:+12h')
assert result['active_at_from'] > timezone.now() - timedelta(hours=25)
assert result['active_at_from'] < timezone.now() - timedelta(hours=23)
assert result['active_at_to'] > timezone.now() - timedelta(hours=13)
assert result['active_at_to'] < timezone.now() - timedelta(hours=11)
def test_last_seen_range(self):
result = self.parse_query('lastSeen:-24h lastSeen:+12h')
assert result['last_seen_from'] > timezone.now() - timedelta(hours=25)
assert result['last_seen_from'] < timezone.now() - timedelta(hours=23)
assert result['last_seen_to'] > timezone.now() - timedelta(hours=13)
assert result['last_seen_to'] < timezone.now() - timedelta(hours=11)
def test_has_tag(self):
result = self.parse_query('has:foo')
assert result['tags']['foo'] == ANY
result = self.parse_query('has:foo foo:value')
assert result['tags']['foo'] == 'value'
def test_has_user(self):
result = self.parse_query('has:user')
assert result['tags']['sentry:user'] == ANY
def test_has_release(self):
result = self.parse_query('has:release')
assert result['tags']['sentry:release'] == ANY
def test_quoted_string(self):
result = self.parse_query('"release:foo"')
assert result == {'tags': {}, 'query': 'release:foo'}
| |
"""Functions for smooshing images. Given a background mask (a foreground
selection), these functions move pixels in an image around to create
various "crash" effects. Images can have their foregrounds crashed into
the center of the image, the side of the image, and so on."""
from __future__ import print_function, division
from collections import namedtuple
from itertools import repeat
import os
import sys
import time
import numpy as np
from six.moves import zip, range
from crash_kiss import util, foreground
from crash_kiss.config import WHITE
#from crash_kiss import smoosh
from crash_kiss.omp_smoosh import smoosh
# _MID_FG is a placeholder value for foreground data that is at the center
# of the image. It's used to distinguish the fg-at-center case from
# the no-fg-at-all case. This is because `np.argmax` returns 0 if the max
# value has index 0 (whether that max value is 0 or 1 or anything else!).
_MID_FG = 0xFFFF # placeholder index for foreground data at the center
_crash_data = namedtuple(
"sdata", "start stop fg_mid max_depth fg_l fg_r "
"mid_left center mid_right side_len")
_row_data = namedtuple("row", "irow ls rs frow")
def center_crash(img, fg, bounds, background_value):
start, stop, fg_mid, depth = bounds
foreground = np.zeros(img.shape[:2], dtype=np.uint8)
foreground[:, start: stop] = fg
smoosh(img, foreground, depth, background_value)
return img
def _old_center_crash(img, fg, bounds):
"""Move the rows of each subject together until they touch.
Write over the vacated space with whatever the row's negative space
is (probably white or transparent pixels)."""
start, stop, fg_mid, depth = bounds
fg_l = fg_mid - depth
fg_r = fg_mid + depth
mid_left = start + depth
center = start + 2 * depth
mid_right = center + depth
side_len = fg.shape[1] // 2
lfg = fg[:, :bounds.fg_mid]
lfg = util.invert_horizontal(lfg)
rfg = fg[:, bounds.fg_mid:]
lstart = np.argmax(lfg, axis=1)
rstart = np.argmax(rfg, axis=1)
overlap = np.logical_or(rfg[:, 0], lfg[:, 0])
foreground = np.zeros(img.shape[:2], dtype=np.uint8)
foreground[:, start: stop] = fg
lnil = np.logical_and(lstart == 0, ~overlap)
rnil = np.logical_and(rstart == 0, ~overlap)
rows_empty = np.logical_and(lnil, rnil)
for chunk, _ in _contiguous_chunks(rows_empty, img):
cpy = chunk.copy()
chunk[:, mid_left: -depth] = chunk[:, center:]
chunk[:, depth: mid_right] = cpy[:, :center]
chunk[:, :depth] = WHITE
chunk[:, -depth:] = WHITE
# Move rows with subject only on left side OR no subject at all
rows_left = np.logical_and(~lnil, rnil)
for chunk, _ in _contiguous_chunks(rows_left, img):
cpy = chunk.copy()
chunk[:, mid_left: -depth] = chunk[:, center:]
chunk[:, depth: mid_right] = cpy[:, :center]
chunk[:, :depth] = WHITE
chunk[:, -depth:] = WHITE
# Move rows with subject only on right side
rows_right = np.logical_and(~rnil, lnil)
for chunk, _ in _contiguous_chunks(rows_right, img):
cpy = chunk.copy()
chunk[:, depth: mid_right] = chunk[:, :center]
chunk[:, mid_left: -depth] = cpy[:, center:]
chunk[:, :depth] = WHITE
chunk[:, -depth:] = WHITE
# Move rows with foreground overlapping the center
chunks = _contiguous_chunks(overlap, img, foreground, lfg, rfg)
for chunk, (f, _lfg, _rfg) in chunks:
l = np.argmin(_lfg, axis=1)
r = np.argmin(_rfg, axis=1)
left_overlaps = (l > r).astype(np.uint8)
smoosh.smoosh_overlap(chunk, f, left_overlaps, depth)
# Move rows with subjects that are close together
rows_close = np.logical_and(~rnil, ~lnil)
rows_close[overlap] = 0
chunks = _contiguous_chunks(
rows_close, img, lstart, rstart, foreground)
for chunk, (l, r, f) in chunks:
smoosh.smoosh(chunk, l, r, f, depth)
return img
def _contiguous_chunks(mask, img, *masks):
idx = 0
while idx <= mask.size - 1:
start = np.argmax(mask[idx:]) + idx
if start == idx and not mask[idx]:
break
stop = np.argmin(mask[start:]) + start
if start == mask.size - 1:
yield img[start: start + 1], (m[start: start + 1] for m in masks)
break
elif stop == start:
if mask[start + 1]:
yield img[start:], (m[start:] for m in masks)
else:
yield img[start: start + 1], (m[start: start + 1] for m in masks)
break
else:
yield img[start: stop], (m[start: stop] for m in masks)
idx = stop + 1
class CrashParams(object):
"""A **picklable** container of values that can be sent to
a `multiprocessing.Process` object. Usually a `namedtuple` or some
other simple container would be better, but `namedtuple` is not
picklable!"""
_params = "max_depth threshold bg_value rgb_select".split()
def __init__(self, *args, **kwargs):
given_args = dict(zip(self._params, repeat(None)))
for name, value in zip(self._params, args):
given_args[name] = value
for name, value in kwargs.items():
given_args[name] = value
self.__dict__.update(given_args)
def __iter__(self):
for param_name in self._params:
yield self.__dict__[param_name]
class SequenceParams(CrashParams):
"""A picklable record to pass to `parallel_crash` for running a
crash over multiple processes"""
_params = ("target working_dir output_suffix crash_params "
"counter depths".split())
def sequence_crash(params):
"""Given an input filename `target`, a `CrashParams` instance,
and an iterable of `depths`, write a crashed version of the target
image to the disk for each depth."""
# The images are *written* and not returned or yielded because it's not
# efficient to pass huge n-dimensional arrays between processes. Each
# process reads its own copy of the target image from the disk and writes
# its own crashed output files to the disk. In my tests, the program is
# mostly limited by disk IO (even on SSDs). While each additional process
# below cpu_count() improves performance, it's usually not by huge amounts.
start = time.time() # keep track of duration to show how cool we are
loc, name, suffix, ext = util.get_filename_hints(
params.target, params.working_dir, params.output_suffix)
tail = "{0}_{1}_{2}.{3}".format(name, suffix, "{0:04d}", ext)
template = os.path.join(loc, tail)
img = util.read_img(params.target)
fg, bounds = foreground.find_foreground(img, params.crash_params)
max_depth = params.depths[0]
first_img = center_crash(img.copy(), fg, bounds)
util.save_img(template.format(max_depth), first_img)
_print_count(params.counter)
# We'll create a background mask (i.e. the foreground selection) with
# the same shape as the image. This lets us calculate the entire
# foreground just once and slice it down to size for each iteration
# of the crash. Not having to recalculate the foreground each time
# saves lots of CPU cycles.
total_fg = np.zeros(shape=img.shape[:2], dtype=bool) # a 2D mask
total_fg[:, bounds.start: bounds.stop] = fg
for depth in params.depths[1:]:
if depth == 0:
crashed = img
else:
crashed = _crash_at_depth(img, total_fg, depth)
util.save_img(template.format(depth), crashed)
_print_count(params.counter)
def _print_count(counter):
counter.value -= 1
print("Remaining: {0:04d}\r".format(counter.value), end="")
sys.stdout.flush()
def _crash_at_depth(img, total_fg, depth):
"""Select a subset of the complete background mask (the foreground)
and crash that subset of pixels by `depth`"""
fg, bounds = foreground.get_foreground_area(total_fg, depth)
crashed_img = center_crash(img.copy(), fg, bounds)
return crashed_img
| |
import logging
import torch
import torch.autograd
import os
import six
from eight_mile.utils import listify, Offsets
from eight_mile.pytorch.optz import OptimizerManager
from eight_mile.metrics import UCM, LCM, UAS, LAS
from eight_mile.progress import create_progress_bar
from baseline.utils import verbose_output, get_model_file, get_metric_cmp
from baseline.train import EpochReportingTrainer, create_trainer, register_trainer, register_training_func
from baseline.model import create_model_for
from torch.utils.data import DataLoader
logger = logging.getLogger('baseline')
@register_trainer(task='deps', name='default')
class DependencyParserTrainerPyTorch(EpochReportingTrainer):
def __init__(self, model, **kwargs):
if type(model) is dict:
model = create_model_for('deps', **model)
super().__init__()
if type(model) is dict:
model = create_model_for('deps', **model)
self.punct_eval = kwargs.get('punct_eval', False)
self.clip = float(kwargs.get('clip', 5))
self.labels = model.labels
self.gpus = int(kwargs.get('gpus', 1))
if self.gpus == -1:
self.gpus = len(os.getenv('CUDA_VISIBLE_DEVICES', os.getenv('NV_GPU', '0')).split(','))
self.optimizer = OptimizerManager(model, **kwargs)
self.model = model
if self.gpus > 0 and self.model.gpu:
self.crit = model.create_loss().cuda()
if self.gpus > 1:
self.model = torch.nn.DataParallel(model).cuda()
else:
self.model.cuda()
else:
logger.warning("Requested training on CPU. This will be slow.")
self.crit = model.create_loss()
self.model = model
self.nsteps = kwargs.get('nsteps', six.MAXSIZE)
def _get_pytorch_model(self):
return self.model.module if self.gpus > 1 else self.model
def save(self, model_file):
self._get_pytorch_model().save(model_file)
def _make_input(self, batch_dict, **kwargs):
return self._get_pytorch_model().make_input(batch_dict, **kwargs)
@staticmethod
def _get_batchsz(batch_dict):
return len(batch_dict['labels'])
def _test(self, loader, **kwargs):
self.model.eval()
steps = len(loader)
pg = create_progress_bar(steps)
metrics = [LAS(), UAS(), LCM(), UCM()]
with torch.no_grad():
for batch_dict in pg(loader):
example = self._make_input(batch_dict)
labels_gold = example.pop('labels')
heads_gold = example.pop('heads')
batchsz = self._get_batchsz(batch_dict)
greedy_heads_pred, greedy_labels_pred = self.model.decode(example)
T = greedy_labels_pred.shape[1]
labels_gold_trimmed = labels_gold[:, :T]
heads_gold_trimmed = heads_gold[:, :T]
for i in range(batchsz):
for m in metrics:
if self.punct_eval is False:
labels_gold_trimmed[i].masked_fill_(labels_gold_trimmed[i] == self.model.punct, Offsets.PAD)
m.add(greedy_heads_pred[i], heads_gold_trimmed[i], greedy_labels_pred[i], labels_gold_trimmed[i])
metrics = {m.name: m.score for m in metrics}
return metrics
def _train(self, loader, **kwargs):
self.model.train()
reporting_fns = kwargs.get('reporting_fns', [])
steps = len(loader)
pg = create_progress_bar(steps)
epoch_loss = 0
epoch_div = 0
for batch_dict in pg(loader):
self.optimizer.zero_grad()
example = self._make_input(batch_dict)
heads_gold = example.pop('heads')
labels_gold = example.pop('labels')
heads_pred, labels_pred = self.model(example)
loss = self.crit(heads_pred, heads_gold, labels_pred, labels_gold)
batchsz = self._get_batchsz(batch_dict)
report_loss = loss.item() * batchsz
epoch_loss += report_loss
epoch_div += batchsz
self.nstep_agg += report_loss
self.nstep_div += batchsz
loss.backward()
torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.clip)
self.optimizer.step()
if (self.optimizer.global_step + 1) % self.nsteps == 0:
metrics = self.calc_metrics(self.nstep_agg, self.nstep_div)
metrics['lr'] = self.optimizer.current_lr
self.report(
self.optimizer.global_step + 1, metrics, self.nstep_start,
'Train', 'STEP', reporting_fns, self.nsteps
)
self.reset_nstep()
metrics = {}
metrics['lr'] = self.optimizer.current_lr
metrics['avg_loss'] = epoch_loss / float(epoch_div)
return metrics
@register_training_func('deps')
def fit(model_params, ts, vs, es, **kwargs):
"""
Train a dependency parser using PyTorch
:param model_params: The model to train
:param ts: A training data set
:param vs: A validation data set
:param es: A test data set, can be None
:param kwargs: See below
:Keyword Arguments:
* *do_early_stopping* (``bool``) -- Stop after eval data is not improving. Default to True
* *epochs* (``int``) -- how many epochs. Default to 20
* *outfile* -- Model output file, defaults to classifier-model.pyth
* *patience* --
How many epochs where evaluation is no longer improving before we give up
* *reporting* --
Callbacks which may be used on reporting updates
* *optim* --
Optimizer to use, defaults to `sgd`
* *eta, lr* (``float``) --
Learning rate, defaults to 0.01
* *mom* (``float``) --
Momentum (SGD only), defaults to 0.9 if optim is `sgd`
:return:
"""
do_early_stopping = bool(kwargs.get('do_early_stopping', True))
verbose = kwargs.get('verbose', {'console': kwargs.get('verbose_console', False), 'file': kwargs.get('verbose_file', None)})
epochs = int(kwargs.get('epochs', 20))
model_file = get_model_file('deps', 'pytorch', kwargs.get('basedir'))
output = kwargs.get('output')
txts = kwargs.get('txts')
num_loader_workers = int(kwargs.get('num_loader_workers', 0))
pin_memory = bool(kwargs.get('pin_memory', True))
if not isinstance(ts, DataLoader):
ts = DataLoader(ts, num_workers=num_loader_workers, batch_size=None, pin_memory=pin_memory)
if not isinstance(vs, DataLoader):
vs = DataLoader(vs, batch_size=None, pin_memory=pin_memory)
if es and not isinstance(es, DataLoader):
es = DataLoader(es, batch_size=None, pin_memory=pin_memory)
best_metric = 0
if do_early_stopping:
early_stopping_metric = kwargs.get('early_stopping_metric', 'acc')
early_stopping_cmp, best_metric = get_metric_cmp(early_stopping_metric, kwargs.get('early_stopping_cmp'))
patience = kwargs.get('patience', epochs)
logger.info('Doing early stopping on [%s] with patience [%d]', early_stopping_metric, patience)
reporting_fns = listify(kwargs.get('reporting', []))
logger.info('reporting %s', reporting_fns)
trainer = create_trainer(model_params, **kwargs)
last_improved = 0
for epoch in range(epochs):
trainer.train(ts, reporting_fns)
test_metrics = trainer.test(vs, reporting_fns)
if do_early_stopping is False:
trainer.save(model_file)
elif early_stopping_cmp(test_metrics[early_stopping_metric], best_metric):
last_improved = epoch
best_metric = test_metrics[early_stopping_metric]
logger.info('New best %.3f', best_metric)
trainer.save(model_file)
elif (epoch - last_improved) > patience:
logger.info('Stopping due to persistent failures to improve')
break
if do_early_stopping is True:
logger.info('Best performance on %s: %.3f at epoch %d', early_stopping_metric, best_metric, last_improved)
if es is not None:
logger.info('Reloading best checkpoint')
model = torch.load(model_file)
trainer = create_trainer(model, **kwargs)
test_metrics = trainer.test(es, reporting_fns, phase='Test', verbose=verbose, output=output, txts=txts)
return test_metrics
| |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TF Lattice premade models implement typical monotonic model architectures.
You can use TFL premade models to easily construct commonly used monotonic model
architectures. To construct a TFL premade model, construct a model configuration
from `tfl.configs` and pass it to the premade model constructor. No fields in
the model config will be automatically filled in, so the config must be fully
specified. Note that the inputs to the model should match the order in which
they are defined in the feature configs.
```python
model_config = tfl.configs.CalibratedLatticeConfig(...)
calibrated_lattice_model = tfl.premade.CalibratedLattice(
model_config=model_config)
calibrated_lattice_model.compile(...)
calibrated_lattice_model.fit(...)
```
Supported models are defined in `tfl.configs`. Each model architecture can be
used the same as any other `tf.keras.Model`.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from . import aggregation_layer
from . import categorical_calibration_layer
from . import configs
from . import kronecker_factored_lattice_layer as kfll
from . import lattice_layer
from . import linear_layer
from . import parallel_combination_layer
from . import premade_lib
from . import pwl_calibration_layer
from . import rtl_layer
from absl import logging
import tensorflow as tf
# TODO: add support for serialization and object scoping or annoations.
class CalibratedLatticeEnsemble(tf.keras.Model):
"""Premade model for Tensorflow calibrated lattice ensemble models.
Creates a `tf.keras.Model` for the model architecture specified by the
`model_config`, which should be a
`tfl.configs.CalibratedLatticeEnsembleConfig`. No fields in the model config
will be automatically filled in, so the config must be fully specified. Note
that the inputs to the model should match the order in which they are defined
in the feature configs.
Example:
```python
model_config = tfl.configs.CalibratedLatticeEnsembleConfig(...)
calibrated_lattice_ensemble_model = tfl.premade.CalibratedLatticeEnsemble(
model_config=model_config)
calibrated_lattice_ensemble_model.compile(...)
calibrated_lattice_ensemble_model.fit(...)
```
Attributes:
model_config: Model configuration object describing model architecture.
Should be a `tfl.configs.CalibratedLatticeEnsembleConfig` instance.
"""
def __init__(self, model_config=None, dtype=tf.float32, **kwargs):
"""Initializes a `CalibratedLatticeEnsemble` instance.
Args:
model_config: Model configuration object describing model architecutre.
Should be one of the model configs in `tfl.configs`.
dtype: dtype of layers used in the model.
**kwargs: Any additional `tf.keras.Model` arguments
"""
# Set our model_config
self.model_config = model_config
# Check if we are constructing with already provided inputs/outputs, e.g.
# when we are loading a model.
if 'inputs' in kwargs and 'outputs' in kwargs:
super(CalibratedLatticeEnsemble, self).__init__(**kwargs)
return
if model_config is None:
raise ValueError('Must provide a model_config.')
# Check that proper config has been given.
if not isinstance(model_config, configs.CalibratedLatticeEnsembleConfig):
raise ValueError('Invalid config type: {}'.format(type(model_config)))
# Verify that the config is fully specified.
premade_lib.verify_config(model_config)
# Get feature configs and construct model.
input_layer = premade_lib.build_input_layer(
feature_configs=model_config.feature_configs, dtype=dtype)
lattice_outputs = premade_lib.build_calibrated_lattice_ensemble_layer(
calibration_input_layer=input_layer,
model_config=model_config,
average_outputs=(not model_config.use_linear_combination),
dtype=dtype)
if model_config.use_linear_combination:
averaged_lattice_output = premade_lib.build_linear_combination_layer(
ensemble_outputs=lattice_outputs,
model_config=model_config,
dtype=dtype)
else:
averaged_lattice_output = lattice_outputs
if model_config.output_calibration:
model_output = premade_lib.build_output_calibration_layer(
output_calibration_input=averaged_lattice_output,
model_config=model_config,
dtype=dtype)
else:
model_output = averaged_lattice_output
# Define inputs and initialize model.
inputs = [
input_layer[feature_config.name]
for feature_config in model_config.feature_configs
]
kwargs['inputs'] = inputs
kwargs['outputs'] = model_output
super(CalibratedLatticeEnsemble, self).__init__(**kwargs)
def get_config(self):
"""Returns a configuration dictionary."""
config = super(CalibratedLatticeEnsemble, self).get_config()
config['model_config'] = tf.keras.utils.serialize_keras_object(
self.model_config)
return config
@classmethod
def from_config(cls, config, custom_objects=None):
model = super(CalibratedLatticeEnsemble, cls).from_config(
config, custom_objects=custom_objects)
try:
model_config = tf.keras.utils.deserialize_keras_object(
config.get('model_config'), custom_objects=custom_objects)
premade_lib.verify_config(model_config)
model.model_config = model_config
except ValueError:
logging.warning(
'Could not load model_config. Constructing model without it: %s',
str(config.get('model_config')))
return model
class CalibratedLattice(tf.keras.Model):
"""Premade model for Tensorflow calibrated lattice models.
Creates a `tf.keras.Model` for the model architecture specified by the
`model_config`, which should be a `tfl.configs.CalibratedLatticeConfig`. No
fields in the model config will be automatically filled in, so the config
must be fully specified. Note that the inputs to the model should match the
order in which they are defined in the feature configs.
Example:
```python
model_config = tfl.configs.CalibratedLatticeConfig(...)
calibrated_lattice_model = tfl.premade.CalibratedLattice(
model_config=model_config)
calibrated_lattice_model.compile(...)
calibrated_lattice_model.fit(...)
```
Attributes:
model_config: Model configuration object describing model architecture.
Should be a `tfl.configs.CalibratedLatticeConfig` instance.
"""
def __init__(self, model_config=None, dtype=tf.float32, **kwargs):
"""Initializes a `CalibratedLattice` instance.
Args:
model_config: Model configuration object describing model architecutre.
Should be one of the model configs in `tfl.configs`.
dtype: dtype of layers used in the model.
**kwargs: Any additional `tf.keras.Model` arguments.
"""
# Set our model_config
self.model_config = model_config
# Check if we are constructing with already provided inputs/outputs, e.g.
# when we are loading a model.
if 'inputs' in kwargs and 'outputs' in kwargs:
super(CalibratedLattice, self).__init__(**kwargs)
return
if model_config is None:
raise ValueError('Must provide a model_config.')
# Check that proper config has been given.
if not isinstance(model_config, configs.CalibratedLatticeConfig):
raise ValueError('Invalid config type: {}'.format(type(model_config)))
# Verify that the config is fully specified.
premade_lib.verify_config(model_config)
# Get feature configs and construct model.
input_layer = premade_lib.build_input_layer(
feature_configs=model_config.feature_configs, dtype=dtype)
submodels_inputs = premade_lib.build_calibration_layers(
calibration_input_layer=input_layer,
model_config=model_config,
layer_output_range=premade_lib.LayerOutputRange.INPUT_TO_LATTICE,
submodels=[[
feature_config.name
for feature_config in model_config.feature_configs
]],
separate_calibrators=False,
dtype=dtype)
lattice_layer_output_range = (
premade_lib.LayerOutputRange.INPUT_TO_FINAL_CALIBRATION
if model_config.output_calibration else
premade_lib.LayerOutputRange.MODEL_OUTPUT)
lattice_output = premade_lib.build_lattice_layer(
lattice_input=submodels_inputs[0],
feature_configs=model_config.feature_configs,
model_config=model_config,
layer_output_range=lattice_layer_output_range,
submodel_index=0,
is_inside_ensemble=False,
dtype=dtype)
if model_config.output_calibration:
model_output = premade_lib.build_output_calibration_layer(
output_calibration_input=lattice_output,
model_config=model_config,
dtype=dtype)
else:
model_output = lattice_output
# Define inputs and initialize model.
inputs = [
input_layer[feature_config.name]
for feature_config in model_config.feature_configs
]
kwargs['inputs'] = inputs
kwargs['outputs'] = model_output
super(CalibratedLattice, self).__init__(**kwargs)
def get_config(self):
"""Returns a configuration dictionary."""
config = super(CalibratedLattice, self).get_config()
config['model_config'] = tf.keras.utils.serialize_keras_object(
self.model_config)
return config
@classmethod
def from_config(cls, config, custom_objects=None):
model = super(CalibratedLattice, cls).from_config(
config, custom_objects=custom_objects)
try:
model_config = tf.keras.utils.deserialize_keras_object(
config.get('model_config'), custom_objects=custom_objects)
premade_lib.verify_config(model_config)
model.model_config = model_config
except ValueError:
logging.warning(
'Could not load model_config. Constructing model without it: %s',
str(config.get('model_config')))
return model
class CalibratedLinear(tf.keras.Model):
"""Premade model for Tensorflow calibrated linear models.
Creates a `tf.keras.Model` for the model architecture specified by the
`model_config`, which should be a `tfl.configs.CalibratedLinearConfig`. No
fields in the model config will be automatically filled in, so the config
must be fully specified. Note that the inputs to the model should match the
order in which they are defined in the feature configs.
Example:
```python
model_config = tfl.configs.CalibratedLinearConfig(...)
calibrated_linear_model = tfl.premade.CalibratedLinear(
model_config=model_config)
calibrated_linear_model.compile(...)
calibrated_linear_model.fit(...)
```
Attributes:
model_config: Model configuration object describing model architecture.
Should be a `tfl.configs.CalibratedLinearConfig` instance.
"""
def __init__(self, model_config=None, dtype=tf.float32, **kwargs):
"""Initializes a `CalibratedLinear` instance.
Args:
model_config: Model configuration object describing model architecutre.
Should be one of the model configs in `tfl.configs`.
dtype: dtype of layers used in the model.
**kwargs: Any additional `tf.keras.Model` arguments.
"""
# Set our model_config
self.model_config = model_config
# Check if we are constructing with already provided inputs/outputs, e.g.
# when we are loading a model.
if 'inputs' in kwargs and 'outputs' in kwargs:
super(CalibratedLinear, self).__init__(**kwargs)
return
if model_config is None:
raise ValueError('Must provide a model_config.')
# Check that proper config has been given.
if not isinstance(model_config, configs.CalibratedLinearConfig):
raise ValueError('Invalid config type: {}'.format(type(model_config)))
# Verify that the config is fully specified.
premade_lib.verify_config(model_config)
# Get feature configs and construct model.
input_layer = premade_lib.build_input_layer(
feature_configs=model_config.feature_configs, dtype=dtype)
calibration_layer_output_range = (
premade_lib.LayerOutputRange.INPUT_TO_FINAL_CALIBRATION
if model_config.output_calibration else
premade_lib.LayerOutputRange.MODEL_OUTPUT)
submodels_inputs = premade_lib.build_calibration_layers(
calibration_input_layer=input_layer,
model_config=model_config,
layer_output_range=calibration_layer_output_range,
submodels=[[
feature_config.name
for feature_config in model_config.feature_configs
]],
separate_calibrators=False,
dtype=dtype)
weighted_average = (
model_config.output_min is not None or
model_config.output_max is not None or model_config.output_calibration)
linear_output = premade_lib.build_linear_layer(
linear_input=submodels_inputs[0],
feature_configs=model_config.feature_configs,
model_config=model_config,
weighted_average=weighted_average,
submodel_index=0,
dtype=dtype)
if model_config.output_calibration:
model_output = premade_lib.build_output_calibration_layer(
output_calibration_input=linear_output,
model_config=model_config,
dtype=dtype)
else:
model_output = linear_output
# Define inputs and initialize model.
inputs = [
input_layer[feature_config.name]
for feature_config in model_config.feature_configs
]
kwargs['inputs'] = inputs
kwargs['outputs'] = model_output
super(CalibratedLinear, self).__init__(**kwargs)
def get_config(self):
"""Returns a configuration dictionary."""
config = super(CalibratedLinear, self).get_config()
config['model_config'] = tf.keras.utils.serialize_keras_object(
self.model_config)
return config
@classmethod
def from_config(cls, config, custom_objects=None):
model = super(CalibratedLinear, cls).from_config(
config, custom_objects=custom_objects)
try:
model_config = tf.keras.utils.deserialize_keras_object(
config.get('model_config'), custom_objects=custom_objects)
premade_lib.verify_config(model_config)
model.model_config = model_config
except ValueError:
logging.warning(
'Could not load model_config. Constructing model without it: %s',
str(config.get('model_config')))
return model
# TODO: add support for tf.map_fn and inputs of shape (B, ?, input_dim)
# as well as non-ragged inputs using padding/mask.
class AggregateFunction(tf.keras.Model):
"""Premade model for Tensorflow aggregate function learning models.
Creates a `tf.keras.Model` for the model architecture specified by the
`model_config`, which should be a
`tfl.configs.AggregateFunctionConfig`. No
fields in the model config will be automatically filled in, so the config
must be fully specified. Note that the inputs to the model should match the
order in which they are defined in the feature configs. Features will be
considered ragged, so inputs to this model must be `tf.ragged` instances.
Example:
```python
model_config = tfl.configs.AggregateFunctionConfig(...)
agg_model = tfl.premade.AggregateFunction(
model_config=model_config)
agg_model.compile(...)
agg_model.fit(...)
```
"""
def __init__(self, model_config=None, dtype=tf.float32, **kwargs):
"""Initializes an `AggregateFunction` instance.
Args:
model_config: Model configuration object describing model architecutre.
Should be a `tfl.configs.AggregateFunctionConfig` instance.
dtype: dtype of layers used in the model.
**kwargs: Any additional `tf.keras.Model` arguments.
"""
# Set our model_config
self.model_config = model_config
# Check if we are constructing with already provided inputs/outputs, e.g.
# when we are loading a model.
if 'inputs' in kwargs and 'outputs' in kwargs:
super(AggregateFunction, self).__init__(**kwargs)
return
if model_config is None:
raise ValueError('Must provide a model_config.')
# Check that proper config has been given.
if not isinstance(model_config, configs.AggregateFunctionConfig):
raise ValueError('Invalid config type: {}'.format(type(model_config)))
# Verify that the config is fully specified.
premade_lib.verify_config(model_config)
# Get feature configs and construct model.
input_layer = premade_lib.build_input_layer(
feature_configs=model_config.feature_configs, dtype=dtype, ragged=True)
# We need to construct middle_dimension calibrated_lattices for the
# aggregation layer. Note that we cannot do this in premade_lib because
# importing premade in premade_lib would cause a dependency cycle. Also
# note that we only need to set the output initialization to the min and
# max since we are not using output calibration at this step of the
# aggregation.
calibrated_lattice_config = configs.CalibratedLatticeConfig(
feature_configs=model_config.feature_configs,
interpolation=model_config.aggregation_lattice_interpolation,
regularizer_configs=model_config.regularizer_configs,
output_min=-1.0,
output_max=1.0,
output_initialization=[-1.0, 1.0])
calibrated_lattice_models = [
CalibratedLattice(calibrated_lattice_config)
for _ in range(model_config.middle_dimension)
]
aggregation_layer_output_range = (
premade_lib.LayerOutputRange.INPUT_TO_FINAL_CALIBRATION
if model_config.output_calibration else
premade_lib.LayerOutputRange.MODEL_OUTPUT)
# Change input layer into a list based on model_config.feature_configs.
# This is the order of inputs expected by calibrated_lattice_models.
inputs = [
input_layer[feature_config.name]
for feature_config in model_config.feature_configs
]
aggregation_output = premade_lib.build_aggregation_layer(
aggregation_input_layer=inputs,
model_config=model_config,
calibrated_lattice_models=calibrated_lattice_models,
layer_output_range=aggregation_layer_output_range,
submodel_index=0,
dtype=dtype)
if model_config.output_calibration:
model_output = premade_lib.build_output_calibration_layer(
output_calibration_input=aggregation_output,
model_config=model_config,
dtype=dtype)
else:
model_output = aggregation_output
# Define inputs and initialize model.
kwargs['inputs'] = inputs
kwargs['outputs'] = model_output
super(AggregateFunction, self).__init__(**kwargs)
def get_config(self):
"""Returns a configuration dictionary."""
config = super(AggregateFunction, self).get_config()
config['model_config'] = tf.keras.utils.serialize_keras_object(
self.model_config)
return config
@classmethod
def from_config(cls, config, custom_objects=None):
model = super(AggregateFunction, cls).from_config(
config, custom_objects=custom_objects)
try:
model_config = tf.keras.utils.deserialize_keras_object(
config.get('model_config'), custom_objects=custom_objects)
premade_lib.verify_config(model_config)
model.model_config = model_config
except ValueError:
logging.warning(
'Could not load model_config. Constructing model without it: %s',
str(config.get('model_config')))
return model
def get_custom_objects(custom_objects=None):
"""Creates and returns a dictionary mapping names to custom objects.
Args:
custom_objects: Optional dictionary mapping names (strings) to custom
classes or functions to be considered during deserialization. If provided,
the returned mapping will be extended to contain this one.
Returns:
A dictionary mapping names (strings) to tensorflow lattice custom objects.
"""
tfl_custom_objects = {
'AggregateFunction':
AggregateFunction,
'AggregateFunctionConfig':
configs.AggregateFunctionConfig,
'Aggregation':
aggregation_layer.Aggregation,
'BiasInitializer':
kfll.BiasInitializer,
'CalibratedLatticeEnsemble':
CalibratedLatticeEnsemble,
'CalibratedLattice':
CalibratedLattice,
'CalibratedLatticeConfig':
configs.CalibratedLatticeConfig,
'CalibratedLatticeEnsembleConfig':
configs.CalibratedLatticeEnsembleConfig,
'CalibratedLinear':
CalibratedLinear,
'CalibratedLinearConfig':
configs.CalibratedLinearConfig,
'CategoricalCalibration':
categorical_calibration_layer.CategoricalCalibration,
'CategoricalCalibrationConstraints':
categorical_calibration_layer.CategoricalCalibrationConstraints,
'DominanceConfig':
configs.DominanceConfig,
'FeatureConfig':
configs.FeatureConfig,
'KFLRandomMonotonicInitializer':
kfll.KFLRandomMonotonicInitializer,
'KroneckerFactoredLattice':
kfll.KroneckerFactoredLattice,
'KroneckerFactoredLatticeConstraints':
kfll.KroneckerFactoredLatticeConstraints,
'LaplacianRegularizer':
lattice_layer.LaplacianRegularizer,
'Lattice':
lattice_layer.Lattice,
'LatticeConstraints':
lattice_layer.LatticeConstraints,
'Linear':
linear_layer.Linear,
'LinearConstraints':
linear_layer.LinearConstraints,
'LinearInitializer':
lattice_layer.LinearInitializer,
'NaiveBoundsConstraints':
pwl_calibration_layer.NaiveBoundsConstraints,
'ParallelCombination':
parallel_combination_layer.ParallelCombination,
'PWLCalibration':
pwl_calibration_layer.PWLCalibration,
'PWLCalibrationConstraints':
pwl_calibration_layer.PWLCalibrationConstraints,
'RandomMonotonicInitializer':
lattice_layer.RandomMonotonicInitializer,
'RegularizerConfig':
configs.RegularizerConfig,
'RTL':
rtl_layer.RTL,
'ScaleConstraints':
kfll.ScaleConstraints,
'ScaleInitializer':
kfll.ScaleInitializer,
'TorsionRegularizer':
lattice_layer.TorsionRegularizer,
'TrustConfig':
configs.TrustConfig,
}
if custom_objects is not None:
tfl_custom_objects.update(custom_objects)
return tfl_custom_objects
| |
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
import functools
import string
from mock import Mock
from testtools import TestCase
from twisted.internet import defer
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_delta.proc import MapReduce
class MapReduceTestCase(TestCase):
@defer.inlineCallbacks
def test_default_map_identity(self):
"""
Perform identity transformation if no mapper is specified.
"""
sut = MapReduce()
insert = {
'inserts': ['a'],
'deletes': [],
'data': {
'a': {
'b': 'c'
}
}
}
matches = MatchesSendDeltaItemInvocation(copy.deepcopy(insert), sut)
send = Mock(spec=Scheduler.send)
yield sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
delete = {
'inserts': [],
'deletes': ['a'],
'data': {}
}
matches = MatchesSendDeltaItemInvocation(copy.deepcopy(delete), sut)
send = Mock(spec=Scheduler.send)
yield sut(delete, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
@defer.inlineCallbacks
def test_default_reduce_merge(self):
"""
Merge updates to the same document if no reducer is specified.
"""
def map(key, doc):
# Unchanged document.
yield key, doc
# Subsequent addition to document.
yield key, {'f': 'g'}
# Subsequent modification of an existing key.
yield key, {'b': 'x'}
sut = MapReduce(map=map)
msg = {
'inserts': ['a'],
'deletes': [],
'data': {
'a': {
'b': 'c',
'd': 'e'
}
}
}
expected = {
'inserts': ['a'],
'deletes': [],
'data': {
'a': {
'b': 'x',
'd': 'e',
'f': 'g'
}
}
}
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
yield sut(msg, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
@defer.inlineCallbacks
def test_filter(self):
"""
Test filtering of documents by a mapper function.
"""
def map(key, doc):
if doc.get('allowed'):
yield key, doc
sut = MapReduce(map=map)
insert = {
'inserts': ['a', 'd'],
'deletes': [],
'data': {
'a': {
'b': 'c',
'allowed': True
},
'd': {
'e': 'f',
}
}
}
expected = {
'inserts': ['a'],
'deletes': [],
'data': {
'a': {
'b': 'c',
'allowed': True
},
}
}
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
yield sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
insert = {
'inserts': ['g'],
'deletes': [],
'data': {
'g': {
'h': 'i',
'allowed': True
}
}
}
matches = MatchesSendDeltaItemInvocation(copy.deepcopy(insert), sut)
send = Mock(spec=Scheduler.send)
yield sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
# Ensure that the filter is also effective when existing documents are
# deleted.
delete = {
'inserts': [],
'deletes': ['a', 'd', 'g'],
'data': {}
}
expected = {
'inserts': [],
'deletes': ['a', 'g'],
'data': {}
}
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
yield sut(delete, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
@defer.inlineCallbacks
def test_term_frequency_with_update(self):
"""
Test example implementation of term frequency analysis.
"""
strip_punctuation = {ord(c): None for c in string.punctuation}
def map(key, doc):
for word in doc.translate(strip_punctuation).split():
yield word, 1
def reduce(key, values):
return sum(values)
sut = MapReduce(map=map, reduce=reduce)
insert = {
'inserts': ['line-1', 'line-2', 'line-3', 'line-4', 'line-5'],
'deletes': [],
'data': {
'line-1': 'There was a fisherman named Fisher',
'line-2': 'who fished for some fish in a fissure.',
'line-3': 'Till a fish with a grin,',
'line-4': 'pulled the firefighter in.',
'line-5': 'Now they\'re fishing the fissure for Fisher.'
}
}
expected_freq = {
'Fisher': 2, 'Now': 1, 'There': 1, 'Till': 1, 'a': 4, 'fish': 2,
'fished': 1, 'fisherman': 1, 'firefighter': 1, 'fishing': 1,
'fissure': 2, 'for': 2, 'grin': 1, 'in': 2, 'named': 1,
'pulled': 1, 'some': 1, 'the': 2, 'theyre': 1, 'was': 1, 'who': 1,
'with': 1
}
expected = {
'inserts': expected_freq.keys(),
'deletes': [],
'data': expected_freq
}
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
yield sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
update = {
'inserts': ['line-4'],
'deletes': ['line-4'],
'data': {
'line-4': 'pulled the fisherman in.',
}
}
expected_freq = {
'pulled': 1, 'the': 2, 'fisherman': 2, 'in': 2
}
expected = {
'inserts': expected_freq.keys(),
'deletes': ['firefighter'] + list(expected_freq.keys()),
'data': expected_freq
}
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
yield sut(update, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
@defer.inlineCallbacks
def test_reverse_dependency_with_update(self):
"""
Test example implementation of reverse dependency analysis.
"""
def map(subj, deplist):
for dep in deplist:
yield dep, [subj]
def concat_list(key, values):
return functools.reduce(lambda a, b: a + b, values, [])
sut = MapReduce(map=map, reduce=concat_list)
insert = {
'inserts': ['stuff.c', 'util.c', 'other.c'],
'deletes': [],
'data': {
'stuff.c': ['common.h', 'stuff.h'],
'util.c': ['common.h', 'util.h'],
'other.c': ['util.h']
}
}
expected_rdep = {
'common.h': ['stuff.c', 'util.c'],
'stuff.h': ['stuff.c'],
'util.h': ['util.c', 'other.c']
}
expected = {
'inserts': expected_rdep.keys(),
'deletes': [],
'data': expected_rdep
}
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
yield sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
# Remove common.h from util.c
update = {
'inserts': ['util.c'],
'deletes': ['util.c'],
'data': {
'util.c': ['util.h'],
}
}
expected_rdep = {
'common.h': ['stuff.c'],
'util.h': ['other.c', 'util.c']
}
expected = {
'inserts': expected_rdep.keys(),
'deletes': expected_rdep.keys(),
'data': expected_rdep
}
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
yield sut(update, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
| |
import sys
from django.contrib.contenttypes.generic import GenericForeignKey, GenericRelation
from django.core.management.color import color_style
from django.utils.itercompat import is_iterable
try:
any
except NameError:
from django.utils.itercompat import any
class ModelErrorCollection:
def __init__(self, outfile=sys.stdout):
self.errors = []
self.outfile = outfile
self.style = color_style()
def add(self, context, error):
self.errors.append((context, error))
self.outfile.write(self.style.ERROR("%s: %s\n" % (context, error)))
def get_validation_errors(outfile, app=None):
"""
Validates all models that are part of the specified app. If no app name is provided,
validates all models of all installed apps. Writes errors, if any, to outfile.
Returns number of errors.
"""
from django.conf import settings
from django.db import models, connection
from django.db.models.loading import get_app_errors
from django.db.models.fields.related import RelatedObject
from django.db.models.deletion import SET_NULL, SET_DEFAULT
e = ModelErrorCollection(outfile)
for (app_name, error) in get_app_errors().items():
e.add(app_name, error)
for cls in models.get_models(app):
opts = cls._meta
# Do field-specific validation.
for f in opts.local_fields:
if f.name == 'id' and not f.primary_key and opts.pk.name == 'id':
e.add(opts, '"%s": You can\'t use "id" as a field name, because each model automatically gets an "id" field if none of the fields have primary_key=True. You need to either remove/rename your "id" field or add primary_key=True to a field.' % f.name)
if f.name.endswith('_'):
e.add(opts, '"%s": Field names cannot end with underscores, because this would lead to ambiguous queryset filters.' % f.name)
if isinstance(f, models.CharField):
try:
max_length = int(f.max_length)
if max_length <= 0:
e.add(opts, '"%s": CharFields require a "max_length" attribute that is a positive integer.' % f.name)
except (ValueError, TypeError):
e.add(opts, '"%s": CharFields require a "max_length" attribute that is a positive integer.' % f.name)
if isinstance(f, models.DecimalField):
decimalp_ok, mdigits_ok = False, False
decimalp_msg ='"%s": DecimalFields require a "decimal_places" attribute that is a non-negative integer.'
try:
decimal_places = int(f.decimal_places)
if decimal_places < 0:
e.add(opts, decimalp_msg % f.name)
else:
decimalp_ok = True
except (ValueError, TypeError):
e.add(opts, decimalp_msg % f.name)
mdigits_msg = '"%s": DecimalFields require a "max_digits" attribute that is a positive integer.'
try:
max_digits = int(f.max_digits)
if max_digits <= 0:
e.add(opts, mdigits_msg % f.name)
else:
mdigits_ok = True
except (ValueError, TypeError):
e.add(opts, mdigits_msg % f.name)
invalid_values_msg = '"%s": DecimalFields require a "max_digits" attribute value that is greater than the value of the "decimal_places" attribute.'
if decimalp_ok and mdigits_ok:
if decimal_places >= max_digits:
e.add(opts, invalid_values_msg % f.name)
if isinstance(f, models.FileField) and not f.upload_to:
e.add(opts, '"%s": FileFields require an "upload_to" attribute.' % f.name)
if isinstance(f, models.ImageField):
# Try to import PIL in either of the two ways it can end up installed.
try:
from PIL import Image
except ImportError:
try:
import Image
except ImportError:
e.add(opts, '"%s": To use ImageFields, you need to install the Python Imaging Library. Get it at http://www.pythonware.com/products/pil/ .' % f.name)
if isinstance(f, models.BooleanField) and getattr(f, 'null', False):
e.add(opts, '"%s": BooleanFields do not accept null values. Use a NullBooleanField instead.' % f.name)
if f.choices:
if isinstance(f.choices, basestring) or not is_iterable(f.choices):
e.add(opts, '"%s": "choices" should be iterable (e.g., a tuple or list).' % f.name)
else:
for c in f.choices:
if not isinstance(c, (list, tuple)) or len(c) != 2:
e.add(opts, '"%s": "choices" should be a sequence of two-tuples.' % f.name)
if f.db_index not in (None, True, False):
e.add(opts, '"%s": "db_index" should be either None, True or False.' % f.name)
# Perform any backend-specific field validation.
connection.validation.validate_field(e, opts, f)
# Check if the on_delete behavior is sane
if f.rel and hasattr(f.rel, 'on_delete'):
if f.rel.on_delete == SET_NULL and not f.null:
e.add(opts, "'%s' specifies on_delete=SET_NULL, but cannot be null." % f.name)
elif f.rel.on_delete == SET_DEFAULT and not f.has_default():
e.add(opts, "'%s' specifies on_delete=SET_DEFAULT, but has no default value." % f.name)
# Check to see if the related field will clash with any existing
# fields, m2m fields, m2m related objects or related objects
if f.rel:
if f.rel.to not in models.get_models():
e.add(opts, "'%s' has a relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to))
# it is a string and we could not find the model it refers to
# so skip the next section
if isinstance(f.rel.to, (str, unicode)):
continue
# Make sure the related field specified by a ForeignKey is unique
if not f.rel.to._meta.get_field(f.rel.field_name).unique:
e.add(opts, "Field '%s' under model '%s' must have a unique=True constraint." % (f.rel.field_name, f.rel.to.__name__))
rel_opts = f.rel.to._meta
rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
rel_query_name = f.related_query_name()
if not f.rel.is_hidden():
for r in rel_opts.fields:
if r.name == rel_name:
e.add(opts, "Accessor for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.local_many_to_many:
if r.name == rel_name:
e.add(opts, "Accessor for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.get_all_related_many_to_many_objects():
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
for r in rel_opts.get_all_related_objects():
if r.field is not f:
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
seen_intermediary_signatures = []
for i, f in enumerate(opts.local_many_to_many):
# Check to see if the related m2m field will clash with any
# existing fields, m2m fields, m2m related objects or related
# objects
if f.rel.to not in models.get_models():
e.add(opts, "'%s' has an m2m relation with model %s, which has either not been installed or is abstract." % (f.name, f.rel.to))
# it is a string and we could not find the model it refers to
# so skip the next section
if isinstance(f.rel.to, (str, unicode)):
continue
# Check that the field is not set to unique. ManyToManyFields do not support unique.
if f.unique:
e.add(opts, "ManyToManyFields cannot be unique. Remove the unique argument on '%s'." % f.name)
if f.rel.through is not None and not isinstance(f.rel.through, basestring):
from_model, to_model = cls, f.rel.to
if from_model == to_model and f.rel.symmetrical and not f.rel.through._meta.auto_created:
e.add(opts, "Many-to-many fields with intermediate tables cannot be symmetrical.")
seen_from, seen_to, seen_self = False, False, 0
for inter_field in f.rel.through._meta.fields:
rel_to = getattr(inter_field.rel, 'to', None)
if from_model == to_model: # relation to self
if rel_to == from_model:
seen_self += 1
if seen_self > 2:
e.add(opts, "Intermediary model %s has more than "
"two foreign keys to %s, which is ambiguous "
"and is not permitted." % (
f.rel.through._meta.object_name,
from_model._meta.object_name
)
)
else:
if rel_to == from_model:
if seen_from:
e.add(opts, "Intermediary model %s has more "
"than one foreign key to %s, which is "
"ambiguous and is not permitted." % (
f.rel.through._meta.object_name,
from_model._meta.object_name
)
)
else:
seen_from = True
elif rel_to == to_model:
if seen_to:
e.add(opts, "Intermediary model %s has more "
"than one foreign key to %s, which is "
"ambiguous and is not permitted." % (
f.rel.through._meta.object_name,
rel_to._meta.object_name
)
)
else:
seen_to = True
if f.rel.through not in models.get_models(include_auto_created=True):
e.add(opts, "'%s' specifies an m2m relation through model "
"%s, which has not been installed." % (f.name, f.rel.through)
)
signature = (f.rel.to, cls, f.rel.through)
if signature in seen_intermediary_signatures:
e.add(opts, "The model %s has two manually-defined m2m "
"relations through the model %s, which is not "
"permitted. Please consider using an extra field on "
"your intermediary model instead." % (
cls._meta.object_name,
f.rel.through._meta.object_name
)
)
else:
seen_intermediary_signatures.append(signature)
if not f.rel.through._meta.auto_created:
seen_related_fk, seen_this_fk = False, False
for field in f.rel.through._meta.fields:
if field.rel:
if not seen_related_fk and field.rel.to == f.rel.to:
seen_related_fk = True
elif field.rel.to == cls:
seen_this_fk = True
if not seen_related_fk or not seen_this_fk:
e.add(opts, "'%s' is a manually-defined m2m relation "
"through model %s, which does not have foreign keys "
"to %s and %s" % (f.name, f.rel.through._meta.object_name,
f.rel.to._meta.object_name, cls._meta.object_name)
)
elif isinstance(f.rel.through, basestring):
e.add(opts, "'%s' specifies an m2m relation through model %s, "
"which has not been installed" % (f.name, f.rel.through)
)
elif isinstance(f, GenericRelation):
if not any([isinstance(vfield, GenericForeignKey) for vfield in f.rel.to._meta.virtual_fields]):
e.add(opts, "Model '%s' must have a GenericForeignKey in "
"order to create a GenericRelation that points to it."
% f.rel.to.__name__
)
rel_opts = f.rel.to._meta
rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
rel_query_name = f.related_query_name()
# If rel_name is none, there is no reverse accessor (this only
# occurs for symmetrical m2m relations to self). If this is the
# case, there are no clashes to check for this field, as there are
# no reverse descriptors for this field.
if rel_name is not None:
for r in rel_opts.fields:
if r.name == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.local_many_to_many:
if r.name == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.get_all_related_many_to_many_objects():
if r.field is not f:
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
for r in rel_opts.get_all_related_objects():
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
# Check ordering attribute.
if opts.ordering:
for field_name in opts.ordering:
if field_name == '?': continue
if field_name.startswith('-'):
field_name = field_name[1:]
if opts.order_with_respect_to and field_name == '_order':
continue
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
if '__' in field_name:
continue
try:
opts.get_field(field_name, many_to_many=False)
except models.FieldDoesNotExist:
e.add(opts, '"ordering" refers to "%s", a field that doesn\'t exist.' % field_name)
# Check unique_together.
for ut in opts.unique_together:
for field_name in ut:
try:
f = opts.get_field(field_name, many_to_many=True)
except models.FieldDoesNotExist:
e.add(opts, '"unique_together" refers to %s, a field that doesn\'t exist. Check your syntax.' % field_name)
else:
if isinstance(f.rel, models.ManyToManyRel):
e.add(opts, '"unique_together" refers to %s. ManyToManyFields are not supported in unique_together.' % f.name)
if f not in opts.local_fields:
e.add(opts, '"unique_together" refers to %s. This is not in the same model as the unique_together statement.' % f.name)
return len(e.errors)
| |
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import contextlib
import functools
import logging
import sys
import time
import uuid
import six
from kazoo import exceptions as k_exceptions
from kazoo.handlers import threading as k_threading
from kazoo.protocol import states as k_states
from kazoo import retry as k_retry
from kazoo.recipe.barrier import Barrier
from kazoo.recipe.barrier import DoubleBarrier
from kazoo.recipe.counter import Counter
from kazoo.recipe.election import Election
from kazoo.recipe.lock import Lock
from kazoo.recipe.lock import Semaphore
from kazoo.recipe.partitioner import SetPartitioner
from kazoo.recipe.party import Party
from kazoo.recipe.party import ShallowParty
from kazoo.recipe.queue import Queue
from kazoo.recipe.queue import LockingQueue
from kazoo.recipe.watchers import ChildrenWatch
from kazoo.recipe.watchers import DataWatch
from zake import fake_storage as fs
from zake import utils
from zake import version
LOG = logging.getLogger(__name__)
# We provide a basic txn support (not as functional as zookeeper) and this
# was added in 3.4.0 so we will say we are 3.4.0 compat (until proven
# differently).
SERVER_VERSION = (3, 4, 0)
_NO_ACL_MSG = "ACLs not currently supported"
class FakeClient(object):
"""A fake mostly functional/good enough kazoo compat. client
It can have its underlying storage mocked out (as well as exposes the
listeners that are currently active and the watches that are currently
active) so that said functionality can be examined & introspected by
testing frameworks (while in use and after the fact).
"""
def __init__(self, handler=None, storage=None, server_version=None):
self._listeners = set()
self._child_watchers = collections.defaultdict(list)
self._data_watchers = collections.defaultdict(list)
if handler is None:
self._handler = k_threading.SequentialThreadingHandler()
self._own_handler = True
else:
self._handler = handler
self._own_handler = False
if storage is not None:
self._storage = storage
self._own_storage = False
else:
self._storage = fs.FakeStorage(self._handler)
self._own_storage = True
self._partial_client = _PartialClient(self._storage)
self._open_close_lock = self._handler.rlock_object()
self._watches_lock = self._handler.rlock_object()
self._listeners_lock = self._handler.rlock_object()
self._connected = False
self._retry = k_retry.KazooRetry()
if server_version is None:
self._server_version = SERVER_VERSION
else:
self._server_version = tuple(server_version)
if not len(self._server_version):
raise ValueError("Non-empty server version expected")
self.expired = False
self.logger = LOG
# Helper objects that makes these easier to create.
self.Barrier = functools.partial(Barrier, self)
self.Counter = functools.partial(Counter, self)
self.DoubleBarrier = functools.partial(DoubleBarrier, self)
self.ChildrenWatch = functools.partial(ChildrenWatch, self)
self.DataWatch = functools.partial(DataWatch, self)
self.Election = functools.partial(Election, self)
self.Lock = functools.partial(Lock, self)
self.Party = functools.partial(Party, self)
self.Queue = functools.partial(Queue, self)
self.LockingQueue = functools.partial(LockingQueue, self)
self.SetPartitioner = functools.partial(SetPartitioner, self)
self.Semaphore = functools.partial(Semaphore, self)
self.ShallowParty = functools.partial(ShallowParty, self)
@property
def handler(self):
return self._handler
@property
def storage(self):
return self._storage
def command(self, cmd=b'ruok'):
self.verify()
if cmd == b'ruok':
return 'imok'
if cmd == b'stat':
server_version = ".".join([str(s) for s in self._server_version])
return "\n".join(['Zake the fake version: %s' % (version.VERSION),
'Mimicked version: %s' % (server_version),
'Mode: standalone'])
if cmd == b"kill":
self.stop()
if cmd == b'envi':
server_version = ".".join([str(s) for s in self._server_version])
lines = [
"Environment:",
"zookeeper.version=%s" % server_version,
]
return "\n".join(lines)
return ''
def verify(self):
if not self._connected:
raise k_exceptions.ConnectionClosedError("Connection has been"
" closed")
if self.expired:
raise k_exceptions.SessionExpiredError("Expired")
@property
def session_id(self):
return self._partial_client.session_id
@property
def timeout_exception(self):
return IOError
@property
def child_watches(self):
return self._child_watchers
@property
def data_watches(self):
return self._data_watchers
@property
def listeners(self):
return self._listeners
@property
def connected(self):
return self._connected
def sync(self, path):
self.verify()
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
def server_version(self):
self.verify()
return self._server_version
def flush(self):
self.verify()
# This puts an item into the callback queue, and waits until it gets
# called, this is a cheap way of knowing that the queue has been
# cycled over (as this item goes in on the bottom) and only when the
# items ahead of this callback are finished will this get called.
wait_for = self.handler.event_object()
fired = False
def flip():
wait_for.set()
while not wait_for.is_set():
if not fired:
self.handler.dispatch_callback(utils.make_cb(flip))
fired = True
time.sleep(0.001)
def create(self, path, value=b"", acl=None,
ephemeral=False, sequence=False, makepath=False):
self.verify()
result, data_watches, child_watches = self._partial_client.create(
path, value=value, acl=acl, ephemeral=ephemeral, sequence=sequence,
makepath=makepath)
self.storage.inform(self, child_watches, data_watches)
return result
def create_async(self, path, value=b"", acl=None,
ephemeral=False, sequence=False, makepath=False):
return utils.dispatch_async(self.handler, self.create, path,
value=value, acl=acl, ephemeral=ephemeral,
sequence=sequence, makepath=makepath)
def get(self, path, watch=None):
self.verify()
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
path = utils.normpath(path)
try:
(data, znode) = self.storage.get(path)
except KeyError:
raise k_exceptions.NoNodeError("Node %s does not exist" % (path))
if watch:
with self._watches_lock:
self._data_watchers[path].append(watch)
return (data, znode)
def set_acls(self, path, acls, version=-1):
raise NotImplementedError(_NO_ACL_MSG)
def set_acls_async(self, path, acls, version=-1):
raise NotImplementedError(_NO_ACL_MSG)
def get_acls_async(self, path):
raise NotImplementedError(_NO_ACL_MSG)
def get_acls(self, path):
raise NotImplementedError(_NO_ACL_MSG)
def get_async(self, path, watch=None):
return utils.dispatch_async(self.handler, self.get, path, watch=watch)
def start(self, timeout=None):
if not self._connected:
with self._open_close_lock:
if not self._connected:
self._connected = True
with self._watches_lock:
self._child_watchers.clear()
self._data_watchers.clear()
self.storage.attach(self)
self.handler.start()
self._partial_client.session_id = int(uuid.uuid4())
self._fire_state_change(k_states.KazooState.CONNECTED)
def restart(self):
with self._open_close_lock:
before = self.session_id
self.stop()
self.start()
return before
def _fire_state_change(self, state):
with self._listeners_lock:
listeners = list(self._listeners)
for func in listeners:
self.handler.dispatch_callback(utils.make_cb(func, [state]))
def exists(self, path, watch=None):
self.verify()
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
path = utils.normpath(path)
try:
(data, exists) = self.storage.get(path)
except KeyError:
exists = None
if watch:
with self._watches_lock:
self._data_watchers[path].append(watch)
return exists
def exists_async(self, path, watch=None):
return utils.dispatch_async(self.handler,
self.exists, path, watch=watch)
def set(self, path, value, version=-1):
self.verify()
result, data_watches, child_watches = self._partial_client.set(
path, value, version=version)
self.storage.inform(self, child_watches, data_watches)
return result
def set_async(self, path, value, version=-1):
return utils.dispatch_async(self.handler,
self.set, path, value, version=version)
def get_children(self, path, watch=None, include_data=False):
self.verify()
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
def clean_path(p):
return p.strip("/")
path = utils.normpath(path)
with self.storage.lock:
if path not in self.storage:
raise k_exceptions.NoNodeError("Node %s does not exist"
% (path))
paths = self.storage.get_children(path)
if watch:
with self._watches_lock:
self._child_watchers[path].append(watch)
if include_data:
children_with_data = []
for (child_path, data) in six.iteritems(paths):
child_path = clean_path(child_path[len(path):])
children_with_data.append((child_path, data))
return children_with_data
else:
children = []
for child_path in six.iterkeys(paths):
child_path = clean_path(child_path[len(path):])
children.append(child_path)
return children
def get_children_async(self, path, watch=None, include_data=False):
return utils.dispatch_async(self.handler, self.get_children, path,
watch=watch, include_data=include_data)
def stop(self):
self.close()
def delete(self, path, version=-1, recursive=False):
self.verify()
result, data_watches, child_watches = self._partial_client.delete(
path, version=version, recursive=recursive)
self.storage.inform(self, child_watches, data_watches)
return result
def delete_async(self, path, recursive=False):
return utils.dispatch_async(self.handler,
self.delete, path, recursive=recursive)
def add_listener(self, listener):
with self._listeners_lock:
self._listeners.add(listener)
def retry(self, func, *args, **kwargs):
self.verify()
r = self._retry.copy()
return r(func, *args, **kwargs)
def remove_listener(self, listener):
with self._listeners_lock:
self._listeners.discard(listener)
def fire_child_watches(self, child_watches):
for (paths, event) in child_watches:
self._fire_watches(paths, event, self._child_watchers)
def fire_data_watches(self, data_watches):
for (paths, event) in data_watches:
self._fire_watches(paths, event, self._data_watchers)
def _fire_watches(self, paths, event, watch_source):
for path in reversed(sorted(paths)):
with self._open_close_lock:
if self._connected:
with self._watches_lock:
watches = list(watch_source.pop(path, []))
for w in watches:
cb = utils.make_cb(w, [event])
self.handler.dispatch_callback(cb)
def transaction(self):
return FakeTransactionRequest(self)
def ensure_path(self, path):
self.verify()
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
path = utils.normpath(path)
for piece in utils.partition_path(path):
try:
self.create(piece)
except k_exceptions.NodeExistsError:
pass
def ensure_path_async(self, path):
return utils.dispatch_async(self.handler, self.ensure_path, path)
def close(self, close_handler=True):
if self._connected:
with self._open_close_lock:
if self._connected:
self._connected = False
with self._watches_lock:
self._child_watchers.clear()
self._data_watchers.clear()
self.storage.purge(self)
self._fire_state_change(k_states.KazooState.LOST)
if self._own_handler and close_handler:
self.handler.stop()
self._partial_client.session_id = None
class _PartialClient(object):
"""An internal *only* client that returns the watches to be triggered."""
def __init__(self, storage):
self.storage = storage
self.session_id = None
def delete(self, path, version=-1, recursive=False):
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
data_watches = []
child_watches = []
path = utils.normpath(path)
with self.storage.lock:
if path not in self.storage:
raise k_exceptions.NoNodeError("Node %s does not exist"
% (path))
path_version = self.storage[path]['version']
if version != -1 and path_version != version:
raise k_exceptions.BadVersionError("Version mismatch"
" (%s != %s)"
% (version, path_version))
if recursive:
paths = [path]
children = self.storage.get_children(path, only_direct=False)
for child_path in six.iterkeys(children):
paths.append(child_path)
else:
children = self.storage.get_children(path, only_direct=False)
if children:
raise k_exceptions.NotEmptyError("Path %s is not-empty"
" (%s children exist)"
% (path, len(children)))
paths = [path]
paths = list(reversed(sorted(set(paths))))
with self.storage.transaction():
for path in paths:
self.storage.pop(path)
parents = []
for path in paths:
parents.extend(self.storage.get_parents(path))
parents = list(reversed(sorted(set(parents))))
for path in parents:
event = k_states.WatchedEvent(
type=k_states.EventType.DELETED,
state=k_states.KeeperState.CONNECTED,
path=path)
child_watches.append(([path], event))
for path in paths:
event = k_states.WatchedEvent(
type=k_states.EventType.DELETED,
state=k_states.KeeperState.CONNECTED,
path=path)
data_watches.append(([path], event))
return (True, data_watches, child_watches)
def set(self, path, value, version=-1):
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
if not isinstance(value, six.binary_type):
raise TypeError("value must be a byte string")
if not isinstance(version, int):
raise TypeError("version must be an int")
path = utils.normpath(path)
try:
stat = self.storage.set(path, value, version=version)
except KeyError:
raise k_exceptions.NoNodeError("Node %s does not exist" % (path))
data_watches = []
child_watches = []
event = k_states.WatchedEvent(type=k_states.EventType.CHANGED,
state=k_states.KeeperState.CONNECTED,
path=path)
data_watches.append(([path], event))
return (stat, data_watches, child_watches)
def create(self, path, value=b"", acl=None,
ephemeral=False, sequence=False, makepath=False):
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
if not isinstance(value, six.binary_type):
raise TypeError("value must be a byte string")
if acl:
raise NotImplementedError(_NO_ACL_MSG)
data_watches = []
child_watches = []
with self.storage.lock:
if sequence:
path = utils.normpath(path, keep_trailing=True)
else:
path = utils.normpath(path, keep_trailing=False)
if makepath:
for parent_path in utils.partition_path(path)[0:-1]:
if parent_path not in self.storage:
result = self.create(parent_path)
data_watches.extend(result[1])
child_watches.extend(result[2])
created, parents, path = self.storage.create(
path, value=value, sequence=sequence,
ephemeral=ephemeral, session_id=self.session_id)
if parents:
event = k_states.WatchedEvent(type=k_states.EventType.CHILD,
state=k_states.KeeperState.CONNECTED,
path=path)
child_watches.append((parents, event))
if created:
event = k_states.WatchedEvent(type=k_states.EventType.CREATED,
state=k_states.KeeperState.CONNECTED,
path=path)
data_watches.append(([path], event))
return (path, data_watches, child_watches)
class StopTransaction(Exception):
pass
class StopTransactionNoExists(StopTransaction):
pass
class StopTransactionBadVersion(StopTransaction):
pass
@contextlib.contextmanager
def try_txn_lock(lock):
locked = lock.acquire(blocking=False)
if not locked:
raise RuntimeError("Transaction can not be concurrently modified")
try:
yield
finally:
lock.release()
class DelayedOperation(object):
def __init__(self, name, operation, path=None, version=-1):
self.path = path
self.name = name
self.version = version
self._operation = operation
def __call__(self):
return self._operation()
class FakeTransactionRequest(object):
def __init__(self, client):
self._lock = client.handler.rlock_object()
self._client = client
self._partial_client = client._partial_client
self._storage = client.storage
self.operations = []
self.committed = False
@property
def storage(self):
return self._storage
def delete(self, path, version=-1):
delayed_op = functools.partial(self._partial_client.delete,
path, version)
self._add(DelayedOperation('delete', delayed_op,
path=path, version=version))
def check(self, path, version):
def delayed_check(path, version):
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
if not isinstance(version, int):
raise TypeError("version must be an int")
try:
data = self._storage[path]
if data['version'] != version:
raise StopTransactionBadVersion()
else:
return (True, [], [])
except KeyError:
raise StopTransactionNoExists()
delayed_op = functools.partial(delayed_check, path, version)
self._add(DelayedOperation('check', delayed_op,
path=path, version=version))
def set_data(self, path, value, version=-1):
delayed_op = functools.partial(self._partial_client.set,
path, value, version)
self._add(DelayedOperation('set_data', delayed_op,
path=path, version=version))
def create(self, path, value=b"", acl=None, ephemeral=False,
sequence=False):
delayed_op = functools.partial(self._partial_client.create,
path, value, acl, ephemeral, sequence)
self._add(DelayedOperation('create', delayed_op, path=path))
def commit(self):
self._check_tx_state()
self._client.verify()
with try_txn_lock(self._lock):
self._check_tx_state()
# Delay all watch firing until we are sure that it succeeded.
results = []
child_watches = []
data_watches = []
try:
with self._storage.transaction():
for op in self.operations:
result = op()
results.append(result[0])
data_watches.extend(result[1])
child_watches.extend(result[2])
except StopTransaction as e:
for i in range(0, len(results)):
results[i] = k_exceptions.RolledBackError()
if isinstance(e, StopTransactionBadVersion):
results.append(k_exceptions.BadVersionError())
if isinstance(e, StopTransactionNoExists):
results.append(k_exceptions.NoNodeError())
while len(results) != len(self.operations):
results.append(k_exceptions.RuntimeInconsistency())
except (NotImplementedError, AttributeError,
RuntimeError, ValueError, TypeError,
k_exceptions.ConnectionClosedError,
k_exceptions.SessionExpiredError):
# Allow all these errors to bubble up.
six.reraise(*sys.exc_info())
except Exception as e:
for i in range(0, len(results)):
results[i] = k_exceptions.RolledBackError()
results.append(e)
while len(results) != len(self.operations):
results.append(k_exceptions.RuntimeInconsistency())
else:
self._storage.inform(self._client, child_watches, data_watches)
self.committed = True
return results
def __enter__(self):
return self
def _check_tx_state(self):
if self.committed:
raise ValueError('Transaction already committed')
def _add(self, request):
with try_txn_lock(self._lock):
self._check_tx_state()
self.operations.append(request)
def __exit__(self, type, value, tb):
if not any((type, value, tb)):
if not self.committed:
self.commit()
| |
# -*- coding: utf-8 -*-
"""
Created on Thu May 4 16:48:43 2017
@author: S.Y. Agustsson
"""
from PyQt5 import QtGui as QG
from PyQt5 import QtWidgets as QW
from PyQt5 import QtCore as QC
import sys
import pyqtgraph as pg
import numpy as np
from lib import transient as tr
import qdarkstyle
import os
class MainWindow(QW.QMainWindow):
""" Main application window """
def __init__(self):
"""Initialize by setting window title, size and graphic options"""
super().__init__()
self.title = 'Transient Analyser'
self.left = 300
self.top = 100
self.width = 1400
self.height = 900
self.initUI()
# set the cool dark theme
self.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
pg.setConfigOption('background', 0.1)
pg.setConfigOption('foreground', 0.7)
def initUI(self):
"""Create the layout, adding central widget, layout style and status
bar. """
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
layout = QG.QGridLayout() # create a grid for subWidgets
layout.setSpacing(10)
self.setLayout(layout)
self.centralWidget = TransientAnalysisWidget()
layout.addWidget(self.centralWidget, 0, 0)
self.setCentralWidget(self.centralWidget)
self.statusBar().showMessage('Message in statusbar.')
self.show()
class TransientAnalysisWidget(QW.QWidget):
""" """
def __init__(self):
super().__init__()
self.title = 'the GUI test'
# self.left = 300
# self.top = 100
# self.width = 1400
# self.height = 900
self.initUI()
self.data = tr.MultiTransients()
self.data_memory = {}
def initUI(self):
""" Generate GUI layout """
self.setWindowTitle(self.title)
# self.setGeometry(self.left, self.top, self.width, self.height)
self.make_layout()
self.show()
def make_layout(self):
""" Generate the GUI layout """
layout = QG.QGridLayout() # create a grid for subWidgets
layout.setSpacing(10)
self.setLayout(layout)
self.setup_font_styles()
# -------- Define items ----------
# import button
self.importFileBtn = QW.QPushButton('Import File(s)', self)
self.importFileBtn.clicked.connect(self.import_multiple_files)
layout.addWidget(self.importFileBtn, 18, 1, 1, 7)
# Save block
self.saveasCSVBtn = QW.QPushButton('Save as CSV', self)
self.saveasCSVBtn.clicked.connect(self.saveasCSV)
layout.addWidget(self.saveasCSVBtn, 16, 24, 1, 5)
self.saveFigure = QW.QPushButton('Save Figure', self)
# self.saveFigure.resize(self.clrButton.sizeHint())
self.saveFigure.clicked.connect(self.no_function_yet)
layout.addWidget(self.saveFigure, 18, 24, 1, 5)
# File name block
self.nameTxtbox = QW.QLabel('Series Name:', self)
self.nameTxtbox.setFont(self.title_font)
layout.addWidget(self.nameTxtbox, 1, 1, 1, 7)
self.nameTxtbox = QW.QLineEdit(self)
self.nameTxtbox.setPlaceholderText('file name')
self.nameTxtbox.editingFinished.connect(self.no_function_yet)
self.nameTxtbox.returnPressed.connect(self.no_function_yet)
layout.addWidget(self.nameTxtbox, 2, 1, 1, 7)
# Metadata tree
# self.metadataTree_name = qw.QLabel('Metadata:', self)
# self.metadataTree_name.setFont(font)
# layout.addWidget(self.metadataTree_name, 3, 1)
# self.metadataTree = pg.DataTreeWidget()
# self.metadataTree.setHeaderItem()
# layout.addWidget(self.metadataTree, 4, 1, 13, 7)
# Scan list tree
self.scanListTree_label = QW.QLabel('Scans:', self)
self.scanListTree_label.setFont(self.title_font)
layout.addWidget(self.scanListTree_label, 3, 1)
self.transientData_list = QW.QListWidget()
# self.scanListTree = pg.parametertree.ParameterTree()
# self.metadataTree.setHeaderItem()
layout.addWidget(self.transientData_list, 4, 1, 13, 7)
# Plot widget
self.plotWidget_name = QW.QLabel('Plot', self)
self.plotWidget_name.setFont(self.title_font)
layout.addWidget(self.plotWidget_name, 1, 9)
self.setup_plot_widget()
layout.addWidget(self.plotWidget, 2, 9, 13, 20)
# plot modification buttons
# self.DataAnalysisBox_label = qw.QLabel('Modify', self)
# self.DataAnalysisBox_label.setFont(font)
# layout.addWidget(self.DataAnalysisBox_label, 15, 10)
self.DataAnalysisBox = QW.QGroupBox()
self.setup_data_analysis_box()
layout.addWidget(self.DataAnalysisBox, 16, 10)
#########################################################
#########################################################
#########################################################
#########################################################
# %% slots
@QC.pyqtSlot()
def no_function_yet(self):
self.msg = QW.QMessageBox()
self.msg.setIcon(QW.QMessageBox.Warning)
self.msg.setText("No, this does nothing yet")
# self.statusBar().showMessage('Message in statusbar.')
# MainWindow.statusBar().showMessage('pushed the wrong button')
self.msg.show()
def setup_plot_widget(self):
""" Create the widget for plotting scans and define it's
properties"""
pg.setConfigOptions(antialias=True)
self.plotWidget = pg.PlotWidget()
# pg.setConfigOption('background', 'w')
# pg.setConfigOption('foreground', 'k')
def setup_data_analysis_box(self):
''' '''
self.DataAnalysisBox_layout = QW.QGridLayout()
self.DataAnalysisBox.setLayout(self.DataAnalysisBox_layout)
self.DataModification_label = QW.QLabel('Modifications:', self)
self.DataModification_label.setFont(self.subtitle_font)
self.DataAnalysisBox_layout.addWidget(self.DataModification_label,
0, 0)
self.flipX_cb = QW.QCheckBox(self.DataAnalysisBox)
self.flipX_cb.setText('Flip x')
self.flipX_cb.setChecked(True)
# self.flipX_checkBox.clicked.connect(self.flip_time_scale)
self.DataAnalysisBox_layout.addWidget(self.flipX_cb, 1, 0)
self.removeDC_cb = QW.QCheckBox(self.DataAnalysisBox)
self.removeDC_cb.setText('Remove DC Offset')
# self.removeDC_cb.clicked.connect(self.remove_DC_offset)
self.removeDC_cb.setChecked(True)
self.DataAnalysisBox_layout.addWidget(self.removeDC_cb, 2, 0)
self.setTimeZero_cb = QW.QCheckBox(self.DataAnalysisBox)
self.setTimeZero_cb.setText('Set time Zero')
# self.setTimeZero_cb.clicked.connect(self.set_time_zero)
self.setTimeZero_cb.setChecked(True)
self.DataAnalysisBox_layout.addWidget(self.setTimeZero_cb, 3, 0)
self.setTimeZero_sb = pg.SpinBox(self.DataAnalysisBox)
self.setTimeZero_sb.setMinimumSize(1, 25)
# self.shiftTimeZero_input.valueChanged.connect(self.set_time_zero)
self.DataAnalysisBox_layout.addWidget(self.setTimeZero_sb, 2, 1)
# self.shiftTimeZero_input.setValidator(QtGui.QDoubleValidator())
# self.shiftTimeZero_input.textChanged.connect(self.setShiftTimeZero)
# Filter
# self.timeZero = 0
self.filter_label = QW.QLabel('Filter [THz]', self.DataAnalysisBox)
self.filter_label.setFont(self.subtitle_font)
# self.filterBox_name.setFont(font)
self.DataAnalysisBox_layout.addWidget(self.filter_label, 0, 2)
# self.filterLowPass_label = qw.QLabel('Low Pass', self.DataAnalysisBox)
self.filterLowPass_cb = QW.QCheckBox(self.DataAnalysisBox)
self.filterLowPass_cb.setText('Low Pass Frequency')
# self.setTimeZero_cb.clicked.connect(self.set_time_zero)
self.filterLowPass_cb.setChecked(True)
self.DataAnalysisBox_layout.addWidget(self.filterLowPass_cb, 1, 2)
self.filterLowPass_sb = pg.SpinBox(self, dec=True)
self.filterLowPass_sb.setMinimumSize(1, 25)
# self.filterLowPassFreq.setPlaceholderText('freq')
self.filterLowPass_sb.valueChanged.connect(self.filter_data_lowpass)
self.DataAnalysisBox_layout.addWidget(self.filterLowPass_sb, 1, 3)
def setup_font_styles(self):
""" Give settings for fonts to use in widget"""
self.title_font = QG.QFont()
self.subtitle_font = QG.QFont()
self.text_font = QG.QFont()
self.title_font.setBold(True)
self.title_font.setPixelSize(15)
self.subtitle_font.setPixelSize(12)
self.subtitle_font.setBold(True)
self.text_font.setPixelSize(10)
# %% import export
@QC.pyqtSlot()
def import_single_file(self): # todo: translate for new Transient() class
""" Import a single file form either .mat or .txt (csv) file."""
self.scanData = tr.Transient()
filename = self.openFileNameDialog() # choose the file to import
self.scanData.import_file(filename)
self.scanData.initParameters()
self.plotScanData()
# ext = os.path.splitext(filename)[-1].lower()
# if ext == ".mat":
# self.loadScanRaw(filename)
# elif ext == ".txt":
# self.loadScanCSV(filename)
# else:
# print("wrong file type, please try again")
def import_multiple_files(self): # todo: Improve! Rethink importing method
"""
Import multiple files to analysis program.
for now only overwrites, adding append function soon.
"""
filename = self.openFileNameDialog()
append = False
self.data.import_files(filename, append)
print(self.data)
self.refresh_transient_list()
self.plot_all_transients()
def refresh_transient_list(self):
""" refresh list of transients reported in list_widget"""
# for n, transient in enumerate(self.data):
# self.transientData_list.clear()
self.transientData_list.addItem('test')
def plot_all_transients(self):
""" """
for scan in self.data:
x = scan.time
y = scan.trace
self.plot = self.plotWidget.plot(x, y, pen=(255, 0, 0))
def plot_selected_transients(self): # todo: write method
""" """
pass
def plotScanData(self): # todo: translate for TransientsSet()
""" clears the graph and plots a fresh graph from scanData"""
self.plotWidget.clear()
x = self.scanData.time
y = self.scanData.trace
self.plot = self.plotWidget.plot(x, y, pen=(255, 0, 0))
def loadScanCSV(self, filename): # todo: translate for TransientsSet()
# filename = self.openFileNameDialog()
self.scanData = rr.rrScan()
self.scanData.import_file_csv(filename)
self.scanData.initParameters()
self.plotScanData()
def loadScanRaw(self, filename): # todo: translate for TransientsSet()
# filename = self.openFileNameDialog()
self.scanData = rr.rrScan()
self.scanData.importRawFile(filename)
self.scanData.initParameters()
print(self.scanData.parameters)
# self.tree.addChild(self.scanData.parameters)
# self.plotData()
self.plotScanData()
def saveasCSV(self): # todo: translate for TransientsSet()
"""save object rrScan() to csv"""
savedir = rr.getFolder()
print(savedir)
self.scanData.export_file_csv(savedir)
# %% Scan Modifcations - old
@QC.pyqtSlot()
def flip_time_scale(self): # todo: translate for TransientsSet(), is it really useful?
self.scanData.flip_time()
self.plotScanData()
@QC.pyqtSlot()
def flip_trace(self): # todo: translate for TransientsSet(), is it really useful?
""" Flip data on y scale: f(x) = -f(x).
Replot data"""
self.scanData.flip_trace()
self.plotScanData()
@QC.pyqtSlot()
def shift_time_scale(self): # todo: translate for TransientsSet(), is it really useful?
"""shift time of scan by timeZero, value given in the QLineEdit shift_time_scale"""
txt = self.shiftTimeZero_input.text()
num = float(txt)
self.timeZero = num
self.scanData.shift_time(self.timeZero)
self.plotScanData()
@QC.pyqtSlot()
def set_time_zero(self): # todo: translate for TransientsSet(), is it really useful?
"""set value given in shiftTimeZero_input() as new time zero"""
txt = self.shiftTimeZero_input.text()
num = float(txt)
self.scanData.shift_time(-self.timeZero)
self.timeZero = num
self.scanData.shift_time(self.timeZero)
self.plotScanData()
@QC.pyqtSlot()
def filter_data_lowpass(self): # todo: translate for TransientsSet(), is it really useful?
"""get filter frequency from textbox and apply the filter to a single scan"""
freq = float(self.filterLowPassFreq.text())
self.scanData.trace = self.scanData.rawtrace
nyqfreq = self.scanData.nyqistFreq()
if freq != 0 and freq < nyqfreq:
cutfactor = freq / nyqfreq
self.scanData.filter_low_pass(cutHigh=cutfactor)
self.plotScanData()
@QC.pyqtSlot()
def remove_DC_offset(self): # todo: translate for TransientsSet(), is it really useful?
self.scanData.remove_DC_offset()
self.plotScanData()
# %% Plots
def plotScanData(self): # todo: translate for TransientsSet(), is it really useful?
""" clears the graph and plots a fresh graph from scanData"""
self.plotWidget.clear()
x = self.scanData.time
y = self.scanData.trace
self.plot = self.plotWidget.plot(x, y, pen=(255, 0, 0))
def clearPlot(self): # todo: translate for TransientsSet(), is it really useful?
"""clears all graphs from plot, after asking confermation"""
reply = QW.QMessageBox.question(self, 'Message',
"Are you sure you want to clear the graph completely?", QW.QMessageBox.Yes |
QW.QMessageBox.No, QW.QMessageBox.No)
if reply == QW.QMessageBox.Yes:
self.plotWidget.clear()
def plotDataTest(self): # todo: translate for TransientsSet(), is it really useful?
""" plot a test curve in the plot widget"""
x = np.arange(0, 1000, 1)
noise = np.random.normal(0, 1, 1000) / 1
y = np.sin(x / 10) + noise
self.plot = self.plotWidget.plot(x, y, color='g')
# %% import export
def loadScanCSV(self, filename):
# filename = self.openFileNameDialog()
self.scanData = rr.rrScan()
self.scanData.import_file_csv(filename)
self.scanData.initParameters()
self.plotScanData()
def loadScanRaw(self, filename):
# filename = self.openFileNameDialog()
self.scanData = rr.rrScan()
self.scanData.importRawFile(filename)
self.scanData.initParameters()
print(self.scanData.parameters)
# self.tree.addChild(self.scanData.parameters)
# self.plotData()
self.plotScanData()
def saveasCSV(self):
"""save object rrScan() to csv"""
savedir = rr.getFolder()
print(savedir)
self.scanData.export_file_csv(savedir)
# %% other
def openFileNameDialog(self):
options = QW.QFileDialog.Options()
options |= QW.QFileDialog.DontUseNativeDialog
fileName, _ = QW.QFileDialog.getOpenFileName(self, "QFileDialog.getOpenFileName()", "",
"All Files (*);;Python Files (*.py)", options=options)
if fileName:
return (fileName)
def set_statusbar(self):
buttonReply = QW.QMessageBox.question(self, 'PyQt5 message', "Do you like PyQt5?",
QW.QMessageBox.Yes | QW.QMessageBox.No,
QW.QMessageBox.No)
if buttonReply == QW.QMessageBox.Yes:
string = 'yes'
else:
string = 'no'
self.statusBar().showMessage(string)
if __name__ == '__main__':
app = QC.QCoreApplication.instance()
if app is None:
app = QG.QApplication(sys.argv)
prg = MainWindow()
prg.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
pg.setConfigOption('background', 0.1)
pg.setConfigOption('foreground', 0.7)
prg.show()
app.exec_()
| |
# Copyright 2015 Tesora Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from proboscis import test
from trove.tests.scenario import groups
from trove.tests.scenario.groups.test_group import TestGroup
from trove.tests.scenario.runners import test_runners
GROUP = "scenario.backup_restore_group"
class BackupRunnerFactory(test_runners.RunnerFactory):
_runner_ns = 'backup_runners'
_runner_cls = 'BackupRunner'
@test(depends_on_groups=[groups.INST_CREATE],
groups=[GROUP, groups.BACKUP, groups.BACKUP_CREATE])
class BackupCreateGroup(TestGroup):
"""Test Backup Create functionality."""
def __init__(self):
super(BackupCreateGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def add_data_for_backup(self):
"""Add data to instance for restore verification."""
self.test_runner.run_add_data_for_backup()
@test(runs_after=[add_data_for_backup])
def verify_data_for_backup(self):
"""Verify data in instance."""
self.test_runner.run_verify_data_for_backup()
@test(runs_after=[verify_data_for_backup])
def save_backup_counts(self):
"""Store the existing backup counts."""
self.test_runner.run_save_backup_counts()
@test(runs_after=[save_backup_counts])
def backup_create(self):
"""Check that create backup is started successfully."""
self.test_runner.run_backup_create()
@test(depends_on_classes=[BackupCreateGroup],
groups=[GROUP, groups.BACKUP_CREATE_NEGATIVE])
class BackupCreateNegativeGroup(TestGroup):
"""Test Backup Create Negative functionality."""
def __init__(self):
super(BackupCreateNegativeGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def backup_delete_while_backup_running(self):
"""Ensure delete backup fails while it is running."""
self.test_runner.run_backup_delete_while_backup_running()
@test(runs_after=[backup_delete_while_backup_running])
def restore_instance_from_not_completed_backup(self):
"""Ensure a restore fails while the backup is running."""
self.test_runner.run_restore_instance_from_not_completed_backup()
@test(runs_after=[restore_instance_from_not_completed_backup])
def backup_create_another_backup_running(self):
"""Ensure create backup fails when another backup is running."""
self.test_runner.run_backup_create_another_backup_running()
@test(runs_after=[backup_create_another_backup_running])
def instance_action_right_after_backup_create(self):
"""Ensure any instance action fails while backup is running."""
self.test_runner.run_instance_action_right_after_backup_create()
@test(runs_after=[instance_action_right_after_backup_create])
def delete_unknown_backup(self):
"""Ensure deleting an unknown backup fails."""
self.test_runner.run_delete_unknown_backup()
@test(runs_after=[instance_action_right_after_backup_create])
def backup_create_instance_invalid(self):
"""Ensure create backup fails with invalid instance id."""
self.test_runner.run_backup_create_instance_invalid()
@test(runs_after=[instance_action_right_after_backup_create])
def backup_create_instance_not_found(self):
"""Ensure create backup fails with unknown instance id."""
self.test_runner.run_backup_create_instance_not_found()
@test(depends_on_classes=[BackupCreateNegativeGroup],
groups=[GROUP, groups.BACKUP, groups.BACKUP_CREATE_WAIT])
class BackupCreateWaitGroup(TestGroup):
"""Wait for Backup Create to Complete."""
def __init__(self):
super(BackupCreateWaitGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def backup_create_completed(self):
"""Check that the backup completes successfully."""
self.test_runner.run_backup_create_completed()
@test(depends_on=[backup_create_completed])
def instance_goes_active(self):
"""Check that the instance goes active after the backup."""
self.test_runner.run_instance_goes_active()
@test(depends_on=[backup_create_completed])
def backup_list(self):
"""Test list backups."""
self.test_runner.run_backup_list()
@test(depends_on=[backup_create_completed])
def backup_list_filter_datastore(self):
"""Test list backups and filter by datastore."""
self.test_runner.run_backup_list_filter_datastore()
@test(depends_on=[backup_create_completed])
def backup_list_filter_datastore_not_found(self):
"""Test list backups and filter by unknown datastore."""
self.test_runner.run_backup_list_filter_datastore_not_found()
@test(depends_on=[backup_create_completed])
def backup_list_for_instance(self):
"""Test backup list for instance."""
self.test_runner.run_backup_list_for_instance()
@test(depends_on=[backup_create_completed])
def backup_get(self):
"""Test backup show."""
self.test_runner.run_backup_get()
@test(depends_on=[backup_create_completed])
def backup_get_unauthorized_user(self):
"""Ensure backup show fails for an unauthorized user."""
self.test_runner.run_backup_get_unauthorized_user()
@test(depends_on_classes=[BackupCreateWaitGroup],
groups=[GROUP, groups.BACKUP_INC, groups.BACKUP_INC_CREATE])
class BackupIncCreateGroup(TestGroup):
"""Test Backup Incremental Create functionality."""
def __init__(self):
super(BackupIncCreateGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def add_data_for_inc_backup_1(self):
"""Add data to instance for inc backup 1."""
self.test_runner.run_add_data_for_inc_backup_1()
@test(depends_on=[add_data_for_inc_backup_1])
def verify_data_for_inc_backup_1(self):
"""Verify data in instance for inc backup 1."""
self.test_runner.run_verify_data_for_inc_backup_1()
@test(depends_on=[verify_data_for_inc_backup_1])
def inc_backup_1(self):
"""Run incremental backup 1."""
self.test_runner.run_inc_backup_1()
@test(depends_on=[inc_backup_1])
def wait_for_inc_backup_1(self):
"""Check that inc backup 1 completes successfully."""
self.test_runner.run_wait_for_inc_backup_1()
@test(depends_on=[wait_for_inc_backup_1])
def add_data_for_inc_backup_2(self):
"""Add data to instance for inc backup 2."""
self.test_runner.run_add_data_for_inc_backup_2()
@test(depends_on=[add_data_for_inc_backup_2])
def verify_data_for_inc_backup_2(self):
"""Verify data in instance for inc backup 2."""
self.test_runner.run_verify_data_for_inc_backup_2()
@test(depends_on=[wait_for_inc_backup_1],
runs_after=[verify_data_for_inc_backup_2])
def instance_goes_active_inc_1(self):
"""Check that the instance goes active after the inc 1 backup."""
self.test_runner.run_instance_goes_active()
@test(depends_on=[verify_data_for_inc_backup_2],
runs_after=[instance_goes_active_inc_1])
def inc_backup_2(self):
"""Run incremental backup 2."""
self.test_runner.run_inc_backup_2()
@test(depends_on=[inc_backup_2])
def wait_for_inc_backup_2(self):
"""Check that inc backup 2 completes successfully."""
self.test_runner.run_wait_for_inc_backup_2()
@test(depends_on=[wait_for_inc_backup_2])
def instance_goes_active_inc_2(self):
"""Check that the instance goes active after the inc 2 backup."""
self.test_runner.run_instance_goes_active()
@test(depends_on_classes=[BackupIncCreateGroup],
groups=[GROUP, groups.BACKUP_INST, groups.BACKUP_INST_CREATE])
class BackupInstCreateGroup(TestGroup):
"""Test Backup Instance Create functionality."""
def __init__(self):
super(BackupInstCreateGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def restore_from_backup(self):
"""Check that restoring an instance from a backup starts."""
self.test_runner.run_restore_from_backup()
@test(depends_on_classes=[BackupInstCreateGroup],
groups=[GROUP, groups.BACKUP_INST, groups.BACKUP_INST_CREATE_WAIT])
class BackupInstCreateWaitGroup(TestGroup):
"""Test Backup Instance Create completes."""
def __init__(self):
super(BackupInstCreateWaitGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def restore_from_backup_completed(self):
"""Wait until restoring an instance from a backup completes."""
self.test_runner.run_restore_from_backup_completed()
@test(depends_on=[restore_from_backup_completed])
def verify_data_in_restored_instance(self):
"""Verify data in restored instance."""
self.test_runner.run_verify_data_in_restored_instance()
@test(depends_on=[restore_from_backup_completed])
def verify_databases_in_restored_instance(self):
"""Verify databases in restored instance."""
self.test_runner.run_verify_databases_in_restored_instance()
@test(depends_on_classes=[BackupInstCreateWaitGroup],
groups=[GROUP, groups.BACKUP_INST, groups.BACKUP_INST_DELETE])
class BackupInstDeleteGroup(TestGroup):
"""Test Backup Instance Delete functionality."""
def __init__(self):
super(BackupInstDeleteGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def delete_restored_instance(self):
"""Test deleting the restored instance."""
self.test_runner.run_delete_restored_instance()
@test(depends_on_classes=[BackupInstDeleteGroup],
groups=[GROUP, groups.BACKUP_INST, groups.BACKUP_INST_DELETE_WAIT])
class BackupInstDeleteWaitGroup(TestGroup):
"""Test Backup Instance Delete completes."""
def __init__(self):
super(BackupInstDeleteWaitGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def wait_for_restored_instance_delete(self):
"""Wait until deleting the restored instance completes."""
self.test_runner.run_wait_for_restored_instance_delete()
@test(depends_on_classes=[BackupInstDeleteWaitGroup],
groups=[GROUP, groups.BACKUP_INC_INST,
groups.BACKUP_INC_INST_CREATE])
class BackupIncInstCreateGroup(TestGroup):
"""Test Backup Incremental Instance Create functionality."""
def __init__(self):
super(BackupIncInstCreateGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def restore_from_inc_1_backup(self):
"""Check that restoring an instance from inc 1 backup starts."""
self.test_runner.run_restore_from_inc_1_backup()
@test(depends_on_classes=[BackupIncInstCreateGroup],
groups=[GROUP, groups.BACKUP_INC_INST,
groups.BACKUP_INC_INST_CREATE_WAIT])
class BackupIncInstCreateWaitGroup(TestGroup):
"""Test Backup Incremental Instance Create completes."""
def __init__(self):
super(BackupIncInstCreateWaitGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def restore_from_inc_1_backup_completed(self):
"""Wait until restoring an inst from inc 1 backup completes."""
self.test_runner.run_restore_from_inc_1_backup_completed()
@test(depends_on=[restore_from_inc_1_backup_completed])
def verify_data_in_restored_inc_1_instance(self):
"""Verify data in restored inc 1 instance."""
self.test_runner.run_verify_data_in_restored_inc_1_instance()
@test(depends_on=[restore_from_inc_1_backup_completed])
def verify_databases_in_restored_inc_1_instance(self):
"""Verify databases in restored inc 1 instance."""
self.test_runner.run_verify_databases_in_restored_inc_1_instance()
@test(depends_on_classes=[BackupIncInstCreateWaitGroup],
groups=[GROUP, groups.BACKUP_INC_INST,
groups.BACKUP_INC_INST_DELETE])
class BackupIncInstDeleteGroup(TestGroup):
"""Test Backup Incremental Instance Delete functionality."""
def __init__(self):
super(BackupIncInstDeleteGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def delete_restored_inc_1_instance(self):
"""Test deleting the restored inc 1 instance."""
self.test_runner.run_delete_restored_inc_1_instance()
@test(depends_on_classes=[BackupIncInstDeleteGroup],
groups=[GROUP, groups.BACKUP_INC_INST,
groups.BACKUP_INC_INST_DELETE_WAIT])
class BackupIncInstDeleteWaitGroup(TestGroup):
"""Test Backup Incremental Instance Delete completes."""
def __init__(self):
super(BackupIncInstDeleteWaitGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def wait_for_restored_inc_1_instance_delete(self):
"""Wait until deleting the restored inc 1 instance completes."""
self.test_runner.run_wait_for_restored_inc_1_instance_delete()
@test(depends_on_classes=[BackupIncInstDeleteWaitGroup],
groups=[GROUP, groups.BACKUP_INC, groups.BACKUP_INC_DELETE])
class BackupIncDeleteGroup(TestGroup):
"""Test Backup Incremental Delete functionality."""
def __init__(self):
super(BackupIncDeleteGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def delete_inc_2_backup(self):
"""Test deleting the inc 2 backup."""
# We only delete the inc 2 backup, as the inc 1 should be deleted
# by the full backup delete that runs after.
self.test_runner.run_delete_inc_2_backup()
@test(depends_on_classes=[BackupIncDeleteGroup],
groups=[GROUP, groups.BACKUP, groups.BACKUP_DELETE])
class BackupDeleteGroup(TestGroup):
"""Test Backup Delete functionality."""
def __init__(self):
super(BackupDeleteGroup, self).__init__(
BackupRunnerFactory.instance())
@test
def delete_backup_unauthorized_user(self):
"""Ensure deleting backup by an unauthorized user fails."""
self.test_runner.run_delete_backup_unauthorized_user()
@test(runs_after=[delete_backup_unauthorized_user])
def delete_backup(self):
"""Test deleting the backup."""
self.test_runner.run_delete_backup()
@test(depends_on=[delete_backup])
def check_for_incremental_backup(self):
"""Test that backup children are deleted."""
self.test_runner.run_check_for_incremental_backup()
@test
def remove_backup_data_from_instance(self):
"""Remove the backup data from the original instance."""
self.test_runner.run_remove_backup_data_from_instance()
| |
#coding=utf-8
#-*- coding: utf-8 -*-
import os
import re
import sys
import time
import math
import pytz
import numpy
import talib
import datetime
import urllib2
sys.path.append("../frame/")
import fetch_data
from loggingex import LOG_INFO
from loggingex import LOG_ERROR
from loggingex import LOG_WARNING
from job_base import job_base
from prepare_table import prepare_table
from mysql_manager import mysql_manager
from stock_conn_manager import stock_conn_manager
class second_buy_point(job_base):
def __init__(self):
pass
def run(self):
share_ids = self._get_all_share_ids()
for share_id_item in share_ids:
share_id = share_id_item[0]
self._calc(share_id)
LOG_INFO("run second_buy_point")
def _get_all_share_ids(self):
date_info = time.strftime('%Y_%m_%d')
trade_table_name = "trade_info_%s" % (date_info)
share_ids = fetch_data.get_data(fetch_data.select_db("daily_temp", trade_table_name, ["share_id"],{"share_id":[["000001","000010","000301","000601","000901","002101","002401","002701","300001","300301","600301","600601","601801","603001","603601","603901"],"in"]}, pre = "distinct"))
#share_ids = fetch_data.get_data(fetch_data.select_db("daily_temp", trade_table_name, ["share_id"],{}, pre = "distinct"))
#share_ids = fetch_data.get_data(fetch_data.select_db("daily_temp", trade_table_name, ["share_id"],{"share_id":["000001","="]}, pre = "distinct"))
return share_ids
def _calc(self, share_id):
table_name = "daily_info_ex_dividend_%s" % (share_id)
all_data = self._get_average_info(share_id, table_name, 360)
indexs_info = self._get_cmp_indexs(all_data)
match_indexs = indexs_info[0]
mismatch_indexs_info = indexs_info[1]
#print "**********************************************************"
#print share_id
#print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$"
#print "match indexs:"
#self._print_result(all_data, match_indexs)
#print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$"
#print "mismatch_indexs:"
#self._print_result(all_data, mismatch_indexs_info)
#print "#####################################################"
filter_results = self._filter_indexs(all_data, match_indexs)
buy_info = filter_results[0]
buy_info = self._filter_time(all_data, buy_info)
#not_buy_info = filter_results[1]
if len(buy_info) > 0:
print share_id
print "buy info:"
self._print_result(all_data, buy_info)
print "#####################################################"
#print "not buy info"
#self._print_result(all_data, not_buy_info)
#print "**********************************************************"
def _filter_time(self, data, indexs_info):
tendays_ago = (datetime.datetime.now() - datetime.timedelta(days = 10))
timeStamp = int(time.mktime(tendays_ago.timetuple()))
filter_data = []
for index_info in indexs_info:
index = index_info[0]
info = index_info[1]
a = data[index]["time"]
if timeStamp < a:
filter_data.append(index_info)
return filter_data
def _print_result(self, data, indexs_info):
for index_info in indexs_info:
index = index_info[0]
info = index_info[1]
a = data[index]["time"]
x = time.localtime(int(a))
time_str = time.strftime('%Y-%m-%d', x)
print "%s %d %s" % (time_str, index, info)
def _get_history_data(self, data, index, period, type, pre = True):
if pre:
period = -1 * period
new_index = index + period
if new_index < 0 or new_index >= len(data):
return None
return (new_index, data[new_index][type])
def _get_history_max_min_info(self, data, index, period, type, include_today = True, pre = True, max=True):
cmp_result = None
for iter in range(period):
if False == include_today and iter == 0:
continue
value_info = self._get_history_data(data, index, iter, type, pre)
if value_info:
pass
else:
continue
if cmp_result:
if max:
if value_info[1] > cmp_result[1]:
cmp_result = value_info
else:
if value_info[1] < cmp_result[1]:
cmp_result = value_info
else:
cmp_result = value_info
return cmp_result
def _check_cmp_cond(self, data, index):
period = 5
close_offset_max = 0.4
index_volume_times_than_max = 1.2
after_period = 5
if period > index:
return (False, "period(%d) bigger than index(%d)" % (period, index))
pchg_max_info = self._get_history_max_min_info(data, index, period, "pchg", include_today = True, pre = True, max=True)
volume_max_info = self._get_history_max_min_info(data, index, period, "volume", include_today = True, pre = True, max=True)
high_max_info = self._get_history_max_min_info(data, index, period, "today_high", include_today = True, pre = True, max=True)
if None == pchg_max_info or None == volume_max_info or None == high_max_info:
return (False, "get %d pre %d pchg volume today_high error" % (index, period))
cmp_index = volume_max_info[0]
if cmp_index != high_max_info[0] and cmp_index != pchg_max_info[0]:
return (False, "vol max index(%d) not equal high max index(%d) or pchg max index(%d)" % (volume_max_info[0], high_max_info[0], pchg_max_info[0]))
if cmp_index != index:
return (False, "today(%d) is not compare day(%d)" % (index, cmp_index))
today_high_info = self._get_history_data(data, index, 0, "today_high")
today_close_info = self._get_history_data(data, index, 0, "today_close")
today_open_info = self._get_history_data(data, index, 0, "today_open")
if None == today_high_info or None == today_close_info or None == today_open_info:
return (False, "get today(%d) today_high today_close today_open error" % index)
today_close = float(today_close_info[1])
today_high = float(today_high_info[1])
today_high_sub_close = today_high - today_close
today_close_sub_open = today_close - float(today_open_info[1])
#if 0 == today_close_sub_open:
# return False
#
#if today_high_sub_close / today_close_sub_open > close_offset_max:
# return False
today_volume_info = self._get_history_data(data, index, 0, "volume")
today_volume_ma5_info = self._get_history_data(data, index, 0, "volume_ma5")
today_volume_ma10_info = self._get_history_data(data, index, 0, "volume_ma10")
if None == today_volume_info or None == today_volume_ma5_info or None == today_volume_ma10_info:
return (False, "get today(%d) volume volume_ma5 volume_ma10 error" % index)
today_volume = float(today_volume_info[1])
today_volume_ma5 = float(today_volume_ma5_info[1])
today_volume_ma10 = float(today_volume_ma10_info[1])
if today_volume < index_volume_times_than_max * today_volume_ma5:
return (False, "today(%d) vol(%f) is not %f times vol_ma5(%f)" % (index, today_volume, index_volume_times_than_max, today_volume_ma5))
if today_volume < index_volume_times_than_max * today_volume_ma10:
return (False, "today(%d) vol(%f) is not %f times vol_ma10(%f)" % (index, today_volume, index_volume_times_than_max, today_volume_ma10))
today_close_ma5_info = self._get_history_data(data, index, 0, "close_ma5")
today_close_ma10_info = self._get_history_data(data, index, 0, "close_ma10")
today_close_ma20_info = self._get_history_data(data, index, 0, "close_ma20")
today_close_ma30_info = self._get_history_data(data, index, 0, "close_ma30")
today_close_ma60_info = self._get_history_data(data, index, 0, "close_ma60")
if None == today_close_ma5_info or None == today_close_ma10_info or None == today_close_ma20_info or None == today_close_ma30_info or None == today_close_ma60_info:
return (False, "get today(%d) close ma5 ma10 ma20 ma30 ma60 error" % index)
today_close_ma5 = today_close_ma5_info[1]
today_close_ma10 = today_close_ma10_info[1]
today_close_ma20 = today_close_ma20_info[1]
today_close_ma30 = today_close_ma30_info[1]
today_close_ma60 = today_close_ma60_info[1]
if today_close_ma30 > today_close_ma20 and today_close_ma30 > today_close_ma10 and today_close_ma30 > today_close_ma5:
return (False, "today(%d) close ma30 is bigger than ma20 ma10 ma5" % index)
if today_high < today_close_ma30 or today_high < today_close_ma20 or today_high < today_close_ma10 or today_high < today_close_ma5:
return (False, "today(%d) high is not bigger than close ma30 ma20 ma10 ma5" % index)
after_close_max_info = self._get_history_max_min_info(data, index, after_period, "today_close", include_today = False, pre = False, max=True)
if None == after_close_max_info:
return (False, "today is last day")
after_close_max = after_close_max_info[1]
if today_close > after_close_max:
return (False, "today is highest in %d days" % (period))
today_low_info = self._get_history_data(data, index, 0, "today_low")
if None == today_low_info:
return (False, "get today(%d) low error" % (index))
today_low = today_low_info[1]
yesterday_close_info = self._get_history_data(data, index, 0, "yesterday_close")
if None == yesterday_close_info:
yesterday_close = yesterday_close_info[1]
##############################################################################
wave_include_ma = 0
if today_close_ma5 > today_low and today_close_ma5 < today_high:
wave_include_ma = wave_include_ma + 1
if today_close_ma10 > today_low and today_close_ma10 < today_high:
wave_include_ma = wave_include_ma + 1
if today_close_ma30 > today_low and today_close_ma30 < today_high:
wave_include_ma = wave_include_ma + 1
if today_close_ma60 > today_low and today_close_ma60 < today_high:
wave_include_ma = wave_include_ma + 1
if wave_include_ma < 1:
return (False, "waves include %d ma" % (wave_include_ma))
#return (True, "%s %s" % (polyfit_30, polyfit_60))
return (True, "")
def _polyfit_close(self, data, index, type, period):
#volume_max_info = self._get_history_max_min_info(data, index, period, "volume", include_today = False, pre = True, max=True)
#if None == volume_max_info:
# return "get today(%d) pre %d volume error" % (index, period)
#period = index - volume_max_info[0]
all_pre_datas = []
all_pre_indexs = []
first = 0
for pre_index in range(period):
new_index = index - pre_index
if new_index < 0:
break
pre_data_info = self._get_history_data(data, new_index, 0, type)
if None == pre_data_info:
break
pre_data = pre_data_info[1]
if first == 0:
first = pre_data
pre_data = pre_data/first
all_pre_datas.insert(0, pre_data)
all_pre_indexs.append(pre_index + 1)
all_pre_datas_np = numpy.array(all_pre_datas)
all_pre_indexs_np = numpy.array(all_pre_indexs)
result = numpy.polyfit(all_pre_datas_np, all_pre_indexs_np, 1)
return result
def _filter_indexs(self, data, indexs):
filter_indexs = []
buy_info = []
not_buy_info = []
for index_info in indexs:
index = index_info[0]
today_close_ma5_info = self._get_history_data(data, index, 0, "close_ma5")
today_close_ma10_info = self._get_history_data(data, index, 0, "close_ma10")
today_close_ma20_info = self._get_history_data(data, index, 0, "close_ma20")
today_close_ma30_info = self._get_history_data(data, index, 0, "close_ma30")
today_close_ma60_info = self._get_history_data(data, index, 0, "close_ma60")
today_close_ma120_info = self._get_history_data(data, index, 0, "close_ma120")
if None == today_close_ma5_info or None == today_close_ma10_info or None == today_close_ma20_info or None == today_close_ma30_info or None == today_close_ma60_info or None == today_close_ma120_info:
not_buy_info.append((index, "get day(%d) close ma5 ma10 ma20 ma30 ma60 ma120 error" % (index)))
continue
today_close_ma5 = today_close_ma5_info[1]
today_close_ma10 = today_close_ma10_info[1]
today_close_ma20 = today_close_ma20_info[1]
today_close_ma30 = today_close_ma30_info[1]
today_close_ma60 = today_close_ma60_info[1]
today_close_ma120 = today_close_ma120_info[1]
tomorrow_high_info = self._get_history_data(data, index, 1, "today_high", pre=False)
tomorrow_low_info = self._get_history_data(data, index, 1, "today_low", pre=False)
tomorrow_close_info = self._get_history_data(data, index, 1, "today_close", pre=False)
tomorrow_pchg_info = self._get_history_data(data, index, 1, "pchg", pre=False)
if None == tomorrow_high_info or None == tomorrow_low_info or None == tomorrow_close_info or None == tomorrow_pchg_info:
not_buy_info.append((index, "get day(%d) high low close pchg error" % (index+1)))
continue
tomorrow_close = float(tomorrow_close_info[1])
tomorrow_high = float(tomorrow_high_info[1])
tomorrow_low = float(tomorrow_low_info[1])
tomorrow_pchg = float(tomorrow_pchg_info[1])
today_close_info = self._get_history_data(data, index, 0, "today_close")
today_high_info = self._get_history_data(data, index, 0, "today_high")
today_low_info = self._get_history_data(data, index, 0, "today_low")
if None == today_close_info or None == today_high_info or None == today_low_info:
not_buy_info.append((index, "get day(%d) close high low error" % (index)))
continue
today_close = today_close_info[1]
today_high = today_high_info[1]
today_low = today_low_info[1]
#up_waves = 0
#if today_close_ma5 < today_close:
# up_waves = up_waves + 1
#if today_close_ma10 < today_close:
# up_waves = up_waves + 1
#if today_close_ma20 < today_close:
# up_waves = up_waves + 1
#if today_close_ma30 < today_close:
# up_waves = up_waves + 1
#if today_close_ma60 < today_close:
# up_waves = up_waves + 1
#if today_close_ma120 < today_close:
# up_waves = up_waves + 1
#if up_waves > 4:
# if tomorrow_close > today_close:
# polyfit_5 = self._polyfit_close(data, index, "close_ma5", 5)
# polyfit_10 = self._polyfit_close(data, index, "close_ma10", 5)
# polyfit_20 = self._polyfit_close(data, index, "close_ma20", 5)
# buy_info.append((index, "buy pointer from index(%d) %s %s %s" %(index, polyfit_5, polyfit_10, polyfit_20)))
# continue
low_day_index = 0
for after_index in range(1, 11):
after_close_info = self._get_history_data(data, index, after_index, "today_close", pre=False)
if after_close_info != None:
after_close = after_close_info[1]
if after_close < today_close:
low_day_index = after_close_info[0]
break
if low_day_index == 0:
not_buy_info.append((index, "get after day(%d) low day index error" % (index)))
continue
reback_day_index = 0
for after_index in range(1,11):
after_close_info = self._get_history_data(data, low_day_index, after_index, "today_close", pre=False)
if after_close_info != None:
after_close = after_close_info[1]
if after_close >= today_close:
reback_day_index = after_close_info[0]
break
if reback_day_index == 0:
not_buy_info.append((index, "get after day(%d) reback day index error" % (index)))
continue
#polyfit_5 = self._polyfit_close(data, reback_day_index, "close_ma5", 5)
#polyfit_10 = self._polyfit_close(data, reback_day_index, "close_ma10", 5)
#polyfit_20 = self._polyfit_close(data, reback_day_index, "close_ma20", 5)
cmp_index_volume_info = self._get_history_data(data, index, 0, "volume")
reback_index_volume_info = self._get_history_data(data, reback_day_index, 0, "volume")
if None == cmp_index_volume_info or None == reback_index_volume_info:
not_buy_info.append((index, "get %d %d volume info error" % (index, reback_day_index)))
continue
if cmp_index_volume_info[1] > reback_index_volume_info[1]:
not_buy_info.append((index, "%d day volume(%f) is bigger than %d day volume(%f)" % (index, cmp_index_volume_info[1], reback_day_index, reback_index_volume_info[1])))
continue
#buy_info.append((reback_day_index, "buy pointer from index(%d) low(%d) %s %s %s" % (index, low_day_index, polyfit_5, polyfit_10, polyfit_20)))
buy_info.append((reback_day_index, "buy pointer from index(%d) low(%d)" % (index, low_day_index)))
continue
return (buy_info, not_buy_info)
# yesterday_high_info = self._get_history_data(data, index, 1, "today_high", pre=True)
# yesterday_low_info = self._get_history_data(data, index, 1, "today_low", pre=True)
# yesterday_close_info = self._get_history_data(data, index, 1, "today_close", pre=True)
# if None == yesterday_high_info or None == yesterday_low_info or None == yesterday_close_info:
# continue
#
# yesterday_close = float(yesterday_close_info[1])
# yesterday_high = float(yesterday_high_info[1])
# yesterday_low = float(yesterday_low_info[1])
# if yesterday_close == yesterday_high and yesterday_close == yesterday_low:
# continue
#
# tomorrow_high_info = self._get_history_data(data, index, 1, "today_high", pre=False)
# tomorrow_low_info = self._get_history_data(data, index, 1, "today_low", pre=False)
# tomorrow_close_info = self._get_history_data(data, index, 1, "today_close", pre=False)
# tomorrow_pchg_info = self._get_history_data(data, index, 1, "pchg", pre=False)
# if None == tomorrow_high_info or None == tomorrow_low_info or None == tomorrow_close_info or None == tomorrow_pchg_info:
# continue
#
# today_close_info = self._get_history_data(data, index, 0, "today_close")
# today_high_info = self._get_history_data(data, index, 0, "today_high")
# today_low_info = self._get_history_data(data, index, 0, "today_low")
# if None == today_close_info or None == today_high_info or None == today_low_info:
# continue
#
# today_close = float(today_close_info[1])
# today_high = float(today_high_info[1])
# today_low = float(today_low_info[1])
# if today_high > yesterday_high and today_low < yesterday_low:
# continue
#
# up_down_per = 0.5
#
# tomorrow_close = float(tomorrow_close_info[1])
# tomorrow_high = float(tomorrow_high_info[1])
# tomorrow_low = float(tomorrow_low_info[1])
# tomorrow_pchg = float(tomorrow_pchg_info[1])
# if tomorrow_pchg > 0:
# tomorrow_high_sub_close = tomorrow_high - tomorrow_close
# tomorrow_close_sub_today_close = tomorrow_close - today_close
# if tomorrow_close_sub_today_close != 0:
# if tomorrow_high_sub_close/tomorrow_close_sub_today_close > up_down_per:
# continue
#
# filter_indexs.append(index)
#
# return filter_indexs
def _get_cmp_indexs(self, data):
mismatch_indexs_info = []
match_indexs = []
for index in range(len(data)):
index_info = self._check_cmp_cond(data, index)
index_new_info = (index, index_info[1])
if index_info[0]:
match_indexs.append(index_new_info)
else:
mismatch_indexs_info.append(index_new_info)
return (match_indexs, mismatch_indexs_info)
def _get_average_info(self, share_id, table_name, period = 0):
stock_conn_manager_obj = stock_conn_manager()
conn_name = stock_conn_manager_obj.get_conn_name(share_id)
periods = [5, 10, 20, 30, 60, 120]
types = ["close_ma", "volume_ma"]
columns = ["time", "today_close", "today_high", "today_low", "today_open", "yesterday_close", "pchg", "turnover_rate", "volume", "turnover"]
for type_item in types:
for period_item in periods:
column_name = "%s%d" % (type_item, period_item)
columns.append(column_name)
extend_str = "order by time desc"
if period > 0:
extend_str = "%s limit %d" % (extend_str, period)
data= fetch_data.get_data(fetch_data.select_db(conn_name, table_name, columns, {}, extend=extend_str))
infos = []
for data_item in data:
info = {}
for index in range(len(columns)):
info[columns[index]] = data_item[index]
infos.insert(0, info)
return infos
def _get_start_time(self, share_id, table_name, ma_empty_start_time):
stock_conn_manager_obj = stock_conn_manager()
conn_name = stock_conn_manager_obj.get_conn_name(share_id)
last_time = fetch_data.get_data(fetch_data.select_db(conn_name, table_name, ["time"], {"time":[ma_empty_start_time, "<="]}, extend="order by time desc limit 120"))
if len(last_time) > 0:
last_day = last_time[-1][0]
tz = pytz.timezone('Asia/Shanghai')
last_day_obj = datetime.datetime.fromtimestamp(last_day, tz)
time_str = last_day_obj.strftime("%Y%m%d")
return time.mktime(time.strptime(time_str, '%Y%m%d'))
else:
return ma_empty_start_time
def _get_close_volume(self, share_id, table_name, start_time):
stock_conn_manager_obj = stock_conn_manager()
conn_name = stock_conn_manager_obj.get_conn_name(share_id)
data = fetch_data.get_data(fetch_data.select_db(conn_name, table_name, ["time", "today_close", "volume"], {"time":[start_time, ">="]}))
time_list = []
close_list = []
volume_list = []
for item in data:
time_int = item[0]
close = item[1]
volume = item[2]
time_list.append(time_int)
close_list.append(close)
volume_list.append(volume)
return {"time":time_list, "close":close_list, "volume":volume_list}
def _get_ma_data(self, ori_data, periods):
ret_data = {}
float_data = [float(x) for x in ori_data]
for period in periods:
data = talib.MA(numpy.array(float_data), timeperiod = period)
data_list = data.tolist()
data_list = self._filter_data(data_list)
ret_data["%d" % period] = data_list
return ret_data
def _update_average(self, share_id):
table_name = "daily_info_ex_dividend_%s" % (share_id)
infos = self._calc_average_data(share_id, table_name)
for item in infos:
self._save_data(share_id, table_name, item)
def _calc_average_data(self, share_id, table_name):
ma_empty_start_time_int = self._get_ma_empty_start_time(share_id, table_name)
if ma_empty_start_time_int == 0:
return []
start_time_int = self._get_start_time(share_id, table_name, ma_empty_start_time_int)
stock_info = self._get_close_volume(share_id, table_name, start_time_int)
periods = [5, 10, 20, 30, 60, 90, 120, 180]
#periods = [90, 180]
close_data = self._get_ma_data(stock_info["close"], periods)
volume_data = self._get_ma_data(stock_info["volume"], periods)
if len(stock_info["time"]) == len(close_data["180"]) and len(close_data["180"]) == len(volume_data["180"]):
pass
else:
LOG_WARNING("calc %s daily average error" % share_id)
return
infos = []
data_len = len(stock_info["time"])
for index in range(data_len):
info = {}
time_int = stock_info["time"][index]
if time_int < ma_empty_start_time_int:
continue
info["time"] = time_int
for period in periods:
info["close_ma%s" % period] = close_data["%s" % period][index]
info["volume_ma%s" % period] = volume_data["%s" % period][index]
infos.append(info)
return infos
if __name__ == "__main__":
import os
os.chdir("../../")
sys.path.append("./src/frame/")
import sys
reload(sys)
sys.setdefaultencoding("utf8")
from j_load_mysql_conf import j_load_mysql_conf
from j_load_regular_conf import j_load_regular_conf
from scheduler_frame_conf_inst import scheduler_frame_conf_inst
frame_conf_inst = scheduler_frame_conf_inst()
frame_conf_inst.load("./conf/frame.conf")
j_load_regular_conf_obj = j_load_regular_conf()
j_load_regular_conf_obj.run()
j_load_mysql_conf_obj = j_load_mysql_conf()
j_load_mysql_conf_obj.run()
a = second_buy_point()
a.run()
| |
# -*- coding: utf-8 -*-
"""Objects representing WikiStats API."""
#
# (C) Pywikibot team, 2014-2020
#
# Distributed under the terms of the MIT license.
from __future__ import absolute_import, division, unicode_literals
from io import BytesIO, StringIO
import pywikibot
from pywikibot.comms import http
from pywikibot.tools import PY2, UnicodeType
if not PY2:
import csv
else:
try:
import unicodecsv as csv
except ImportError:
pywikibot.warning(
'WikiStats: unicodecsv package required for using csv in Python 2;'
' falling back to using the larger XML datasets.')
csv = None
class WikiStats(object):
"""
Light wrapper around WikiStats data, caching responses and data.
The methods accept a Pywikibot family name as the WikiStats table name,
mapping the names before calling the WikiStats API.
"""
FAMILY_MAPPING = {
'wikipedia': 'wikipedias',
'wikiquote': 'wikiquotes',
'wikisource': 'wikisources',
'wiktionary': 'wiktionaries',
}
MISC_SITES_TABLE = 'mediawikis'
WMF_MULTILANG_TABLES = {
'wikipedias', 'wiktionaries', 'wikisources', 'wikinews',
'wikibooks', 'wikiquotes', 'wikivoyage', 'wikiversity',
}
OTHER_MULTILANG_TABLES = {
'uncyclomedia',
'rodovid',
'wikifur',
'wikitravel',
'scoutwiki',
'opensuse',
'metapedias',
'lxde',
'pardus',
'gentoo',
}
OTHER_TABLES = {
# Farms
'wikia',
'wikkii',
'wikisite',
'editthis',
'orain',
'shoutwiki',
'referata',
# Single purpose/manager sets
'wmspecials',
'gamepedias',
'w3cwikis',
'neoseeker',
'sourceforge',
}
ALL_TABLES = ({MISC_SITES_TABLE} | WMF_MULTILANG_TABLES
| OTHER_MULTILANG_TABLES | OTHER_TABLES)
ALL_KEYS = set(FAMILY_MAPPING.keys()) | ALL_TABLES
def __init__(self, url='https://wikistats.wmflabs.org/'):
"""Initializer."""
self.url = url
self._raw = {}
self._data = {}
def fetch(self, table, format='xml'):
"""
Fetch data from WikiStats.
@param table: table of data to fetch
@type table: basestring
@param format: Format of data to use
@type format: 'xml' or 'csv'.
@rtype: bytes
"""
if format == 'xml':
path = '/{format}/{table}.{format}'
else:
path = '/api.php?action=dump&table={table}&format={format}'
url = self.url + path
if table not in self.ALL_KEYS:
pywikibot.warning('WikiStats unknown table ' + table)
if table in self.FAMILY_MAPPING:
table = self.FAMILY_MAPPING[table]
r = http.fetch(url.format(table=table, format=format))
return r.raw
def raw_cached(self, table, format):
"""
Cache raw data.
@param table: table of data to fetch
@type table: basestring
@param format: Format of data to use
@type format: 'xml' or 'csv'.
@rtype: bytes
"""
if format not in self._raw:
self._raw[format] = {}
if table in self._raw[format]:
return self._raw[format][table]
data = self.fetch(table, format)
self._raw[format][table] = data
return data
def csv(self, table):
"""
Fetch and parse CSV for a table.
@param table: table of data to fetch
@type table: basestring
@rtype: list
"""
if table in self._data.setdefault('csv', {}):
return self._data['csv'][table]
data = self.raw_cached(table, 'csv')
if not PY2:
f = StringIO(data.decode('utf8'))
else:
f = BytesIO(data)
reader = csv.DictReader(f)
data = list(reader)
self._data['csv'][table] = data
return data
def xml(self, table):
"""
Fetch and parse XML for a table.
@param table: table of data to fetch
@type table: basestring
@rtype: list
"""
if table in self._data.setdefault('xml', {}):
return self._data['xml'][table]
from xml.etree import ElementTree
data = self.raw_cached(table, 'xml')
f = BytesIO(data)
tree = ElementTree.parse(f)
data = []
for row in tree.findall('row'):
site = {}
for field in row.findall('field'):
name = UnicodeType(field.get('name'))
site[name] = UnicodeType(field.text)
data.append(site)
self._data['xml'][table] = data
return data
def get(self, table, format=None):
"""
Get a list of a table of data using format.
@param table: table of data to fetch
@type table: basestring
@param format: Format of data to use
@type format: 'xml' or 'csv', or None to autoselect.
@rtype: list
"""
if csv or format == 'csv':
data = self.csv(table)
else:
data = self.xml(table)
return data
def get_dict(self, table, format=None):
"""
Get dictionary of a table of data using format.
@param table: table of data to fetch
@type table: basestring
@param format: Format of data to use
@type format: 'xml' or 'csv', or None to autoselect.
@rtype: dict
"""
return {data['prefix']: data for data in self.get(table, format)}
def sorted(self, table, key):
"""
Reverse numerical sort of data.
@param table: name of table of data
@param key: numerical key, such as id, total, good
"""
return sorted(self.get(table),
key=lambda d: int(d[key]),
reverse=True)
def languages_by_size(self, table):
"""Return ordered list of languages by size from WikiStats."""
# This assumes they appear in order of size in the WikiStats dump.
return [d['prefix'] for d in self.get(table)]
| |
"""Unit tests specific to VPC endpoint services."""
import pytest
import boto3
from botocore.exceptions import ClientError
from moto import mock_ec2, settings
from unittest import SkipTest
@mock_ec2
def test_describe_vpc_endpoint_services_bad_args():
if settings.TEST_SERVER_MODE:
# Long-running operation - doesn't quite work in ServerMode, with parallel tests
# Probably needs some locking to force the initialization to only occur once
raise SkipTest("Can't run in ServerMode")
"""Verify exceptions are raised for bad arguments."""
ec2 = boto3.client("ec2", region_name="us-west-1")
# Bad service name -- a default service would typically be of the format:
# 'com.amazonaws.<region>.<service_name>'.
with pytest.raises(ClientError) as exc:
ec2.describe_vpc_endpoint_services(ServiceNames=["s3"])
err = exc.value.response["Error"]
assert err["Code"] == "InvalidServiceName"
assert "The Vpc Endpoint Service 's3' does not exist" in err["Message"]
# Bad filter specification -- the filter name should be "service-type"
# not "ServiceType".
with pytest.raises(ClientError) as exc:
ec2.describe_vpc_endpoint_services(
ServiceNames=["com.amazonaws.us-west-1.s3"],
Filters=[{"Name": "ServiceType", "Values": ["Gateway"]}],
)
err = exc.value.response["Error"]
assert err["Code"] == "InvalidFilter"
assert "The filter 'ServiceType' is invalid" in err["Message"]
# Bad token -- a token of "foo" has no correlation with this data.
with pytest.raises(ClientError) as exc:
ec2.describe_vpc_endpoint_services(
ServiceNames=["com.amazonaws.us-west-1.s3"],
Filters=[{"Name": "service-type", "Values": ["Gateway"]}],
NextToken="foo",
)
err = exc.value.response["Error"]
assert err["Code"] == "InvalidNextToken"
assert "The token 'foo' is invalid" in err["Message"]
def fake_endpoint_services():
"""Return a dummy list of default VPC endpoint services."""
return [
{
"AcceptanceRequired": False,
"AvailabilityZones": ["us-west-1a", "us-west-1b"],
"BaseEndpointDnsNames": ["access-analyzer.us-west-1.vpce.amazonaws.com"],
"ManagesVpcEndpoints": False,
"Owner": "amazon",
"PrivateDnsName": "access-analyzer.us-west-1.amazonaws.com",
"PrivateDnsNameVerificationState": "verified",
"PrivateDnsNames": [
{"PrivateDnsName": "access-analyzer.us-west-1.amazonaws.com"},
],
"ServiceId": "vpce-svc-1",
"ServiceName": "com.amazonaws.us-west-1.access-analyzer",
"ServiceType": [{"ServiceType": "Interface"}],
"Tags": [],
"VpcEndpointPolicySupported": True,
},
{
"AcceptanceRequired": False,
"AvailabilityZones": ["us-west-1a", "us-west-1b"],
"BaseEndpointDnsNames": ["config.us-west-1.vpce.amazonaws.com"],
"ManagesVpcEndpoints": False,
"Owner": "amazon",
"PrivateDnsName": "config.us-west-1.amazonaws.com",
"PrivateDnsNameVerificationState": "verified",
"PrivateDnsNames": [{"PrivateDnsName": "config.us-west-1.amazonaws.com"}],
"ServiceId": "vpce-svc-2",
"ServiceName": "com.amazonaws.us-west-1.config",
"ServiceType": [{"ServiceType": "Interface"}],
"Tags": [],
"VpcEndpointPolicySupported": True,
},
{
"AcceptanceRequired": True,
"AvailabilityZones": ["us-west-1a", "us-west-1b"],
"BaseEndpointDnsNames": ["s3.us-west-1.amazonaws.com"],
"ManagesVpcEndpoints": True,
"Owner": "amazon",
"ServiceId": "vpce-svc-3",
"ServiceName": "com.amazonaws.us-west-1.s3",
"ServiceType": [{"ServiceType": "Gateway"}],
"Tags": [{"Key": "Name", "Value": "s3_gw"}],
"VpcEndpointPolicySupported": False,
},
{
"AcceptanceRequired": False,
"AvailabilityZones": ["us-west-1a", "us-west-1b"],
"BaseEndpointDnsNames": ["s3.us-west-1.vpce.amazonaws.com"],
"ManagesVpcEndpoints": False,
"Owner": "amazon",
"ServiceId": "vpce-svc-4",
"ServiceName": "com.amazonaws.us-west-1.s3",
"ServiceType": [{"ServiceType": "Interface"}],
"Tags": [
{"Key": "Name", "Value": "s3_if"},
{"Key": "Environ", "Value": "test"},
],
"VpcEndpointPolicySupported": True,
},
]
def validate_s3_service_endpoint_gateway(details):
"""Validate response contains appropriate s3 Gateway service details."""
assert details["AcceptanceRequired"] is True
assert details["AvailabilityZones"] == ["us-west-1a", "us-west-1b"]
assert details["BaseEndpointDnsNames"] == ["s3.us-west-1.amazonaws.com"]
assert details["ManagesVpcEndpoints"] is True
assert details["Owner"] == "amazon"
assert details["ServiceId"] == "vpce-svc-3"
assert details["ServiceName"] == "com.amazonaws.us-west-1.s3"
assert details["ServiceType"] == [{"ServiceType": "Gateway"}]
assert details["VpcEndpointPolicySupported"] is False
assert details["Tags"][0] == {"Key": "Name", "Value": "s3_gw"}
def validate_s3_service_endpoint_interface(details):
"""Validate response contains appropriate s3 Gateway service details."""
assert details["AcceptanceRequired"] is False
assert details["AvailabilityZones"] == ["us-west-1a", "us-west-1b"]
assert details["BaseEndpointDnsNames"] == ["s3.us-west-1.vpce.amazonaws.com"]
assert details["ManagesVpcEndpoints"] is False
assert details["Owner"] == "amazon"
assert details["ServiceId"] == "vpce-svc-4"
assert details["ServiceName"] == "com.amazonaws.us-west-1.s3"
assert details["ServiceType"] == [{"ServiceType": "Interface"}]
assert details["VpcEndpointPolicySupported"] is True
assert details["Tags"][0] == {"Key": "Name", "Value": "s3_if"}
assert details["Tags"][1] == {"Key": "Environ", "Value": "test"}
@mock_ec2
def test_describe_vpc_endpoint_services_filters():
"""Verify that different type of filters return the expected results."""
from moto.ec2.models import ec2_backends # pylint: disable=import-outside-toplevel
ec2_backend = ec2_backends["us-west-1"]
test_data = fake_endpoint_services()
# Allow access to _filter_endpoint_services as it provides the best
# means of testing this logic.
# pylint: disable=protected-access
# Test a service name filter, using s3 as the service name.
filtered_services = ec2_backend._filter_endpoint_services(
["com.amazonaws.us-west-1.s3"], [], test_data,
)
assert len(filtered_services) == 2
validate_s3_service_endpoint_gateway(filtered_services[0])
validate_s3_service_endpoint_interface(filtered_services[1])
# Test a service type filter.
filtered_services = ec2_backend._filter_endpoint_services(
[], [{"Name": "service-type", "Value": ["Gateway"]}], test_data,
)
assert len(filtered_services) == 1
validate_s3_service_endpoint_gateway(filtered_services[0])
# Test a tag key/value filter.
filtered_services = ec2_backend._filter_endpoint_services(
[], [{"Name": "tag-key", "Value": ["Name"]}], test_data,
)
assert len(filtered_services) == 2
validate_s3_service_endpoint_gateway(filtered_services[0])
validate_s3_service_endpoint_interface(filtered_services[1])
# Test a tag key filter.
filtered_services = ec2_backend._filter_endpoint_services(
[], [{"Name": "tag:Environ", "Value": ["test"]}], test_data,
)
assert len(filtered_services) == 1
validate_s3_service_endpoint_interface(filtered_services[0])
# Test when there are no filters.
filtered_services = ec2_backend._filter_endpoint_services([], [], test_data)
assert len(filtered_services) == 4
# Test a combo of service name and multiple filters.
filtered_services = ec2_backend._filter_endpoint_services(
["com.amazonaws.us-west-1.s3"],
[{"Name": "tag:Environ", "Value": ["test"]}],
test_data,
)
assert len(filtered_services) == 1
validate_s3_service_endpoint_interface(filtered_services[0])
@mock_ec2
def test_describe_vpc_default_endpoint_services():
"""Test successfull calls as well as the next_token arg."""
ec2 = boto3.client("ec2", region_name="us-west-1")
# Verify the major components of the response. The unit test for filters
# verifies the contents of some of the ServicesDetails entries, so the
# focus of this unit test will be the larger components of the response.
all_services = ec2.describe_vpc_endpoint_services()
assert set(all_services.keys()) == set(
["ServiceNames", "ServiceDetails", "ResponseMetadata"]
)
assert len(all_services["ServiceDetails"]) == len(all_services["ServiceNames"])
all_names = [x["ServiceName"] for x in all_services["ServiceDetails"]]
assert set(all_names) == set(all_services["ServiceNames"])
# Verify the handling of the next token.
partial_services = ec2.describe_vpc_endpoint_services(MaxResults=2)
assert len(partial_services["ServiceDetails"]) == 2
assert len(partial_services["ServiceNames"]) == 2
assert all_names[0] == partial_services["ServiceNames"][0]
assert all_names[1] == partial_services["ServiceNames"][1]
assert all_names[0] == partial_services["ServiceDetails"][0]["ServiceName"]
assert all_names[1] == partial_services["ServiceDetails"][1]["ServiceName"]
assert partial_services["NextToken"] == (
all_services["ServiceDetails"][2]["ServiceId"]
)
# Use the next token to receive another service.
more_services = ec2.describe_vpc_endpoint_services(
MaxResults=1, NextToken=partial_services["NextToken"]
)
assert len(more_services["ServiceDetails"]) == 1
assert len(more_services["ServiceNames"]) == 1
assert all_names[2] == more_services["ServiceNames"][0]
assert all_names[2] == more_services["ServiceDetails"][0]["ServiceName"]
assert more_services["NextToken"] == all_services["ServiceDetails"][3]["ServiceId"]
# Use the next token to receive the remaining services.
remaining_services = ec2.describe_vpc_endpoint_services(
NextToken=more_services["NextToken"]
)
assert len(remaining_services["ServiceDetails"]) == len(all_names) - 3
assert "NextToken" not in remaining_services
# Extract one service and verify all the fields. This time the data is
# extracted from the actual response.
config_service = ec2.describe_vpc_endpoint_services(
ServiceNames=["com.amazonaws.us-west-1.config"]
)
details = config_service["ServiceDetails"][0]
assert details["AcceptanceRequired"] is False
assert details["AvailabilityZones"] == ["us-west-1a", "us-west-1b"]
assert details["BaseEndpointDnsNames"] == ["config.us-west-1.vpce.amazonaws.com"]
assert details["ManagesVpcEndpoints"] is False
assert details["Owner"] == "amazon"
assert details["PrivateDnsName"] == "config.us-west-1.amazonaws.com"
assert details["PrivateDnsNames"] == [
{"PrivateDnsName": "config.us-west-1.amazonaws.com"}
]
assert details["PrivateDnsNameVerificationState"] == "verified"
assert details["ServiceName"] == "com.amazonaws.us-west-1.config"
assert details["ServiceType"] == [{"ServiceType": "Interface"}]
assert details["VpcEndpointPolicySupported"] is True
| |
#!/usr/bin/python2.4
#
# Copyright 2010 Google Inc. All Rights Reserved.
"""Discovery document tests
Functional tests that verify we can retrieve data from existing services.
"""
__author__ = 'ade@google.com (Ade Oshineye)'
import httplib2
import pprint
from apiclient.discovery import build
import httplib2
import logging
import pickle
import os
import time
import unittest
class BuzzFunctionalTest(unittest.TestCase):
def setUp(self):
self.buzz = build('buzz', 'v1', developerKey='AIzaSyD7aEm5tyC9BAdoC-MfL0ol7VV1P4zQgig')
def test_can_get_specific_activity(self):
activity = self.buzz.activities().get(userId='105037104815911535953',
postId='B:z12sspviqyakfvye123wehng0muwz5jzq04').execute()
self.assertTrue(activity is not None)
def test_can_get_specific_activity_with_tag_id(self):
activity = self.buzz.activities().get(userId='105037104815911535953',
postId='tag:google.com,2010:buzz:z13ptnw5usmnv15ey22fzlswnuqoebasu').execute()
self.assertTrue(activity is not None)
def test_can_get_buzz_activities_with_many_params(self):
max_results = 2
activities_command = self.buzz.activities()
activities = activities_command.list(userId='googlebuzz', scope='@self',
max_comments=max_results*2 ,max_liked=max_results*3,
max_results=max_results).execute()
activity_count = len(activities['items'])
self.assertEquals(max_results, activity_count)
activities = activities_command.list_next(activities).execute()
activity_count = len(activities['items'])
self.assertEquals(max_results, activity_count)
def test_can_get_multiple_pages_of_buzz_activities(self):
max_results = 2
activities_command = self.buzz.activities()
activities = activities_command.list(userId='adewale', scope='@self',
max_results=max_results).execute()
for count in range(10):
activities = activities_command.list_next(activities).execute()
activity_count = len(activities['items'])
self.assertEquals(max_results, activity_count, 'Failed after %s pages' % str(count))
def IGNORE_test_can_get_multiple_pages_of_buzz_likers(self):
# Ignore this test until the Buzz API fixes the bug with next links
# http://code.google.com/p/google-buzz-api/issues/detail?id=114
max_results = 1
people_cmd = self.buzz.people()
# The post https://www.googleapis.com/buzz/v1/activities/111062888259659218284/@self/B:z13nh535yk2syfob004cdjyb3mjeulcwv3c?alt=json#
#Perform this call https://www.googleapis.com/buzz/v1/activities/111062888259659218284/@self/B:z13nh535yk2syfob004cdjyb3mjeulcwv3c/@liked?alt=json&max-results=1
people = people_cmd.liked(groupId='@liked', userId='googlebuzz', scope='@self',
postId='B:z13nh535yk2syfob004cdjyb3mjeulcwv3c', max_results=max_results).execute()
for count in range(10):
print count
people = people_cmd.liked_next(people).execute()
people_count = len(people['items'])
self.assertEquals(max_results, people_count, 'Failed after %s pages' % str(count))
def test_can_get_user_profile(self):
person = self.buzz.people().get(userId='googlebuzz').execute()
self.assertTrue(person is not None)
self.assertEquals('buzz#person', person['kind'])
self.assertEquals('Google Buzz Team', person['displayName'])
self.assertEquals('111062888259659218284', person['id'])
self.assertEquals('https://profiles.google.com/googlebuzz', person['profileUrl'])
def test_can_get_user_profile_using_numeric_identifier(self):
person = self.buzz.people().get(userId='108242092577082601423').execute()
self.assertTrue(person is not None)
self.assertEquals('buzz#person', person['kind'])
self.assertEquals('Test Account', person['displayName'])
self.assertEquals('108242092577082601423', person['id'])
self.assertEquals('https://profiles.google.com/108242092577082601423', person['profileUrl'])
def test_can_get_followees_of_user(self):
expected_followees = 30
following = self.buzz.people().list(userId='googlebuzz', groupId='@following', max_results=expected_followees).execute()
self.assertEquals(expected_followees, following['totalResults'])
self.assertEquals(expected_followees, len(following['entry']))
def test_can_efficiently_get_follower_count_of_user(self):
# Restricting max_results to 1 means only a tiny amount of data comes back but the totalResults still has the total.
followers = self.buzz.people().list(userId='googlebuzz', groupId='@followers',
max_results='1').execute()
# @googlebuzz has a large but fluctuating number of followers
# It is sufficient if the result is bigger than 10, 000
follower_count = followers['totalResults']
self.assertTrue(follower_count > 10000, follower_count)
def test_follower_count_is_missing_for_user_with_hidden_follower_count(self):
followers = self.buzz.people().list(userId='adewale', groupId='@followers').execute()
self.assertFalse('totalResults' in followers)
class BuzzAuthenticatedFunctionalTest(unittest.TestCase):
def __init__(self, method_name):
unittest.TestCase.__init__(self, method_name)
credentials_dir = os.path.join(logging.os.path.dirname(__file__), './data')
f = file(os.path.join(credentials_dir, 'buzz_credentials.dat'), 'r')
credentials = pickle.loads(f.read())
f.close()
self.http = credentials.authorize(httplib2.Http())
self.buzz = build('buzz', 'v1', http=self.http, developerKey='AIzaSyD7aEm5tyC9BAdoC-MfL0ol7VV1P4zQgig')
def test_can_create_activity(self):
activity = self.buzz.activities().insert(userId='@me', body={
'data': {
'title': 'Testing insert',
'object': {
'content': u'Just a short note to show that insert is working. ?',
'type': 'note'}
}
}
).execute()
self.assertTrue(activity is not None)
def test_fields_parameter_restricts_response_fields(self):
activity = self.buzz.activities().insert(userId='@me', body={
'data': {
'title': 'Testing patch',
'object': {
'content': u'Just a short note to show that insert is working. ?',
'type': 'note'}
}
}
).execute()
self.assertTrue('kind' in activity)
# test fields to restrict what is returned
activity = self.buzz.activities().get(userId='@me', postId=activity['id'],
fields='object,id').execute()
self.assertTrue('kind' not in activity)
self.assertTrue('object' in activity)
self.assertTrue('id' in activity)
def test_patch(self):
activity = self.buzz.activities().insert(userId='@me', body={
'data': {
'title': 'Testing patch',
'object': {
'content': u'Just a short note to show that insert is working. ?',
'type': 'note'}
}
}).execute()
# Construct a raw patch to send, also restrict the response with fields
activity = self.buzz.activities().patch(userId='@me',
scope='@self',
postId=activity['id'],
body={
'object': {
'content': 'Updated content only!'}},
fields='object').execute()
self.assertEquals(activity['object']['content'], 'Updated content only!')
self.assertTrue('id' not in activity)
def test_can_create_private_activity(self):
activity = self.buzz.activities().insert(userId='@me', body={
'data': {
'title': 'Testing insert',
'object': {
'content': 'This is a private post.'
},
'visibility': {
'entries': [
{ 'id': 'tag:google.com,2010:buzz-group:108242092577082601423:13' }
]
}
}
}
).execute()
self.assertTrue(activity is not None)
def test_can_create_and_delete_new_group(self):
group_name = 'New Group Created At' + str(time.time())
group = self.buzz.groups().insert(userId='@me', body = {
'data': {
'title': group_name
}
}).execute()
self.assertTrue(group is not None)
result = self.buzz.groups().delete(userId='@me', groupId=group['id']).execute()
self.assertEquals({}, result)
def test_can_identify_number_of_groups_belonging_to_user(self):
groups = self.buzz.groups().list(userId='108242092577082601423').execute()
# This should work as long as no-one deletes the 4 default groups for this test account
expected_default_number_of_groups = 4
self.assertTrue(len(groups['items']) > expected_default_number_of_groups)
def IGNORE__test_can_like_activity(self):
activity = self.buzz.activities().insert(userId='@me', body={
'data': {
'title': 'Testing insert',
'object': {
'content': u'Just a short note to show that insert is working. ?',
'type': 'note'}
}
}
).execute()
pprint.pprint(activity)
id = activity['id']
likers = self.buzz.people().liked(userId='105037104815911535953', postId=id, groupId='@liked', scope='@self').execute()
# Todo(ade) Insert the new liker once the Buzz back-end bug is fixed
def test_can_comment_on_activity(self):
activity = self.buzz.activities().insert(userId='@me', body={
'data': {
'title': 'A new activity',
'object': {
'content': u'The body of the new activity',
'type': 'note'}
}
}
).execute()
id = activity['id']
comment = self.buzz.comments().insert(userId='@me', postId=id, body={
'data': {
'content': 'A comment on the new activity'
}
}).execute()
def test_can_list_groups_belonging_to_user(self):
groups = self.buzz.groups().list(userId='108242092577082601423').execute()
group = self.buzz.groups().get(userId='108242092577082601423', groupId='G:108242092577082601423:15').execute()
self.assertEquals('G:108242092577082601423:15', group['id'], group)
group = self.buzz.groups().get(userId='108242092577082601423', groupId='G:108242092577082601423:14').execute()
self.assertEquals('G:108242092577082601423:14', group['id'], group)
group = self.buzz.groups().get(userId='108242092577082601423', groupId='G:108242092577082601423:13').execute()
self.assertEquals('G:108242092577082601423:13', group['id'], group)
group = self.buzz.groups().get(userId='108242092577082601423', groupId='G:108242092577082601423:6').execute()
self.assertEquals('G:108242092577082601423:6', group['id'], group)
def test_can_delete_activity(self):
activity = self.buzz.activities().insert(userId='@me', body={
'data': {
'title': 'Activity to be deleted',
'object': {
'content': u'Created this activity so that it can be deleted.',
'type': 'note'}
}
}
).execute()
id = activity['id']
self.buzz.activities().delete(scope='@self', userId='@me', postId=id).execute()
time.sleep(2)
activity_url = activity['links']['self'][0]['href']
resp, content = self.http.request(activity_url, 'GET')
self.assertEquals(404, resp.status)
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.