repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
mskala/noxcf-gimp | plug-ins/pygimp/plug-ins/gradients-save-as-css.py | 16 | 3784 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Allows saving (TODO: and loading) CSS gradient files
# Copyright (C) 2011 João S. O. Bueno <gwidion@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Currently this exports all color segments as RGB linear centered segments.
# TODO: Respect gradient alpha, off-center segments, different blending
# functions and HSV colors
from gimpfu import *
gettext.install("gimp20-python", gimp.locale_directory, unicode=True)
w3c_template = """background-image: linear-gradient(top, %s);\n"""
moz_template = """background-image: -moz-linear-gradient(center top, %s);\n"""
webkit_template = """background-image: -webkit-gradient(linear, """ \
"""left top, left bottom, %s);\n"""
color_to_html = lambda c: "rgb(%d,%d,%d)" % tuple(c)[:3]
def format_text(text):
counter = 0
new_text = []
for token in text.split(","):
if counter + len(token) > 77:
token = "\n " + token
counter = 4
new_text.append(token)
if "\n" in token:
counter = len(token.rsplit("\n")[-1]) + 1
else:
counter += len(token) + 1
return ",".join(new_text)
def gradient_css_save(gradient, file_name):
stops = []
wk_stops = []
n_segments = pdb.gimp_gradient_get_number_of_segments(gradient)
last_stop = None
for index in xrange(n_segments):
lcolor, lopacity = pdb.gimp_gradient_segment_get_left_color(
gradient,
index)
rcolor, ropacity = pdb.gimp_gradient_segment_get_right_color(
gradient,
index)
lpos = pdb.gimp_gradient_segment_get_left_pos(gradient, index)
rpos = pdb.gimp_gradient_segment_get_right_pos(gradient, index)
lstop = color_to_html(lcolor) + " %d%%" % int(100 * lpos)
wk_lstop = "color-stop(%.03f, %s)" %(lpos, color_to_html(lcolor))
if lstop != last_stop:
stops.append(lstop)
wk_stops.append(wk_lstop)
rstop = color_to_html(rcolor) + " %d%%" % int(100 * rpos)
wk_rstop = "color-stop(%.03f, %s)" %(rpos, color_to_html(rcolor))
stops.append(rstop)
wk_stops.append(wk_rstop)
last_stop = rstop
final_text = w3c_template % ", ".join(stops)
final_text += moz_template % ",".join(stops)
final_text += webkit_template % ",".join(wk_stops)
with open(file_name, "wt") as file_:
file_.write(format_text(final_text))
register(
"gradient-save-as-css",
"Creates a new palette from a given gradient",
"palette_from_gradient (gradient, number, segment_colors) -> None",
"Joao S. O. Bueno",
"(c) GPL V3.0 or later",
"2011",
"Save as CSS...",
"",
[
(PF_GRADIENT, "gradient", N_("Gradient to use"),""),
(PF_FILE, "file_name", N_("File Name"), ""),
],
[],
gradient_css_save,
menu="<Gradients>",
domain=("gimp20-python", gimp.locale_directory)
)
main() | gpl-3.0 |
sergei-maertens/django | django/contrib/staticfiles/management/commands/findstatic.py | 106 | 1745 | from __future__ import unicode_literals
import os
from django.contrib.staticfiles import finders
from django.core.management.base import LabelCommand
from django.utils.encoding import force_text
class Command(LabelCommand):
help = "Finds the absolute paths for the given static file(s)."
label = 'staticfile'
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--first', action='store_false', dest='all',
default=True,
help="Only return the first match for each static file.",
)
def handle_label(self, path, **options):
verbosity = options['verbosity']
result = finders.find(path, all=options['all'])
path = force_text(path)
if verbosity >= 2:
searched_locations = (
"\nLooking in the following locations:\n %s" %
"\n ".join(force_text(location) for location in finders.searched_locations)
)
else:
searched_locations = ''
if result:
if not isinstance(result, (list, tuple)):
result = [result]
result = (force_text(os.path.realpath(path)) for path in result)
if verbosity >= 1:
file_list = '\n '.join(result)
return ("Found '%s' here:\n %s%s" %
(path, file_list, searched_locations))
else:
return '\n'.join(result)
else:
message = ["No matching file found for '%s'." % path]
if verbosity >= 2:
message.append(searched_locations)
if verbosity >= 1:
self.stderr.write('\n'.join(message))
| bsd-3-clause |
farseerfc/jgments | lib/pygments-1.2.2-patched/pygments/lexers/__init__.py | 3 | 7343 | # -*- coding: utf-8 -*-
"""
pygments.lexers
~~~~~~~~~~~~~~~
Pygments lexers.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
import fnmatch
import types
from os.path import basename
try:
set
except NameError:
from sets import Set as set
from pygments.lexers._mapping import LEXERS
from pygments.plugin import find_plugin_lexers
from pygments.util import ClassNotFound, bytes
__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
'guess_lexer'] + LEXERS.keys()
_lexer_cache = {}
def _load_lexers(module_name):
"""
Load a lexer (and all others in the module too).
"""
mod = __import__(module_name, None, None, ['__all__'])
for lexer_name in mod.__all__:
cls = getattr(mod, lexer_name)
_lexer_cache[cls.name] = cls
def get_all_lexers():
"""
Return a generator of tuples in the form ``(name, aliases,
filenames, mimetypes)`` of all know lexers.
"""
for item in LEXERS.itervalues():
yield item[1:]
for lexer in find_plugin_lexers():
yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
def find_lexer_class(name):
"""
Lookup a lexer class by name. Return None if not found.
"""
if name in _lexer_cache:
return _lexer_cache[name]
# lookup builtin lexers
for module_name, lname, aliases, _, _ in LEXERS.itervalues():
if name == lname:
_load_lexers(module_name)
return _lexer_cache[name]
# continue with lexers from setuptools entrypoints
for cls in find_plugin_lexers():
if cls.name == name:
return cls
def get_lexer_by_name(_alias, **options):
"""
Get a lexer by an alias.
"""
# lookup builtin lexers
for module_name, name, aliases, _, _ in LEXERS.itervalues():
if _alias in aliases:
if name not in _lexer_cache:
_load_lexers(module_name)
return _lexer_cache[name](**options)
# continue with lexers from setuptools entrypoints
for cls in find_plugin_lexers():
if _alias in cls.aliases:
return cls(**options)
raise ClassNotFound('no lexer for alias %r found' % _alias)
def get_lexer_for_filename(_fn, code=None, **options):
"""
Get a lexer for a filename. If multiple lexers match the filename
pattern, use ``analyze_text()`` to figure out which one is more
appropriate.
"""
matches = []
fn = basename(_fn)
for modname, name, _, filenames, _ in LEXERS.itervalues():
for filename in filenames:
if fnmatch.fnmatch(fn, filename):
if name not in _lexer_cache:
_load_lexers(modname)
matches.append(_lexer_cache[name])
for cls in find_plugin_lexers():
for filename in cls.filenames:
if fnmatch.fnmatch(fn, filename):
matches.append(cls)
if sys.version_info > (3,) and isinstance(code, bytes):
# decode it, since all analyse_text functions expect unicode
code = code.decode('latin1')
def get_rating(cls):
# The class _always_ defines analyse_text because it's included in
# the Lexer class. The default implementation returns None which
# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
# to find lexers which need it overridden.
d = cls.analyse_text(code)
#print "Got %r from %r" % (d, cls)
return d
if code:
matches.sort(key=get_rating)
if matches:
#print "Possible lexers, after sort:", matches
return matches[-1](**options)
raise ClassNotFound('no lexer for filename %r found' % _fn)
def get_lexer_for_mimetype(_mime, **options):
"""
Get a lexer for a mimetype.
"""
for modname, name, _, _, mimetypes in LEXERS.itervalues():
if _mime in mimetypes:
if name not in _lexer_cache:
_load_lexers(modname)
return _lexer_cache[name](**options)
for cls in find_plugin_lexers():
if _mime in cls.mimetypes:
return cls(**options)
raise ClassNotFound('no lexer for mimetype %r found' % _mime)
def _iter_lexerclasses():
"""
Return an iterator over all lexer classes.
"""
for module_name, name, _, _, _ in LEXERS.itervalues():
if name not in _lexer_cache:
_load_lexers(module_name)
yield _lexer_cache[name]
for lexer in find_plugin_lexers():
yield lexer
def guess_lexer_for_filename(_fn, _text, **options):
"""
Lookup all lexers that handle those filenames primary (``filenames``)
or secondary (``alias_filenames``). Then run a text analysis for those
lexers and choose the best result.
usage::
>>> from pygments.lexers import guess_lexer_for_filename
>>> guess_lexer_for_filename('hello.html', '<%= @foo %>')
<pygments.lexers.templates.RhtmlLexer object at 0xb7d2f32c>
>>> guess_lexer_for_filename('hello.html', '<h1>{{ title|e }}</h1>')
<pygments.lexers.templates.HtmlDjangoLexer object at 0xb7d2f2ac>
>>> guess_lexer_for_filename('style.css', 'a { color: <?= $link ?> }')
<pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c>
"""
fn = basename(_fn)
primary = None
matching_lexers = set()
for lexer in _iter_lexerclasses():
for filename in lexer.filenames:
if fnmatch.fnmatch(fn, filename):
matching_lexers.add(lexer)
primary = lexer
for filename in lexer.alias_filenames:
if fnmatch.fnmatch(fn, filename):
matching_lexers.add(lexer)
if not matching_lexers:
raise ClassNotFound('no lexer for filename %r found' % fn)
if len(matching_lexers) == 1:
return matching_lexers.pop()(**options)
result = []
for lexer in matching_lexers:
rv = lexer.analyse_text(_text)
if rv == 1.0:
return lexer(**options)
result.append((rv, lexer))
result.sort()
if not result[-1][0] and primary is not None:
return primary(**options)
return result[-1][1](**options)
def guess_lexer(_text, **options):
"""
Guess a lexer by strong distinctions in the text (eg, shebang).
"""
best_lexer = [0.0, None]
for lexer in _iter_lexerclasses():
rv = lexer.analyse_text(_text)
if rv == 1.0:
return lexer(**options)
if rv > best_lexer[0]:
best_lexer[:] = (rv, lexer)
if not best_lexer[0] or best_lexer[1] is None:
raise ClassNotFound('no lexer matching the text found')
return best_lexer[1](**options)
class _automodule(types.ModuleType):
"""Automatically import lexers."""
def __getattr__(self, name):
info = LEXERS.get(name)
if info:
_load_lexers(info[0])
cls = _lexer_cache[info[1]]
setattr(self, name, cls)
return cls
raise AttributeError(name)
import sys
oldmod = sys.modules['pygments.lexers']
newmod = _automodule('pygments.lexers')
newmod.__dict__.update(oldmod.__dict__)
sys.modules['pygments.lexers'] = newmod
del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
| bsd-2-clause |
soldag/home-assistant | tests/components/nightscout/test_init.py | 7 | 1416 | """Test the Nightscout config flow."""
from aiohttp import ClientError
from homeassistant.components.nightscout.const import DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import CONF_URL
from tests.async_mock import patch
from tests.common import MockConfigEntry
from tests.components.nightscout import init_integration
async def test_unload_entry(hass):
"""Test successful unload of entry."""
entry = await init_integration(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state == ENTRY_STATE_LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state == ENTRY_STATE_NOT_LOADED
assert not hass.data.get(DOMAIN)
async def test_async_setup_raises_entry_not_ready(hass):
"""Test that it throws ConfigEntryNotReady when exception occurs during setup."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_URL: "https://some.url:1234"},
)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.nightscout.NightscoutAPI.get_server_status",
side_effect=ClientError(),
):
await hass.config_entries.async_setup(config_entry.entry_id)
assert config_entry.state == ENTRY_STATE_SETUP_RETRY
| apache-2.0 |
UniversalMasterEgg8679/ansible | lib/ansible/modules/system/aix_lvol.py | 10 | 10849 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Alain Dejoux <adejoux@djouxtech.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = '''
---
author:
- "Alain Dejoux (@adejoux)"
module: aix_lvol
short_description: Configure AIX LVM logical volumes
description:
- This module creates, removes or resizes AIX logical volumes. Inspired by lvol module.
version_added: "2.4"
options:
vg:
description:
- The volume group this logical volume is part of.
required: true
lv:
description:
- The name of the logical volume.
required: true
lv_type:
description:
- The type of the logical volume. Default to jfs2.
size:
description:
- The size of the logical volume with one of the [MGT] units.
copies:
description:
- the number of copies of the logical volume. By default, 1 copy. Maximum copies are 3.
policy:
choices: [ "maximum", "minimum" ]
default: maximum
description:
- Sets the interphysical volume allocation policy. "maximum" allocates logical partitions across the maximum number of physical volumes.
"minimum" allocates logical partitions across the minimum number of physical volumes.
state:
choices: [ "present", "absent" ]
default: present
description:
- Control if the logical volume exists. If C(present) and the
volume does not already exist then the C(size) option is required.
opts:
description:
- Free-form options to be passed to the mklv command
pvs:
description:
- Comma separated list of physical volumes e.g. hdisk1,hdisk2
'''
EXAMPLES = '''
# Create a logical volume of 512M.
- aix_lvol:
vg: testvg
lv: testlv
size: 512M
# Create a logical volume of 512M with disks hdisk1 and hdisk2
- aix_lvol:
vg: testvg
lv: test2lv
size: 512M
pvs: hdisk1,hdisk2
# Create a logical volume of 512M mirrored.
- aix_lvol:
vg: testvg
lv: test3lv
size: 512M
copies: 2
# Create a logical volume of 1G with a minimum placement policy .
- aix_lvol:
vg: rootvg
lv: test4lv
size: 1G
policy: minimum
# Create a logical volume with special options like mirror pool
- aix_lvol:
vg: testvg
lv: testlv
size: 512M
opts: -p copy1=poolA -p copy2=poolB
# Extend the logical volume to 1200M.
- aix_lvol:
vg: testvg
lv: test4lv
size: 1200M
# Remove the logical volume.
- aix_lvol:
vg: testvg
lv: testlv
state: absent
'''
RETURN = '''
msg:
type: string
description: A friendly message describing the task result.
returned: always
sample: Logical volume testlv created.
'''
from ansible.module_utils.basic import AnsibleModule
import re
def convert_size(module, size):
unit = size[-1].upper()
units = ['M', 'G', 'T']
try:
multiplier = 1024**units.index(unit)
except ValueError:
module.fail_json(msg="No valid size unit specified.")
return int(size[:-1]) * multiplier
def round_ppsize(x, base=16):
new_size = int(base * round(float(x) / base))
if new_size < x:
new_size += base
return new_size
def parse_lv(data):
name = None
for line in data.splitlines():
match = re.search("LOGICAL VOLUME:\s+(\w+)\s+VOLUME GROUP:\s+(\w+)", line)
if match is not None:
name = match.group(1)
vg = match.group(2)
continue
match = re.search("LPs:\s+(\d+).*PPs", line)
if match is not None:
lps = int(match.group(1))
continue
match = re.search("PP SIZE:\s+(\d+)", line)
if match is not None:
pp_size = int(match.group(1))
continue
match = re.search("INTER-POLICY:\s+(\w+)", line)
if match is not None:
policy = match.group(1)
continue
if not name:
return None
size = lps * pp_size
return {'name': name, 'vg': vg, 'size': size, 'policy': policy}
def parse_vg(data):
for line in data.splitlines():
match = re.search("VOLUME GROUP:\s+(\w+)", line)
if match is not None:
name = match.group(1)
continue
match = re.search("TOTAL PP.*\((\d+)", line)
if match is not None:
size = int(match.group(1))
continue
match = re.search("PP SIZE:\s+(\d+)", line)
if match is not None:
pp_size = int(match.group(1))
continue
match = re.search("FREE PP.*\((\d+)", line)
if match is not None:
free = int(match.group(1))
continue
return {'name': name, 'size': size, 'free': free, 'pp_size': pp_size}
def main():
module = AnsibleModule(
argument_spec=dict(
vg=dict(required=True, type='str'),
lv=dict(required=True, type='str'),
lv_type=dict(default='jfs2', type='str'),
size=dict(type='str'),
opts=dict(default='', type='str'),
copies=dict(default='1', type='str'),
state=dict(choices=["absent", "present"], default='present'),
policy=dict(choices=["maximum", "minimum"], default='maximum'),
pvs=dict(type='list', default=list())
),
supports_check_mode=True,
)
vg = module.params['vg']
lv = module.params['lv']
lv_type = module.params['lv_type']
size = module.params['size']
opts = module.params['opts']
copies = module.params['copies']
policy = module.params['policy']
state = module.params['state']
pvs = module.params['pvs']
pv_list = ' '.join(pvs)
if policy == 'maximum':
lv_policy = 'x'
else:
lv_policy = 'm'
# Add echo command when running in check-mode
if module.check_mode:
test_opt = 'echo '
else:
test_opt = ''
# check if system commands are available
lsvg_cmd = module.get_bin_path("lsvg", required=True)
lslv_cmd = module.get_bin_path("lslv", required=True)
# Get information on volume group requested
rc, vg_info, err = module.run_command("%s %s" % (lsvg_cmd, vg))
if rc != 0:
if state == 'absent':
module.exit_json(changed=False, msg="Volume group %s does not exist." % vg)
else:
module.fail_json(msg="Volume group %s does not exist." % vg, rc=rc, out=vg_info, err=err)
this_vg = parse_vg(vg_info)
if size is not None:
# Calculate pp size and round it up based on pp size.
lv_size = round_ppsize(convert_size(module, size), base=this_vg['pp_size'])
# Get information on logical volume requested
rc, lv_info, err = module.run_command(
"%s %s" % (lslv_cmd, lv))
if rc != 0:
if state == 'absent':
module.exit_json(changed=False, msg="Logical Volume %s does not exist." % lv)
changed = False
this_lv = parse_lv(lv_info)
if state == 'present' and not size:
if this_lv is None:
module.fail_json(msg="No size given.")
if this_lv is None:
if state == 'present':
if lv_size > this_vg['free']:
module.fail_json(msg="Not enough free space in volume group %s: %s MB free." % (this_vg['name'], this_vg['free']))
# create LV
mklv_cmd = module.get_bin_path("mklv", required=True)
cmd = "%s %s -t %s -y %s -c %s -e %s %s %s %sM %s" % (test_opt, mklv_cmd, lv_type, lv, copies, lv_policy, opts, vg, lv_size, pv_list)
rc, out, err = module.run_command(cmd)
if rc == 0:
module.exit_json(changed=True, msg="Logical volume %s created." % lv)
else:
module.fail_json(msg="Creating logical volume %s failed." % lv, rc=rc, out=out, err=err)
else:
if state == 'absent':
# remove LV
rmlv_cmd = module.get_bin_path("rmlv", required=True)
rc, out, err = module.run_command("%s %s -f %s" % (test_opt, rmlv_cmd, this_lv['name']))
if rc == 0:
module.exit_json(changed=True, msg="Logical volume %s deleted." % lv)
else:
module.fail_json(msg="Failed to remove logical volume %s." % lv, rc=rc, out=out, err=err)
else:
if this_lv['policy'] != policy:
# change lv allocation policy
chlv_cmd = module.get_bin_path("chlv", required=True)
rc, out, err = module.run_command("%s %s -e %s %s" % (test_opt, chlv_cmd, lv_policy, this_lv['name']))
if rc == 0:
module.exit_json(changed=True, msg="Logical volume %s policy changed: %s." % (lv, policy))
else:
module.fail_json(msg="Failed to change logical volume %s policy." % lv, rc=rc, out=out, err=err)
if vg != this_lv['vg']:
module.fail_json(msg="Logical volume %s already exist in volume group %s" % (lv, this_lv['vg']))
# from here the last remaining action is to resize it, if no size parameter is passed we do nothing.
if not size:
module.exit_json(changed=False, msg="Logical volume %s already exist." % (lv))
# resize LV based on absolute values
if int(lv_size) > this_lv['size']:
extendlv_cmd = module.get_bin_path("extendlv", required=True)
cmd = "%s %s %s %sM" % (test_opt, extendlv_cmd, lv, lv_size - this_lv['size'])
rc, out, err = module.run_command(cmd)
if rc == 0:
module.exit_json(changed=True, msg="Logical volume %s size extended to %sMB." % (lv, lv_size))
else:
module.fail_json(msg="Unable to resize %s to %sMB." % (lv, lv_size), rc=rc, out=out, err=err)
elif lv_size < this_lv['size']:
module.fail_json(msg="No shrinking of Logical Volume %s permitted. Current size: %s MB" % (lv, this_lv['size']))
else:
module.exit_json(changed=False, msg="Logical volume %s size is already %sMB." % (lv, lv_size))
if __name__ == '__main__':
main()
| gpl-3.0 |
yosuke/rtshell | rtshell/post_install.py | 2 | 9957 | # -*- Python -*-
# -*- coding: utf-8 -*-
from __future__ import print_function
'''rtshell
Copyright (C) 2009-2015
Geoffrey Biggs
RT-Synthesis Research Group
Intelligent Systems Research Institute,
National Institute of Advanced Industrial Science and Technology (AIST),
Japan
All rights reserved.
Licensed under the GNU Lesser General Public License version 3.
http://www.gnu.org/licenses/lgpl-3.0.en.html
Post-install actions for running after wheel-based installs
'''
import argparse
import logging
import os
import os.path
import pkg_resources
import pkgutil
import platform
import shutil
import sys
def create_and_link_dir(source, dest, dir_type='', remove=False):
# Confirm the source is correct
if not os.path.exists(source) or not os.path.isdir(source):
sys.exit('Source {} directory does not exist or is not a directory: '
'{}'.format(dir_type, source))
if os.path.exists(dest):
if remove:
if os.path.islink(dest):
os.remove(dest)
else:
shutil.rmtree(dest)
else:
logging.info('Destination {} directory already exists; skipping: '
'{}'.format(dir_type, dest))
return
# Make the destination parent directory if necessary
parent_dir = os.path.dirname(dest)
if not os.path.exists(parent_dir):
logging.info('Making {} parent directory: {}'.format(dir_type,
parent_dir))
os.makedirs(parent_dir)
elif not os.path.isdir(parent_dir):
sys.exit('Destination {} parent directory exists but is not a '
'directory: {}'.format(dir_type, parent))
# Create the link
logging.info('Creating symbolic link from {} to {}'.format(source, dest))
os.symlink(source, dest)
def create_and_link_dir_content(source, dest, dir_type='', remove=False):
logging.info('Creating symbolic links from {} to {}'.format(source, dest))
# Confirm the source is correct
if not os.path.exists(source) or not os.path.isdir(source):
sys.exit('Source {} directory does not exist or is not a directory: '
'{}'.format(dir_type, source))
# Make the destination path if necessary
if os.path.exists(dest):
if remove:
if os.path.islink(dest):
os.remove(dest)
else:
shutil.rmtree(dest)
else:
logging.info('Destination {} directory already exists; skipping: '
'{}'.format(dir_type, dest))
os.makedirs(dest)
# Link all files in source
for f in [f for f in os.listdir(source) if
os.path.isfile(os.path.join(source, f))]:
s = os.path.join(source, f)
d = os.path.join(dest, f)
logging.info('Creating symbolic link from {} to {}'.format(s, d))
try:
os.symlink(s, d)
except OSError as e:
if e.errno == 17:
logging.info('Skipping existing symbolic link {}'.format(d))
else:
raise
def doc_source_dir(t, l, postfix=''):
r = pkg_resources.resource_filename('rtshell.data',
os.path.join('doc', t, l))
if postfix:
# Done separately to prevent a trailing slash for empty postfixes,
# which messes with os.path.basename, etc.
r = os.path.join(r, postfix)
return r
# Symlink the man pages from share/doc/rtshell/man/man1/* into # <prefix>/share/man/man1
# If there is a Japanese manpage dir, symlink share/doc/rtshell/man/ja/man1 to there
def link_man_pages(prefix, remove=False):
man_source_en = doc_source_dir('man', 'en', 'man1')
man_source_ja = doc_source_dir('man', 'ja', 'man1')
man_path_en = os.path.join(prefix, 'share', 'man', 'man1')
man_path_ja = os.path.join(prefix, 'share', 'man', 'ja', 'man1')
if os.path.exists(man_source_en):
create_and_link_dir_content(man_source_en, man_path_en,
'default manpage', remove)
if os.path.exists(man_source_ja):
create_and_link_dir_content(man_source_ja, man_path_ja,
'Japanese manpage', remove)
man_path = os.path.join(prefix, 'share', 'man')
print('Linked manpage documentation to', man_path)
print('***** IMPORTANT *****')
print('You may need to add the following line or similar to your shell '
'setup scripts ' '(e.g. $HOME/.bashrc) to enable the manpage '
'documentation:')
print('\texport MANPATH={}:$MANPATH'.format(man_path))
print('*********************')
# Symlink the html/pdf documentation to prefix/share/doc/rtshell
def link_documentation(prefix, remove=False):
doc_dir = os.path.join(prefix, 'share', 'doc', 'rtshell')
html_source_en = doc_source_dir('html', 'en')
html_source_ja = doc_source_dir('html', 'ja')
if os.path.exists(html_source_en):
create_and_link_dir(html_source_en, os.path.join(doc_dir, 'html',
'en'), 'English HTML documentation', remove)
if os.path.exists(html_source_ja):
create_and_link_dir(html_source_ja, os.path.join(doc_dir, 'html',
'ja'), 'Japanese HTML documentation', remove)
pdf_source_en = doc_source_dir('pdf', 'en')
pdf_source_ja = doc_source_dir('pdf', 'ja')
if os.path.exists(pdf_source_en):
create_and_link_dir(pdf_source_en, os.path.join(doc_dir, 'pdf', 'en'),
'English PDF documentation', remove)
if os.path.exists(pdf_source_ja):
create_and_link_dir(pdf_source_ja, os.path.join(doc_dir, 'pdf', 'ja'),
'Japanese PDF documentation', remove)
print('Linked documentation to', doc_dir)
def add_shell_support(prefix, bashrc_path=None):
script_path = pkg_resources.resource_filename('rtshell.data',
'shell_support')
source_line = 'source {}'.format(os.path.abspath(script_path))
if not bashrc_path:
bashrc_path = os.path.expanduser('~/.bashrc')
else:
bashrc_path = os.path.expanduser(bashrc_path)
if os.path.exists(bashrc_path) and os.path.isfile(bashrc_path):
# Check if the source line already exists
with open(bashrc_path, 'r') as f:
content = f.read()
if source_line in content:
print('Shell support already installed in', bashrc_path)
return
with open(bashrc_path, 'a') as bashrc_f:
bashrc_f.write('\n' + source_line)
print('Added "{}" to {}'.format(source_line, bashrc_path))
def post_install_unix(prefix, bashrc, interactive, remove=False):
# Link the manpages to the manpage directory under the prefix
ans = raw_input('Link man pages? ') if interactive else 'y'
if ans.lower() == 'y' or ans.lower() == 'yes':
link_man_pages(prefix, remove)
# Link documentation to <prefix>/share/doc/rtshell
ans = raw_input('Link documentation? ') if interactive else 'y'
if ans.lower() == 'y' or ans.lower() == 'yes':
link_documentation(prefix, remove)
# Add sourcing of share/rtshell/shell_support to .bashrc or .bash_profile
ans = raw_input('Add shell support to .bashrc? ') if interactive else 'y'
if ans.lower() == 'y' or ans.lower() == 'yes':
add_shell_support(prefix, bashrc_path=bashrc)
def copy_batch_files(prefix, remove=False):
# Copy the rtcwd batch file to the Python scripts directory
bf_src = pkg_resources.resource_filename('rtshell.data', 'rtcwd.bat')
if not os.path.exists(bf_src) or not os.path.isfile(bf_src):
sys.exit('Source batch file does not exist or is not a file:'.format(
bf_src))
bf_dest = os.path.join(prefix, 'Scripts', 'rtcwd.bat')
if os.path.exists(bf_dest):
if remove:
os.remove(bf_dest)
else:
logging.info('Destination file already exists; skipping: '
'{}'.format(bf_dest))
return
shutil.copy(bf_src, bf_dest)
print('Copied {} to {}'.format(bf_src, bf_dest))
def post_install_windows(prefix, interactive, remove=False):
ans = raw_input('Copy batch files? ') if interactive else 'y'
if ans.lower() == 'y' or ans.lower() == 'yes':
copy_batch_files(prefix, remove)
def main():
p = argparse.ArgumentParser(description='Post-install actions for RTShell')
if platform.system() != 'Windows':
p.add_argument('-b', '--bashrc-path', type=str, default='~/.bashrc',
help='Path to an alternative file to install shell support in')
p.add_argument('-p', '--prefix', type=str, default='/usr/local',
help='Prefix to install to [Default: %(default)s]')
else:
p.add_argument('-p', '--prefix', type=str, default=sys.exec_prefix,
help='Prefix to install to [Default: %(default)s]')
p.add_argument('-n', '--non-interactive', action='store_true',
default=False, help='Do not ask before performing each action')
p.add_argument('-r', '--remove', action='store_true', default=False,
help='Remove existing files [Default: %(default)s]')
p.add_argument('-v', '--verbose', action='store_true', default=False,
help='Enable verbose output')
args = p.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG,
format='%(levelname)s: %(message)s)')
print('Running post-install actions for', platform.system())
if platform.system() == 'Linux':
post_install_unix(args.prefix, args.bashrc_path, not
args.non_interactive, args.remove)
elif platform.system() == 'Darwin':
post_install_unix(args.prefix, args.bashrc_path, not
args.non_interactive, args.remove)
elif platform.system() == 'Windows':
post_install_windows(args.prefix, not args.non_interactive, args.remove)
else:
print('No post-install actions for', platform.system())
# vim: set expandtab tabstop=8 shiftwidth=4 softtabstop=4 textwidth=79
| lgpl-3.0 |
CarlosLannister/OwadeReborn | owade/fileAnalyze/outlook.py | 1 | 3487 | from DPAPI.Core import masterkey
from DPAPI.Core import registry
from DPAPI.Probes import dropbox
from DPAPI.Core import blob
import sqlite3
import re
import os
import binascii
# Version 0.1.
class GetOutlookPassword:
def getOutlookPassword(self, mkpDir, sid, credHist, ntUser, userPassword):
dic = {}
'''
OutlokkMasterkey = "/home/hackaton/Escritorio/dropbox/Archivos necesarios/Protect/S-1-5-21-3173276068-3308429807-3105269238-1000"
OutlookSID = "S-1-5-21-3173276068-3308429807-3105269238-1000"
OutlookCredhist = "/home/hackaton/Escritorio/dropbox/Archivos necesarios/Protect/CREDHIST"
Ntuser = "/home/hackaton/Escritorio/dropbox/Archivos necesarios/NTUSER.DAT"
Userpassword = "lazarus2015"'''
mkp = masterkey.MasterKeyPool()
mkp.loadDirectory(mkpDir)
mkp.addCredhistFile(sid, credHist)
mkp.try_credential(sid, userPassword) # Credential of the USER
email = []
password = []
# Open the registry
with open(ntUser, 'rb') as f:
r = registry.Registry.Registry(f)
# Path of the Outlook file in Registry
directory = r.open(
'Software\\Microsoft\\Office\\15.0\\Outlook\\Profiles\\Outlook\\9375CFF0413111d3B88A00104B2A6676')
for reg in directory.subkeys():
auxreg = []
for regnumber in reg.values(): # 000001 000002 000003.....
auxreg.append(regnumber.name())
# For IMAP
if "IMAP Password" in auxreg:
username = reg.value('Email').value()
password = reg.value('IMAP Password').value()
break
# For IMAP
if "POP3 Password" in auxreg:
username = reg.value('Email').value()
password = reg.value('POP3 Password').value()
break
# Function de hacer cosas
for char in username:
if char.encode("hex") != "00":
email.append(char)
finalusername = ''.join(email)
dic['user'] = finalusername
# File to create the blob
fi = open("blob", 'w')
notruncate = password # This password is not truncated, need to delete the first byte
passwordhex = password.encode("hex") # Convert the hex to hexadecimal
binstr = binascii.unhexlify(passwordhex[2:]) # The blop does not need the first byte.
fi.write(binstr) # Write the blop in a file
fi.close()
blob1 = blob.DPAPIBlob(open('blob', 'rb').read()) # Load the blop from the file
finalpass = []
mks = mkp.getMasterKeys(blob1.mkguid)
for mk in mks:
if mk.decrypted:
blob1.decrypt(mk.get_key())
if blob1.decrypted:
password = blob1.cleartext
for char in password:
if char.encode("hex") != "00":
finalpass.append(char)
finalpassword = ''.join(finalpass)
dic['password'] = finalpassword
try:
os.remove("blob")
except:
pass
return { self.__class__.__name__: dic }
def main(self, mkpDir, sid, credHist, ntUser, userPassword):
dic = self.getOutlookPassword(mkpDir, sid, credHist, ntUser, userPassword)
return dic | gpl-3.0 |
lDisciple/MetaZombies | Mobile/networkHandler.py | 1 | 5894 | import threading
import urllib
import urllib.parse
import urllib.error
import urllib.request
import urllib.response
import json
import time
import os
"""
Method to add a new TODO file with command in it
"""
def addCommandToQueue(command):
fileName = os.getcwd() + "/TODO/task_queue_" + str(time.time())
f = open(fileName,"w")
f.write(command)
f.close()
def addCommandsToQueue(commands):
fileName = os.getcwd() + "/TODO/task_queue_" + str(time.time())
f = open(fileName,"w")
for command in commands:
f.write(command)
f.close()
class NetworkHandler(threading.Thread):
def __init__(self,url,password):
threading.Thread.__init__(self)
self._commands = []
self._url = url
self._password = password
self._running = False
self._userdata = {}
self._splitter = "/%%:"
def run(self):
self._running = True
#Long poll for updates
while self._running:
try:
updates = self.post({'command':'getUpdates'})
jdata = json.loads(updates)
for update in jdata:
self._commands.append(update['command'].strip())
except:
pass
time.sleep(3)
def post(self,rawData):
rawData['password'] = self._password
data = json.dumps(rawData)
url = urllib.request.Request(self._url,data.encode())
url.add_header("Content-Type","application/json")
data = urllib.request.urlopen(url, timeout=10).read().decode('utf8', 'ignore')
return data
def loadUserData(self):
if os.path.exists(os.getcwd() + "/userdata.txt")==False:#Download if absent
self.downloadUserData()
#Load File
f = open(os.getcwd() + "/userdata.txt","r")
line = f.readline()
while len(line) > 0:
details = line.split(self._splitter)
self._userdata[details[0]] = {'username':details[1],'studentnum':details[2],'cellphone':details[3]}
line = f.readline()
f.close()
def downloadUserData(self):
userDataJson = self.getUserData()
userMap = json.loads(userDataJson)
toWrite = ""
for user in userMap:
if user != 'success':
details = userMap[user]
toWrite += "{1}{0}{2}{0}{3}{0}{4}\n".format(self._splitter,details['USERID'],details['USERNAME'],details['STUDENTNUMBER'],details['CELLNUMBER'])
newFile = open(os.getcwd() + "/userdata.txt","w")
newFile.write(toWrite)
newFile.close()
def getUserDetail(self,userID,detail):
if len(self._userdata) == 0:#Check if file loaded
self.loadUserData()
if not userID in self._userdata:#If user doesnt exist and old file then redownload
self.downloadUserData()
if not userID in self._userdata:#If user doesnt exist
return userID
else:#User exists
return self._userdata[userID][detail]
def findValue(self,m,val,detail):
found = False
for key in m:
if val in m[key][detail]:
found = True
return found
def findUserDetails(self,value,detail):
if len(self._userdata) == 0:#Check if file loaded
self.loadUserData()
if not self.findValue(self._userdata,value,detail):#If user doesnt exist and old file then redownload
self.downloadUserData()
if not self.findValue(self._userdata,value,detail):#If user doesnt exist
return "Unknown("+userID+")"
else:#User exists
for key in self._userdata:
if value in self._userdata[key][detail]:
return key
return 'unknown'
def getUsername(self,userID):
return self.getUserDetail(userID,'username')
def getCellphone(self,userID):
return self.getUserDetail(userID,'cellphone')
def getStudentNumber(self,userID):
return self.getUserDetail(userID,'studentnum')
def getUserIDByCell(self,cell):
return self.findUserDetails(cell,'cellphone')
def getLeaderboard(self,userID):
data = self.post({'command':'getLeaderboard'})
jsondata = json.loads(data);
if not jsondata['success']:
addCommandToQueue("send~~"+ self.sanitiseUserID(userID) +"~~Could not load the leaderboard."
+" Try the website if the problem persists.")
else:
addCommandToQueue("sendleaderboard~~"+ userID +"~~" + data)
def sanitiseUserID(self,userID):
if '+27' in userID:
userID = "0"+userID[3:]
userID = self.getUserIDByCell(userID)
if userID == 'unknown':
self.addCommandToQueue("send~~"+ userID +"~~Could not find your user code. Please use the website.")
userID = 'a385cac'
return userID
def getStatus(self,userID):
data = self.post({'command':'getUserStatus','userID':self.sanitiseUserID(userID)})
jsondata = json.loads(data);
if jsondata['success'] != True:
addCommandToQueue("send~~"+ userID +"~~Error: Could not get your details."
+" Try the website if the problem persists.")
else:
addCommandToQueue("sendstatus~~"+ userID +"~~"+data)
def getUserData(self):
return self.post({'command':'getUserData'})
def kill(self,userID, victimID):
data = self.post({'command':'kill','killerID':self.sanitiseUserID(userID),'victimID':self.sanitiseUserID(victimID)})
jsondata = json.loads(data);
if not jsondata['success']:
addCommandToQueue("send~~"+ userID +"~~Could not kill that user."
+jsondata['reason']
+". Try the website if the problem persists.")
def claim(self,userID, claimID):
data = self.post({'command':'claim','userID':self.sanitiseUserID(userID),'code':claimID})
jsondata = json.loads(data);
if not jsondata['success']:
addCommandToQueue("send~~"+ userID +"~~Your claim failed, " + jsondata['reason']
+". Try claim it on the website if the problem persists.")
def getLiving(self,userID):
data = self.post({'command':'getLiving'})
addCommandToQueue("sendliving~~"+ userID +"~~"+data)
def stop(self):
self._running = False
def getCommands(self):
ret = self._commands
self._commands = []
return ret
if __name__ == '__main__':
cfgFile = open(os.getcwd() + "/Config.cfg", "r")
config = {}
for line in cfgFile:
parts = line.strip().split("=")
config[parts[0]] = parts[1]
net = NetworkHandler(config['phpUrl'],config['phpPassword'])
net.start()
print(net.getUserIDByCell('845810628'))
| mit |
pablooliveira/cere | src/cere/cere_sanity_check.py | 3 | 5075 | #!/usr/bin/env python
# This file is part of CERE.
#
# Copyright (c) 2013-2016, Universite de Versailles St-Quentin-en-Yvelines
#
# CERE is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# CERE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CERE. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
import cere_configure
import logging
import csv
import argparse
logger = logging.getLogger('ASM checker')
def init_module(subparsers, cere_plugins):
cere_plugins["check"] = run
check_parser = subparsers.add_parser("check", help="Compare for a given region, the assembly between original region and replay region")
check_parser.add_argument("--max-error", default=15.0, help="Maximum tolerated error between original and replay regions (Default: 15%)")
check_parser.add_argument('--region', required=True, help="Region to check")
check_parser.add_argument('--path', help="Path of the object file")
check_parser.add_argument("--diff-asm", nargs='?', const=True, default=False, help="Run vimdiff between original and replay file")
def compute_error(a, b):
return (abs(a-b)/float(max(a, b)))*100
def run_shell_command(command):
try:
logger.debug(subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True))
except subprocess.CalledProcessError as err:
logger.error(str(err))
logger.error(err.output)
logger.error("Fail: {0}".format(command))
return False
return True
def get_nlines(filename, functionname):
#Objdump the function
if not run_shell_command("gdb -batch -ex 'file {0}' -ex 'disassemble {1}' > cere_tmp".format(filename, functionname)):
return False
#count the number of lines
try:
n_lines = subprocess.Popen("wc -l cere_tmp", shell=True, stdout=subprocess.PIPE).communicate()[0].split()[0]
except subprocess.CalledProcessError as err:
logger.error(str(err))
logger.error(err.output)
logger.error("Fail: wc -l original")
return False
return int(n_lines)
def run(args):
if not cere_configure.init():
return False
if not os.path.isfile("regions.csv"):
logger.critical("Regions.csv file missing. Please run cere regions")
return False
region = args.region
filename = ""
functionname=""
#Find the file where the region is
with open("regions.csv") as regions_list:
reader = csv.DictReader(regions_list)
for row in reader:
if region in row["Region Name"]:
filename = row["File Name"]
functionname = row["Function Name"]
break
if args.path:
filename = filename.rsplit('/', 1)
filename = args.path+"/"+filename[1]
filename = filename.replace(os.path.splitext(filename)[1], ".o")
logger.debug("The file is {0} and the function is {1}".format(filename, functionname))
#Now let's compile it in the orginal application
if not run_shell_command("{0} && {1} CERE_MODE=original".format(cere_configure.cere_config["clean_cmd"], cere_configure.cere_config["build_cmd"])):
return False
original_lines = get_nlines(filename, functionname)
if not original_lines:
return False
#backup the original assembly file
if not run_shell_command("cp cere_tmp cere_original"):
return False
#Compile replay mode
#we accept that compilation fails because the dump does not have to be present.
try:
logger.debug(subprocess.check_output("{0} && {1} CERE_MODE=\"replay --region={2}\"".format(cere_configure.cere_config["clean_cmd"], cere_configure.cere_config["build_cmd"], args.region), stderr=subprocess.STDOUT, shell=True))
except subprocess.CalledProcessError as err:
logger.error(str(err))
logger.error(err.output)
logger.warning("If the dump is not present, skip these warnings")
replay_lines = get_nlines(filename, "run__cere__"+region)
if not replay_lines:
return False
#backup the replay assembly file
if not run_shell_command("mv cere_tmp cere_replay"):
return False
err = compute_error(original_lines, replay_lines)
if err <= args.max_error:
logger.info("Assembly matching: Original lines = {0} && replay lines = {1} (error = {2})".format(original_lines, replay_lines, err))
else:
logger.info("Assembly not matching: Original lines = {0} && replay lines = {1} (error = {2})".format(original_lines, replay_lines, err))
if args.diff_asm:
subprocess.call("vimdiff cere_original cere_replay", shell=True)
return True
| lgpl-3.0 |
richpolis/siveinpy | env/lib/python2.7/site-packages/django/core/management/commands/testserver.py | 307 | 2006 | from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.'),
make_option('--addrport', action='store', dest='addrport',
type='string', default='',
help='port number or ipaddr:port to run the server on'),
make_option('--ipv6', '-6', action='store_true', dest='use_ipv6', default=False,
help='Tells Django to use a IPv6 address.'),
)
help = 'Runs a development server with data from the given fixture(s).'
args = '[fixture ...]'
requires_model_validation = False
def handle(self, *fixture_labels, **options):
from django.core.management import call_command
from django.db import connection
verbosity = int(options.get('verbosity'))
interactive = options.get('interactive')
addrport = options.get('addrport')
# Create a test database.
db_name = connection.creation.create_test_db(verbosity=verbosity, autoclobber=not interactive)
# Import the fixture data into the test database.
call_command('loaddata', *fixture_labels, **{'verbosity': verbosity})
# Run the development server. Turn off auto-reloading because it causes
# a strange error -- it causes this handle() method to be called
# multiple times.
shutdown_message = '\nServer stopped.\nNote that the test database, %r, has not been deleted. You can explore it on your own.' % db_name
use_threading = connection.features.test_db_allows_multiple_connections
call_command('runserver',
addrport=addrport,
shutdown_message=shutdown_message,
use_reloader=False,
use_ipv6=options['use_ipv6'],
use_threading=use_threading
)
| mit |
asmaps/kvmate | kvmate/vnc/views.py | 2 | 2256 | from django.shortcuts import redirect
from django.views.generic import View
from django.views.generic.base import TemplateView
from braces.views import LoginRequiredMixin
import json
from django.template import RequestContext
from django.template.loader import render_to_string
from django.http import HttpResponse
from django.conf import settings
# django message framework
from django.contrib import messages
from host.models import Host
from .models import Vnc
from backends.mylibvirt import LibvirtBackend
class VncView(LoginRequiredMixin, TemplateView):
template_name = 'vnc/vnc.html'
def get(self, request, name):
try:
host = Host.objects.get(name=name)
except Host.DoesNotExist:
messages.add_message(self.request, messages.ERROR, 'The host "%s" is not in the database' % name, 'danger')
return redirect('hosts')
if host.is_on:
return super(VncView, self).get(request, name)
else:
messages.add_message(self.request, messages.ERROR, 'This host is not running at the moment', 'warning')
return redirect('hosts')
def get_context_data(self, **kwargs):
context = super(VncView, self).get_context_data(**kwargs)
lvb = LibvirtBackend()
host = Host.objects.get(name=self.kwargs['name'])
success = lvb.attach_or_create_websock(self.request.user, host)
context['name']=host.name
if success == 0:
context['host']=settings.VNC_HOST
context['port']=host.vnc.port
elif success == 1:
messages.add_message(self.request, messages.ERROR, 'This host is not running at the moment', 'warning')
elif success == -1:
messages.add_message(self.request, messages.ERROR, 'An error occured retrieving the VNC parameters', 'danger')
elif success == 2:
messages.add_message(self.request, messages.ERROR, 'The host "%s" does not (yet) exist, but is in the database. This could mean that the host has been deleted without using kvmate, or that the host has not yet been created. If the latter is the case, allow a few seconds to start the boot process and reload this page' % self.kwargs['name'], 'danger')
return context
| mit |
anryko/ansible | lib/ansible/modules/network/fortios/fortios_report_dataset.py | 7 | 13721 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_report_dataset
short_description: Report dataset configuration in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify report feature and dataset category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
version_added: 2.9
report_dataset:
description:
- Report dataset configuration.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
field:
description:
- Fields.
type: list
suboptions:
displayname:
description:
- Display name.
type: str
id:
description:
- Field ID (1 to number of columns in SQL result).
required: true
type: int
name:
description:
- Name.
type: str
type:
description:
- Field type.
type: str
choices:
- text
- integer
- double
name:
description:
- Name.
required: true
type: str
parameters:
description:
- Parameters.
type: list
suboptions:
data_type:
description:
- Data type.
type: str
choices:
- text
- integer
- double
- long-integer
- date-time
display_name:
description:
- Display name.
type: str
field:
description:
- SQL field name.
type: str
id:
description:
- Parameter ID (1 to number of columns in SQL result).
required: true
type: int
policy:
description:
- Used by monitor policy.
type: int
query:
description:
- SQL query statement.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Report dataset configuration.
fortios_report_dataset:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
report_dataset:
field:
-
displayname: "<your_own_value>"
id: "5"
name: "default_name_6"
type: "text"
name: "default_name_8"
parameters:
-
data_type: "text"
display_name: "<your_own_value>"
field: "<your_own_value>"
id: "13"
policy: "14"
query: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_report_dataset_data(json):
option_list = ['field', 'name', 'parameters',
'policy', 'query']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def report_dataset(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['report_dataset'] and data['report_dataset']:
state = data['report_dataset']['state']
else:
state = True
report_dataset_data = data['report_dataset']
filtered_data = underscore_to_hyphen(filter_report_dataset_data(report_dataset_data))
if state == "present":
return fos.set('report',
'dataset',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('report',
'dataset',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_report(data, fos):
if data['report_dataset']:
resp = report_dataset(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"report_dataset": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"field": {"required": False, "type": "list",
"options": {
"displayname": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"name": {"required": False, "type": "str"},
"type": {"required": False, "type": "str",
"choices": ["text", "integer", "double"]}
}},
"name": {"required": True, "type": "str"},
"parameters": {"required": False, "type": "list",
"options": {
"data_type": {"required": False, "type": "str",
"choices": ["text", "integer", "double",
"long-integer", "date-time"]},
"display_name": {"required": False, "type": "str"},
"field": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"}
}},
"policy": {"required": False, "type": "int"},
"query": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_report(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_report(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
altaf-ali/luigi | examples/ftp_experiment_outputs.py | 85 | 3278 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import luigi
from luigi.contrib.ftp import RemoteTarget
#: the FTP server
HOST = "some_host"
#: the username
USER = "user"
#: the password
PWD = "some_password"
class ExperimentTask(luigi.ExternalTask):
"""
This class represents something that was created elsewhere by an external process,
so all we want to do is to implement the output method.
"""
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file that will be created in a FTP server.
:return: the target output for this task.
:rtype: object (:py:class:`~luigi.target.Target`)
"""
return RemoteTarget('/experiment/output1.txt', HOST, username=USER, password=PWD)
def run(self):
"""
The execution of this task will write 4 lines of data on this task's target output.
"""
with self.output().open('w') as outfile:
print("data 0 200 10 50 60", file=outfile)
print("data 1 190 9 52 60", file=outfile)
print("data 2 200 10 52 60", file=outfile)
print("data 3 195 1 52 60", file=outfile)
class ProcessingTask(luigi.Task):
"""
This class represents something that was created elsewhere by an external process,
so all we want to do is to implement the output method.
"""
def requires(self):
"""
This task's dependencies:
* :py:class:`~.ExperimentTask`
:return: object (:py:class:`luigi.task.Task`)
"""
return ExperimentTask()
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file on the local filesystem.
:return: the target output for this task.
:rtype: object (:py:class:`~luigi.target.Target`)
"""
return luigi.LocalTarget('/tmp/processeddata.txt')
def run(self):
avg = 0.0
elements = 0
sumval = 0.0
# Target objects are a file system/format abstraction and this will return a file stream object
# NOTE: self.input() actually returns the ExperimentTask.output() target
for line in self.input().open('r'):
values = line.split(" ")
avg += float(values[2])
sumval += float(values[3])
elements = elements + 1
# average
avg = avg / elements
# save calculated values
with self.output().open('w') as outfile:
print(avg, sumval, file=outfile)
if __name__ == '__main__':
luigi.run()
| apache-2.0 |
quinot/ansible | lib/ansible/modules/network/nxos/_nxos_ip_interface.py | 8 | 22287 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_ip_interface
version_added: "2.1"
deprecated: Deprecated in 2.5. Use M(nxos_l3_interface) instead.
short_description: Manages L3 attributes for IPv4 and IPv6 interfaces.
description:
- Manages Layer 3 attributes for IPv4 and IPv6 interfaces.
extends_documentation_fragment: nxos
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- Interface must already be a L3 port when using this module.
- Logical interfaces (po, loop, svi) must be created first.
- C(mask) must be inserted in decimal format (i.e. 24) for
both IPv6 and IPv4.
- A single interface can have multiple IPv6 configured.
- C(tag) is not idempotent for IPv6 addresses and I2 system image.
options:
interface:
description:
- Full name of interface, i.e. Ethernet1/1, vlan10.
required: true
addr:
description:
- IPv4 or IPv6 Address.
required: false
default: null
version:
description:
- Version of IP address. If the IP address is IPV4 version should be v4.
If the IP address is IPV6 version should be v6.
default: v4
choices: ['v4', 'v6']
mask:
description:
- Subnet mask for IPv4 or IPv6 Address in decimal format.
required: false
default: null
dot1q:
description:
- Configures IEEE 802.1Q VLAN encapsulation on the subinterface. The range is from 2 to 4093.
required: false
default: null
version_added: "2.5"
tag:
description:
- Route tag for IPv4 or IPv6 Address in integer format.
required: false
default: 0
version_added: "2.4"
allow_secondary:
description:
- Allow to configure IPv4 secondary addresses on interface.
required: false
default: false
version_added: "2.4"
state:
description:
- Specify desired state of the resource.
required: false
default: present
choices: ['present','absent']
requirements:
- "ipaddress"
'''
EXAMPLES = '''
- name: Ensure ipv4 address is configured on Ethernet1/32
nxos_ip_interface:
interface: Ethernet1/32
transport: nxapi
version: v4
state: present
addr: 20.20.20.20
mask: 24
- name: Ensure ipv6 address is configured on Ethernet1/31
nxos_ip_interface:
interface: Ethernet1/31
transport: cli
version: v6
state: present
addr: '2001::db8:800:200c:cccb'
mask: 64
- name: Ensure ipv4 address is configured with tag
nxos_ip_interface:
interface: Ethernet1/32
transport: nxapi
version: v4
state: present
tag: 100
addr: 20.20.20.20
mask: 24
- name: Ensure ipv4 address is configured on sub-intf with dot1q encapsulation
nxos_ip_interface:
interface: Ethernet1/32.10
transport: nxapi
version: v4
state: present
dot1q: 10
addr: 20.20.20.20
mask: 24
- name: Configure ipv4 address as secondary if needed
nxos_ip_interface:
interface: Ethernet1/32
transport: nxapi
version: v4
state: present
allow_secondary: true
addr: 21.21.21.21
mask: 24
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"addr": "20.20.20.20", "allow_secondary": true,
"interface": "Ethernet1/32", "mask": "24", "tag": 100}
existing:
description: k/v pairs of existing IP attributes on the interface
returned: always
type: dict
sample: {"addresses": [{"addr": "11.11.11.11", "mask": 17, "tag": 101, "secondary": false}],
"interface": "ethernet1/32", "prefixes": ["11.11.0.0/17"],
"type": "ethernet", "vrf": "default"}
end_state:
description: k/v pairs of IP attributes after module execution
returned: always
type: dict
sample: {"addresses": [{"addr": "11.11.11.11", "mask": 17, "tag": 101, "secondary": false},
{"addr": "20.20.20.20", "mask": 24, "tag": 100, "secondary": true}],
"interface": "ethernet1/32", "prefixes": ["11.11.0.0/17", "20.20.20.0/24"],
"type": "ethernet", "vrf": "default"}
commands:
description: commands sent to the device
returned: always
type: list
sample: ["interface ethernet1/32", "ip address 20.20.20.20/24 secondary tag 100"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
try:
import ipaddress
HAS_IPADDRESS = True
except ImportError:
HAS_IPADDRESS = False
from ansible.module_utils.network.nxos.nxos import load_config, run_commands
from ansible.module_utils.network.nxos.nxos import get_capabilities, nxos_argument_spec
from ansible.module_utils.basic import AnsibleModule
def find_same_addr(existing, addr, mask, full=False, **kwargs):
for address in existing['addresses']:
if address['addr'] == addr and address['mask'] == mask:
if full:
if kwargs['version'] == 'v4' and int(address['tag']) == kwargs['tag']:
return address
elif kwargs['version'] == 'v6' and kwargs['tag'] == 0:
# Currently we don't get info about IPv6 address tag
# But let's not break idempotence for the default case
return address
else:
return address
return False
def execute_show_command(command, module):
cmd = {}
cmd['answer'] = None
cmd['command'] = command
cmd['output'] = 'text'
cmd['prompt'] = None
body = run_commands(module, [cmd])
return body
def get_interface_type(interface):
if interface.upper().startswith('ET'):
return 'ethernet'
elif interface.upper().startswith('VL'):
return 'svi'
elif interface.upper().startswith('LO'):
return 'loopback'
elif interface.upper().startswith('MG'):
return 'management'
elif interface.upper().startswith('MA'):
return 'management'
elif interface.upper().startswith('PO'):
return 'portchannel'
else:
return 'unknown'
def is_default(interface, module):
command = 'show run interface {0}'.format(interface)
try:
body = execute_show_command(command, module)[0]
if 'invalid' in body.lower():
return 'DNE'
else:
raw_list = body.split('\n')
if raw_list[-1].startswith('interface'):
return True
else:
return False
except KeyError:
return 'DNE'
def get_interface_mode(interface, intf_type, module):
command = 'show interface {0} switchport'.format(interface)
mode = 'unknown'
if intf_type in ['ethernet', 'portchannel']:
body = execute_show_command(command, module)[0]
if len(body) > 0:
if 'Switchport: Disabled' in body:
mode = 'layer3'
elif 'Switchport: Enabled' in body:
mode = "layer2"
elif intf_type == 'svi':
mode = 'layer3'
return mode
def send_show_command(interface_name, version, module):
if version == 'v4':
command = 'show ip interface {0}'.format(interface_name)
elif version == 'v6':
command = 'show ipv6 interface {0}'.format(interface_name)
body = execute_show_command(command, module)
return body
def parse_unstructured_data(body, interface_name, version, module):
interface = {}
interface['addresses'] = []
interface['prefixes'] = []
vrf = None
body = body[0]
splitted_body = body.split('\n')
if version == "v6":
if "ipv6 is disabled" not in body.lower():
address_list = []
# We can have multiple IPv6 on the same interface.
# We need to parse them manually from raw output.
for index in range(0, len(splitted_body) - 1):
if "IPv6 address:" in splitted_body[index]:
first_reference_point = index + 1
elif "IPv6 subnet:" in splitted_body[index]:
last_reference_point = index
break
interface_list_table = splitted_body[first_reference_point:last_reference_point]
for each_line in interface_list_table:
address = each_line.strip().split(' ')[0]
if address not in address_list:
address_list.append(address)
interface['prefixes'].append(str(ipaddress.ip_interface(u"%s" % address).network))
if address_list:
for ipv6 in address_list:
address = {}
splitted_address = ipv6.split('/')
address['addr'] = splitted_address[0]
address['mask'] = splitted_address[1]
interface['addresses'].append(address)
else:
for index in range(0, len(splitted_body) - 1):
if "IP address" in splitted_body[index]:
regex = r'.*IP\saddress:\s(?P<addr>\d{1,3}(?:\.\d{1,3}){3}),\sIP\ssubnet:' + \
r'\s\d{1,3}(?:\.\d{1,3}){3}\/(?P<mask>\d+)(?:\s(?P<secondary>secondary)\s)?' + \
r'(.+?tag:\s(?P<tag>\d+).*)?'
match = re.match(regex, splitted_body[index])
if match:
match_dict = match.groupdict()
if match_dict['secondary'] is None:
match_dict['secondary'] = False
else:
match_dict['secondary'] = True
if match_dict['tag'] is None:
match_dict['tag'] = 0
else:
match_dict['tag'] = int(match_dict['tag'])
interface['addresses'].append(match_dict)
prefix = str(ipaddress.ip_interface(u"%(addr)s/%(mask)s" % match_dict).network)
interface['prefixes'].append(prefix)
try:
vrf_regex = r'.+?VRF\s+(?P<vrf>\S+?)\s'
match_vrf = re.match(vrf_regex, body, re.DOTALL)
vrf = match_vrf.groupdict()['vrf']
except AttributeError:
vrf = None
interface['interface'] = interface_name
interface['type'] = get_interface_type(interface_name)
interface['vrf'] = vrf
return interface
def parse_interface_data(body):
body = body[0]
splitted_body = body.split('\n')
for index in range(0, len(splitted_body) - 1):
if "Encapsulation 802.1Q" in splitted_body[index]:
regex = r'(.+?ID\s(?P<dot1q>\d+).*)?'
match = re.match(regex, splitted_body[index])
if match:
match_dict = match.groupdict()
if match_dict['dot1q'] is not None:
return int(match_dict['dot1q'])
return 0
def get_dot1q_id(interface_name, module):
if "." not in interface_name:
return 0
command = 'show interface {0}'.format(interface_name)
try:
body = execute_show_command(command, module)
dot1q = parse_interface_data(body)
return dot1q
except KeyError:
return 0
def get_ip_interface(interface_name, version, module):
body = send_show_command(interface_name, version, module)
interface = parse_unstructured_data(body, interface_name, version, module)
return interface
def get_remove_ip_config_commands(interface, addr, mask, existing, version):
commands = []
if version == 'v4':
# We can't just remove primary address if secondary address exists
for address in existing['addresses']:
if address['addr'] == addr:
if address['secondary']:
commands.append('no ip address {0}/{1} secondary'.format(addr, mask))
elif len(existing['addresses']) > 1:
new_primary = False
for address in existing['addresses']:
if address['addr'] != addr:
commands.append('no ip address {0}/{1} secondary'.format(address['addr'], address['mask']))
if not new_primary:
command = 'ip address {0}/{1}'.format(address['addr'], address['mask'])
new_primary = True
else:
command = 'ip address {0}/{1} secondary'.format(address['addr'], address['mask'])
if 'tag' in address and address['tag'] != 0:
command += " tag " + str(address['tag'])
commands.append(command)
else:
commands.append('no ip address {0}/{1}'.format(addr, mask))
break
else:
for address in existing['addresses']:
if address['addr'] == addr:
commands.append('no ipv6 address {0}/{1}'.format(addr, mask))
return commands
def get_config_ip_commands(delta, interface, existing, version):
commands = []
delta = dict(delta)
if version == 'v4':
command = 'ip address {addr}/{mask}'.format(**delta)
if len(existing['addresses']) > 0:
if delta['allow_secondary']:
for address in existing['addresses']:
if delta['addr'] == address['addr'] and address['secondary'] is False and delta['tag'] != 0:
break
else:
command += ' secondary'
else:
# Remove all existed addresses if 'allow_secondary' isn't specified
for address in existing['addresses']:
if address['secondary']:
commands.insert(0, 'no ip address {addr}/{mask} secondary'.format(**address))
else:
commands.append('no ip address {addr}/{mask}'.format(**address))
else:
if not delta['allow_secondary']:
# Remove all existed addresses if 'allow_secondary' isn't specified
for address in existing['addresses']:
commands.insert(0, 'no ipv6 address {addr}/{mask}'.format(**address))
command = 'ipv6 address {addr}/{mask}'.format(**delta)
if int(delta['tag']) > 0:
command += ' tag {tag}'.format(**delta)
elif int(delta['tag']) == 0:
# Case when we need to remove tag from an address. Just enter command like
# 'ip address ...' (without 'tag') not enough
commands += get_remove_ip_config_commands(interface, delta['addr'], delta['mask'], existing, version)
commands.append(command)
return commands
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def validate_params(addr, interface, mask, dot1q, tag, allow_secondary, version, state, intf_type, module):
device_info = get_capabilities(module)
network_api = device_info.get('network_api', 'nxapi')
if state == "present":
if addr is None or mask is None:
module.fail_json(msg="An IP address AND a mask must be provided "
"when state=present.")
elif state == "absent" and version == "v6":
if addr is None or mask is None:
module.fail_json(msg="IPv6 address and mask must be provided when "
"state=absent.")
if intf_type != "ethernet" and network_api == 'cliconf':
if is_default(interface, module) == "DNE":
module.fail_json(msg="That interface does not exist yet. Create "
"it first.", interface=interface)
if mask is not None:
try:
if (int(mask) < 1 or int(mask) > 32) and version == "v4":
raise ValueError
elif int(mask) < 1 or int(mask) > 128:
raise ValueError
except ValueError:
module.fail_json(msg="Warning! 'mask' must be an integer between"
" 1 and 32 when version v4 and up to 128 "
"when version v6.", version=version,
mask=mask)
if addr is not None and mask is not None:
try:
ipaddress.ip_interface(u'%s/%s' % (addr, mask))
except ValueError:
module.fail_json(msg="Warning! Invalid ip address or mask set.", addr=addr, mask=mask)
if dot1q is not None:
try:
if 2 > dot1q > 4093:
raise ValueError
except ValueError:
module.fail_json(msg="Warning! 'dot1q' must be an integer between"
" 2 and 4093", dot1q=dot1q)
if tag is not None:
try:
if 0 > tag > 4294967295:
raise ValueError
except ValueError:
module.fail_json(msg="Warning! 'tag' must be an integer between"
" 0 (default) and 4294967295."
"To use tag you must set 'addr' and 'mask' params.", tag=tag)
if allow_secondary is not None:
try:
if addr is None or mask is None:
raise ValueError
except ValueError:
module.fail_json(msg="Warning! 'secondary' can be used only when 'addr' and 'mask' set.",
allow_secondary=allow_secondary)
def main():
argument_spec = dict(
interface=dict(required=True),
addr=dict(required=False),
version=dict(required=False, choices=['v4', 'v6'],
default='v4'),
mask=dict(type='str', required=False),
dot1q=dict(required=False, default=0, type='int'),
tag=dict(required=False, default=0, type='int'),
state=dict(required=False, default='present',
choices=['present', 'absent']),
allow_secondary=dict(required=False, default=False,
type='bool')
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
if not HAS_IPADDRESS:
module.fail_json(msg="ipaddress is required for this module. Run 'pip install ipaddress' for install.")
warnings = list()
addr = module.params['addr']
version = module.params['version']
mask = module.params['mask']
dot1q = module.params['dot1q']
tag = module.params['tag']
allow_secondary = module.params['allow_secondary']
interface = module.params['interface'].lower()
state = module.params['state']
intf_type = get_interface_type(interface)
validate_params(addr, interface, mask, dot1q, tag, allow_secondary, version, state, intf_type, module)
mode = get_interface_mode(interface, intf_type, module)
if mode == 'layer2':
module.fail_json(msg='That interface is a layer2 port.\nMake it '
'a layer 3 port first.', interface=interface)
existing = get_ip_interface(interface, version, module)
dot1q_tag = get_dot1q_id(interface, module)
if dot1q_tag > 1:
existing['dot1q'] = dot1q_tag
args = dict(addr=addr, mask=mask, dot1q=dot1q, tag=tag, interface=interface, allow_secondary=allow_secondary)
proposed = dict((k, v) for k, v in args.items() if v is not None)
commands = []
changed = False
end_state = existing
commands = ['interface {0}'.format(interface)]
if state == 'absent':
if existing['addresses']:
if find_same_addr(existing, addr, mask):
command = get_remove_ip_config_commands(interface, addr,
mask, existing, version)
commands.append(command)
if 'dot1q' in existing and existing['dot1q'] > 1:
command = 'no encapsulation dot1Q {0}'.format(existing['dot1q'])
commands.append(command)
elif state == 'present':
if not find_same_addr(existing, addr, mask, full=True, tag=tag, version=version):
command = get_config_ip_commands(proposed, interface, existing, version)
commands.append(command)
if 'dot1q' not in existing and (intf_type in ['ethernet', 'portchannel'] and "." in interface):
command = 'encapsulation dot1Q {0}'.format(proposed['dot1q'])
commands.append(command)
if len(commands) < 2:
del commands[0]
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
load_config(module, cmds)
changed = True
end_state = get_ip_interface(interface, version, module)
if 'configure' in cmds:
cmds.pop(0)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['end_state'] = end_state
results['commands'] = cmds
results['changed'] = changed
results['warnings'] = warnings
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
Hazelwire/Hazelwire | src/run_tests.py | 1 | 1457 | # Copyright (c) 2011 The Hazelwire Team.
#
# This file is part of Hazelwire.
#
# Hazelwire is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Hazelwire is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Hazelwire. If not, see <http://www.gnu.org/licenses/>.
import unittest
from tests.test_db import DatabaseHandlerTestCase
from tests.test_flagdistrib import FlagDistributionTestCase
from tests.test_manifestparser import ManifestParserTestCase
from tests.test_sanitycheck import SanityCheckTestCase
db_suite = unittest.TestLoader().loadTestsFromTestCase(DatabaseHandlerTestCase)
flagdistrib_suite = unittest.TestLoader().loadTestsFromTestCase(FlagDistributionTestCase)
manifestparser_suite = unittest.TestLoader().loadTestsFromTestCase(ManifestParserTestCase)
sanitycheck_suite = unittest.TestLoader().loadTestsFromTestCase(SanityCheckTestCase)
all_tests = unittest.TestSuite([db_suite, flagdistrib_suite, manifestparser_suite, sanitycheck_suite])
unittest.TextTestRunner(verbosity=2).run(all_tests)
| gpl-3.0 |
tildebyte/processing.py | examples.py/3D/Form/Vertices.py | 7 | 1764 | """
Vertices
by Simon Greenwold.
(Rewritten in Python by Jonathan Feinberg.)
Draw a cylinder centered on the y-axis, going down
from y=0 to y=height. The radius at the top can be
different from the radius at the bottom, and the
number of sides drawn is variable.
"""
def setup():
size(640, 360, P3D)
def draw():
background(0)
lights()
translate(width / 2, height / 2)
rotateY(map(mouseX, 0, width, 0, PI))
rotateZ(map(mouseY, 0, height, 0, -PI))
noStroke()
fill(255, 255, 255)
translate(0, -40, 0)
drawCylinder(10, 180, 200, 16) # Draw a mix between a cylinder and a cone
#drawCylinder(70, 70, 120, 64) # Draw a cylinder
#drawCylinder(0, 180, 200, 4) # Draw a pyramid
def drawCylinder(topRadius, bottomRadius, tall, sides):
angle = 0
angleIncrement = TWO_PI / sides
beginShape(QUAD_STRIP)
for i in range(sides + 1):
vertex(topRadius * cos(angle), 0, topRadius * sin(angle))
vertex(bottomRadius * cos(angle), tall, bottomRadius * sin(angle))
angle += angleIncrement
endShape()
# If it is not a cone, draw the circular top cap
if topRadius:
angle = 0
beginShape(TRIANGLE_FAN)
# Center point
vertex(0, 0, 0)
for i in range(sides + 1):
vertex(topRadius * cos(angle), 0, topRadius * sin(angle))
angle += angleIncrement
endShape()
# If it is not a cone, draw the circular bottom cap
if bottomRadius:
angle = 0
beginShape(TRIANGLE_FAN)
# Center point
vertex(0, tall, 0)
for i in range(sides + 1):
vertex(bottomRadius * cos(angle), tall, bottomRadius * sin(angle))
angle += angleIncrement
endShape()
| apache-2.0 |
normanmaurer/autobahntestsuite-maven-plugin | src/main/resources/twisted/internet/_posixstdio.py | 19 | 5015 | # -*- test-case-name: twisted.test.test_stdio -*-
"""Standard input/out/err support.
Future Plans::
support for stderr, perhaps
Rewrite to use the reactor instead of an ad-hoc mechanism for connecting
protocols to transport.
Maintainer: James Y Knight
"""
import warnings
from zope.interface import implements
from twisted.internet import process, error, interfaces
from twisted.python import log, failure
class PipeAddress(object):
implements(interfaces.IAddress)
class StandardIO(object):
implements(interfaces.ITransport, interfaces.IProducer,
interfaces.IConsumer, interfaces.IHalfCloseableDescriptor)
_reader = None
_writer = None
disconnected = False
disconnecting = False
def __init__(self, proto, stdin=0, stdout=1, reactor=None):
if reactor is None:
from twisted.internet import reactor
self.protocol = proto
self._writer = process.ProcessWriter(reactor, self, 'write', stdout)
self._reader = process.ProcessReader(reactor, self, 'read', stdin)
self._reader.startReading()
self.protocol.makeConnection(self)
# ITransport
# XXX Actually, see #3597.
def loseWriteConnection(self):
if self._writer is not None:
self._writer.loseConnection()
def write(self, data):
if self._writer is not None:
self._writer.write(data)
def writeSequence(self, data):
if self._writer is not None:
self._writer.writeSequence(data)
def loseConnection(self):
self.disconnecting = True
if self._writer is not None:
self._writer.loseConnection()
if self._reader is not None:
# Don't loseConnection, because we don't want to SIGPIPE it.
self._reader.stopReading()
def getPeer(self):
return PipeAddress()
def getHost(self):
return PipeAddress()
# Callbacks from process.ProcessReader/ProcessWriter
def childDataReceived(self, fd, data):
self.protocol.dataReceived(data)
def childConnectionLost(self, fd, reason):
if self.disconnected:
return
if reason.value.__class__ == error.ConnectionDone:
# Normal close
if fd == 'read':
self._readConnectionLost(reason)
else:
self._writeConnectionLost(reason)
else:
self.connectionLost(reason)
def connectionLost(self, reason):
self.disconnected = True
# Make sure to cleanup the other half
_reader = self._reader
_writer = self._writer
protocol = self.protocol
self._reader = self._writer = None
self.protocol = None
if _writer is not None and not _writer.disconnected:
_writer.connectionLost(reason)
if _reader is not None and not _reader.disconnected:
_reader.connectionLost(reason)
try:
protocol.connectionLost(reason)
except:
log.err()
def _writeConnectionLost(self, reason):
self._writer=None
if self.disconnecting:
self.connectionLost(reason)
return
p = interfaces.IHalfCloseableProtocol(self.protocol, None)
if p:
try:
p.writeConnectionLost()
except:
log.err()
self.connectionLost(failure.Failure())
def _readConnectionLost(self, reason):
self._reader=None
p = interfaces.IHalfCloseableProtocol(self.protocol, None)
if p:
try:
p.readConnectionLost()
except:
log.err()
self.connectionLost(failure.Failure())
else:
self.connectionLost(reason)
# IConsumer
def registerProducer(self, producer, streaming):
if self._writer is None:
producer.stopProducing()
else:
self._writer.registerProducer(producer, streaming)
def unregisterProducer(self):
if self._writer is not None:
self._writer.unregisterProducer()
# IProducer
def stopProducing(self):
self.loseConnection()
def pauseProducing(self):
if self._reader is not None:
self._reader.pauseProducing()
def resumeProducing(self):
if self._reader is not None:
self._reader.resumeProducing()
# Stupid compatibility:
def closeStdin(self):
"""Compatibility only, don't use. Same as loseWriteConnection."""
warnings.warn("This function is deprecated, use loseWriteConnection instead.",
category=DeprecationWarning, stacklevel=2)
self.loseWriteConnection()
def stopReading(self):
"""Compatibility only, don't use. Call pauseProducing."""
self.pauseProducing()
def startReading(self):
"""Compatibility only, don't use. Call resumeProducing."""
self.resumeProducing()
| apache-2.0 |
dav1x/ansible | docs/api/conf.py | 66 | 10034 | # -*- coding: utf-8 -*-
#
# Ansible documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 3 17:34:17 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('../bin'))
sys.path.insert(0, os.path.abspath('../lib/ansible'))
import sphinx_rtd_theme
import alabaster
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
'sphinx.ext.graphviz',
'sphinx.ext.inheritance_diagram',
'alabaster',
]
#autodoc_default_flags = ['members', 'show-inheritance', 'inherited-members', 'undoc-members',]
autodoc_default_flags = ['members', 'show-inheritance', 'undoc-members',]
autoclass_content = 'both'
autodoc_member_order = 'bysource'
autodoc_mock_imports = ['xmltodict', 'winrm', 'redis', 'StricRedis']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Ansible'
copyright = u'2016, Red Hat'
author = u'Red Hat'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'2.3'
# The full version, including alpha/beta/rc tags.
release = u'1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'alabaster'
#html_theme_path = ['../docsite/_themes']
#html_theme = 'srtd'
html_short_title = 'Ansible Documentation'
#html_theme = "sphinx_rtd_theme"
#html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_path = [alabaster.get_path()]
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Ansibledoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Ansible.tex', u'Ansible Documentation',
u'Red Hat', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'ansible', u'Ansible Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Ansible', u'Ansible Documentation',
author, 'Ansible', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| gpl-3.0 |
MotorolaMobilityLLC/external-chromium_org | tools/omahaproxy.py | 164 | 2466 | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Chrome Version Tool
Scrapes Chrome channel information and prints out the requested nugget of
information.
"""
import json
import optparse
import os
import string
import sys
import urllib
URL = 'https://omahaproxy.appspot.com/json'
def main():
try:
data = json.load(urllib.urlopen(URL))
except Exception as e:
print 'Error: could not load %s\n\n%s' % (URL, str(e))
return 1
# Iterate to find out valid values for OS, channel, and field options.
oses = set()
channels = set()
fields = set()
for os_versions in data:
oses.add(os_versions['os'])
for version in os_versions['versions']:
for field in version:
if field == 'channel':
channels.add(version['channel'])
else:
fields.add(field)
oses = sorted(oses)
channels = sorted(channels)
fields = sorted(fields)
# Command line parsing fun begins!
usage = ('%prog [options]\n'
'Print out information about a particular Chrome channel.')
parser = optparse.OptionParser(usage=usage)
parser.add_option('-o', '--os',
choices=oses,
default='win',
help='The operating system of interest: %s '
'[default: %%default]' % ', '.join(oses))
parser.add_option('-c', '--channel',
choices=channels,
default='stable',
help='The channel of interest: %s '
'[default: %%default]' % ', '.join(channels))
parser.add_option('-f', '--field',
choices=fields,
default='version',
help='The field of interest: %s '
'[default: %%default] ' % ', '.join(fields))
(opts, args) = parser.parse_args()
# Print out requested data if available.
for os_versions in data:
if os_versions['os'] != opts.os:
continue
for version in os_versions['versions']:
if version['channel'] != opts.channel:
continue
if opts.field not in version:
continue
print version[opts.field]
return 0
print 'Error: unable to find %s for Chrome %s %s.' % (
opts.field, opts.os, opts.channel)
return 1
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
rosswhitfield/mantid | scripts/test/Muon/utilities/muon_file_utils_test.py | 3 | 4193 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
import os
from io import StringIO
import unittest
from unittest import mock
import Muon.GUI.Common.utilities.muon_file_utils as utils
class MuonFileUtilsTest(unittest.TestCase):
def test_parse_user_input_to_files_returns_single_file_as_list(self):
filename = "EMU0001234.nxs"
parsed_file = utils.parse_user_input_to_files(filename)
self.assertEqual(parsed_file, [filename])
def test_parse_user_input_to_files_returns_full_filepath(self):
files = ["C:" + os.sep + "dir1" + os.sep + "dir2" + os.sep + "EMU0001234.nxs",
"dir1" + os.sep + "dir2" + os.sep + "EMU0001234.nxs"]
for file_name in files:
parsed_file = utils.parse_user_input_to_files(file_name)
self.assertEqual(parsed_file, [file_name])
def test_parse_user_input_to_files_returns_list_correctly(self):
user_input = "C:" + os.sep + "dir1" + os.sep + "dir2" + os.sep + "EMU0001234.nxs;" \
"C:" + os.sep + "dir1" + os.sep + "dir2" + \
os.sep + "EMU0001235.nxs;C:" + os.sep + "dir1" + os.sep + "dir2" + os.sep + "EMU0001236.nxs"
files = ["C:" + os.sep + "dir1" + os.sep + "dir2" + os.sep + "EMU0001234.nxs",
"C:" + os.sep + "dir1" + os.sep + "dir2" + os.sep + "EMU0001235.nxs",
"C:" + os.sep + "dir1" + os.sep + "dir2" + os.sep + "EMU0001236.nxs"]
parsed_file = utils.parse_user_input_to_files(user_input)
self.assertEqual(parsed_file, files)
def test_parse_user_input_to_files_filters_files_with_incorrect_extension(self):
user_input = "C:" + os.sep + "dir1" + os.sep + "dir2" + os.sep + "EMU0001234.nxs;" \
"C:" + os.sep + "dir1" + os.sep + "dir2" + \
os.sep + "EMU0001235.txt;C:" + os.sep + "dir1" + os.sep + "dir2" + os.sep + "EMU0001236.png"
files = ["C:" + os.sep + "dir1" + os.sep + "dir2" + os.sep + "EMU0001234.nxs"]
parsed_file = utils.parse_user_input_to_files(user_input, ['nxs'])
self.assertEqual(parsed_file, files)
def test_duplicates_removed_from_list_of_filenames_and_ordering_maintained(self):
file_list = [os.sep + "dir1" + os.sep + "dir2" + os.sep + "file1.nxs",
os.sep + "dir1" + os.sep + "dir4" + os.sep + "file2.nxs",
os.sep + "dir4" + os.sep + "dir2" + os.sep + "file1.nxs",
os.sep + "dir1" + os.sep + "dir4" + os.sep + "file1.nxs"]
unique_file_list = utils.remove_duplicated_files_from_list(file_list)
self.assertEqual(unique_file_list, [os.sep + "dir1" + os.sep + "dir2" + os.sep + "file1.nxs",
os.sep + "dir1" + os.sep + "dir4" + os.sep + "file2.nxs"])
def test_that_get_current_run_filename_throws_if_autosave_file_not_found(self):
utils.check_file_exists = mock.Mock(return_value=False)
with self.assertRaises(ValueError):
utils.get_current_run_filename("EMU")
def test_that_get_current_run_returns_correct_run(self):
utils.check_file_exists = mock.Mock(return_value=True)
expected_file_name = os.sep + os.sep + "EMU" + os.sep + "data" + os.sep + "autoA"
test_file_name = StringIO(u"autoA")
utils.open = mock.Mock(return_value=test_file_name)
current_file_name = utils.get_current_run_filename("EMU")
self.assertEqual(current_file_name, expected_file_name)
def test_that_get_current_run_throws_if_no_valid_run_in_autosave_run(self):
utils.check_file_exists = mock.Mock(side_effect=[True, False])
with self.assertRaises(ValueError):
utils.get_current_run_filename("EMU")
if __name__ == '__main__':
unittest.main(buffer=False, verbosity=2)
| gpl-3.0 |
tomhenderson/ns-3-dev-testing | utils.py | 179 | 4188 | #! /usr/bin/env python
# These methods are used by test.py and waf to look for and read the
# .ns3rc configuration file, which is used to specify the modules that
# should be enabled
import os
import sys
def get_list_from_file(file_path, list_name):
'''Looks for a Python list called list_name in the file specified
by file_path and returns it.
If the file or list name aren't found, this function will return
an empty list.
'''
list = []
# Read in the file if it exists.
if os.path.exists(file_path):
file_in = open(file_path, "r")
# Look for the list.
list_string = ""
parsing_multiline_list = False
for line in file_in:
# Remove any comments.
if '#' in line:
(line, comment) = line.split('#', 1)
# Parse the line.
if list_name in line or parsing_multiline_list:
list_string += line
# Handle multiline lists.
if ']' not in list_string:
parsing_multiline_list = True
else:
# Evaluate the list once its end is reached.
# Make the split function only split it once.
list = eval(list_string.split('=', 1)[1].strip())
break
# Close the file
file_in.close()
return list
def get_bool_from_file(file_path, bool_name, value_if_missing):
'''Looks for a Python boolean variable called bool_name in the
file specified by file_path and returns its value.
If the file or boolean variable aren't found, this function will
return value_if_missing.
'''
# Read in the file if it exists.
if os.path.exists(file_path):
file_in = open(file_path, "r")
# Look for the boolean variable.
bool_found = False
for line in file_in:
# Remove any comments.
if '#' in line:
(line, comment) = line.split('#', 1)
# Parse the line.
if bool_name in line:
# Evaluate the variable's line once it is found. Make
# the split function only split it once.
bool = eval(line.split('=', 1)[1].strip())
bool_found = True
break
# Close the file
file_in.close()
if bool_found:
return bool
else:
return value_if_missing
# Reads the NS-3 configuration file and returns a list of enabled modules.
#
# This function first looks for the ns3 configuration file (.ns3rc) in
# the current working directory and then looks in the ~ directory.
def read_config_file():
# By default, all modules will be enabled, examples will be disabled,
# and tests will be disabled.
modules_enabled = ['all_modules']
examples_enabled = False
tests_enabled = False
# See if the ns3 configuration file exists in the current working
# directory and then look for it in the ~ directory.
config_file_exists = False
dot_ns3rc_name = '.ns3rc'
dot_ns3rc_path = dot_ns3rc_name
if not os.path.exists(dot_ns3rc_path):
dot_ns3rc_path = os.path.expanduser('~/') + dot_ns3rc_name
if not os.path.exists(dot_ns3rc_path):
# Return all of the default values if the .ns3rc file can't be found.
return (config_file_exists, modules_enabled, examples_enabled, tests_enabled)
config_file_exists = True
# Read in the enabled modules.
modules_enabled = get_list_from_file(dot_ns3rc_path, 'modules_enabled')
if not modules_enabled:
# Enable all modules if the modules_enabled line can't be found.
modules_enabled = ['all_modules']
# Read in whether examples should be enabled or not.
value_if_missing = False
examples_enabled = get_bool_from_file(dot_ns3rc_path, 'examples_enabled', value_if_missing)
# Read in whether tests should be enabled or not.
value_if_missing = False
tests_enabled = get_bool_from_file(dot_ns3rc_path, 'tests_enabled', value_if_missing)
return (config_file_exists, modules_enabled, examples_enabled, tests_enabled)
| gpl-2.0 |
fredreichbier/genie | genie/slp/pyglet_adapter.py | 1 | 4118 | from pyglet.image import ImageData, AnimationFrame, Animation
from .raw_adapter import RawAdapter
class PygletAdapter(RawAdapter):
"""
An extension to the `RawAdapter`: return an `pyglet.image.ImageData` object.
"""
def __init__(self, frame):
RawAdapter.__init__(self, frame)
# Since genie *seems* to specify the hotspot relative to
# the top left corner and pyglet wants it relative to the
# bottom left corner, we need to do some calculations here.
self.anchor_x, self.anchor_y = frame.hotspot_x, frame.height - frame.hotspot_y
def get_image(self):
# We need to pass a negative stride here since the image
# will be upside-down otherwise.
img = ImageData(self.width, self.height,
'RGBA', str(self.array), -self.stride)
img.anchor_x = self.anchor_x
img.anchor_y = self.anchor_y
return img
class MirroredPygletAdapter(PygletAdapter):
"""
Exactly like the above, it's just mirrored. For simplicity, since
Age Of Empires doesn't store all the animation directions, you have
to mirror the existing frames to get the missing images.
Also, flip the X anchor if needed.
"""
def __init__(self, frame):
PygletAdapter.__init__(self, frame)
self.anchor_x = frame.width - self.anchor_x
def _get_byte_pos(self, x, y):
# mirror dat. ehehehehehAHAHAHAHAH
return y * self.stride + (self.width - x) * self.pixel_size
def load_animation(slp_file, frame_ids, duration=0.1, mirrored=False, player=1):
"""
Load some frames from the slp fil into an `pyglet.image.Animation` instance.
*frame_ids* is a tuple ``(first frame, last frame)`` (inclusive).
*duration* is the number of seconds to display the frame.
If the frames should be mirrored horizontally, pass True for *mirrored*.
You can also pass a player number as *player*.
Return a `pyglet.image.Animation` instance.
"""
adapter = MirroredPygletAdapter if mirrored else PygletAdapter
anim_frames = []
for frame_id in xrange(frame_ids[0], frame_ids[1] + 1):
frame = slp_file.frames[frame_id]
img = frame.parse_stream(image_adapter_cls=adapter, player=player)
anim_frames.append(AnimationFrame(img, duration))
return Animation(anim_frames)
ORIGINAL_ANIMATIONS = [
2, # south
1, # southwest
4, # ...
7,
8,
]
MIRRORED_ANIMATIONS = {
7: 9, # 9 (northeast) is 7 (northwest) mirrored
4: 6, # ...
1: 3,
}
DIRECTIONS_IN_SLP = 5
class AnimationError(Exception):
pass
def load_aoe_animations(slp_file, duration=0.1, player=1):
"""
Load AOE animations. Return a dictionary ``{ direction: Animation instance }``
where *direction* is a number from 0-9. Look at your numpad.
The actual count of frames per direction varies, but there always are
5 directions stored in one SLP file, so we can calculate the frame count
per animation from that.
:todo: Just use `pyglet.image.Animation.get_transform` for the flips.
"""
anims = {}
def _load_anim(idx, direction, mirrored=False):
anims[direction] = load_animation(slp_file,
(idx * frames_per_direction,
(idx + 1) * frames_per_direction - 1),
duration,
mirrored,
player)
if len(slp_file.frames) % DIRECTIONS_IN_SLP:
raise AnimationError('incompatible frame count: %d' % len(slp_file.frames))
frames_per_direction = len(slp_file.frames) // DIRECTIONS_IN_SLP
for idx, direction in enumerate(ORIGINAL_ANIMATIONS):
# load original (in-file) animation
_load_anim(idx, direction, False)
if direction in MIRRORED_ANIMATIONS:
# if possible, load a mirrored animation
_load_anim(idx, MIRRORED_ANIMATIONS[direction], True)
return anims
| bsd-2-clause |
yukeyi/meeting | wechat/views.py | 1 | 4093 | from django.utils import timezone
from wechat.wrapper import WeChatView, WeChatLib
from wechat.handlers import *
from wechat.models import Activity
from meeting.settings import WECHAT_TOKEN, WECHAT_APPID, WECHAT_SECRET
class CustomWeChatView(WeChatView):
lib = WeChatLib(WECHAT_TOKEN, WECHAT_APPID, WECHAT_SECRET)
handlers = [
Quicklook, ExitMeetingHandler, BindAccountHandler, AllMeetingsHandler, RecentMeetingHandler, MyMeetingHandler, FakeSearchHandler, SearchHandler
]
error_message_handler = ErrorHandler
default_handler = DefaultHandler
event_keys = {
'book_what': 'SERVICE_BOOK_WHAT',
'get_ticket': 'SERVICE_GET_TICKET',
'account_bind': 'SERVICE_BIND',
'my_meeting': 'MY_MEETING',
'exit_meeting': 'EXIT_MEETING',
'quick_look': 'QUICK_LOOK',
'book_empty': 'BOOKING_EMPTY',
'book_header': 'BOOKING_ACTIVITY_',
'all_meeting' : 'ALL_MEETING',
'recent' : 'RECENT_MEETING',
'search' : 'SEARCH_MEETING'
}
menu = {
'button': [
{
"name": "我的会议",
"sub_button": [
{
"type": "click",
"name": "会佳账户绑定",
"key": event_keys['account_bind'],
},
{
"type": "click",
"name": "我收藏的会议",
"key": event_keys['my_meeting'],
},
{
"type": "click",
"name": "退出会议",
"key": event_keys['exit_meeting'],
},
{
"type": "click",
"name": "我的提醒",
"key": event_keys['quick_look'],
}
]
},
{
"name": "服务",
"sub_button": [
{
"type": "click",
"name": "所有会议",
"key": event_keys['all_meeting'],
},
{
"type": "click",
"name": "近期会议",
"key": event_keys['recent'],
},
{
"type": "click",
"name": "查询会议",
"key": event_keys['search']
}
]
}
]
}
@classmethod
def get_book_btn(cls):
return cls.menu['button'][-1]
@classmethod
def update_book_button(cls, activities):
print("dfdf")
@classmethod
def update_menu(cls, activities=None):
if activities is not None:
if len(activities) > 5:
cls.logger.warn('Custom menu with %d activities, keep only 5', len(activities))
cls.update_book_button([{'id': act.id, 'name': act.name} for act in activities[:5]])
else:
current_menu = cls.lib.get_wechat_menu()
existed_buttons = list()
for btn in current_menu:
if btn['name'] == '抢票':
existed_buttons += btn.get('sub_button', list())
activity_ids = list()
for btn in existed_buttons:
if 'key' in btn:
activity_id = btn['key']
if activity_id.startswith(cls.event_keys['book_header']):
activity_id = activity_id[len(cls.event_keys['book_header']):]
if activity_id and activity_id.isdigit():
activity_ids.append(int(activity_id))
return cls.update_menu(Activity.objects.filter(
id__in=activity_ids, status=Activity.STATUS_PUBLISHED, book_end__gt=timezone.now()
).order_by('book_end')[: 5])
cls.lib.set_wechat_menu(cls.menu)
| gpl-3.0 |
lordkman/burnman | burnman/output_seismo.py | 3 | 11627 | # This file is part of BurnMan - a thermoelastic and thermodynamic toolkit for the Earth and Planetary Sciences
# Copyright (C) 2012 - 2017 by the BurnMan team, released under the GNU
# GPL v2 or later.
from __future__ import absolute_import
import numpy as np
import warnings
import scipy.integrate
import matplotlib.pyplot as plt
import pkgutil
from . import tools
from . import constants
from . import seismic
from . import geotherm
def write_axisem_input(rock, min_depth=670.e3, max_depth=2890.e3, T0= 1900, filename='axisem_burnmantestrock.txt',
axisem_ref='axisem_prem_ani_noocean.txt', plotting=False):
"""
Writing velocities and densities to AXISEM (www.axisem.info) input file
Default is set to replacing the lower mantle with the BurnMan rock
Note:
- This implementation uses PREM to convert from depths to pressures to compute at
- This implementation assumes an adiabatic temperature profile, only T0 at min_depth can be set
- Currently, it only honors the discontinuities already in the synthetic input file, so it is best
to only replace certain layers with burnman values (this should be improved in the future).
Parameters
----------
rock : burnman.Composite()
Composition to implement in the model
min_depth : float
minimum depth to replace model (m) (default = 670 km)
max_depth : float
minimum depth to replace model (m) (default = 2890 km)
T0 : float
Anchor temperature at min_depth for adiabatic profile (K) (default=1900)
filename: string
Output filename (default ='axisem_burnmantestrock.txt')
axisem_ref: string
Input filename (in burnman/data/input_seismic/) (default = 'axisem_prem_ani_noocean.txt')
plotting: Boolean
True means plot of the old model and replaced model will be shown (default = False)
"""
# Load reference input
datastream = pkgutil.get_data('burnman', 'data/input_seismic/' + axisem_ref)
lines = [line.strip()
for line in datastream.decode('ascii').split('\n') if line.strip()]
table = []
for line in lines[18:]:
numbers = np.fromstring(line, sep=' ')
if len(numbers)>0:
if line[0] != "#" and line[0] != "%":
table.append(numbers)
table = np.array(table)
ref_radius = table[:, 0]
ref_depth = 6371.e3 - ref_radius
ref_density = table[:, 1]
ref_vpv = table[:, 2]
ref_vsv = table[:, 3]
ref_Qk = table[:, 4]
ref_Qmu = table[:, 5]
ref_vph = table[:, 6]
ref_vsh = table[:, 7]
ref_eta = table[:, 8]
# Cutting out range to input in Axisem reference file (currently the lower mantle)
indrange = [x for x in range(len(ref_depth)) if ref_depth[
x] > min_depth and ref_depth[x] < max_depth]
# pad both ends to include up to discontinuity, bit of a hack...
indrange.insert(0, indrange[0] - 1)
indrange.append(indrange[-1] + 1)
# Invert depthrange so adiabatic computations work!
depthrange = ref_depth[indrange]
# convert depths to pressures
pressures = seismic.PREM().pressure(depthrange)
# Computing adiabatic temperatures. T0 is an input parameter!
T0 = T0 # K
temperatures = geotherm.adiabatic(pressures, T0, rock)
print("Calculations are done for:")
rock.debug_print()
rock_vp, rock_vs, rock_rho = rock.evaluate(
['v_p', 'v_s', 'density'], pressures, temperatures)
discontinuity =0
# WRITE OUT FILE
f = open(filename, 'w')
print('Writing ' + filename + ' ...')
f.write('# Input file '+ filename +' for AXISEM created using BurnMan, replacing ' + axisem_ref+ ' between ' +str(np.round(min_depth/1.e3)) + ' and ' + str(np.round(max_depth /1.e3)) +' km \n')
f.write('NAME ' + filename + '\n')
for line in lines[2:18]:
f.write(line[:-1] + '\n')
for i in range(indrange[0]):
if i>0 and ref_radius[i] ==ref_radius[i-1]:
discontinuity = discontinuity + 1
f.write('# Discontinuity ' +str(discontinuity) + ', depth: '+ str(np.round(ref_depth[i]/1.e3,decimals=2)) +' km \n')
f.write(
'%8.0f %9.2f %9.2f %9.2f %9.1f %9.1f %9.2f %9.2f %9.5f \n' %
(ref_radius[i], ref_density[i], ref_vpv[i], ref_vsv[i], ref_Qk[i],
ref_Qmu[i], ref_vph[i], ref_vsh[i], ref_eta[i]))
for i in range(indrange[0], indrange[-1]):
ind2 = -1 + i - indrange[0]
if ref_radius[i] ==ref_radius[i-1]:
discontinuity = discontinuity + 1
f.write('# Discontinuity '+ str(discontinuity) + ', depth: '+ str(np.round(ref_depth[i]/1.e3,decimals=2))+' km \n')
f.write(
'%8.0f %9.2f %9.2f %9.2f %9.1f %9.1f %9.2f %9.2f %9.5f \n' %
(ref_radius[i], rock_rho[ind2], rock_vp[ind2], rock_vs[ind2], ref_Qk[i],
ref_Qmu[i], rock_vp[ind2], rock_vs[ind2], ref_eta[i]))
for i in range(indrange[-1], len(ref_radius)):
if ref_radius[i] ==ref_radius[i-1]:
discontinuity = discontinuity + 1
f.write('# Discontinuity ' +str(discontinuity) + ', depth: '+ str(np.round(ref_depth[i]/1.e3,decimals=2))+' km \n')
f.write(
'%8.0f %9.2f %9.2f %9.2f %9.1f %9.1f %9.2f %9.2f %9.5f \n' %
(ref_radius[i], ref_density[i], ref_vpv[i], ref_vsv[i], ref_Qk[i],
ref_Qmu[i], ref_vph[i], ref_vsh[i], ref_eta[i]))
f.close()
if plotting:
# plot vp
plt.plot(ref_depth / 1.e3, ref_vph / 1.e3, color='g', linestyle='-', label='vp')
plt.plot(depthrange / 1.e3, rock_vp / 1.e3, color='g', linestyle='-',
marker='o', markerfacecolor='g', markersize=1)
# plot Vs
plt.plot(ref_depth / 1.e3, ref_vsh / 1.e3, color='b', linestyle='-', label='vs')
plt.plot(depthrange / 1.e3, rock_vs / 1.e3, color='b', linestyle='-',
marker='o', markerfacecolor='b', markersize=1)
# plot density
plt.plot(ref_depth / 1.e3, ref_density / 1.e3, color='r', linestyle='-', label='density')
plt.plot(depthrange / 1.e3, rock_rho / 1.e3, color='r', linestyle='-',
marker='o', markerfacecolor='r', markersize=1)
plt.title(filename + ' = ' + axisem_ref + ' replaced between ' +
str(min_depth / 1.e3) + ' and ' + str(max_depth / 1.e3) + ' km')
plt.legend(loc='lower right')
plt.show()
def write_mineos_input(rock, min_depth=670.e3, max_depth=2890.e3, T0 = 1900, filename='mineos_burnmantestrock.txt',
mineos_ref='mineos_prem_noocean.txt', plotting=False):
"""
Writing velocities and densities to Mineos (https://geodynamics.org/cig/software/mineos/) input file
Default is set to replacing the lower mantle with the BurnMan rock
Note:
- This implementation uses PREM to convert from depths to pressures to compute at
- This implementation assumes an adiabatic temperature profile, only T0 at min_depth can be set
- Currently, it only honors the discontinuities already in the synthetic input file, so it is best
to only replace certain layers with burnman values (this should be improved in the future).
Parameters
----------
rock : burnman.Composite()
Composition to implement in the model
min_depth : float
minimum depth to replace model (m) (default = 670 km)
max_depth : float
minimum depth to replace model (m) (default = 2890 km)
T0 : float
Anchor temperature at min_depth for adiabatic profile (K) (default=1900)
filename: string
Output filename (default ='mineos_burnmantestrock.txt')
axisem_ref: string
Input filename (in burnman/data/input_seismic/) (default = 'mineos_prem_noocean.txt')
plotting: Boolean
True means plot of the old model and replaced model will be shown (default = False)
"""
# Load reference input
datastream = pkgutil.get_data('burnman', 'data/input_seismic/' + mineos_ref)
lines = [line.strip()
for line in datastream.decode('ascii').split('\n') if line.strip()]
table=[]
for line in lines[3:]:
numbers = np.fromstring(line, sep=' ')
table.append(numbers)
table = np.array(table)
ref_radius = table[:, 0]
ref_depth = 6371.e3 - ref_radius
ref_density = table[:, 1]
ref_vpv = table[:, 2]
ref_vsv = table[:, 3]
ref_Qk = table[:, 4]
ref_Qmu = table[:, 5]
ref_vph = table[:, 6]
ref_vsh = table[:, 7]
ref_eta = table[:, 8]
# Cutting out range to input in Mineos (currently the lower mantle)
indrange = [x for x in range(len(ref_depth)) if ref_depth[
x] > min_depth and ref_depth[x] < max_depth]
# pad both ends to include up to discontinuity, bit of a hack...
indrange.insert(0, indrange[0] - 1)
indrange.append(indrange[-1] + 1)
# Invert depthrange so adiabatic computations work!
depthrange = ref_depth[indrange][::-1]
# convert depths to pressures
pressures = seismic.PREM().pressure(depthrange)
# Computing adiabatic temperatures. T0 is a choice!
T0 = T0 # K
temperatures = geotherm.adiabatic(pressures, T0, rock)
print("Calculations are done for:")
rock.debug_print()
rock_vp, rock_vs, rock_rho = rock.evaluate(
['v_p', 'v_s', 'density'], pressures, temperatures)
# WRITE OUT FILE
f = open(filename , 'w')
print('Writing ' + filename + ' ...')
f.write(lines[0][:-2] + ' + ' + filename + '\n')
for line in lines[1:3]:
f.write(line[:-2] + '\n')
for i in range(indrange[0]):
f.write(
'%8.0f %9.2f %9.2f %9.2f %9.1f %9.1f %9.2f %9.2f %9.5f \n' %
(ref_radius[i], ref_density[i], ref_vpv[i], ref_vsv[i], ref_Qk[i],
ref_Qmu[i], ref_vph[i], ref_vsh[i], ref_eta[i]))
for i in range(indrange[0], indrange[-1]):
ind2 = -1 - i + indrange[0]
f.write(
'%8.0f %9.2f %9.2f %9.2f %9.1f %9.1f %9.2f %9.2f %9.5f \n' %
(ref_radius[i], rock_rho[ind2], rock_vp[ind2], rock_vs[ind2], ref_Qk[i],
ref_Qmu[i], rock_vp[ind2], rock_vs[ind2], ref_eta[i]))
for i in range(indrange[-1], len(ref_radius)):
f.write(
'%8.0f %9.2f %9.2f %9.2f %9.1f %9.1f %9.2f %9.2f %9.5f \n' %
(ref_radius[i], ref_density[i], ref_vpv[i], ref_vsv[i], ref_Qk[i],
ref_Qmu[i], ref_vph[i], ref_vsh[i], ref_eta[i]))
f.close()
if plotting:
# plot vp
plt.plot(ref_depth / 1.e3, ref_vph / 1.e3, color='g', linestyle='-', label='vp')
plt.plot(depthrange / 1.e3, rock_vp / 1.e3, color='g', linestyle='-',
marker='o', markerfacecolor='g', markersize=1)
# plot Vs
plt.plot(ref_depth / 1.e3, ref_vsh / 1.e3, color='b', linestyle='-', label='vs')
plt.plot(depthrange / 1.e3, rock_vs / 1.e3, color='b', linestyle='-',
marker='o', markerfacecolor='b', markersize=1)
# plot density
plt.plot(ref_depth / 1.e3, ref_density / 1.e3, color='r', linestyle='-', label='density')
plt.plot(depthrange / 1.e3, rock_rho / 1.e3, color='r', linestyle='-',
marker='o', markerfacecolor='r', markersize=1)
plt.title(filename + ' = ' + mineos_ref + ' replaced between ' +
str(min_depth / 1.e3) + ' and ' + str(max_depth / 1.e3) + ' km')
plt.legend(loc='lower right')
plt.show()
| gpl-2.0 |
noironetworks/neutron | neutron/tests/unit/objects/plugins/ml2/test_flatallocation.py | 5 | 1063 | # Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects.plugins.ml2 import flatallocation
from neutron.tests.unit.objects import test_base
from neutron.tests.unit import testlib_api
class FlatAllocationIfaceObjTestCase(test_base.BaseObjectIfaceTestCase):
_test_class = flatallocation.FlatAllocation
class FlatAllocationDbObjTestCase(test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = flatallocation.FlatAllocation
| apache-2.0 |
tomaslu/map_editor | authentication/views.py | 1 | 2770 | from django.shortcuts import render
from django.http.response import HttpResponse, HttpResponseBadRequest,\
HttpResponseRedirect
from django.views.generic.base import View
from authentication.forms import RegisterForm, LoginForm
from django.contrib.auth.models import User
from lib.utils import RequestUtils
from django.contrib.auth import authenticate, login, logout
from authentication.exceptions import AuthenticationException
from django.contrib.auth.decorators import login_required
# Create your views here.
class RegisterView(View):
def post(self, request, *args, **kwargs):
data = RequestUtils.get_parameters(request)
form = RegisterForm(data)
if form.is_valid():
user = User.objects.create_user(
form.cleaned_data['username'],
form.cleaned_data['email'],
form.cleaned_data['password'],
)
if form.cleaned_data['first_name']:
user.first_name = form.cleaned_data['first_name']
if form.cleaned_data['last_name']:
user.last_name = form.cleaned_data['last_name']
user.save()
return HttpResponse('user is created')
else:
return HttpResponseBadRequest(str(form.errors))
def get(self, request, *args, **kwargs):
return HttpResponse('get for registering')
class LoginView(View):
def get(self, request, *args, **kwargs):
return HttpResponse('get for login')
def post(self, request, *args, **kwargs):
data = RequestUtils.get_parameters(request)
form = LoginForm(data)
try:
if form.is_valid():
user = authenticate(username=form.cleaned_data['username'], password=form.cleaned_data['password'])
if user is not None:
if user.is_active:
login(request, user)
return HttpResponse('login valid')
else:
raise AuthenticationException('User is not active')
else:
raise AuthenticationException('Can not authenticate user')
else:
print(form.errors)
return HttpResponse('can not login')
except User.DoesNotExist:
return HttpResponse('can not login')
def logout_user(request, *args, **kwargs):
logout(request)
return HttpResponseRedirect('/')
@login_required(login_url='/#/login')
def authenticated_page(request):
context = {'user': request.user}
return render(request, 'authenticated_page.html', context) | mit |
jonasjberg/autonameow | autonameow/vendor/chardet/sjisprober.py | 1777 | 3764 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import SJISDistributionAnalysis
from .jpcntx import SJISContextAnalysis
from .mbcssm import SJISSMModel
from . import constants
class SJISProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(SJISSMModel)
self._mDistributionAnalyzer = SJISDistributionAnalysis()
self._mContextAnalyzer = SJISContextAnalysis()
self.reset()
def reset(self):
MultiByteCharSetProber.reset(self)
self._mContextAnalyzer.reset()
def get_charset_name(self):
return self._mContextAnalyzer.get_charset_name()
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],
charLen)
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3
- charLen], charLen)
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mContextAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
contxtCf = self._mContextAnalyzer.get_confidence()
distribCf = self._mDistributionAnalyzer.get_confidence()
return max(contxtCf, distribCf)
| gpl-2.0 |
bdarnell/codegenloader | codegenloader/protobuf.py | 1 | 1724 | from __future__ import with_statement
import os
import shutil
import subprocess
import tempfile
from codegenloader.base import CodeGenLoader
class ProtobufLoader(CodeGenLoader):
def protoname(self, relname):
assert relname.endswith("_pb2")
relname = relname[:-len("_pb2")]
return os.path.abspath(os.path.join(self.basedir, relname + '.proto'))
def can_generate(self, relname):
if relname.endswith("_pb2"):
return os.path.exists(self.protoname(relname))
else:
return False
def generate(self, relname):
tempdir = tempfile.mkdtemp(prefix='codegenloader')
try:
protodir, protofile = os.path.split(self.protoname(relname))
subprocess.check_call(
["protoc",
"--python_out=.",
"--proto_path=%s" % protodir,
self.protoname(relname)],
cwd=tempdir)
relpath = relname + ".py"
with open(os.path.join(tempdir, relpath)) as f:
self.store_contents(relpath, f.read())
finally:
shutil.rmtree(tempdir)
def make_path(modname, basedir):
"""Returns a object to be set as ``__path__``.
This is the visible entry point to this module. To use it,
assign the result of this function to ``__path__``::
import dropbox.codegenloader.protobuf
__path__ = dropbox.codegenloader.protobuf.make_path(__name__, "proto")
The first argument should always be ``__name__``; the second is a
directory name that contains the ``.proto`` files. (relative to the
file where `make_path` is called).
"""
return ProtobufLoader.register(modname, basedir)
| mit |
mtp401/airflow | airflow/operators/mysql_operator.py | 5 | 1301 | import logging
from airflow.hooks import MySqlHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class MySqlOperator(BaseOperator):
"""
Executes sql code in a specific MySQL database
:param mysql_conn_id: reference to a specific mysql database
:type mysql_conn_id: string
:param sql: the sql code to be executed
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, mysql_conn_id='mysql_default', parameters=None,
autocommit=False, *args, **kwargs):
super(MySqlOperator, self).__init__(*args, **kwargs)
self.mysql_conn_id = mysql_conn_id
self.sql = sql
self.autocommit = autocommit
self.parameters = parameters
def execute(self, context):
logging.info('Executing: ' + str(self.sql))
hook = MySqlHook(mysql_conn_id=self.mysql_conn_id)
hook.run(
self.sql,
autocommit=self.autocommit,
parameters=self.parameters)
| apache-2.0 |
ProfessionalIT/professionalit-webiste | sdk/google_appengine/lib/django-1.3/django/utils/formats.py | 159 | 6835 | import decimal
import datetime
from django.conf import settings
from django.utils.translation import get_language, to_locale, check_for_language
from django.utils.importlib import import_module
from django.utils.encoding import smart_str
from django.utils import dateformat, numberformat, datetime_safe
from django.utils.safestring import mark_safe
# format_cache is a mapping from (format_type, lang) to the format string.
# By using the cache, it is possible to avoid running get_format_modules
# repeatedly.
_format_cache = {}
_format_modules_cache = {}
def reset_format_cache():
"""Clear any cached formats.
This method is provided primarily for testing purposes,
so that the effects of cached formats can be removed.
"""
global _format_cache, _format_modules_cache
_format_cache = {}
_format_modules_cache = {}
def iter_format_modules(lang):
"""
Does the heavy lifting of finding format modules.
"""
if check_for_language(lang):
format_locations = ['django.conf.locale.%s']
if settings.FORMAT_MODULE_PATH:
format_locations.append(settings.FORMAT_MODULE_PATH + '.%s')
format_locations.reverse()
locale = to_locale(lang)
locales = [locale]
if '_' in locale:
locales.append(locale.split('_')[0])
for location in format_locations:
for loc in locales:
try:
yield import_module('.formats', location % loc)
except ImportError:
pass
def get_format_modules(reverse=False):
"""
Returns a list of the format modules found
"""
lang = get_language()
modules = _format_modules_cache.setdefault(lang, list(iter_format_modules(lang)))
if reverse:
return list(reversed(modules))
return modules
def get_format(format_type, lang=None, use_l10n=None):
"""
For a specific format type, returns the format for the current
language (locale), defaults to the format in the settings.
format_type is the name of the format, e.g. 'DATE_FORMAT'
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
format_type = smart_str(format_type)
if use_l10n or (use_l10n is None and settings.USE_L10N):
if lang is None:
lang = get_language()
cache_key = (format_type, lang)
try:
return _format_cache[cache_key] or getattr(settings, format_type)
except KeyError:
for module in get_format_modules():
try:
val = getattr(module, format_type)
_format_cache[cache_key] = val
return val
except AttributeError:
pass
_format_cache[cache_key] = None
return getattr(settings, format_type)
def date_format(value, format=None, use_l10n=None):
"""
Formats a datetime.date or datetime.datetime object using a
localizable format
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
return dateformat.format(value, get_format(format or 'DATE_FORMAT', use_l10n=use_l10n))
def time_format(value, format=None, use_l10n=None):
"""
Formats a datetime.time object using a localizable format
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
return dateformat.time_format(value, get_format(format or 'TIME_FORMAT', use_l10n=use_l10n))
def number_format(value, decimal_pos=None, use_l10n=None):
"""
Formats a numeric value using localization settings
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
if use_l10n or (use_l10n is None and settings.USE_L10N):
lang = get_language()
else:
lang = None
return numberformat.format(
value,
get_format('DECIMAL_SEPARATOR', lang, use_l10n=use_l10n),
decimal_pos,
get_format('NUMBER_GROUPING', lang, use_l10n=use_l10n),
get_format('THOUSAND_SEPARATOR', lang, use_l10n=use_l10n),
)
def localize(value, use_l10n=None):
"""
Checks if value is a localizable type (date, number...) and returns it
formatted as a string using current locale format.
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
if isinstance(value, bool):
return mark_safe(unicode(value))
elif isinstance(value, (decimal.Decimal, float, int, long)):
return number_format(value, use_l10n=use_l10n)
elif isinstance(value, datetime.datetime):
return date_format(value, 'DATETIME_FORMAT', use_l10n=use_l10n)
elif isinstance(value, datetime.date):
return date_format(value, use_l10n=use_l10n)
elif isinstance(value, datetime.time):
return time_format(value, 'TIME_FORMAT', use_l10n=use_l10n)
else:
return value
def localize_input(value, default=None):
"""
Checks if an input value is a localizable type and returns it
formatted with the appropriate formatting string of the current locale.
"""
if isinstance(value, (decimal.Decimal, float, int, long)):
return number_format(value)
elif isinstance(value, datetime.datetime):
value = datetime_safe.new_datetime(value)
format = smart_str(default or get_format('DATETIME_INPUT_FORMATS')[0])
return value.strftime(format)
elif isinstance(value, datetime.date):
value = datetime_safe.new_date(value)
format = smart_str(default or get_format('DATE_INPUT_FORMATS')[0])
return value.strftime(format)
elif isinstance(value, datetime.time):
format = smart_str(default or get_format('TIME_INPUT_FORMATS')[0])
return value.strftime(format)
return value
def sanitize_separators(value):
"""
Sanitizes a value according to the current decimal and
thousand separator setting. Used with form field input.
"""
if settings.USE_L10N:
decimal_separator = get_format('DECIMAL_SEPARATOR')
if isinstance(value, basestring):
parts = []
if decimal_separator in value:
value, decimals = value.split(decimal_separator, 1)
parts.append(decimals)
if settings.USE_THOUSAND_SEPARATOR:
parts.append(value.replace(get_format('THOUSAND_SEPARATOR'), ''))
else:
parts.append(value)
value = '.'.join(reversed(parts))
return value
| lgpl-3.0 |
igutekunst/lol-pandora | pandora/blowfish.py | 6 | 21268 | # Copyright (C) 2011 Versile AS
#
# This file is part of Versile Python Open Source Edition.
#
# Versile Python Open Source Edition is free software: you can
# redistribute it and/or modify it under the terms of the GNU Affero
# General Public License as published by the Free Software Foundation,
# either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
# Versile Python Open Source Edition implements Versile Platform which
# is a copyrighted specification that is not part of this software.
# Modification of the software is subject to Versile Platform licensing,
# see https://versile.com/ for details. Distribution of unmodified versions
# released by Versile AS is not subject to Versile Platform licensing.
#
"""Implementation of the Blowfish cipher.
This is an implementation of the `Blowfish
<http://www.schneier.com/blowfish.html>`__ cipher, which is in the
public domain. Stated on the web site:
\"Everyone is welcome to download Blowfish and use it in their
application. There are no rules about use, although I would
appreciate being notified of any commercial applications using
the product so that I can list them on this website.\"
The implementation in this module is a python conversion and
adaptation of Bruce Schneier's `C implementation
<http://www.schneier.com/blowfish-download.html>`__\ .
"""
import copy
class VCryptoException(Exception):
"""Exception for crypto operations."""
def __init__(self, *args):
super(VCryptoException, self).__init__(*args)
class Blowfish(object):
"""Blowfish cipher.
When initialized the object can encrypt and decrypt blocks of
data with the :meth:`encrypt` and :meth:`decrypt` methods.
:param key: cipher key
:type key: bytes
Key length must be between 1 byte and 56 bytes (448 bits).
"""
def __init__(self, key):
if not isinstance(key, bytes):
raise VCryptoException('Key must be a bytes object')
elif len(key) > 56:
raise VCryptoException('Max key length is 448 bits (56 bytes)')
P, S = copy.deepcopy(_P_INIT), copy.deepcopy(_S_INIT)
self.__P, self.__S = P, S
keylen = len(key)
j = 0
for i in range(len(P)):
data = 0
for k in range(4):
data = ((data << 8) & 0xffffffff) | key[j]
j += 1
if j >= keylen:
j = 0
P[i] ^= data
data = 8*b'\x00'
for i in range(0, len(P), 2):
data = self.encrypt(data)
P[i] = ((data[0] << 24) + (data[1] << 16) +
(data[2] << 8 ) + data[3])
P[i+1] = ((data[4] << 24) + (data[5] << 16) +
(data[6] << 8 ) + data[7])
for i in range(4):
for j in range(0, 256, 2):
data = self.encrypt(data)
S[i][j] = ((data[0] << 24) + (data[1] << 16) +
(data[2] << 8 ) + data[3])
S[i][j+1] = ((data[4] << 24) +
(data[5] << 16) +
(data[6] << 8 ) + data[7])
def __feistel(self, x):
S = self.__S
d = x & 0xff
x >>= 8
c = x & 0xff
x >>= 8
b = x & 0xff
x >>= 8
a = x & 0xff
y = (S[0][a] + S[1][b]) & 0xffffffff
y ^= S[2][c]
y = (y + S[3][d]) & 0xffffffff
return y
def encrypt(self, data):
"""Encipher plaintext and return result.
:param data: plaintext to encrypt (8 bytes)
:type data: bytes
:returns: encrypted data (8 bytes)
:rtype: bytes
The data must align with 8-byte blocksize.
.. note::
Enciphering is performed without any kind of chaining, and
the same plaintext will always return the same encrypted
block of data. In order to use securely as a cipher, it is
normally required that the cipher is combined with
chaining techniques.
"""
len_data = len(data)
if len_data % 8:
raise VCryptoException('Data not aligned with 8-byte blocksize')
if len_data == 8:
return self._encrypt_block(data)
else:
result = []
start = 0
while start < len_data:
end = start + 8
block = data[start:end]
result.append(self._encrypt_block(block))
start += 8
return b''.join(result)
def _encrypt_block(self, block):
if not isinstance(block, bytes) or len(block) != 8:
raise VCryptoException('Data block must be bytes of len 8')
b_l = ((block[0] << 24) + (block[1] << 16) +
(block[2] << 8 ) + block[3])
b_r = ((block[4] << 24) + (block[5] << 16) +
(block[6] << 8 ) + block[7])
P, S = self.__P, self.__S
for i in range(16):
b_l ^= P[i]
b_r ^= self.__feistel(b_l)
b_l, b_r = b_r, b_l
b_l, b_r = b_r, b_l
b_r ^= P[16]
b_l ^= P[17]
bval = [(b_l >> 24), (b_l >> 16), (b_l >> 8), b_l,
(b_r >> 24), (b_r >> 16), (b_r >> 8), b_r]
return bytes([b & 0xff for b in bval])
def decrypt(self, data):
"""Decipher encrypted data and return decrypted plaintext.
:param data: encrypted data (8 bytes)
:type data: bytes
:returns: decrypted plaintext (8 bytes)
:rtype: bytes
The block of encrypted data must be a multiple of 8 bytes.
"""
len_data = len(data)
if len_data % 8:
raise VCryptoException('Data not aligned with 8-byte blocksize')
if len_data == 8:
return self._decrypt_block(data)
else:
result = []
start = 0
while start < len_data:
end = start + 8
block = data[start:end]
result.append(self._decrypt_block(block))
start += 8
return b''.join(result)
def _decrypt_block(self, block):
if not isinstance(block, bytes) or len(block) != 8:
raise VCryptoException('Data block must be bytes of len 8')
b_l = ((block[0] << 24) + (block[1] << 16) +
(block[2] << 8 ) + block[3])
b_r = ((block[4] << 24) + (block[5] << 16) +
(block[6] << 8 ) + block[7])
P, S = self.__P, self.__S
for i in range(17, 1, -1):
b_l ^= P[i]
b_r ^= self.__feistel(b_l)
b_l, b_r = b_r, b_l
b_l, b_r = b_r, b_l
b_r ^= P[1]
b_l ^= P[0]
bval = [(b_l >> 24), (b_l >> 16), (b_l >> 8), b_l,
(b_r >> 24), (b_r >> 16), (b_r >> 8), b_r]
return bytes([b & 0xff for b in bval])
# These are the standard initialization valies of P and S blocks for the
# cipher. The constants are internal to this module and should not be accessed
# directly or modified by outside code.
_P_INIT = [0x243f6a88,0x85a308d3,0x13198a2e,0x03707344,0xa4093822,0x299f31d0,
0x082efa98,0xec4e6c89,0x452821e6,0x38d01377,0xbe5466cf,0x34e90c6c,
0xc0ac29b7,0xc97c50dd,0x3f84d5b5,0xb5470917,0x9216d5d9,0x8979fb1b]
_S_INIT = [[0xd1310ba6,0x98dfb5ac,0x2ffd72db,0xd01adfb7,0xb8e1afed,0x6a267e96,
0xba7c9045,0xf12c7f99,0x24a19947,0xb3916cf7,0x0801f2e2,0x858efc16,
0x636920d8,0x71574e69,0xa458fea3,0xf4933d7e,0x0d95748f,0x728eb658,
0x718bcd58,0x82154aee,0x7b54a41d,0xc25a59b5,0x9c30d539,0x2af26013,
0xc5d1b023,0x286085f0,0xca417918,0xb8db38ef,0x8e79dcb0,0x603a180e,
0x6c9e0e8b,0xb01e8a3e,0xd71577c1,0xbd314b27,0x78af2fda,0x55605c60,
0xe65525f3,0xaa55ab94,0x57489862,0x63e81440,0x55ca396a,0x2aab10b6,
0xb4cc5c34,0x1141e8ce,0xa15486af,0x7c72e993,0xb3ee1411,0x636fbc2a,
0x2ba9c55d,0x741831f6,0xce5c3e16,0x9b87931e,0xafd6ba33,0x6c24cf5c,
0x7a325381,0x28958677,0x3b8f4898,0x6b4bb9af,0xc4bfe81b,0x66282193,
0x61d809cc,0xfb21a991,0x487cac60,0x5dec8032,0xef845d5d,0xe98575b1,
0xdc262302,0xeb651b88,0x23893e81,0xd396acc5,0x0f6d6ff3,0x83f44239,
0x2e0b4482,0xa4842004,0x69c8f04a,0x9e1f9b5e,0x21c66842,0xf6e96c9a,
0x670c9c61,0xabd388f0,0x6a51a0d2,0xd8542f68,0x960fa728,0xab5133a3,
0x6eef0b6c,0x137a3be4,0xba3bf050,0x7efb2a98,0xa1f1651d,0x39af0176,
0x66ca593e,0x82430e88,0x8cee8619,0x456f9fb4,0x7d84a5c3,0x3b8b5ebe,
0xe06f75d8,0x85c12073,0x401a449f,0x56c16aa6,0x4ed3aa62,0x363f7706,
0x1bfedf72,0x429b023d,0x37d0d724,0xd00a1248,0xdb0fead3,0x49f1c09b,
0x075372c9,0x80991b7b,0x25d479d8,0xf6e8def7,0xe3fe501a,0xb6794c3b,
0x976ce0bd,0x04c006ba,0xc1a94fb6,0x409f60c4,0x5e5c9ec2,0x196a2463,
0x68fb6faf,0x3e6c53b5,0x1339b2eb,0x3b52ec6f,0x6dfc511f,0x9b30952c,
0xcc814544,0xaf5ebd09,0xbee3d004,0xde334afd,0x660f2807,0x192e4bb3,
0xc0cba857,0x45c8740f,0xd20b5f39,0xb9d3fbdb,0x5579c0bd,0x1a60320a,
0xd6a100c6,0x402c7279,0x679f25fe,0xfb1fa3cc,0x8ea5e9f8,0xdb3222f8,
0x3c7516df,0xfd616b15,0x2f501ec8,0xad0552ab,0x323db5fa,0xfd238760,
0x53317b48,0x3e00df82,0x9e5c57bb,0xca6f8ca0,0x1a87562e,0xdf1769db,
0xd542a8f6,0x287effc3,0xac6732c6,0x8c4f5573,0x695b27b0,0xbbca58c8,
0xe1ffa35d,0xb8f011a0,0x10fa3d98,0xfd2183b8,0x4afcb56c,0x2dd1d35b,
0x9a53e479,0xb6f84565,0xd28e49bc,0x4bfb9790,0xe1ddf2da,0xa4cb7e33,
0x62fb1341,0xcee4c6e8,0xef20cada,0x36774c01,0xd07e9efe,0x2bf11fb4,
0x95dbda4d,0xae909198,0xeaad8e71,0x6b93d5a0,0xd08ed1d0,0xafc725e0,
0x8e3c5b2f,0x8e7594b7,0x8ff6e2fb,0xf2122b64,0x8888b812,0x900df01c,
0x4fad5ea0,0x688fc31c,0xd1cff191,0xb3a8c1ad,0x2f2f2218,0xbe0e1777,
0xea752dfe,0x8b021fa1,0xe5a0cc0f,0xb56f74e8,0x18acf3d6,0xce89e299,
0xb4a84fe0,0xfd13e0b7,0x7cc43b81,0xd2ada8d9,0x165fa266,0x80957705,
0x93cc7314,0x211a1477,0xe6ad2065,0x77b5fa86,0xc75442f5,0xfb9d35cf,
0xebcdaf0c,0x7b3e89a0,0xd6411bd3,0xae1e7e49,0x00250e2d,0x2071b35e,
0x226800bb,0x57b8e0af,0x2464369b,0xf009b91e,0x5563911d,0x59dfa6aa,
0x78c14389,0xd95a537f,0x207d5ba2,0x02e5b9c5,0x83260376,0x6295cfa9,
0x11c81968,0x4e734a41,0xb3472dca,0x7b14a94a,0x1b510052,0x9a532915,
0xd60f573f,0xbc9bc6e4,0x2b60a476,0x81e67400,0x08ba6fb5,0x571be91f,
0xf296ec6b,0x2a0dd915,0xb6636521,0xe7b9f9b6,0xff34052e,0xc5855664,
0x53b02d5d,0xa99f8fa1,0x08ba4799,0x6e85076a],
[0x4b7a70e9,0xb5b32944,0xdb75092e,0xc4192623,0xad6ea6b0,0x49a7df7d,
0x9cee60b8,0x8fedb266,0xecaa8c71,0x699a17ff,0x5664526c,0xc2b19ee1,
0x193602a5,0x75094c29,0xa0591340,0xe4183a3e,0x3f54989a,0x5b429d65,
0x6b8fe4d6,0x99f73fd6,0xa1d29c07,0xefe830f5,0x4d2d38e6,0xf0255dc1,
0x4cdd2086,0x8470eb26,0x6382e9c6,0x021ecc5e,0x09686b3f,0x3ebaefc9,
0x3c971814,0x6b6a70a1,0x687f3584,0x52a0e286,0xb79c5305,0xaa500737,
0x3e07841c,0x7fdeae5c,0x8e7d44ec,0x5716f2b8,0xb03ada37,0xf0500c0d,
0xf01c1f04,0x0200b3ff,0xae0cf51a,0x3cb574b2,0x25837a58,0xdc0921bd,
0xd19113f9,0x7ca92ff6,0x94324773,0x22f54701,0x3ae5e581,0x37c2dadc,
0xc8b57634,0x9af3dda7,0xa9446146,0x0fd0030e,0xecc8c73e,0xa4751e41,
0xe238cd99,0x3bea0e2f,0x3280bba1,0x183eb331,0x4e548b38,0x4f6db908,
0x6f420d03,0xf60a04bf,0x2cb81290,0x24977c79,0x5679b072,0xbcaf89af,
0xde9a771f,0xd9930810,0xb38bae12,0xdccf3f2e,0x5512721f,0x2e6b7124,
0x501adde6,0x9f84cd87,0x7a584718,0x7408da17,0xbc9f9abc,0xe94b7d8c,
0xec7aec3a,0xdb851dfa,0x63094366,0xc464c3d2,0xef1c1847,0x3215d908,
0xdd433b37,0x24c2ba16,0x12a14d43,0x2a65c451,0x50940002,0x133ae4dd,
0x71dff89e,0x10314e55,0x81ac77d6,0x5f11199b,0x043556f1,0xd7a3c76b,
0x3c11183b,0x5924a509,0xf28fe6ed,0x97f1fbfa,0x9ebabf2c,0x1e153c6e,
0x86e34570,0xeae96fb1,0x860e5e0a,0x5a3e2ab3,0x771fe71c,0x4e3d06fa,
0x2965dcb9,0x99e71d0f,0x803e89d6,0x5266c825,0x2e4cc978,0x9c10b36a,
0xc6150eba,0x94e2ea78,0xa5fc3c53,0x1e0a2df4,0xf2f74ea7,0x361d2b3d,
0x1939260f,0x19c27960,0x5223a708,0xf71312b6,0xebadfe6e,0xeac31f66,
0xe3bc4595,0xa67bc883,0xb17f37d1,0x018cff28,0xc332ddef,0xbe6c5aa5,
0x65582185,0x68ab9802,0xeecea50f,0xdb2f953b,0x2aef7dad,0x5b6e2f84,
0x1521b628,0x29076170,0xecdd4775,0x619f1510,0x13cca830,0xeb61bd96,
0x0334fe1e,0xaa0363cf,0xb5735c90,0x4c70a239,0xd59e9e0b,0xcbaade14,
0xeecc86bc,0x60622ca7,0x9cab5cab,0xb2f3846e,0x648b1eaf,0x19bdf0ca,
0xa02369b9,0x655abb50,0x40685a32,0x3c2ab4b3,0x319ee9d5,0xc021b8f7,
0x9b540b19,0x875fa099,0x95f7997e,0x623d7da8,0xf837889a,0x97e32d77,
0x11ed935f,0x16681281,0x0e358829,0xc7e61fd6,0x96dedfa1,0x7858ba99,
0x57f584a5,0x1b227263,0x9b83c3ff,0x1ac24696,0xcdb30aeb,0x532e3054,
0x8fd948e4,0x6dbc3128,0x58ebf2ef,0x34c6ffea,0xfe28ed61,0xee7c3c73,
0x5d4a14d9,0xe864b7e3,0x42105d14,0x203e13e0,0x45eee2b6,0xa3aaabea,
0xdb6c4f15,0xfacb4fd0,0xc742f442,0xef6abbb5,0x654f3b1d,0x41cd2105,
0xd81e799e,0x86854dc7,0xe44b476a,0x3d816250,0xcf62a1f2,0x5b8d2646,
0xfc8883a0,0xc1c7b6a3,0x7f1524c3,0x69cb7492,0x47848a0b,0x5692b285,
0x095bbf00,0xad19489d,0x1462b174,0x23820e00,0x58428d2a,0x0c55f5ea,
0x1dadf43e,0x233f7061,0x3372f092,0x8d937e41,0xd65fecf1,0x6c223bdb,
0x7cde3759,0xcbee7460,0x4085f2a7,0xce77326e,0xa6078084,0x19f8509e,
0xe8efd855,0x61d99735,0xa969a7aa,0xc50c06c2,0x5a04abfc,0x800bcadc,
0x9e447a2e,0xc3453484,0xfdd56705,0x0e1e9ec9,0xdb73dbd3,0x105588cd,
0x675fda79,0xe3674340,0xc5c43465,0x713e38d8,0x3d28f89e,0xf16dff20,
0x153e21e7,0x8fb03d4a,0xe6e39f2b,0xdb83adf7],
[0xe93d5a68,0x948140f7,0xf64c261c,0x94692934,0x411520f7,0x7602d4f7,
0xbcf46b2e,0xd4a20068,0xd4082471,0x3320f46a,0x43b7d4b7,0x500061af,
0x1e39f62e,0x97244546,0x14214f74,0xbf8b8840,0x4d95fc1d,0x96b591af,
0x70f4ddd3,0x66a02f45,0xbfbc09ec,0x03bd9785,0x7fac6dd0,0x31cb8504,
0x96eb27b3,0x55fd3941,0xda2547e6,0xabca0a9a,0x28507825,0x530429f4,
0x0a2c86da,0xe9b66dfb,0x68dc1462,0xd7486900,0x680ec0a4,0x27a18dee,
0x4f3ffea2,0xe887ad8c,0xb58ce006,0x7af4d6b6,0xaace1e7c,0xd3375fec,
0xce78a399,0x406b2a42,0x20fe9e35,0xd9f385b9,0xee39d7ab,0x3b124e8b,
0x1dc9faf7,0x4b6d1856,0x26a36631,0xeae397b2,0x3a6efa74,0xdd5b4332,
0x6841e7f7,0xca7820fb,0xfb0af54e,0xd8feb397,0x454056ac,0xba489527,
0x55533a3a,0x20838d87,0xfe6ba9b7,0xd096954b,0x55a867bc,0xa1159a58,
0xcca92963,0x99e1db33,0xa62a4a56,0x3f3125f9,0x5ef47e1c,0x9029317c,
0xfdf8e802,0x04272f70,0x80bb155c,0x05282ce3,0x95c11548,0xe4c66d22,
0x48c1133f,0xc70f86dc,0x07f9c9ee,0x41041f0f,0x404779a4,0x5d886e17,
0x325f51eb,0xd59bc0d1,0xf2bcc18f,0x41113564,0x257b7834,0x602a9c60,
0xdff8e8a3,0x1f636c1b,0x0e12b4c2,0x02e1329e,0xaf664fd1,0xcad18115,
0x6b2395e0,0x333e92e1,0x3b240b62,0xeebeb922,0x85b2a20e,0xe6ba0d99,
0xde720c8c,0x2da2f728,0xd0127845,0x95b794fd,0x647d0862,0xe7ccf5f0,
0x5449a36f,0x877d48fa,0xc39dfd27,0xf33e8d1e,0x0a476341,0x992eff74,
0x3a6f6eab,0xf4f8fd37,0xa812dc60,0xa1ebddf8,0x991be14c,0xdb6e6b0d,
0xc67b5510,0x6d672c37,0x2765d43b,0xdcd0e804,0xf1290dc7,0xcc00ffa3,
0xb5390f92,0x690fed0b,0x667b9ffb,0xcedb7d9c,0xa091cf0b,0xd9155ea3,
0xbb132f88,0x515bad24,0x7b9479bf,0x763bd6eb,0x37392eb3,0xcc115979,
0x8026e297,0xf42e312d,0x6842ada7,0xc66a2b3b,0x12754ccc,0x782ef11c,
0x6a124237,0xb79251e7,0x06a1bbe6,0x4bfb6350,0x1a6b1018,0x11caedfa,
0x3d25bdd8,0xe2e1c3c9,0x44421659,0x0a121386,0xd90cec6e,0xd5abea2a,
0x64af674e,0xda86a85f,0xbebfe988,0x64e4c3fe,0x9dbc8057,0xf0f7c086,
0x60787bf8,0x6003604d,0xd1fd8346,0xf6381fb0,0x7745ae04,0xd736fccc,
0x83426b33,0xf01eab71,0xb0804187,0x3c005e5f,0x77a057be,0xbde8ae24,
0x55464299,0xbf582e61,0x4e58f48f,0xf2ddfda2,0xf474ef38,0x8789bdc2,
0x5366f9c3,0xc8b38e74,0xb475f255,0x46fcd9b9,0x7aeb2661,0x8b1ddf84,
0x846a0e79,0x915f95e2,0x466e598e,0x20b45770,0x8cd55591,0xc902de4c,
0xb90bace1,0xbb8205d0,0x11a86248,0x7574a99e,0xb77f19b6,0xe0a9dc09,
0x662d09a1,0xc4324633,0xe85a1f02,0x09f0be8c,0x4a99a025,0x1d6efe10,
0x1ab93d1d,0x0ba5a4df,0xa186f20f,0x2868f169,0xdcb7da83,0x573906fe,
0xa1e2ce9b,0x4fcd7f52,0x50115e01,0xa70683fa,0xa002b5c4,0x0de6d027,
0x9af88c27,0x773f8641,0xc3604c06,0x61a806b5,0xf0177a28,0xc0f586e0,
0x006058aa,0x30dc7d62,0x11e69ed7,0x2338ea63,0x53c2dd94,0xc2c21634,
0xbbcbee56,0x90bcb6de,0xebfc7da1,0xce591d76,0x6f05e409,0x4b7c0188,
0x39720a3d,0x7c927c24,0x86e3725f,0x724d9db9,0x1ac15bb4,0xd39eb8fc,
0xed545578,0x08fca5b5,0xd83d7cd3,0x4dad0fc4,0x1e50ef5e,0xb161e6f8,
0xa28514d9,0x6c51133c,0x6fd5c7e7,0x56e14ec4,0x362abfce,0xddc6c837,
0xd79a3234,0x92638212,0x670efa8e,0x406000e0],
[0x3a39ce37,0xd3faf5cf,0xabc27737,0x5ac52d1b,0x5cb0679e,0x4fa33742,
0xd3822740,0x99bc9bbe,0xd5118e9d,0xbf0f7315,0xd62d1c7e,0xc700c47b,
0xb78c1b6b,0x21a19045,0xb26eb1be,0x6a366eb4,0x5748ab2f,0xbc946e79,
0xc6a376d2,0x6549c2c8,0x530ff8ee,0x468dde7d,0xd5730a1d,0x4cd04dc6,
0x2939bbdb,0xa9ba4650,0xac9526e8,0xbe5ee304,0xa1fad5f0,0x6a2d519a,
0x63ef8ce2,0x9a86ee22,0xc089c2b8,0x43242ef6,0xa51e03aa,0x9cf2d0a4,
0x83c061ba,0x9be96a4d,0x8fe51550,0xba645bd6,0x2826a2f9,0xa73a3ae1,
0x4ba99586,0xef5562e9,0xc72fefd3,0xf752f7da,0x3f046f69,0x77fa0a59,
0x80e4a915,0x87b08601,0x9b09e6ad,0x3b3ee593,0xe990fd5a,0x9e34d797,
0x2cf0b7d9,0x022b8b51,0x96d5ac3a,0x017da67d,0xd1cf3ed6,0x7c7d2d28,
0x1f9f25cf,0xadf2b89b,0x5ad6b472,0x5a88f54c,0xe029ac71,0xe019a5e6,
0x47b0acfd,0xed93fa9b,0xe8d3c48d,0x283b57cc,0xf8d56629,0x79132e28,
0x785f0191,0xed756055,0xf7960e44,0xe3d35e8c,0x15056dd4,0x88f46dba,
0x03a16125,0x0564f0bd,0xc3eb9e15,0x3c9057a2,0x97271aec,0xa93a072a,
0x1b3f6d9b,0x1e6321f5,0xf59c66fb,0x26dcf319,0x7533d928,0xb155fdf5,
0x03563482,0x8aba3cbb,0x28517711,0xc20ad9f8,0xabcc5167,0xccad925f,
0x4de81751,0x3830dc8e,0x379d5862,0x9320f991,0xea7a90c2,0xfb3e7bce,
0x5121ce64,0x774fbe32,0xa8b6e37e,0xc3293d46,0x48de5369,0x6413e680,
0xa2ae0810,0xdd6db224,0x69852dfd,0x09072166,0xb39a460a,0x6445c0dd,
0x586cdecf,0x1c20c8ae,0x5bbef7dd,0x1b588d40,0xccd2017f,0x6bb4e3bb,
0xdda26a7e,0x3a59ff45,0x3e350a44,0xbcb4cdd5,0x72eacea8,0xfa6484bb,
0x8d6612ae,0xbf3c6f47,0xd29be463,0x542f5d9e,0xaec2771b,0xf64e6370,
0x740e0d8d,0xe75b1357,0xf8721671,0xaf537d5d,0x4040cb08,0x4eb4e2cc,
0x34d2466a,0x0115af84,0xe1b00428,0x95983a1d,0x06b89fb4,0xce6ea048,
0x6f3f3b82,0x3520ab82,0x011a1d4b,0x277227f8,0x611560b1,0xe7933fdc,
0xbb3a792b,0x344525bd,0xa08839e1,0x51ce794b,0x2f32c9b7,0xa01fbac9,
0xe01cc87e,0xbcc7d1f6,0xcf0111c3,0xa1e8aac7,0x1a908749,0xd44fbd9a,
0xd0dadecb,0xd50ada38,0x0339c32a,0xc6913667,0x8df9317c,0xe0b12b4f,
0xf79e59b7,0x43f5bb3a,0xf2d519ff,0x27d9459c,0xbf97222c,0x15e6fc2a,
0x0f91fc71,0x9b941525,0xfae59361,0xceb69ceb,0xc2a86459,0x12baa8d1,
0xb6c1075e,0xe3056a0c,0x10d25065,0xcb03a442,0xe0ec6e0e,0x1698db3b,
0x4c98a0be,0x3278e964,0x9f1f9532,0xe0d392df,0xd3a0342b,0x8971f21e,
0x1b0a7441,0x4ba3348c,0xc5be7120,0xc37632d8,0xdf359f8d,0x9b992f2e,
0xe60b6f47,0x0fe3f11d,0xe54cda54,0x1edad891,0xce6279cf,0xcd3e7e6f,
0x1618b166,0xfd2c1d05,0x848fd2c5,0xf6fb2299,0xf523f357,0xa6327623,
0x93a83531,0x56cccd02,0xacf08162,0x5a75ebb5,0x6e163697,0x88d273cc,
0xde966292,0x81b949d0,0x4c50901b,0x71c65614,0xe6c6c7bd,0x327a140a,
0x45e1d006,0xc3f27b9a,0xc9aa53fd,0x62a80f00,0xbb25bfe2,0x35bdd2f6,
0x71126905,0xb2040222,0xb6cbcf7c,0xcd769c2b,0x53113ec0,0x1640e3d3,
0x38abbd60,0x2547adf0,0xba38209c,0xf746ce76,0x77afa1c5,0x20756060,
0x85cbfe4e,0x8ae88dd8,0x7aaaf9b0,0x4cf9aa7e,0x1948c25c,0x02fb8a8c,
0x01c36ae4,0xd6ebe1f9,0x90d4f869,0xa65cdea0,0x3f09252d,0xc208e69f,
0xb74e6132,0xce77e25b,0x578fdfe3,0x3ac372e6]
] | mit |
alexandreleroux/mayavi | integrationtests/mayavi/test_vtk_xml_reader.py | 2 | 1044 | """Simple test to check the VTK XML reader -- this is basically a copy
of test_contour.py with just the reader changed.
"""
# Author: Prabhu Ramachandran <prabhu_r@users.sf.net>
# Copyright (c) 2005-2008, Enthought, Inc.
# License: BSD Style.
# Standard library imports.
from os.path import abspath
from StringIO import StringIO
import copy
# Local imports.
from common import TestCase, get_example_data
from test_vtk_data_source import TestVTKDataSource
class TestVTKXMLReader(TestVTKDataSource):
def make_data(self):
script = self.script
from mayavi.sources.vtk_xml_file_reader import VTKXMLFileReader
############################################################
# Create a new scene and set up the visualization.
s = self.new_scene()
# Read a VTK XML data file.
r = VTKXMLFileReader()
r.initialize(get_example_data('heart.vti'))
script.add_source(r)
def test(self):
self.main()
if __name__ == "__main__":
t = TestVTKXMLReader()
t.test()
| bsd-3-clause |
bdoner/SickRage | lib/sqlalchemy/event/api.py | 75 | 3844 | # event/api.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Public API functions for the event system.
"""
from __future__ import absolute_import
from .. import util, exc
from .base import _registrars
from .registry import _EventKey
CANCEL = util.symbol('CANCEL')
NO_RETVAL = util.symbol('NO_RETVAL')
def _event_key(target, identifier, fn):
for evt_cls in _registrars[identifier]:
tgt = evt_cls._accept_with(target)
if tgt is not None:
return _EventKey(target, identifier, fn, tgt)
else:
raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
(identifier, target))
def listen(target, identifier, fn, *args, **kw):
"""Register a listener function for the given target.
e.g.::
from sqlalchemy import event
from sqlalchemy.schema import UniqueConstraint
def unique_constraint_name(const, table):
const.name = "uq_%s_%s" % (
table.name,
list(const.columns)[0].name
)
event.listen(
UniqueConstraint,
"after_parent_attach",
unique_constraint_name)
A given function can also be invoked for only the first invocation
of the event using the ``once`` argument::
def on_config():
do_config()
event.listen(Mapper, "before_configure", on_config, once=True)
.. versionadded:: 0.9.3 Added ``once=True`` to :func:`.event.listen`
and :func:`.event.listens_for`.
"""
_event_key(target, identifier, fn).listen(*args, **kw)
def listens_for(target, identifier, *args, **kw):
"""Decorate a function as a listener for the given target + identifier.
e.g.::
from sqlalchemy import event
from sqlalchemy.schema import UniqueConstraint
@event.listens_for(UniqueConstraint, "after_parent_attach")
def unique_constraint_name(const, table):
const.name = "uq_%s_%s" % (
table.name,
list(const.columns)[0].name
)
A given function can also be invoked for only the first invocation
of the event using the ``once`` argument::
@event.listens_for(Mapper, "before_configure", once=True)
def on_config():
do_config()
.. versionadded:: 0.9.3 Added ``once=True`` to :func:`.event.listen`
and :func:`.event.listens_for`.
"""
def decorate(fn):
listen(target, identifier, fn, *args, **kw)
return fn
return decorate
def remove(target, identifier, fn):
"""Remove an event listener.
The arguments here should match exactly those which were sent to
:func:`.listen`; all the event registration which proceeded as a result
of this call will be reverted by calling :func:`.remove` with the same
arguments.
e.g.::
# if a function was registered like this...
@event.listens_for(SomeMappedClass, "before_insert", propagate=True)
def my_listener_function(*arg):
pass
# ... it's removed like this
event.remove(SomeMappedClass, "before_insert", my_listener_function)
Above, the listener function associated with ``SomeMappedClass`` was also
propagated to subclasses of ``SomeMappedClass``; the :func:`.remove` function
will revert all of these operations.
.. versionadded:: 0.9.0
"""
_event_key(target, identifier, fn).remove()
def contains(target, identifier, fn):
"""Return True if the given target/ident/fn is set up to listen.
.. versionadded:: 0.9.0
"""
return _event_key(target, identifier, fn).contains()
| gpl-3.0 |
hufsm/tu_gen2_libsigrokdecode | decoders/eeprom93xx/pd.py | 1 | 5740 | ##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2017 Kevin Redon <kingkevin@cuvoodoo.info>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, see <http://www.gnu.org/licenses/>.
##
import sigrokdecode as srd
class Decoder(srd.Decoder):
api_version = 3
id = 'eeprom93xx'
name = '93xx EEPROM'
longname = '93xx Microwire EEPROM'
desc = '93xx series Microwire EEPROM protocol.'
license = 'gplv2+'
inputs = ['microwire']
outputs = ['eeprom93xx']
options = (
{'id': 'addresssize', 'desc': 'Address size', 'default': 8},
{'id': 'wordsize', 'desc': 'Word size', 'default': 16},
)
annotations = (
('si-data', 'SI data'),
('so-data', 'SO data'),
('warning', 'Warning'),
)
annotation_rows = (
('data', 'Data', (0, 1)),
('warnings', 'Warnings', (2,)),
)
def __init__(self):
self.reset()
def reset(self):
self.frame = []
def start(self):
self.out_ann = self.register(srd.OUTPUT_ANN)
self.addresssize = self.options['addresssize']
self.wordsize = self.options['wordsize']
def put_address(self, data):
# Get address (MSb first).
a = 0
for b in range(len(data)):
a += (data[b].si << (len(data) - b - 1))
self.put(data[0].ss, data[-1].es, self.out_ann,
[0, ['Address: 0x%x' % a, 'Addr: 0x%x' % a, '0x%x' % a]])
def put_word(self, si, data):
# Decode word (MSb first).
word = 0
for b in range(len(data)):
d = data[b].si if si else data[b].so
word += (d << (len(data) - b - 1))
idx = 0 if si else 1
self.put(data[0].ss, data[-1].es,
self.out_ann, [idx, ['Data: 0x%x' % word, '0x%x' % word]])
def decode(self, ss, es, data):
if len(data) < (2 + self.addresssize):
self.put(ss, es, self.out_ann, [2, ['Not enough packet bits']])
return
opcode = (data[0].si << 1) + (data[1].si << 0)
if opcode == 2:
# READ instruction.
self.put(data[0].ss, data[1].es,
self.out_ann, [0, ['Read word', 'READ']])
self.put_address(data[2:2 + self.addresssize])
# Get all words.
word_start = 2 + self.addresssize
while len(data) - word_start > 0:
# Check if there are enough bits for a word.
if len(data) - word_start < self.wordsize:
self.put(data[word_start].ss, data[len(data) - 1].es,
self.out_ann, [2, ['Not enough word bits']])
break
self.put_word(False, data[word_start:word_start + self.wordsize])
# Go to next word.
word_start += self.wordsize
elif opcode == 1:
# WRITE instruction.
self.put(data[0].ss, data[1].es,
self.out_ann, [0, ['Write word', 'WRITE']])
self.put_address(data[2:2 + self.addresssize])
# Get word.
if len(data) < 2 + self.addresssize + self.wordsize:
self.put(data[2 + self.addresssize].ss,
data[len(data) - 1].ss,
self.out_ann, [2, ['Not enough word bits']])
else:
self.put_word(True, data[2 + self.addresssize:2 + self.addresssize + self.wordsize])
elif opcode == 3:
# ERASE instruction.
self.put(data[0].ss, data[1].es,
self.out_ann, [0, ['Erase word', 'ERASE']])
self.put_address(data[2:2 + self.addresssize])
elif opcode == 0:
if data[2].si == 1 and data[3].si == 1:
# WEN instruction.
self.put(data[0].ss, data[2 + self.addresssize - 1].es,
self.out_ann, [0, ['Write enable', 'WEN']])
elif data[2].si == 0 and data[3].si == 0:
# WDS instruction.
self.put(data[0].ss, data[2 + self.addresssize - 1].es,
self.out_ann, [0, ['Write disable', 'WDS']])
elif data[2].si == 1 and data[3].si == 0:
# ERAL instruction.
self.put(data[0].ss, data[2 + self.addresssize - 1].es,
self.out_ann, [0, ['Erase all memory',
'Erase all', 'ERAL']])
elif data[2].si == 0 and data[3].si == 1:
# WRAL instruction.
self.put(data[0].ss, data[2 + self.addresssize - 1].es,
self.out_ann, [0, ['Write all memory',
'Write all', 'WRAL']])
# Get word.
if len(data) < 2 + self.addresssize + self.wordsize:
self.put(data[2 + self.addresssize].ss,
data[len(data) - 1].ss,
self.out_ann, [2, ['Not enough word bits']])
else:
self.put_word(True, data[2 + self.addresssize:2 + self.addresssize + self.wordsize])
| gpl-3.0 |
eeshangarg/oh-mainline | mysite/search/migrations/0047_project_icons_mark_as_wrong.py | 17 | 10858 | # This file is part of OpenHatch.
# Copyright (C) 2010 Parker Phinney
# Copyright (C) 2010 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.search.models import *
class Migration:
def forwards(self, orm):
# Adding field 'Project.icon_is_wrong'
db.add_column('search_project', 'icon_is_wrong', orm['search.project:icon_is_wrong'])
def backwards(self, orm):
# Deleting field 'Project.icon_is_wrong'
db.delete_column('search_project', 'icon_is_wrong')
models = {
'auth.group': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'search.answer': {
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'author_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['search.ProjectInvolvementQuestion']"}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
},
'search.bug': {
'as_appears_in_distribution': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'bize_size_tag_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'canonical_bug_link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'concerns_just_documentation': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'date_reported': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {}),
'good_for_newcomers': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'last_polled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(1970, 1, 1, 0, 0)'}),
'last_touched': ('django.db.models.fields.DateTimeField', [], {}),
'looks_closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'people_involved': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['search.Project']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'submitter_realname': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'submitter_username': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'search.bugalert': {
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '255'}),
'how_many_bugs_at_time_of_request': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'query_string': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
},
'search.hitcountcache': {
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'hashed_query': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'hit_count': ('django.db.models.fields.IntegerField', [], {}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'search.project': {
'cached_contributor_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'date_icon_was_fetched_from_ohloh': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'icon_for_profile': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_for_search_result': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_is_wrong': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'icon_raw': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_smaller_for_badge': ('django.db.models.fields.files.ImageField', [], {'default': 'None', 'max_length': '100', 'null': 'True'}),
'icon_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'logo_contains_name': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'unique': 'True'})
},
'search.projectinvolvementquestion': {
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_bug_style': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'key_string': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['search']
| agpl-3.0 |
mavenlin/tensorflow | tensorflow/contrib/learn/python/learn/experiment_test.py | 18 | 34939 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for TaskRunner and Experiment class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import tempfile
import time
from tensorflow.contrib.layers.python.layers import feature_column
from tensorflow.contrib.learn.python.learn import estimator as estimator_lib
from tensorflow.contrib.learn.python.learn import evaluable
from tensorflow.contrib.learn.python.learn import experiment
from tensorflow.contrib.learn.python.learn import run_config
from tensorflow.contrib.learn.python.learn import trainable
from tensorflow.contrib.learn.python.learn.estimators import dnn
from tensorflow.contrib.learn.python.learn.estimators import run_config as run_config_lib
from tensorflow.contrib.learn.python.learn.estimators import test_data
from tensorflow.contrib.learn.python.learn.utils import saved_model_export_utils
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.estimator import estimator as core_estimator
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
from tensorflow.python.training import saver
from tensorflow.python.training import server_lib
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import compat
from tensorflow.python.util import tf_inspect
class SheepCounter(object):
"""To be patched in for the time module, replacing sleep() and time()."""
def __init__(self):
self._total_time = 0
self._sleeptimes = []
self._time_calls = 0
def sleep(self, t):
self._total_time += t
self._sleeptimes += [t]
def time(self):
self._time_calls += 1
return self._total_time
@property
def sleep_times(self):
return self._sleeptimes
@property
def time_calls(self):
return self._time_calls
class TestBaseEstimator(object):
def __init__(self, config, max_evals, eval_dict):
self.eval_count = 0
self.fit_count = 0
self._max_evals = max_evals
self.export_count = 0
self.monitors = []
self.eval_hooks = []
self._config = config or run_config.RunConfig()
self._model_dir = tempfile.mkdtemp()
self._eval_dict = eval_dict
@property
def model_dir(self):
return self._model_dir
@property
def config(self):
return self._config
def evaluate(self, **kwargs):
tf_logging.info('evaluate called with args: %s' % kwargs)
if 'hooks' in kwargs:
self.eval_hooks = kwargs['hooks']
self.eval_count += 1
if self.eval_count > self._max_evals:
tf_logging.info('Ran %d evals. Done.' % self.eval_count)
raise StopIteration()
return self._eval_dict
def fake_checkpoint(self):
save_path = os.path.join(self.model_dir, 'model.ckpt')
with session.Session() as sess:
var = variables.Variable(1.0, name='var0')
save = saver.Saver({var.op.name: var})
var.initializer.run()
save.save(sess, save_path, global_step=0)
def train(self, **kwargs):
self.fake_checkpoint()
tf_logging.info('fit called with args: %s' % kwargs)
self.fit_count += 1
return [(key, kwargs[key]) for key in sorted(kwargs.keys())]
def export_savedmodel(self, export_dir_base, serving_input_fn, **kwargs):
tf_logging.info('export_savedmodel called with args: %s, %s, %s' %
(export_dir_base, serving_input_fn, kwargs))
self.export_count += 1
return os.path.join(
compat.as_bytes(export_dir_base), compat.as_bytes('bogus_timestamp'))
def _check_method_supports_args(method, kwargs):
"""Checks that the given method supports the given args."""
supported_args = tuple(tf_inspect.getargspec(method).args)
for kwarg in kwargs:
if kwarg not in supported_args:
raise ValueError(
'Argument `{}` is not supported in method {}.'.format(kwarg, method))
class TestEstimator(
TestBaseEstimator, evaluable.Evaluable, trainable.Trainable):
def __init__(self, config=None, max_evals=5, eval_dict=None):
super(TestEstimator, self).__init__(config, max_evals, eval_dict)
tf_logging.info('Create Estimator')
def evaluate(self, **kwargs):
_check_method_supports_args(evaluable.Evaluable.evaluate, kwargs)
return super(TestEstimator, self).evaluate(**kwargs)
def fit(self, **kwargs):
_check_method_supports_args(trainable.Trainable.fit, kwargs)
if 'monitors' in kwargs:
self.monitors = kwargs['monitors']
return super(TestEstimator, self).train(**kwargs)
def train(self, **kwargs):
raise ValueError('`train` is not defined in Estimator.')
def export_savedmodel(
self, export_dir_base, serving_input_fn, **kwargs):
_check_method_supports_args(
estimator_lib.Estimator.export_savedmodel, kwargs)
return super(TestEstimator, self).export_savedmodel(
export_dir_base, serving_input_fn, **kwargs)
class TestCoreEstimator(TestBaseEstimator, core_estimator.Estimator):
def __init__(self, config=None, max_evals=5, eval_dict=None):
super(TestCoreEstimator, self).__init__(config, max_evals, eval_dict)
tf_logging.info('Create Core Estimator')
def evaluate(self, **kwargs):
_check_method_supports_args(core_estimator.Estimator.evaluate, kwargs)
return super(TestCoreEstimator, self).evaluate(**kwargs)
def train(self, **kwargs):
_check_method_supports_args(core_estimator.Estimator.train, kwargs)
if 'hooks' in kwargs:
self.monitors = kwargs['hooks']
return super(TestCoreEstimator, self).train(**kwargs)
def export_savedmodel(
self, export_dir_base, serving_input_receiver_fn, **kwargs):
_check_method_supports_args(
core_estimator.Estimator.export_savedmodel, kwargs)
return super(TestCoreEstimator, self).export_savedmodel(
export_dir_base, serving_input_receiver_fn, **kwargs)
class _NoopHook(session_run_hook.SessionRunHook):
pass
class ExperimentTest(test.TestCase):
def _cluster_spec(self):
return {
run_config_lib.TaskType.PS: ['host1:2222', 'host2:2222'],
run_config_lib.TaskType.WORKER:
['host3:2222', 'host4:2222', 'host5:2222']
}
def _estimators_for_tests(self, config=None, eval_dict=None):
return [TestEstimator(config=config, eval_dict=eval_dict),
TestCoreEstimator(config=config, eval_dict=eval_dict)]
def test_eval_metrcis_for_core_estimator(self):
est = TestCoreEstimator()
with self.assertRaisesRegexp(
ValueError, '`eval_metrics` must be `None`'):
experiment.Experiment(
est,
train_input_fn='train_input',
train_steps='train_steps',
eval_input_fn='eval_input',
eval_metrics='eval_metrics')
def test_default_output_alternative_key_core_estimator(self):
est = TestCoreEstimator()
export_strategy = saved_model_export_utils.make_export_strategy(
est,
default_output_alternative_key='export_key',
exports_to_keep=None)
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
train_steps=100,
eval_steps=100,
export_strategies=export_strategy)
with self.assertRaisesRegexp(
ValueError, 'default_output_alternative_key is not supported'):
ex.train_and_evaluate()
def test_train(self):
for est in self._estimators_for_tests():
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
ex = experiment.Experiment(
est,
train_input_fn='train_input',
train_steps='train_steps',
eval_input_fn='eval_input',
eval_metrics=eval_metrics)
fit_args = ex.train(delay_secs=0)
self.assertEqual(1, est.fit_count)
self.assertIn(('max_steps', 'train_steps'), fit_args)
self.assertEqual(0, est.eval_count)
def test_train_delay(self):
for est in self._estimators_for_tests():
ex = experiment.Experiment(
est, train_input_fn='train_input', eval_input_fn='eval_input')
for delay in [0, 1, 3]:
sheep = SheepCounter()
with test.mock.patch.object(time, 'time', sheep.time):
with test.mock.patch.object(time, 'sleep', sheep.sleep):
ex.train(delay_secs=delay)
self.assertAlmostEqual(delay, sheep.time(), delta=1e-4)
def test_train_default_delay(self):
for task_id in [0, 1, 3]:
tf_config = {'task': {'index': task_id}}
with test.mock.patch.dict('os.environ',
{'TF_CONFIG': json.dumps(tf_config)}):
config = run_config.RunConfig()
for est in self._estimators_for_tests(config):
ex = experiment.Experiment(
est, train_input_fn='train_input', eval_input_fn='eval_input')
sheep = SheepCounter()
with test.mock.patch.object(time, 'time', sheep.time):
with test.mock.patch.object(time, 'sleep', sheep.sleep):
ex.train()
self.assertAlmostEqual(task_id * 5, sheep.time(), delta=1e-4)
@test.mock.patch.object(server_lib, 'Server')
def test_train_starts_server(self, mock_server):
# Arrange.
tf_config = {
'cluster': self._cluster_spec(),
'environment': run_config_lib.Environment.CLOUD,
'task': {
'type': run_config_lib.TaskType.WORKER,
'index': 1
}
}
with test.mock.patch.dict('os.environ',
{'TF_CONFIG': json.dumps(tf_config)}):
config = run_config_lib.RunConfig(
master='host4:2222', num_cores=15, gpu_memory_fraction=0.314)
for est in self._estimators_for_tests(config):
ex = experiment.Experiment(
est, train_input_fn='train_input', eval_input_fn='eval_input')
# Act.
# We want to make sure we discount the time it takes to start the server
# in our accounting of the delay, so we set a small delay here.
sheep = SheepCounter()
with test.mock.patch.object(time, 'time', sheep.time):
with test.mock.patch.object(time, 'sleep', sheep.sleep):
ex.train(delay_secs=1)
# Ensure that the delay takes into account the time to start server.
self.assertAlmostEqual(1, sheep.time(), delta=1e-4)
# Assert.
expected_config_proto = config_pb2.ConfigProto()
expected_config_proto.inter_op_parallelism_threads = 15
expected_config_proto.intra_op_parallelism_threads = 15
expected_config_proto.gpu_options.per_process_gpu_memory_fraction = 0.314
mock_server.assert_called_with(
config.cluster_spec,
job_name=run_config_lib.TaskType.WORKER,
task_index=1,
config=expected_config_proto,
start=False)
mock_server.assert_has_calls([test.mock.call().start()])
@test.mock.patch.object(server_lib, 'Server')
def test_train_server_does_not_start_without_cluster_spec(self, mock_server):
config = run_config_lib.RunConfig(master='host4:2222')
for est in self._estimators_for_tests(config):
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input')
ex.train()
# The server should not have started because there was no ClusterSpec.
self.assertFalse(mock_server.called)
@test.mock.patch.object(server_lib, 'Server')
def test_train_server_does_not_start_with_empty_master(self, mock_server):
tf_config = {'cluster': self._cluster_spec()}
with test.mock.patch.dict('os.environ',
{'TF_CONFIG': json.dumps(tf_config)}):
config = run_config_lib.RunConfig(master='')
for est in self._estimators_for_tests(config):
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input')
ex.train()
# The server should not have started because master was the empty string.
self.assertFalse(mock_server.called)
def test_train_raises_if_job_name_is_missing(self):
tf_config = {
'cluster': self._cluster_spec(),
'environment': run_config_lib.Environment.CLOUD,
'task': {
'index': 1
}
}
with test.mock.patch.dict(
'os.environ',
{'TF_CONFIG': json.dumps(tf_config)}), self.assertRaises(ValueError):
config = run_config_lib.RunConfig(
master='host3:2222' # Normally selected by task type.
)
for est in self._estimators_for_tests(config):
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input')
ex.train()
def test_evaluate(self):
for est in self._estimators_for_tests():
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
est.fake_checkpoint()
noop_hook = _NoopHook()
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
eval_hooks=[noop_hook],
eval_steps='steps',
eval_delay_secs=0)
ex.evaluate()
self.assertEqual(0, est.fit_count)
self.assertEqual(1, est.eval_count)
self.assertEqual([noop_hook], est.eval_hooks)
def test_evaluate_delay(self):
for est in self._estimators_for_tests():
est.fake_checkpoint()
noop_hook = _NoopHook()
ex = experiment.Experiment(
est, train_input_fn='train_input', eval_input_fn='eval_input',
eval_hooks=[noop_hook])
for delay in [0, 1, 3]:
sheep = SheepCounter()
with test.mock.patch.object(time, 'time', sheep.time):
with test.mock.patch.object(time, 'sleep', sheep.sleep):
ex.evaluate(delay_secs=delay)
self.assertAlmostEqual(delay, sheep.time(), delta=1e-4)
self.assertEqual([noop_hook], est.eval_hooks)
def test_continuous_eval(self):
for est in self._estimators_for_tests(eval_dict={'global_step': 100}):
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
est.fake_checkpoint()
noop_hook = _NoopHook()
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
eval_hooks=[noop_hook],
eval_delay_secs=0,
continuous_eval_throttle_secs=0)
self.assertRaises(StopIteration, ex.continuous_eval,
evaluate_checkpoint_only_once=False)
self.assertEqual(0, est.fit_count)
self.assertEqual(6, est.eval_count)
self.assertEqual([noop_hook], est.eval_hooks)
def test_continuous_eval_ends_after_train_step(self):
for est in self._estimators_for_tests(eval_dict={'global_step': 100}):
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
est.fake_checkpoint()
noop_hook = _NoopHook()
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
eval_hooks=[noop_hook],
eval_delay_secs=0,
continuous_eval_throttle_secs=0,
train_steps=100)
ex.continuous_eval()
self.assertEqual(0, est.fit_count)
self.assertEqual(1, est.eval_count)
self.assertEqual([noop_hook], est.eval_hooks)
def test_continuous_eval_throttle_delay(self):
for delay in [0, 1, 2]:
for est in self._estimators_for_tests():
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
est.fake_checkpoint()
noop_hook = _NoopHook()
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
eval_hooks=[noop_hook],
continuous_eval_throttle_secs=delay,
eval_delay_secs=0)
sheep = SheepCounter()
with test.mock.patch.object(time, 'time', sheep.time):
with test.mock.patch.object(time, 'sleep', sheep.sleep):
self.assertRaises(
StopIteration,
ex.continuous_eval,
evaluate_checkpoint_only_once=False)
self.assertAlmostEqual(5 * delay, sheep.time(), delta=1e-4)
def test_continuous_eval_predicate_fn(self):
for est in self._estimators_for_tests():
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
est.fake_checkpoint()
noop_hook = _NoopHook()
def _predicate_fn(unused_eval_result):
return est.eval_count < 3 # pylint: disable=cell-var-from-loop
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
eval_hooks=[noop_hook],
eval_delay_secs=0,
continuous_eval_throttle_secs=0)
ex.continuous_eval(evaluate_checkpoint_only_once=False,
continuous_eval_predicate_fn=_predicate_fn)
self.assertEqual(0, est.fit_count)
self.assertEqual(3, est.eval_count)
self.assertEqual([noop_hook], est.eval_hooks)
def test_run_local(self):
for est in self._estimators_for_tests():
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
noop_hook = _NoopHook()
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
eval_hooks=[noop_hook],
train_steps=100,
eval_steps=100,
local_eval_frequency=10)
ex.local_run()
self.assertEqual(1, est.fit_count)
self.assertEqual(1, est.eval_count)
self.assertEqual(1, len(est.monitors))
self.assertEqual([noop_hook], est.eval_hooks)
self.assertTrue(isinstance(est.monitors[0],
session_run_hook.SessionRunHook))
def test_train_hooks_extend_does_not_mutate_input_hooks(self):
for est in self._estimators_for_tests():
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
noop_hook = _NoopHook()
input_hooks = [noop_hook]
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
train_monitors=input_hooks)
self.assertAllEqual([noop_hook], ex._train_monitors)
another_noop_hook = _NoopHook()
# Assert that the extend API mutates the hooks, but not the input hooks
ex.extend_train_hooks([another_noop_hook])
self.assertAllEqual([noop_hook, another_noop_hook], ex._train_monitors)
self.assertAllEqual([noop_hook], input_hooks)
def test_invalid_export_strategies(self):
for est in self._estimators_for_tests():
with self.assertRaisesRegexp(ValueError, 'ExportStrategy'):
experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
train_steps=100,
eval_steps=100,
export_strategies='not_an_export_strategy')
with self.assertRaisesRegexp(ValueError, 'ExportStrategy'):
experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
train_steps=100,
eval_steps=100,
export_strategies=['not_an_export_srategy'])
def test_export_strategies_reset(self):
for est in self._estimators_for_tests():
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
export_strategy_1 = saved_model_export_utils.make_export_strategy(
est,
None if isinstance(est, core_estimator.Estimator) else 'export_1',
exports_to_keep=None)
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
train_steps=100,
eval_steps=100,
export_strategies=(export_strategy_1,))
ex.train_and_evaluate()
self.assertEqual(1, est.export_count)
# After reset with empty list (None), the count does not change and the
# user provided export strategy list should remain intact.
old_es = ex.reset_export_strategies()
ex.train_and_evaluate()
self.assertAllEqual([export_strategy_1], old_es)
self.assertEqual(1, est.export_count)
# After reset with list, the count should increase with the number of
# items.
export_strategy_2 = saved_model_export_utils.make_export_strategy(
est,
None if isinstance(est, core_estimator.Estimator) else 'export_2',
exports_to_keep=None)
export_strategy_3 = saved_model_export_utils.make_export_strategy(
est,
None if isinstance(est, core_estimator.Estimator) else 'export_3',
exports_to_keep=None)
old_es = ex.reset_export_strategies(
[export_strategy_2, export_strategy_3])
ex.train_and_evaluate()
self.assertAllEqual([], old_es)
self.assertEqual(3, est.export_count)
def test_train_and_evaluate(self):
for est in self._estimators_for_tests():
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
noop_hook = _NoopHook()
export_strategy = saved_model_export_utils.make_export_strategy(
est,
None if isinstance(est, core_estimator.Estimator) else 'export_input',
exports_to_keep=None)
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
eval_hooks=[noop_hook],
train_steps=100,
eval_steps=100,
export_strategies=export_strategy)
ex.train_and_evaluate()
self.assertEqual(1, est.fit_count)
self.assertEqual(1, est.eval_count)
self.assertEqual(1, est.export_count)
self.assertEqual(1, len(est.monitors))
self.assertEqual([noop_hook], est.eval_hooks)
self.assertTrue(isinstance(est.monitors[0],
session_run_hook.SessionRunHook))
def test_train_and_evaluate_with_no_eval_during_training(self):
for est in self._estimators_for_tests():
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
noop_hook = _NoopHook()
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
eval_hooks=[noop_hook],
train_steps=100,
eval_steps=100,
min_eval_frequency=0)
ex.train_and_evaluate()
self.assertEqual(1, est.fit_count)
self.assertEqual(1, est.eval_count)
self.assertEqual(0, len(est.monitors))
def test_min_eval_frequency_defaults(self):
def dummy_model_fn(features, labels): # pylint: disable=unused-argument
pass
# The default value when model_dir is on GCS is 1000
estimator = core_estimator.Estimator(dummy_model_fn, 'gs://dummy_bucket')
ex = experiment.Experiment(
estimator, train_input_fn=None, eval_input_fn=None)
self.assertEquals(ex._min_eval_frequency, 1000)
# The default value when model_dir is not on GCS is 1
estimator = core_estimator.Estimator(dummy_model_fn, '/tmp/dummy')
ex = experiment.Experiment(
estimator, train_input_fn=None, eval_input_fn=None)
self.assertEquals(ex._min_eval_frequency, 1)
# Make sure default not used when explicitly set
estimator = core_estimator.Estimator(dummy_model_fn, 'gs://dummy_bucket')
ex = experiment.Experiment(
estimator,
min_eval_frequency=123,
train_input_fn=None,
eval_input_fn=None)
self.assertEquals(ex._min_eval_frequency, 123)
# Make sure default not used when explicitly set as 0
estimator = core_estimator.Estimator(dummy_model_fn, 'gs://dummy_bucket')
ex = experiment.Experiment(
estimator,
min_eval_frequency=0,
train_input_fn=None,
eval_input_fn=None)
self.assertEquals(ex._min_eval_frequency, 0)
def test_continuous_train_and_eval(self):
for est in self._estimators_for_tests(eval_dict={'global_step': 100}):
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
noop_hook = _NoopHook()
export_strategy = saved_model_export_utils.make_export_strategy(
est,
None if isinstance(est, core_estimator.Estimator) else 'export_input',
exports_to_keep=None)
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
eval_hooks=[noop_hook],
train_steps=100,
eval_steps=100,
export_strategies=export_strategy)
ex.continuous_train_and_eval()
self.assertEqual(1, est.fit_count)
self.assertEqual(1, est.eval_count)
self.assertEqual(1, est.export_count)
self.assertEqual([noop_hook], est.eval_hooks)
def test_continuous_train_and_eval_with_predicate_fn(self):
for est in self._estimators_for_tests(eval_dict={'global_step': 100}):
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
export_strategy = saved_model_export_utils.make_export_strategy(
est,
None if isinstance(est, core_estimator.Estimator) else 'export_input',
exports_to_keep=None)
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
train_steps=100000000000, # a value will make `ex` never stops.
eval_steps=100,
export_strategies=export_strategy)
def predicate_fn(eval_result):
del eval_result # unused. for fn signature.
return False
ex.continuous_train_and_eval(continuous_eval_predicate_fn=predicate_fn)
self.assertEqual(0, est.fit_count)
self.assertEqual(0, est.eval_count)
self.assertEqual(0, est.export_count)
def test_continuous_train_and_eval_with_adapted_steps_per_iteration(self):
mock_estimator = test.mock.Mock(core_estimator.Estimator)
type(mock_estimator).model_dir = test.mock.PropertyMock(
return_value='test_dir')
total_steps = 100000000000000
ex = experiment.Experiment(
mock_estimator,
train_input_fn='train_input',
eval_input_fn='eval_input',
train_steps=total_steps)
def predicate_fn(eval_result):
# Allows the first invoke only.
return eval_result is None
ex.continuous_train_and_eval(continuous_eval_predicate_fn=predicate_fn)
mock_estimator.train.assert_called_once_with(
input_fn='train_input',
steps=int(total_steps/10),
max_steps=test.mock.ANY,
hooks=test.mock.ANY)
def test_continuous_train_and_eval_with_steps_per_iteration_from_user(self):
mock_estimator = test.mock.Mock(core_estimator.Estimator)
type(mock_estimator).model_dir = test.mock.PropertyMock(
return_value='test_dir')
total_steps = 100000000000000
ex = experiment.Experiment(
mock_estimator,
train_input_fn='train_input',
eval_input_fn='eval_input',
train_steps_per_iteration=1234,
train_steps=total_steps)
def predicate_fn(eval_result):
# Allows the first invoke only.
return eval_result is None
ex.continuous_train_and_eval(continuous_eval_predicate_fn=predicate_fn)
mock_estimator.train.assert_called_once_with(
input_fn='train_input',
steps=1234,
max_steps=test.mock.ANY,
hooks=test.mock.ANY)
def test_continuous_train_and_eval_with_default_steps_per_iteration(self):
mock_estimator = test.mock.Mock(core_estimator.Estimator)
type(mock_estimator).model_dir = test.mock.PropertyMock(
return_value='test_dir')
ex = experiment.Experiment(
mock_estimator,
train_input_fn='train_input',
eval_input_fn='eval_input',
train_steps_per_iteration=None,
train_steps=None)
def predicate_fn(eval_result):
# Allows the first invoke only.
return eval_result is None
ex.continuous_train_and_eval(continuous_eval_predicate_fn=predicate_fn)
mock_estimator.train.assert_called_once_with(
input_fn='train_input',
steps=1000,
max_steps=test.mock.ANY,
hooks=test.mock.ANY)
def test_continuous_train_and_eval_with_invalid_predicate_fn(self):
for est in self._estimators_for_tests():
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input')
with self.assertRaisesRegexp(
ValueError, '`continuous_eval_predicate_fn` must be a callable'):
ex.continuous_train_and_eval(continuous_eval_predicate_fn='fn')
def test_continuous_train_and_eval_with_invalid_train_steps_iterations(self):
for est in self._estimators_for_tests():
with self.assertRaisesRegexp(
ValueError, '`train_steps_per_iteration` must be an integer.'):
experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
train_steps_per_iteration='123')
@test.mock.patch.object(server_lib, 'Server')
def test_run_std_server(self, mock_server):
# Arrange.
tf_config = {
'cluster': self._cluster_spec(),
'task': {
'type': run_config_lib.TaskType.PS,
'index': 1
}
}
with test.mock.patch.dict('os.environ',
{'TF_CONFIG': json.dumps(tf_config)}):
config = run_config_lib.RunConfig(
master='host2:2222',
num_cores=15,
gpu_memory_fraction=0.314,)
for est in self._estimators_for_tests(config):
ex = experiment.Experiment(
est, train_input_fn='train_input', eval_input_fn='eval_input')
# Act.
ex.run_std_server()
# Assert.
mock_server.assert_has_calls(
[test.mock.call().start(), test.mock.call().join()])
@test.mock.patch.object(server_lib, 'Server')
def test_run_std_server_raises_without_cluster_spec(self, mock_server):
config = run_config_lib.RunConfig(master='host4:2222')
for est in self._estimators_for_tests(config):
with self.assertRaises(ValueError):
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input')
ex.run_std_server()
def test_test(self):
for est in self._estimators_for_tests():
exp_strategy = saved_model_export_utils.make_export_strategy(
est,
None if isinstance(est, core_estimator.Estimator) else 'export_input',
exports_to_keep=None)
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
export_strategies=(exp_strategy,))
ex.test()
self.assertEqual(1, est.fit_count)
self.assertEqual(1, est.eval_count)
self.assertEqual(1, est.export_count)
def test_continuous_eval_evaluates_checkpoint_once(self):
for est in self._estimators_for_tests(eval_dict={'global_step': 100}):
eval_metrics = 'eval_metrics' if not isinstance(
est, core_estimator.Estimator) else None
est.fake_checkpoint()
result = {
'called': 0,
'called_with_eval_result': 0,
}
# pylint: disable=cell-var-from-loop
def _predicate_fn(eval_result):
result['called'] += 1
if eval_result:
# If eval_result is not empty nor None, the checkpoint has been
# evaluated.
result['called_with_eval_result'] += 1
# With 300 times of evaluation, this should prove something.
return result['called'] < 300
# pylint: enable=cell-var-from-loop
ex = experiment.Experiment(
est,
train_input_fn='train_input',
eval_input_fn='eval_input',
eval_metrics=eval_metrics,
eval_delay_secs=0,
continuous_eval_throttle_secs=0)
ex.continuous_eval(evaluate_checkpoint_only_once=True,
continuous_eval_predicate_fn=_predicate_fn)
self.assertEqual(0, est.fit_count)
self.assertEqual(1, est.eval_count)
self.assertEqual(300, result['called'])
self.assertEqual(1, result['called_with_eval_result'])
def test_checkpoint_and_export(self):
model_dir = tempfile.mkdtemp()
config = run_config_lib.RunConfig(save_checkpoints_steps=3)
est = dnn.DNNClassifier(
n_classes=3,
feature_columns=[
feature_column.real_valued_column('feature', dimension=4)
],
hidden_units=[3, 3],
model_dir=model_dir,
config=config)
exp_strategy = saved_model_export_utils.make_export_strategy(
est, 'export_input', exports_to_keep=None)
ex = experiment.Experiment(
est,
train_input_fn=test_data.iris_input_multiclass_fn,
eval_input_fn=test_data.iris_input_multiclass_fn,
export_strategies=(exp_strategy,),
train_steps=8,
checkpoint_and_export=True,
eval_delay_secs=0)
with test.mock.patch.object(ex, '_maybe_export'):
with test.mock.patch.object(ex, '_call_evaluate'):
ex.train_and_evaluate()
# Eval and export are called after steps 1, 4, 7, and 8 (after training
# is completed).
self.assertEqual(ex._maybe_export.call_count, 4)
self.assertEqual(ex._call_evaluate.call_count, 4)
if __name__ == '__main__':
test.main()
| apache-2.0 |
Jimdo/ansible-modules-core | cloud/amazon/rds_subnet_group.py | 17 | 5033 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: rds_subnet_group
version_added: "1.5"
short_description: manage RDS database subnet groups
description:
- Creates, modifies, and deletes RDS database subnet groups. This module has a dependency on python-boto >= 2.5.
options:
state:
description:
- Specifies whether the subnet should be present or absent.
required: true
default: present
aliases: []
choices: [ 'present' , 'absent' ]
name:
description:
- Database subnet group identifier.
required: true
default: null
aliases: []
description:
description:
- Database subnet group description. Only set when a new group is added.
required: false
default: null
aliases: []
subnets:
description:
- List of subnet IDs that make up the database subnet group.
required: false
default: null
aliases: []
region:
description:
- The AWS region to use. If not specified then the value of the AWS_REGION or EC2_REGION environment variable, if any, is used.
required: true
default: null
aliases: ['aws_region', 'ec2_region']
author: Scott Anderson
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Add or change a subnet group
- rds_subnet_group
state: present
name: norwegian-blue
description: My Fancy Ex Parrot Subnet Group
subnets:
- subnet-aaaaaaaa
- subnet-bbbbbbbb
# Remove a subnet group
- rds_subnet_group:
state: absent
name: norwegian-blue
'''
try:
import boto.rds
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state = dict(required=True, choices=['present', 'absent']),
name = dict(required=True),
description = dict(required=False),
subnets = dict(required=False, type='list'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
group_name = module.params.get('name').lower()
group_description = module.params.get('description')
group_subnets = module.params.get('subnets') or {}
if state == 'present':
for required in ['name', 'description', 'subnets']:
if not module.params.get(required):
module.fail_json(msg = str("Parameter %s required for state='present'" % required))
else:
for not_allowed in ['description', 'subnets']:
if module.params.get(not_allowed):
module.fail_json(msg = str("Parameter %s not allowed for state='absent'" % not_allowed))
# Retrieve any AWS settings from the environment.
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
if not region:
module.fail_json(msg = str("Either region or AWS_REGION or EC2_REGION environment variable or boto config aws_region or ec2_region must be set."))
try:
conn = boto.rds.connect_to_region(region, **aws_connect_kwargs)
except boto.exception.BotoServerError, e:
module.fail_json(msg = e.error_message)
try:
changed = False
exists = False
try:
matching_groups = conn.get_all_db_subnet_groups(group_name, max_records=100)
exists = len(matching_groups) > 0
except BotoServerError, e:
if e.error_code != 'DBSubnetGroupNotFoundFault':
module.fail_json(msg = e.error_message)
if state == 'absent':
if exists:
conn.delete_db_subnet_group(group_name)
changed = True
else:
if not exists:
new_group = conn.create_db_subnet_group(group_name, desc=group_description, subnet_ids=group_subnets)
else:
changed_group = conn.modify_db_subnet_group(group_name, description=group_description, subnet_ids=group_subnets)
except BotoServerError, e:
module.fail_json(msg = e.error_message)
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 |
ychen820/microblog | y/google-cloud-sdk/.install/.backup/platform/gsutil/third_party/boto/boto/s3/bucketlistresultset.py | 19 | 6565 | # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
def bucket_lister(bucket, prefix='', delimiter='', marker='', headers=None,
encoding_type=None):
"""
A generator function for listing keys in a bucket.
"""
more_results = True
k = None
while more_results:
rs = bucket.get_all_keys(prefix=prefix, marker=marker,
delimiter=delimiter, headers=headers,
encoding_type=encoding_type)
for k in rs:
yield k
if k:
marker = rs.next_marker or k.name
more_results= rs.is_truncated
class BucketListResultSet(object):
"""
A resultset for listing keys within a bucket. Uses the bucket_lister
generator function and implements the iterator interface. This
transparently handles the results paging from S3 so even if you have
many thousands of keys within the bucket you can iterate over all
keys in a reasonably efficient manner.
"""
def __init__(self, bucket=None, prefix='', delimiter='', marker='',
headers=None, encoding_type=None):
self.bucket = bucket
self.prefix = prefix
self.delimiter = delimiter
self.marker = marker
self.headers = headers
self.encoding_type = encoding_type
def __iter__(self):
return bucket_lister(self.bucket, prefix=self.prefix,
delimiter=self.delimiter, marker=self.marker,
headers=self.headers,
encoding_type=self.encoding_type)
def versioned_bucket_lister(bucket, prefix='', delimiter='',
key_marker='', version_id_marker='', headers=None,
encoding_type=None):
"""
A generator function for listing versions in a bucket.
"""
more_results = True
k = None
while more_results:
rs = bucket.get_all_versions(prefix=prefix, key_marker=key_marker,
version_id_marker=version_id_marker,
delimiter=delimiter, headers=headers,
max_keys=999, encoding_type=encoding_type)
for k in rs:
yield k
key_marker = rs.next_key_marker
version_id_marker = rs.next_version_id_marker
more_results= rs.is_truncated
class VersionedBucketListResultSet(object):
"""
A resultset for listing versions within a bucket. Uses the bucket_lister
generator function and implements the iterator interface. This
transparently handles the results paging from S3 so even if you have
many thousands of keys within the bucket you can iterate over all
keys in a reasonably efficient manner.
"""
def __init__(self, bucket=None, prefix='', delimiter='', key_marker='',
version_id_marker='', headers=None, encoding_type=None):
self.bucket = bucket
self.prefix = prefix
self.delimiter = delimiter
self.key_marker = key_marker
self.version_id_marker = version_id_marker
self.headers = headers
self.encoding_type = encoding_type
def __iter__(self):
return versioned_bucket_lister(self.bucket, prefix=self.prefix,
delimiter=self.delimiter,
key_marker=self.key_marker,
version_id_marker=self.version_id_marker,
headers=self.headers,
encoding_type=self.encoding_type)
def multipart_upload_lister(bucket, key_marker='',
upload_id_marker='',
headers=None, encoding_type=None):
"""
A generator function for listing multipart uploads in a bucket.
"""
more_results = True
k = None
while more_results:
rs = bucket.get_all_multipart_uploads(key_marker=key_marker,
upload_id_marker=upload_id_marker,
headers=headers,
encoding_type=encoding_type)
for k in rs:
yield k
key_marker = rs.next_key_marker
upload_id_marker = rs.next_upload_id_marker
more_results= rs.is_truncated
class MultiPartUploadListResultSet(object):
"""
A resultset for listing multipart uploads within a bucket.
Uses the multipart_upload_lister generator function and
implements the iterator interface. This
transparently handles the results paging from S3 so even if you have
many thousands of uploads within the bucket you can iterate over all
keys in a reasonably efficient manner.
"""
def __init__(self, bucket=None, key_marker='',
upload_id_marker='', headers=None, encoding_type=None):
self.bucket = bucket
self.key_marker = key_marker
self.upload_id_marker = upload_id_marker
self.headers = headers
self.encoding_type = encoding_type
def __iter__(self):
return multipart_upload_lister(self.bucket,
key_marker=self.key_marker,
upload_id_marker=self.upload_id_marker,
headers=self.headers,
encoding_type=self.encoding_type)
| bsd-3-clause |
foursquare/pants | contrib/python/src/python/pants/contrib/python/checks/tasks/checkstyle/file_excluder.py | 1 | 1407 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import re
from builtins import object
from pants.base.exceptions import TaskError
class FileExcluder(object):
def __init__(self, excludes_path, log):
self.excludes = {}
if excludes_path:
if not os.path.exists(excludes_path):
raise TaskError('Excludes file does not exist: {0}'.format(excludes_path))
with open(excludes_path) as fh:
for line in fh.readlines():
if line and not line.startswith('#') and '::' in line:
pattern, plugins = line.strip().split('::', 2)
style_plugins = plugins.split()
self.excludes[pattern] = {
'regex': re.compile(pattern),
'plugins': style_plugins
}
log.debug('Exclude pattern: {pattern}'.format(pattern=pattern))
else:
log.debug('No excludes file specified. All python sources will be checked.')
def should_include(self, source_filename, plugin):
for exclude_rule in self.excludes.values():
if exclude_rule['regex'].match(source_filename) and (
(exclude_rule['plugins'] == ['.*']) or (plugin in exclude_rule['plugins'])
):
return False
return True
| apache-2.0 |
openiitbombayx/edx-platform | lms/djangoapps/courseware/tests/test_about.py | 38 | 23595 | """
Test the about xblock
"""
import datetime
import pytz
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from mock import patch
from nose.plugins.attrib import attr
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from course_modes.models import CourseMode
from track.tests import EventTrackingTestCase
from xmodule.modulestore.tests.django_utils import TEST_DATA_MIXED_CLOSED_MODULESTORE
from student.models import CourseEnrollment
from student.tests.factories import UserFactory, CourseEnrollmentAllowedFactory
from shoppingcart.models import Order, PaidCourseRegistration
from xmodule.course_module import CATALOG_VISIBILITY_ABOUT, CATALOG_VISIBILITY_NONE
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from util.milestones_helpers import (
set_prerequisite_courses,
seed_milestone_relationship_types,
get_prerequisite_courses_display,
)
from .helpers import LoginEnrollmentTestCase
# HTML for registration button
REG_STR = "<form id=\"class_enroll_form\" method=\"post\" data-remote=\"true\" action=\"/change_enrollment\">"
SHIB_ERROR_STR = "The currently logged-in user account does not have permission to enroll in this course."
@attr('shard_1')
class AboutTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase, EventTrackingTestCase):
"""
Tests about xblock.
"""
def setUp(self):
super(AboutTestCase, self).setUp()
self.course = CourseFactory.create()
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
self.course_without_about = CourseFactory.create(catalog_visibility=CATALOG_VISIBILITY_NONE)
self.about = ItemFactory.create(
category="about", parent_location=self.course_without_about.location,
data="WITHOUT ABOUT", display_name="overview"
)
self.course_with_about = CourseFactory.create(catalog_visibility=CATALOG_VISIBILITY_ABOUT)
self.about = ItemFactory.create(
category="about", parent_location=self.course_with_about.location,
data="WITH ABOUT", display_name="overview"
)
self.purchase_course = CourseFactory.create(org='MITx', number='buyme', display_name='Course To Buy')
self.course_mode = CourseMode(course_id=self.purchase_course.id,
mode_slug="honor",
mode_display_name="honor cert",
min_price=10)
self.course_mode.save()
def test_anonymous_user(self):
"""
This test asserts that a non-logged in user can visit the course about page
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
# Check that registration button is present
self.assertIn(REG_STR, resp.content)
def test_logged_in(self):
"""
This test asserts that a logged-in user can visit the course about page
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
def test_already_enrolled(self):
"""
Asserts that the end user sees the appropriate messaging
when he/she visits the course about page, but is already enrolled
"""
self.setup_user()
self.enroll(self.course, True)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("You are registered for this course", resp.content)
self.assertIn("View Courseware", resp.content)
@override_settings(COURSE_ABOUT_VISIBILITY_PERMISSION="see_about_page")
def test_visible_about_page_settings(self):
"""
Verify that the About Page honors the permission settings in the course module
"""
url = reverse('about_course', args=[self.course_with_about.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("WITH ABOUT", resp.content)
url = reverse('about_course', args=[self.course_without_about.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 404)
@patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
def test_logged_in_marketing(self):
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
# should be redirected
self.assertEqual(resp.status_code, 302)
# follow this time, and check we're redirected to the course info page
resp = self.client.get(url, follow=True)
target_url = resp.redirect_chain[-1][0]
info_url = reverse('info', args=[self.course.id.to_deprecated_string()])
self.assertTrue(target_url.endswith(info_url))
@patch.dict(settings.FEATURES, {'ENABLE_PREREQUISITE_COURSES': True, 'MILESTONES_APP': True})
def test_pre_requisite_course(self):
seed_milestone_relationship_types()
pre_requisite_course = CourseFactory.create(org='edX', course='900', display_name='pre requisite course')
course = CourseFactory.create(pre_requisite_courses=[unicode(pre_requisite_course.id)])
self.setup_user()
url = reverse('about_course', args=[unicode(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
pre_requisite_courses = get_prerequisite_courses_display(course)
pre_requisite_course_about_url = reverse('about_course', args=[unicode(pre_requisite_courses[0]['key'])])
self.assertIn("<span class=\"important-dates-item-text pre-requisite\"><a href=\"{}\">{}</a></span>"
.format(pre_requisite_course_about_url, pre_requisite_courses[0]['display']),
resp.content.strip('\n'))
@patch.dict(settings.FEATURES, {'ENABLE_PREREQUISITE_COURSES': True, 'MILESTONES_APP': True})
def test_about_page_unfulfilled_prereqs(self):
seed_milestone_relationship_types()
pre_requisite_course = CourseFactory.create(
org='edX',
course='900',
display_name='pre requisite course',
)
pre_requisite_courses = [unicode(pre_requisite_course.id)]
# for this failure to occur, the enrollment window needs to be in the past
course = CourseFactory.create(
org='edX',
course='1000',
# closed enrollment
enrollment_start=datetime.datetime(2013, 1, 1),
enrollment_end=datetime.datetime(2014, 1, 1),
start=datetime.datetime(2013, 1, 1),
end=datetime.datetime(2030, 1, 1),
pre_requisite_courses=pre_requisite_courses,
)
set_prerequisite_courses(course.id, pre_requisite_courses)
self.setup_user()
self.enroll(self.course, True)
self.enroll(pre_requisite_course, True)
url = reverse('about_course', args=[unicode(course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
pre_requisite_courses = get_prerequisite_courses_display(course)
pre_requisite_course_about_url = reverse('about_course', args=[unicode(pre_requisite_courses[0]['key'])])
self.assertIn("<span class=\"important-dates-item-text pre-requisite\"><a href=\"{}\">{}</a></span>"
.format(pre_requisite_course_about_url, pre_requisite_courses[0]['display']),
resp.content.strip('\n'))
url = reverse('about_course', args=[unicode(pre_requisite_course.id)])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
@attr('shard_1')
class AboutTestCaseXML(LoginEnrollmentTestCase, ModuleStoreTestCase):
"""
Tests for the course about page
"""
MODULESTORE = TEST_DATA_MIXED_CLOSED_MODULESTORE
# The following XML test course (which lives at common/test/data/2014)
# is closed; we're testing that an about page still appears when
# the course is already closed
xml_course_id = SlashSeparatedCourseKey('edX', 'detached_pages', '2014')
# this text appears in that course's about page
# common/test/data/2014/about/overview.html
xml_data = "about page 463139"
@patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_logged_in_xml(self):
self.setup_user()
url = reverse('about_course', args=[self.xml_course_id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(self.xml_data, resp.content)
@patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_anonymous_user_xml(self):
url = reverse('about_course', args=[self.xml_course_id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(self.xml_data, resp.content)
@attr('shard_1')
class AboutWithCappedEnrollmentsTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
"""
This test case will check the About page when a course has a capped enrollment
"""
def setUp(self):
"""
Set up the tests
"""
super(AboutWithCappedEnrollmentsTestCase, self).setUp()
self.course = CourseFactory.create(metadata={"max_student_enrollments_allowed": 1})
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
def test_enrollment_cap(self):
"""
This test will make sure that enrollment caps are enforced
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn('<a href="#" class="register">', resp.content)
self.enroll(self.course, verify=True)
# create a new account since the first account is already registered for the course
self.email = 'foo_second@test.com'
self.password = 'bar'
self.username = 'test_second'
self.create_account(self.username,
self.email, self.password)
self.activate_user(self.email)
self.login(self.email, self.password)
# Get the about page again and make sure that the page says that the course is full
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Course is full", resp.content)
# Try to enroll as well
result = self.enroll(self.course)
self.assertFalse(result)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
@attr('shard_1')
class AboutWithInvitationOnly(ModuleStoreTestCase):
"""
This test case will check the About page when a course is invitation only.
"""
def setUp(self):
super(AboutWithInvitationOnly, self).setUp()
self.course = CourseFactory.create(metadata={"invitation_only": True})
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
display_name="overview"
)
def test_invitation_only(self):
"""
Test for user not logged in, invitation only course.
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment in this course is by invitation only", resp.content)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
def test_invitation_only_but_allowed(self):
"""
Test for user logged in and allowed to enroll in invitation only course.
"""
# Course is invitation only, student is allowed to enroll and logged in
user = UserFactory.create(username='allowed_student', password='test', email='allowed_student@test.com')
CourseEnrollmentAllowedFactory(email=user.email, course_id=self.course.id)
self.client.login(username=user.username, password='test')
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn(u"Register for {}".format(self.course.id.course), resp.content.decode('utf-8'))
# Check that registration button is present
self.assertIn(REG_STR, resp.content)
@attr('shard_1')
@patch.dict(settings.FEATURES, {'RESTRICT_ENROLL_BY_REG_METHOD': True})
class AboutTestCaseShibCourse(LoginEnrollmentTestCase, ModuleStoreTestCase):
"""
Test cases covering about page behavior for courses that use shib enrollment domain ("shib courses")
"""
def setUp(self):
super(AboutTestCaseShibCourse, self).setUp()
self.course = CourseFactory.create(enrollment_domain="shib:https://idp.stanford.edu/")
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
data="OOGIE BLOOGIE", display_name="overview"
)
def test_logged_in_shib_course(self):
"""
For shib courses, logged in users will see the register button, but get rejected once they click there
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
self.assertIn(u"Register for {}".format(self.course.id.course), resp.content.decode('utf-8'))
self.assertIn(SHIB_ERROR_STR, resp.content)
self.assertIn(REG_STR, resp.content)
def test_anonymous_user_shib_course(self):
"""
For shib courses, anonymous users will also see the register button
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("OOGIE BLOOGIE", resp.content)
self.assertIn(u"Register for {}".format(self.course.id.course), resp.content.decode('utf-8'))
self.assertIn(SHIB_ERROR_STR, resp.content)
self.assertIn(REG_STR, resp.content)
@attr('shard_1')
class AboutWithClosedEnrollment(ModuleStoreTestCase):
"""
This test case will check the About page for a course that has enrollment start/end
set but it is currently outside of that period.
"""
def setUp(self):
super(AboutWithClosedEnrollment, self).setUp()
self.course = CourseFactory.create(metadata={"invitation_only": False})
# Setup enrollment period to be in future
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
nextday = tomorrow + datetime.timedelta(days=1)
self.course.enrollment_start = tomorrow
self.course.enrollment_end = nextday
self.course = self.update_course(self.course, self.user.id)
self.about = ItemFactory.create(
category="about", parent_location=self.course.location,
display_name="overview"
)
def test_closed_enrollmement(self):
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment is Closed", resp.content)
# Check that registration button is not present
self.assertNotIn(REG_STR, resp.content)
def test_course_price_is_not_visble_in_sidebar(self):
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
# course price is not visible ihe course_about page when the course
# mode is not set to honor
self.assertNotIn('<span class="important-dates-item-text">$10</span>', resp.content)
@attr('shard_1')
@patch.dict(settings.FEATURES, {'ENABLE_SHOPPING_CART': True})
@patch.dict(settings.FEATURES, {'ENABLE_PAID_COURSE_REGISTRATION': True})
class AboutPurchaseCourseTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
"""
This test class runs through a suite of verifications regarding
purchaseable courses
"""
def setUp(self):
super(AboutPurchaseCourseTestCase, self).setUp()
self.course = CourseFactory.create(org='MITx', number='buyme', display_name='Course To Buy')
self._set_ecomm(self.course)
def _set_ecomm(self, course):
"""
Helper method to turn on ecommerce on the course
"""
course_mode = CourseMode(
course_id=course.id,
mode_slug="honor",
mode_display_name="honor cert",
min_price=10,
)
course_mode.save()
def test_anonymous_user(self):
"""
Make sure an anonymous user sees the purchase button
"""
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart ($10)", resp.content)
def test_logged_in(self):
"""
Make sure a logged in user sees the purchase button
"""
self.setup_user()
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart ($10)", resp.content)
def test_already_in_cart(self):
"""
This makes sure if a user has this course in the cart, that the expected message
appears
"""
self.setup_user()
cart = Order.get_cart_for_user(self.user)
PaidCourseRegistration.add_to_order(cart, self.course.id)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("This course is in your", resp.content)
self.assertNotIn("Add buyme to Cart ($10)", resp.content)
def test_already_enrolled(self):
"""
This makes sure that the already enrolled message appears for paywalled courses
"""
self.setup_user()
# note that we can't call self.enroll here since that goes through
# the Django student views, which doesn't allow for enrollments
# for paywalled courses
CourseEnrollment.enroll(self.user, self.course.id)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("You are registered for this course", resp.content)
self.assertIn("View Courseware", resp.content)
self.assertNotIn("Add buyme to Cart ($10)", resp.content)
def test_closed_enrollment(self):
"""
This makes sure that paywalled courses also honor the registration
window
"""
self.setup_user()
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
nextday = tomorrow + datetime.timedelta(days=1)
self.course.enrollment_start = tomorrow
self.course.enrollment_end = nextday
self.course = self.update_course(self.course, self.user.id)
url = reverse('about_course', args=[self.course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment is Closed", resp.content)
self.assertNotIn("Add buyme to Cart ($10)", resp.content)
# course price is visible ihe course_about page when the course
# mode is set to honor and it's price is set
self.assertIn('<span class="important-dates-item-text">$10</span>', resp.content)
def test_invitation_only(self):
"""
This makes sure that the invitation only restirction takes prescendence over
any purchase enablements
"""
course = CourseFactory.create(metadata={"invitation_only": True})
self._set_ecomm(course)
self.setup_user()
url = reverse('about_course', args=[course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Enrollment in this course is by invitation only", resp.content)
def test_enrollment_cap(self):
"""
Make sure that capped enrollments work even with
paywalled courses
"""
course = CourseFactory.create(
metadata={
"max_student_enrollments_allowed": 1,
"display_coursenumber": "buyme",
}
)
self._set_ecomm(course)
self.setup_user()
url = reverse('about_course', args=[course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Add buyme to Cart ($10)", resp.content)
# note that we can't call self.enroll here since that goes through
# the Django student views, which doesn't allow for enrollments
# for paywalled courses
CourseEnrollment.enroll(self.user, course.id)
# create a new account since the first account is already registered for the course
email = 'foo_second@test.com'
password = 'bar'
username = 'test_second'
self.create_account(username,
email, password)
self.activate_user(email)
self.login(email, password)
# Get the about page again and make sure that the page says that the course is full
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Course is full", resp.content)
self.assertNotIn("Add buyme to Cart ($10)", resp.content)
def test_free_course_display(self):
"""
Make sure other courses that don't have shopping cart enabled don't display the add-to-cart button
and don't display the course_price field if Cosmetic Price is disabled.
"""
course = CourseFactory.create(org='MITx', number='free', display_name='Course For Free')
self.setup_user()
url = reverse('about_course', args=[course.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertNotIn("Add free to Cart (Free)", resp.content)
self.assertNotIn('<p class="important-dates-item-title">Price</p>', resp.content)
| agpl-3.0 |
2014c2g2/teamwork | w2/static/Brython2.0.0-20140209-164925/Lib/subprocess.py | 728 | 67282 | # subprocess - Subprocesses with accessible I/O streams
#
# For more information about this module, see PEP 324.
#
# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
#
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/2.4/license for licensing details.
r"""subprocess - Subprocesses with accessible I/O streams
This module allows you to spawn processes, connect to their
input/output/error pipes, and obtain their return codes. This module
intends to replace several other, older modules and functions, like:
os.system
os.spawn*
Information about how the subprocess module can be used to replace these
modules and functions can be found below.
Using the subprocess module
===========================
This module defines one class called Popen:
class Popen(args, bufsize=-1, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=True, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0,
restore_signals=True, start_new_session=False, pass_fds=()):
Arguments are:
args should be a string, or a sequence of program arguments. The
program to execute is normally the first item in the args sequence or
string, but can be explicitly set by using the executable argument.
On POSIX, with shell=False (default): In this case, the Popen class
uses os.execvp() to execute the child program. args should normally
be a sequence. A string will be treated as a sequence with the string
as the only item (the program to execute).
On POSIX, with shell=True: If args is a string, it specifies the
command string to execute through the shell. If args is a sequence,
the first item specifies the command string, and any additional items
will be treated as additional shell arguments.
On Windows: the Popen class uses CreateProcess() to execute the child
program, which operates on strings. If args is a sequence, it will be
converted to a string using the list2cmdline method. Please note that
not all MS Windows applications interpret the command line the same
way: The list2cmdline is designed for applications using the same
rules as the MS C runtime.
bufsize will be supplied as the corresponding argument to the io.open()
function when creating the stdin/stdout/stderr pipe file objects:
0 means unbuffered (read & write are one system call and can return short),
1 means line buffered, any other positive value means use a buffer of
approximately that size. A negative bufsize, the default, means the system
default of io.DEFAULT_BUFFER_SIZE will be used.
stdin, stdout and stderr specify the executed programs' standard
input, standard output and standard error file handles, respectively.
Valid values are PIPE, an existing file descriptor (a positive
integer), an existing file object, and None. PIPE indicates that a
new pipe to the child should be created. With None, no redirection
will occur; the child's file handles will be inherited from the
parent. Additionally, stderr can be STDOUT, which indicates that the
stderr data from the applications should be captured into the same
file handle as for stdout.
On POSIX, if preexec_fn is set to a callable object, this object will be
called in the child process just before the child is executed. The use
of preexec_fn is not thread safe, using it in the presence of threads
could lead to a deadlock in the child process before the new executable
is executed.
If close_fds is true, all file descriptors except 0, 1 and 2 will be
closed before the child process is executed. The default for close_fds
varies by platform: Always true on POSIX. True when stdin/stdout/stderr
are None on Windows, false otherwise.
pass_fds is an optional sequence of file descriptors to keep open between the
parent and child. Providing any pass_fds implicitly sets close_fds to true.
if shell is true, the specified command will be executed through the
shell.
If cwd is not None, the current directory will be changed to cwd
before the child is executed.
On POSIX, if restore_signals is True all signals that Python sets to
SIG_IGN are restored to SIG_DFL in the child process before the exec.
Currently this includes the SIGPIPE, SIGXFZ and SIGXFSZ signals. This
parameter does nothing on Windows.
On POSIX, if start_new_session is True, the setsid() system call will be made
in the child process prior to executing the command.
If env is not None, it defines the environment variables for the new
process.
If universal_newlines is false, the file objects stdin, stdout and stderr
are opened as binary files, and no line ending conversion is done.
If universal_newlines is true, the file objects stdout and stderr are
opened as a text files, but lines may be terminated by any of '\n',
the Unix end-of-line convention, '\r', the old Macintosh convention or
'\r\n', the Windows convention. All of these external representations
are seen as '\n' by the Python program. Also, the newlines attribute
of the file objects stdout, stdin and stderr are not updated by the
communicate() method.
The startupinfo and creationflags, if given, will be passed to the
underlying CreateProcess() function. They can specify things such as
appearance of the main window and priority for the new process.
(Windows only)
This module also defines some shortcut functions:
call(*popenargs, **kwargs):
Run command with arguments. Wait for command to complete, then
return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
>>> retcode = subprocess.call(["ls", "-l"])
check_call(*popenargs, **kwargs):
Run command with arguments. Wait for command to complete. If the
exit code was zero then return, otherwise raise
CalledProcessError. The CalledProcessError object will have the
return code in the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
>>> subprocess.check_call(["ls", "-l"])
0
getstatusoutput(cmd):
Return (status, output) of executing cmd in a shell.
Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple
(status, output). cmd is actually run as '{ cmd ; } 2>&1', so that the
returned output will contain output or error messages. A trailing newline
is stripped from the output. The exit status for the command can be
interpreted according to the rules for the C function wait(). Example:
>>> subprocess.getstatusoutput('ls /bin/ls')
(0, '/bin/ls')
>>> subprocess.getstatusoutput('cat /bin/junk')
(256, 'cat: /bin/junk: No such file or directory')
>>> subprocess.getstatusoutput('/bin/junk')
(256, 'sh: /bin/junk: not found')
getoutput(cmd):
Return output (stdout or stderr) of executing cmd in a shell.
Like getstatusoutput(), except the exit status is ignored and the return
value is a string containing the command's output. Example:
>>> subprocess.getoutput('ls /bin/ls')
'/bin/ls'
check_output(*popenargs, **kwargs):
Run command with arguments and return its output.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
The arguments are the same as for the Popen constructor. Example:
>>> output = subprocess.check_output(["ls", "-l", "/dev/null"])
Exceptions
----------
Exceptions raised in the child process, before the new program has
started to execute, will be re-raised in the parent. Additionally,
the exception object will have one extra attribute called
'child_traceback', which is a string containing traceback information
from the child's point of view.
The most common exception raised is OSError. This occurs, for
example, when trying to execute a non-existent file. Applications
should prepare for OSErrors.
A ValueError will be raised if Popen is called with invalid arguments.
Exceptions defined within this module inherit from SubprocessError.
check_call() and check_output() will raise CalledProcessError if the
called process returns a non-zero return code. TimeoutExpired
be raised if a timeout was specified and expired.
Security
--------
Unlike some other popen functions, this implementation will never call
/bin/sh implicitly. This means that all characters, including shell
metacharacters, can safely be passed to child processes.
Popen objects
=============
Instances of the Popen class have the following methods:
poll()
Check if child process has terminated. Returns returncode
attribute.
wait()
Wait for child process to terminate. Returns returncode attribute.
communicate(input=None)
Interact with process: Send data to stdin. Read data from stdout
and stderr, until end-of-file is reached. Wait for process to
terminate. The optional input argument should be a string to be
sent to the child process, or None, if no data should be sent to
the child.
communicate() returns a tuple (stdout, stderr).
Note: The data read is buffered in memory, so do not use this
method if the data size is large or unlimited.
The following attributes are also available:
stdin
If the stdin argument is PIPE, this attribute is a file object
that provides input to the child process. Otherwise, it is None.
stdout
If the stdout argument is PIPE, this attribute is a file object
that provides output from the child process. Otherwise, it is
None.
stderr
If the stderr argument is PIPE, this attribute is file object that
provides error output from the child process. Otherwise, it is
None.
pid
The process ID of the child process.
returncode
The child return code. A None value indicates that the process
hasn't terminated yet. A negative value -N indicates that the
child was terminated by signal N (POSIX only).
Replacing older functions with the subprocess module
====================================================
In this section, "a ==> b" means that b can be used as a replacement
for a.
Note: All functions in this section fail (more or less) silently if
the executed program cannot be found; this module raises an OSError
exception.
In the following examples, we assume that the subprocess module is
imported with "from subprocess import *".
Replacing /bin/sh shell backquote
---------------------------------
output=`mycmd myarg`
==>
output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0]
Replacing shell pipe line
-------------------------
output=`dmesg | grep hda`
==>
p1 = Popen(["dmesg"], stdout=PIPE)
p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
output = p2.communicate()[0]
Replacing os.system()
---------------------
sts = os.system("mycmd" + " myarg")
==>
p = Popen("mycmd" + " myarg", shell=True)
pid, sts = os.waitpid(p.pid, 0)
Note:
* Calling the program through the shell is usually not required.
* It's easier to look at the returncode attribute than the
exitstatus.
A more real-world example would look like this:
try:
retcode = call("mycmd" + " myarg", shell=True)
if retcode < 0:
print("Child was terminated by signal", -retcode, file=sys.stderr)
else:
print("Child returned", retcode, file=sys.stderr)
except OSError as e:
print("Execution failed:", e, file=sys.stderr)
Replacing os.spawn*
-------------------
P_NOWAIT example:
pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg")
==>
pid = Popen(["/bin/mycmd", "myarg"]).pid
P_WAIT example:
retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg")
==>
retcode = call(["/bin/mycmd", "myarg"])
Vector example:
os.spawnvp(os.P_NOWAIT, path, args)
==>
Popen([path] + args[1:])
Environment example:
os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env)
==>
Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"})
"""
import sys
mswindows = (sys.platform == "win32")
import io
import os
import time
import traceback
import gc
import signal
import builtins
import warnings
import errno
try:
from time import monotonic as _time
except ImportError:
from time import time as _time
# Exception classes used by this module.
class SubprocessError(Exception): pass
class CalledProcessError(SubprocessError):
"""This exception is raised when a process run by check_call() or
check_output() returns a non-zero exit status.
The exit status will be stored in the returncode attribute;
check_output() will also store the output in the output attribute.
"""
def __init__(self, returncode, cmd, output=None):
self.returncode = returncode
self.cmd = cmd
self.output = output
def __str__(self):
return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
class TimeoutExpired(SubprocessError):
"""This exception is raised when the timeout expires while waiting for a
child process.
"""
def __init__(self, cmd, timeout, output=None):
self.cmd = cmd
self.timeout = timeout
self.output = output
def __str__(self):
return ("Command '%s' timed out after %s seconds" %
(self.cmd, self.timeout))
if mswindows:
import threading
import msvcrt
import _winapi
class STARTUPINFO:
dwFlags = 0
hStdInput = None
hStdOutput = None
hStdError = None
wShowWindow = 0
class pywintypes:
error = IOError
else:
import select
_has_poll = hasattr(select, 'poll')
import _posixsubprocess
_create_pipe = _posixsubprocess.cloexec_pipe
# When select or poll has indicated that the file is writable,
# we can write up to _PIPE_BUF bytes without risk of blocking.
# POSIX defines PIPE_BUF as >= 512.
_PIPE_BUF = getattr(select, 'PIPE_BUF', 512)
__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
"getoutput", "check_output", "CalledProcessError", "DEVNULL"]
if mswindows:
from _winapi import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
STD_ERROR_HANDLE, SW_HIDE,
STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW)
__all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
"STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
"STD_ERROR_HANDLE", "SW_HIDE",
"STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW"])
class Handle(int):
closed = False
def Close(self, CloseHandle=_winapi.CloseHandle):
if not self.closed:
self.closed = True
CloseHandle(self)
def Detach(self):
if not self.closed:
self.closed = True
return int(self)
raise ValueError("already closed")
def __repr__(self):
return "Handle(%d)" % int(self)
__del__ = Close
__str__ = __repr__
try:
MAXFD = os.sysconf("SC_OPEN_MAX")
except:
MAXFD = 256
# This lists holds Popen instances for which the underlying process had not
# exited at the time its __del__ method got called: those processes are wait()ed
# for synchronously from _cleanup() when a new Popen object is created, to avoid
# zombie processes.
_active = []
def _cleanup():
for inst in _active[:]:
res = inst._internal_poll(_deadstate=sys.maxsize)
if res is not None:
try:
_active.remove(inst)
except ValueError:
# This can happen if two threads create a new Popen instance.
# It's harmless that it was already removed, so ignore.
pass
PIPE = -1
STDOUT = -2
DEVNULL = -3
def _eintr_retry_call(func, *args):
while True:
try:
return func(*args)
except InterruptedError:
continue
# XXX This function is only used by multiprocessing and the test suite,
# but it's here so that it can be imported when Python is compiled without
# threads.
def _args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
settings in sys.flags and sys.warnoptions."""
flag_opt_map = {
'debug': 'd',
# 'inspect': 'i',
# 'interactive': 'i',
'optimize': 'O',
'dont_write_bytecode': 'B',
'no_user_site': 's',
'no_site': 'S',
'ignore_environment': 'E',
'verbose': 'v',
'bytes_warning': 'b',
'quiet': 'q',
'hash_randomization': 'R',
}
args = []
for flag, opt in flag_opt_map.items():
v = getattr(sys.flags, flag)
if v > 0:
args.append('-' + opt * v)
for opt in sys.warnoptions:
args.append('-W' + opt)
return args
def call(*popenargs, timeout=None, **kwargs):
"""Run command with arguments. Wait for command to complete or
timeout, then return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
"""
with Popen(*popenargs, **kwargs) as p:
try:
return p.wait(timeout=timeout)
except:
p.kill()
p.wait()
raise
def check_call(*popenargs, **kwargs):
"""Run command with arguments. Wait for command to complete. If
the exit code was zero then return, otherwise raise
CalledProcessError. The CalledProcessError object will have the
return code in the returncode attribute.
The arguments are the same as for the call function. Example:
check_call(["ls", "-l"])
"""
retcode = call(*popenargs, **kwargs)
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd)
return 0
def check_output(*popenargs, timeout=None, **kwargs):
r"""Run command with arguments and return its output.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
The arguments are the same as for the Popen constructor. Example:
>>> check_output(["ls", "-l", "/dev/null"])
b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use stderr=STDOUT.
>>> check_output(["/bin/sh", "-c",
... "ls -l non_existent_file ; exit 0"],
... stderr=STDOUT)
b'ls: non_existent_file: No such file or directory\n'
If universal_newlines=True is passed, the return value will be a
string rather than bytes.
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
with Popen(*popenargs, stdout=PIPE, **kwargs) as process:
try:
output, unused_err = process.communicate(timeout=timeout)
except TimeoutExpired:
process.kill()
output, unused_err = process.communicate()
raise TimeoutExpired(process.args, timeout, output=output)
except:
process.kill()
process.wait()
raise
retcode = process.poll()
if retcode:
raise CalledProcessError(retcode, process.args, output=output)
return output
def list2cmdline(seq):
"""
Translate a sequence of arguments into a command line
string, using the same rules as the MS C runtime:
1) Arguments are delimited by white space, which is either a
space or a tab.
2) A string surrounded by double quotation marks is
interpreted as a single argument, regardless of white space
contained within. A quoted string can be embedded in an
argument.
3) A double quotation mark preceded by a backslash is
interpreted as a literal double quotation mark.
4) Backslashes are interpreted literally, unless they
immediately precede a double quotation mark.
5) If backslashes immediately precede a double quotation mark,
every pair of backslashes is interpreted as a literal
backslash. If the number of backslashes is odd, the last
backslash escapes the next double quotation mark as
described in rule 3.
"""
# See
# http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
# or search http://msdn.microsoft.com for
# "Parsing C++ Command-Line Arguments"
result = []
needquote = False
for arg in seq:
bs_buf = []
# Add a space to separate this argument from the others
if result:
result.append(' ')
needquote = (" " in arg) or ("\t" in arg) or not arg
if needquote:
result.append('"')
for c in arg:
if c == '\\':
# Don't know if we need to double yet.
bs_buf.append(c)
elif c == '"':
# Double backslashes.
result.append('\\' * len(bs_buf)*2)
bs_buf = []
result.append('\\"')
else:
# Normal char
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
# Add remaining backslashes, if any.
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
# Various tools for executing commands and looking at their output and status.
#
# NB This only works (and is only relevant) for POSIX.
def getstatusoutput(cmd):
"""Return (status, output) of executing cmd in a shell.
Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple
(status, output). cmd is actually run as '{ cmd ; } 2>&1', so that the
returned output will contain output or error messages. A trailing newline
is stripped from the output. The exit status for the command can be
interpreted according to the rules for the C function wait(). Example:
>>> import subprocess
>>> subprocess.getstatusoutput('ls /bin/ls')
(0, '/bin/ls')
>>> subprocess.getstatusoutput('cat /bin/junk')
(256, 'cat: /bin/junk: No such file or directory')
>>> subprocess.getstatusoutput('/bin/junk')
(256, 'sh: /bin/junk: not found')
"""
with os.popen('{ ' + cmd + '; } 2>&1', 'r') as pipe:
try:
text = pipe.read()
sts = pipe.close()
except:
process = pipe._proc
process.kill()
process.wait()
raise
if sts is None:
sts = 0
if text[-1:] == '\n':
text = text[:-1]
return sts, text
def getoutput(cmd):
"""Return output (stdout or stderr) of executing cmd in a shell.
Like getstatusoutput(), except the exit status is ignored and the return
value is a string containing the command's output. Example:
>>> import subprocess
>>> subprocess.getoutput('ls /bin/ls')
'/bin/ls'
"""
return getstatusoutput(cmd)[1]
_PLATFORM_DEFAULT_CLOSE_FDS = object()
class Popen(object):
def __init__(self, args, bufsize=-1, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=_PLATFORM_DEFAULT_CLOSE_FDS,
shell=False, cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0,
restore_signals=True, start_new_session=False,
pass_fds=()):
"""Create new Popen instance."""
_cleanup()
self._child_created = False
self._input = None
self._communication_started = False
if bufsize is None:
bufsize = -1 # Restore default
if not isinstance(bufsize, int):
raise TypeError("bufsize must be an integer")
if mswindows:
if preexec_fn is not None:
raise ValueError("preexec_fn is not supported on Windows "
"platforms")
any_stdio_set = (stdin is not None or stdout is not None or
stderr is not None)
if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:
if any_stdio_set:
close_fds = False
else:
close_fds = True
elif close_fds and any_stdio_set:
raise ValueError(
"close_fds is not supported on Windows platforms"
" if you redirect stdin/stdout/stderr")
else:
# POSIX
if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:
close_fds = True
if pass_fds and not close_fds:
warnings.warn("pass_fds overriding close_fds.", RuntimeWarning)
close_fds = True
if startupinfo is not None:
raise ValueError("startupinfo is only supported on Windows "
"platforms")
if creationflags != 0:
raise ValueError("creationflags is only supported on Windows "
"platforms")
self.args = args
self.stdin = None
self.stdout = None
self.stderr = None
self.pid = None
self.returncode = None
self.universal_newlines = universal_newlines
# Input and output objects. The general principle is like
# this:
#
# Parent Child
# ------ -----
# p2cwrite ---stdin---> p2cread
# c2pread <--stdout--- c2pwrite
# errread <--stderr--- errwrite
#
# On POSIX, the child objects are file descriptors. On
# Windows, these are Windows file handles. The parent objects
# are file descriptors on both platforms. The parent objects
# are -1 when not using PIPEs. The child objects are -1
# when not redirecting.
(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite) = self._get_handles(stdin, stdout, stderr)
# We wrap OS handles *before* launching the child, otherwise a
# quickly terminating child could make our fds unwrappable
# (see #8458).
#fix me brython syntax error
#if mswindows:
# if p2cwrite != -1:
# p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0)
# if c2pread != -1:
# c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0)
# if errread != -1:
# errread = msvcrt.open_osfhandle(errread.Detach(), 0)
if p2cwrite != -1:
self.stdin = io.open(p2cwrite, 'wb', bufsize)
if universal_newlines:
self.stdin = io.TextIOWrapper(self.stdin, write_through=True)
if c2pread != -1:
self.stdout = io.open(c2pread, 'rb', bufsize)
if universal_newlines:
self.stdout = io.TextIOWrapper(self.stdout)
if errread != -1:
self.stderr = io.open(errread, 'rb', bufsize)
if universal_newlines:
self.stderr = io.TextIOWrapper(self.stderr)
self._closed_child_pipe_fds = False
try:
self._execute_child(args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
restore_signals, start_new_session)
except:
# Cleanup if the child failed starting.
for f in filter(None, (self.stdin, self.stdout, self.stderr)):
try:
f.close()
except EnvironmentError:
pass # Ignore EBADF or other errors.
if not self._closed_child_pipe_fds:
to_close = []
if stdin == PIPE:
to_close.append(p2cread)
if stdout == PIPE:
to_close.append(c2pwrite)
if stderr == PIPE:
to_close.append(errwrite)
if hasattr(self, '_devnull'):
to_close.append(self._devnull)
for fd in to_close:
try:
os.close(fd)
except EnvironmentError:
pass
raise
def _translate_newlines(self, data, encoding):
data = data.decode(encoding)
return data.replace("\r\n", "\n").replace("\r", "\n")
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self.stdout:
self.stdout.close()
if self.stderr:
self.stderr.close()
if self.stdin:
self.stdin.close()
# Wait for the process to terminate, to avoid zombies.
self.wait()
def __del__(self, _maxsize=sys.maxsize, _active=_active):
# If __init__ hasn't had a chance to execute (e.g. if it
# was passed an undeclared keyword argument), we don't
# have a _child_created attribute at all.
if not getattr(self, '_child_created', False):
# We didn't get to successfully create a child process.
return
# In case the child hasn't been waited on, check if it's done.
self._internal_poll(_deadstate=_maxsize)
if self.returncode is None and _active is not None:
# Child is still running, keep us alive until we can wait on it.
_active.append(self)
def _get_devnull(self):
if not hasattr(self, '_devnull'):
self._devnull = os.open(os.devnull, os.O_RDWR)
return self._devnull
def communicate(self, input=None, timeout=None):
"""Interact with process: Send data to stdin. Read data from
stdout and stderr, until end-of-file is reached. Wait for
process to terminate. The optional input argument should be
bytes to be sent to the child process, or None, if no data
should be sent to the child.
communicate() returns a tuple (stdout, stderr)."""
if self._communication_started and input:
raise ValueError("Cannot send input after starting communication")
# Optimization: If we are not worried about timeouts, we haven't
# started communicating, and we have one or zero pipes, using select()
# or threads is unnecessary.
if (timeout is None and not self._communication_started and
[self.stdin, self.stdout, self.stderr].count(None) >= 2):
stdout = None
stderr = None
if self.stdin:
if input:
try:
self.stdin.write(input)
except IOError as e:
if e.errno != errno.EPIPE and e.errno != errno.EINVAL:
raise
self.stdin.close()
elif self.stdout:
stdout = _eintr_retry_call(self.stdout.read)
self.stdout.close()
elif self.stderr:
stderr = _eintr_retry_call(self.stderr.read)
self.stderr.close()
self.wait()
else:
if timeout is not None:
endtime = _time() + timeout
else:
endtime = None
try:
stdout, stderr = self._communicate(input, endtime, timeout)
finally:
self._communication_started = True
sts = self.wait(timeout=self._remaining_time(endtime))
return (stdout, stderr)
def poll(self):
return self._internal_poll()
def _remaining_time(self, endtime):
"""Convenience for _communicate when computing timeouts."""
if endtime is None:
return None
else:
return endtime - _time()
def _check_timeout(self, endtime, orig_timeout):
"""Convenience for checking if a timeout has expired."""
if endtime is None:
return
if _time() > endtime:
raise TimeoutExpired(self.args, orig_timeout)
if mswindows:
#
# Windows methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
if stdin is None and stdout is None and stderr is None:
return (-1, -1, -1, -1, -1, -1)
p2cread, p2cwrite = -1, -1
c2pread, c2pwrite = -1, -1
errread, errwrite = -1, -1
if stdin is None:
p2cread = _winapi.GetStdHandle(_winapi.STD_INPUT_HANDLE)
if p2cread is None:
p2cread, _ = _winapi.CreatePipe(None, 0)
p2cread = Handle(p2cread)
_winapi.CloseHandle(_)
elif stdin == PIPE:
p2cread, p2cwrite = _winapi.CreatePipe(None, 0)
p2cread, p2cwrite = Handle(p2cread), Handle(p2cwrite)
elif stdin == DEVNULL:
p2cread = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdin, int):
p2cread = msvcrt.get_osfhandle(stdin)
else:
# Assuming file-like object
p2cread = msvcrt.get_osfhandle(stdin.fileno())
p2cread = self._make_inheritable(p2cread)
if stdout is None:
c2pwrite = _winapi.GetStdHandle(_winapi.STD_OUTPUT_HANDLE)
if c2pwrite is None:
_, c2pwrite = _winapi.CreatePipe(None, 0)
c2pwrite = Handle(c2pwrite)
_winapi.CloseHandle(_)
elif stdout == PIPE:
c2pread, c2pwrite = _winapi.CreatePipe(None, 0)
c2pread, c2pwrite = Handle(c2pread), Handle(c2pwrite)
elif stdout == DEVNULL:
c2pwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdout, int):
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
# Assuming file-like object
c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
c2pwrite = self._make_inheritable(c2pwrite)
if stderr is None:
errwrite = _winapi.GetStdHandle(_winapi.STD_ERROR_HANDLE)
if errwrite is None:
_, errwrite = _winapi.CreatePipe(None, 0)
errwrite = Handle(errwrite)
_winapi.CloseHandle(_)
elif stderr == PIPE:
errread, errwrite = _winapi.CreatePipe(None, 0)
errread, errwrite = Handle(errread), Handle(errwrite)
elif stderr == STDOUT:
errwrite = c2pwrite
elif stderr == DEVNULL:
errwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stderr, int):
errwrite = msvcrt.get_osfhandle(stderr)
else:
# Assuming file-like object
errwrite = msvcrt.get_osfhandle(stderr.fileno())
errwrite = self._make_inheritable(errwrite)
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _make_inheritable(self, handle):
"""Return a duplicate of handle, which is inheritable"""
h = _winapi.DuplicateHandle(
_winapi.GetCurrentProcess(), handle,
_winapi.GetCurrentProcess(), 0, 1,
_winapi.DUPLICATE_SAME_ACCESS)
return Handle(h)
def _find_w9xpopen(self):
"""Find and return absolut path to w9xpopen.exe"""
w9xpopen = os.path.join(
os.path.dirname(_winapi.GetModuleFileName(0)),
"w9xpopen.exe")
if not os.path.exists(w9xpopen):
# Eeek - file-not-found - possibly an embedding
# situation - see if we can locate it in sys.exec_prefix
w9xpopen = os.path.join(os.path.dirname(sys.base_exec_prefix),
"w9xpopen.exe")
if not os.path.exists(w9xpopen):
raise RuntimeError("Cannot locate w9xpopen.exe, which is "
"needed for Popen to work with your "
"shell or platform.")
return w9xpopen
def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
unused_restore_signals, unused_start_new_session):
"""Execute program (MS Windows version)"""
assert not pass_fds, "pass_fds not supported on Windows."
if not isinstance(args, str):
args = list2cmdline(args)
# Process startup details
if startupinfo is None:
startupinfo = STARTUPINFO()
if -1 not in (p2cread, c2pwrite, errwrite):
startupinfo.dwFlags |= _winapi.STARTF_USESTDHANDLES
startupinfo.hStdInput = p2cread
startupinfo.hStdOutput = c2pwrite
startupinfo.hStdError = errwrite
if shell:
startupinfo.dwFlags |= _winapi.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = _winapi.SW_HIDE
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = '{} /c "{}"'.format (comspec, args)
if (_winapi.GetVersion() >= 0x80000000 or
os.path.basename(comspec).lower() == "command.com"):
# Win9x, or using command.com on NT. We need to
# use the w9xpopen intermediate program. For more
# information, see KB Q150956
# (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
w9xpopen = self._find_w9xpopen()
args = '"%s" %s' % (w9xpopen, args)
# Not passing CREATE_NEW_CONSOLE has been known to
# cause random failures on win9x. Specifically a
# dialog: "Your program accessed mem currently in
# use at xxx" and a hopeful warning about the
# stability of your system. Cost is Ctrl+C won't
# kill children.
creationflags |= _winapi.CREATE_NEW_CONSOLE
# Start the process
try:
hp, ht, pid, tid = _winapi.CreateProcess(executable, args,
# no special security
None, None,
int(not close_fds),
creationflags,
env,
cwd,
startupinfo)
except pywintypes.error as e:
# Translate pywintypes.error to WindowsError, which is
# a subclass of OSError. FIXME: We should really
# translate errno using _sys_errlist (or similar), but
# how can this be done from Python?
raise WindowsError(*e.args)
finally:
# Child is launched. Close the parent's copy of those pipe
# handles that only the child should have open. You need
# to make sure that no handles to the write end of the
# output pipe are maintained in this process or else the
# pipe will not close when the child process exits and the
# ReadFile will hang.
if p2cread != -1:
p2cread.Close()
if c2pwrite != -1:
c2pwrite.Close()
if errwrite != -1:
errwrite.Close()
if hasattr(self, '_devnull'):
os.close(self._devnull)
# Retain the process handle, but close the thread handle
self._child_created = True
self._handle = Handle(hp)
self.pid = pid
_winapi.CloseHandle(ht)
def _internal_poll(self, _deadstate=None,
_WaitForSingleObject=_winapi.WaitForSingleObject,
_WAIT_OBJECT_0=_winapi.WAIT_OBJECT_0,
_GetExitCodeProcess=_winapi.GetExitCodeProcess):
"""Check if child process has terminated. Returns returncode
attribute.
This method is called by __del__, so it can only refer to objects
in its local scope.
"""
if self.returncode is None:
if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0:
self.returncode = _GetExitCodeProcess(self._handle)
return self.returncode
def wait(self, timeout=None, endtime=None):
"""Wait for child process to terminate. Returns returncode
attribute."""
if endtime is not None:
timeout = self._remaining_time(endtime)
if timeout is None:
timeout_millis = _winapi.INFINITE
else:
timeout_millis = int(timeout * 1000)
if self.returncode is None:
result = _winapi.WaitForSingleObject(self._handle,
timeout_millis)
if result == _winapi.WAIT_TIMEOUT:
raise TimeoutExpired(self.args, timeout)
self.returncode = _winapi.GetExitCodeProcess(self._handle)
return self.returncode
def _readerthread(self, fh, buffer):
buffer.append(fh.read())
fh.close()
def _communicate(self, input, endtime, orig_timeout):
# Start reader threads feeding into a list hanging off of this
# object, unless they've already been started.
if self.stdout and not hasattr(self, "_stdout_buff"):
self._stdout_buff = []
self.stdout_thread = \
threading.Thread(target=self._readerthread,
args=(self.stdout, self._stdout_buff))
self.stdout_thread.daemon = True
self.stdout_thread.start()
if self.stderr and not hasattr(self, "_stderr_buff"):
self._stderr_buff = []
self.stderr_thread = \
threading.Thread(target=self._readerthread,
args=(self.stderr, self._stderr_buff))
self.stderr_thread.daemon = True
self.stderr_thread.start()
if self.stdin:
if input is not None:
try:
self.stdin.write(input)
except IOError as e:
if e.errno != errno.EPIPE:
raise
self.stdin.close()
# Wait for the reader threads, or time out. If we time out, the
# threads remain reading and the fds left open in case the user
# calls communicate again.
if self.stdout is not None:
self.stdout_thread.join(self._remaining_time(endtime))
if self.stdout_thread.is_alive():
raise TimeoutExpired(self.args, orig_timeout)
if self.stderr is not None:
self.stderr_thread.join(self._remaining_time(endtime))
if self.stderr_thread.is_alive():
raise TimeoutExpired(self.args, orig_timeout)
# Collect the output from and close both pipes, now that we know
# both have been read successfully.
stdout = None
stderr = None
if self.stdout:
stdout = self._stdout_buff
self.stdout.close()
if self.stderr:
stderr = self._stderr_buff
self.stderr.close()
# All data exchanged. Translate lists into strings.
if stdout is not None:
stdout = stdout[0]
if stderr is not None:
stderr = stderr[0]
return (stdout, stderr)
def send_signal(self, sig):
"""Send a signal to the process
"""
if sig == signal.SIGTERM:
self.terminate()
elif sig == signal.CTRL_C_EVENT:
os.kill(self.pid, signal.CTRL_C_EVENT)
elif sig == signal.CTRL_BREAK_EVENT:
os.kill(self.pid, signal.CTRL_BREAK_EVENT)
else:
raise ValueError("Unsupported signal: {}".format(sig))
def terminate(self):
"""Terminates the process
"""
try:
_winapi.TerminateProcess(self._handle, 1)
except PermissionError:
# ERROR_ACCESS_DENIED (winerror 5) is received when the
# process already died.
rc = _winapi.GetExitCodeProcess(self._handle)
if rc == _winapi.STILL_ACTIVE:
raise
self.returncode = rc
kill = terminate
else:
#
# POSIX methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
p2cread, p2cwrite = -1, -1
c2pread, c2pwrite = -1, -1
errread, errwrite = -1, -1
if stdin is None:
pass
elif stdin == PIPE:
p2cread, p2cwrite = _create_pipe()
elif stdin == DEVNULL:
p2cread = self._get_devnull()
elif isinstance(stdin, int):
p2cread = stdin
else:
# Assuming file-like object
p2cread = stdin.fileno()
if stdout is None:
pass
elif stdout == PIPE:
c2pread, c2pwrite = _create_pipe()
elif stdout == DEVNULL:
c2pwrite = self._get_devnull()
elif isinstance(stdout, int):
c2pwrite = stdout
else:
# Assuming file-like object
c2pwrite = stdout.fileno()
if stderr is None:
pass
elif stderr == PIPE:
errread, errwrite = _create_pipe()
elif stderr == STDOUT:
errwrite = c2pwrite
elif stderr == DEVNULL:
errwrite = self._get_devnull()
elif isinstance(stderr, int):
errwrite = stderr
else:
# Assuming file-like object
errwrite = stderr.fileno()
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _close_fds(self, fds_to_keep):
start_fd = 3
for fd in sorted(fds_to_keep):
if fd >= start_fd:
os.closerange(start_fd, fd)
start_fd = fd + 1
if start_fd <= MAXFD:
os.closerange(start_fd, MAXFD)
def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
restore_signals, start_new_session):
"""Execute program (POSIX version)"""
if isinstance(args, (str, bytes)):
args = [args]
else:
args = list(args)
if shell:
args = ["/bin/sh", "-c"] + args
if executable:
args[0] = executable
if executable is None:
executable = args[0]
orig_executable = executable
# For transferring possible exec failure from child to parent.
# Data format: "exception name:hex errno:description"
# Pickle is not used; it is complex and involves memory allocation.
errpipe_read, errpipe_write = _create_pipe()
try:
try:
# We must avoid complex work that could involve
# malloc or free in the child process to avoid
# potential deadlocks, thus we do all this here.
# and pass it to fork_exec()
if env is not None:
env_list = [os.fsencode(k) + b'=' + os.fsencode(v)
for k, v in env.items()]
else:
env_list = None # Use execv instead of execve.
executable = os.fsencode(executable)
if os.path.dirname(executable):
executable_list = (executable,)
else:
# This matches the behavior of os._execvpe().
executable_list = tuple(
os.path.join(os.fsencode(dir), executable)
for dir in os.get_exec_path(env))
fds_to_keep = set(pass_fds)
fds_to_keep.add(errpipe_write)
self.pid = _posixsubprocess.fork_exec(
args, executable_list,
close_fds, sorted(fds_to_keep), cwd, env_list,
p2cread, p2cwrite, c2pread, c2pwrite,
errread, errwrite,
errpipe_read, errpipe_write,
restore_signals, start_new_session, preexec_fn)
self._child_created = True
finally:
# be sure the FD is closed no matter what
os.close(errpipe_write)
# self._devnull is not always defined.
devnull_fd = getattr(self, '_devnull', None)
if p2cread != -1 and p2cwrite != -1 and p2cread != devnull_fd:
os.close(p2cread)
if c2pwrite != -1 and c2pread != -1 and c2pwrite != devnull_fd:
os.close(c2pwrite)
if errwrite != -1 and errread != -1 and errwrite != devnull_fd:
os.close(errwrite)
if devnull_fd is not None:
os.close(devnull_fd)
# Prevent a double close of these fds from __init__ on error.
self._closed_child_pipe_fds = True
# Wait for exec to fail or succeed; possibly raising an
# exception (limited in size)
errpipe_data = bytearray()
while True:
part = _eintr_retry_call(os.read, errpipe_read, 50000)
errpipe_data += part
if not part or len(errpipe_data) > 50000:
break
finally:
# be sure the FD is closed no matter what
os.close(errpipe_read)
if errpipe_data:
try:
_eintr_retry_call(os.waitpid, self.pid, 0)
except OSError as e:
if e.errno != errno.ECHILD:
raise
try:
exception_name, hex_errno, err_msg = (
errpipe_data.split(b':', 2))
except ValueError:
exception_name = b'RuntimeError'
hex_errno = b'0'
err_msg = (b'Bad exception data from child: ' +
repr(errpipe_data))
child_exception_type = getattr(
builtins, exception_name.decode('ascii'),
RuntimeError)
err_msg = err_msg.decode(errors="surrogatepass")
if issubclass(child_exception_type, OSError) and hex_errno:
errno_num = int(hex_errno, 16)
child_exec_never_called = (err_msg == "noexec")
if child_exec_never_called:
err_msg = ""
if errno_num != 0:
err_msg = os.strerror(errno_num)
if errno_num == errno.ENOENT:
if child_exec_never_called:
# The error must be from chdir(cwd).
err_msg += ': ' + repr(cwd)
else:
err_msg += ': ' + repr(orig_executable)
raise child_exception_type(errno_num, err_msg)
raise child_exception_type(err_msg)
def _handle_exitstatus(self, sts, _WIFSIGNALED=os.WIFSIGNALED,
_WTERMSIG=os.WTERMSIG, _WIFEXITED=os.WIFEXITED,
_WEXITSTATUS=os.WEXITSTATUS):
# This method is called (indirectly) by __del__, so it cannot
# refer to anything outside of its local scope."""
if _WIFSIGNALED(sts):
self.returncode = -_WTERMSIG(sts)
elif _WIFEXITED(sts):
self.returncode = _WEXITSTATUS(sts)
else:
# Should never happen
raise RuntimeError("Unknown child exit status!")
def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid,
_WNOHANG=os.WNOHANG, _os_error=os.error, _ECHILD=errno.ECHILD):
"""Check if child process has terminated. Returns returncode
attribute.
This method is called by __del__, so it cannot reference anything
outside of the local scope (nor can any methods it calls).
"""
if self.returncode is None:
try:
pid, sts = _waitpid(self.pid, _WNOHANG)
if pid == self.pid:
self._handle_exitstatus(sts)
except _os_error as e:
if _deadstate is not None:
self.returncode = _deadstate
elif e.errno == _ECHILD:
# This happens if SIGCLD is set to be ignored or
# waiting for child processes has otherwise been
# disabled for our process. This child is dead, we
# can't get the status.
# http://bugs.python.org/issue15756
self.returncode = 0
return self.returncode
def _try_wait(self, wait_flags):
try:
(pid, sts) = _eintr_retry_call(os.waitpid, self.pid, wait_flags)
except OSError as e:
if e.errno != errno.ECHILD:
raise
# This happens if SIGCLD is set to be ignored or waiting
# for child processes has otherwise been disabled for our
# process. This child is dead, we can't get the status.
pid = self.pid
sts = 0
return (pid, sts)
def wait(self, timeout=None, endtime=None):
"""Wait for child process to terminate. Returns returncode
attribute."""
if self.returncode is not None:
return self.returncode
# endtime is preferred to timeout. timeout is only used for
# printing.
if endtime is not None or timeout is not None:
if endtime is None:
endtime = _time() + timeout
elif timeout is None:
timeout = self._remaining_time(endtime)
if endtime is not None:
# Enter a busy loop if we have a timeout. This busy loop was
# cribbed from Lib/threading.py in Thread.wait() at r71065.
delay = 0.0005 # 500 us -> initial delay of 1 ms
while True:
(pid, sts) = self._try_wait(os.WNOHANG)
assert pid == self.pid or pid == 0
if pid == self.pid:
self._handle_exitstatus(sts)
break
remaining = self._remaining_time(endtime)
if remaining <= 0:
raise TimeoutExpired(self.args, timeout)
delay = min(delay * 2, remaining, .05)
time.sleep(delay)
else:
while self.returncode is None:
(pid, sts) = self._try_wait(0)
# Check the pid and loop as waitpid has been known to return
# 0 even without WNOHANG in odd situations. issue14396.
if pid == self.pid:
self._handle_exitstatus(sts)
return self.returncode
def _communicate(self, input, endtime, orig_timeout):
if self.stdin and not self._communication_started:
# Flush stdio buffer. This might block, if the user has
# been writing to .stdin in an uncontrolled fashion.
self.stdin.flush()
if not input:
self.stdin.close()
if _has_poll:
stdout, stderr = self._communicate_with_poll(input, endtime,
orig_timeout)
else:
stdout, stderr = self._communicate_with_select(input, endtime,
orig_timeout)
self.wait(timeout=self._remaining_time(endtime))
# All data exchanged. Translate lists into strings.
if stdout is not None:
stdout = b''.join(stdout)
if stderr is not None:
stderr = b''.join(stderr)
# Translate newlines, if requested.
# This also turns bytes into strings.
if self.universal_newlines:
if stdout is not None:
stdout = self._translate_newlines(stdout,
self.stdout.encoding)
if stderr is not None:
stderr = self._translate_newlines(stderr,
self.stderr.encoding)
return (stdout, stderr)
def _save_input(self, input):
# This method is called from the _communicate_with_*() methods
# so that if we time out while communicating, we can continue
# sending input if we retry.
if self.stdin and self._input is None:
self._input_offset = 0
self._input = input
if self.universal_newlines and input is not None:
self._input = self._input.encode(self.stdin.encoding)
def _communicate_with_poll(self, input, endtime, orig_timeout):
stdout = None # Return
stderr = None # Return
if not self._communication_started:
self._fd2file = {}
poller = select.poll()
def register_and_append(file_obj, eventmask):
poller.register(file_obj.fileno(), eventmask)
self._fd2file[file_obj.fileno()] = file_obj
def close_unregister_and_remove(fd):
poller.unregister(fd)
self._fd2file[fd].close()
self._fd2file.pop(fd)
if self.stdin and input:
register_and_append(self.stdin, select.POLLOUT)
# Only create this mapping if we haven't already.
if not self._communication_started:
self._fd2output = {}
if self.stdout:
self._fd2output[self.stdout.fileno()] = []
if self.stderr:
self._fd2output[self.stderr.fileno()] = []
select_POLLIN_POLLPRI = select.POLLIN | select.POLLPRI
if self.stdout:
register_and_append(self.stdout, select_POLLIN_POLLPRI)
stdout = self._fd2output[self.stdout.fileno()]
if self.stderr:
register_and_append(self.stderr, select_POLLIN_POLLPRI)
stderr = self._fd2output[self.stderr.fileno()]
self._save_input(input)
while self._fd2file:
timeout = self._remaining_time(endtime)
if timeout is not None and timeout < 0:
raise TimeoutExpired(self.args, orig_timeout)
try:
ready = poller.poll(timeout)
except select.error as e:
if e.args[0] == errno.EINTR:
continue
raise
self._check_timeout(endtime, orig_timeout)
# XXX Rewrite these to use non-blocking I/O on the
# file objects; they are no longer using C stdio!
for fd, mode in ready:
if mode & select.POLLOUT:
chunk = self._input[self._input_offset :
self._input_offset + _PIPE_BUF]
try:
self._input_offset += os.write(fd, chunk)
except OSError as e:
if e.errno == errno.EPIPE:
close_unregister_and_remove(fd)
else:
raise
else:
if self._input_offset >= len(self._input):
close_unregister_and_remove(fd)
elif mode & select_POLLIN_POLLPRI:
data = os.read(fd, 4096)
if not data:
close_unregister_and_remove(fd)
self._fd2output[fd].append(data)
else:
# Ignore hang up or errors.
close_unregister_and_remove(fd)
return (stdout, stderr)
def _communicate_with_select(self, input, endtime, orig_timeout):
if not self._communication_started:
self._read_set = []
self._write_set = []
if self.stdin and input:
self._write_set.append(self.stdin)
if self.stdout:
self._read_set.append(self.stdout)
if self.stderr:
self._read_set.append(self.stderr)
self._save_input(input)
stdout = None # Return
stderr = None # Return
if self.stdout:
if not self._communication_started:
self._stdout_buff = []
stdout = self._stdout_buff
if self.stderr:
if not self._communication_started:
self._stderr_buff = []
stderr = self._stderr_buff
while self._read_set or self._write_set:
timeout = self._remaining_time(endtime)
if timeout is not None and timeout < 0:
raise TimeoutExpired(self.args, orig_timeout)
try:
(rlist, wlist, xlist) = \
select.select(self._read_set, self._write_set, [],
timeout)
except select.error as e:
if e.args[0] == errno.EINTR:
continue
raise
# According to the docs, returning three empty lists indicates
# that the timeout expired.
if not (rlist or wlist or xlist):
raise TimeoutExpired(self.args, orig_timeout)
# We also check what time it is ourselves for good measure.
self._check_timeout(endtime, orig_timeout)
# XXX Rewrite these to use non-blocking I/O on the
# file objects; they are no longer using C stdio!
if self.stdin in wlist:
chunk = self._input[self._input_offset :
self._input_offset + _PIPE_BUF]
try:
bytes_written = os.write(self.stdin.fileno(), chunk)
except OSError as e:
if e.errno == errno.EPIPE:
self.stdin.close()
self._write_set.remove(self.stdin)
else:
raise
else:
self._input_offset += bytes_written
if self._input_offset >= len(self._input):
self.stdin.close()
self._write_set.remove(self.stdin)
if self.stdout in rlist:
data = os.read(self.stdout.fileno(), 1024)
if not data:
self.stdout.close()
self._read_set.remove(self.stdout)
stdout.append(data)
if self.stderr in rlist:
data = os.read(self.stderr.fileno(), 1024)
if not data:
self.stderr.close()
self._read_set.remove(self.stderr)
stderr.append(data)
return (stdout, stderr)
def send_signal(self, sig):
"""Send a signal to the process
"""
os.kill(self.pid, sig)
def terminate(self):
"""Terminate the process with SIGTERM
"""
self.send_signal(signal.SIGTERM)
def kill(self):
"""Kill the process with SIGKILL
"""
self.send_signal(signal.SIGKILL)
| gpl-2.0 |
umitproject/site-status | django/core/handlers/wsgi.py | 102 | 10193 | from pprint import pformat
import sys
from threading import Lock
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import socket
from django import http
from django.core import signals
from django.core.handlers import base
from django.core.urlresolvers import set_script_prefix
from django.utils import datastructures
from django.utils.encoding import force_unicode, iri_to_uri
from django.utils.log import getLogger
logger = getLogger('django.request')
# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
STATUS_CODE_TEXT = {
100: 'CONTINUE',
101: 'SWITCHING PROTOCOLS',
200: 'OK',
201: 'CREATED',
202: 'ACCEPTED',
203: 'NON-AUTHORITATIVE INFORMATION',
204: 'NO CONTENT',
205: 'RESET CONTENT',
206: 'PARTIAL CONTENT',
300: 'MULTIPLE CHOICES',
301: 'MOVED PERMANENTLY',
302: 'FOUND',
303: 'SEE OTHER',
304: 'NOT MODIFIED',
305: 'USE PROXY',
306: 'RESERVED',
307: 'TEMPORARY REDIRECT',
400: 'BAD REQUEST',
401: 'UNAUTHORIZED',
402: 'PAYMENT REQUIRED',
403: 'FORBIDDEN',
404: 'NOT FOUND',
405: 'METHOD NOT ALLOWED',
406: 'NOT ACCEPTABLE',
407: 'PROXY AUTHENTICATION REQUIRED',
408: 'REQUEST TIMEOUT',
409: 'CONFLICT',
410: 'GONE',
411: 'LENGTH REQUIRED',
412: 'PRECONDITION FAILED',
413: 'REQUEST ENTITY TOO LARGE',
414: 'REQUEST-URI TOO LONG',
415: 'UNSUPPORTED MEDIA TYPE',
416: 'REQUESTED RANGE NOT SATISFIABLE',
417: 'EXPECTATION FAILED',
500: 'INTERNAL SERVER ERROR',
501: 'NOT IMPLEMENTED',
502: 'BAD GATEWAY',
503: 'SERVICE UNAVAILABLE',
504: 'GATEWAY TIMEOUT',
505: 'HTTP VERSION NOT SUPPORTED',
}
class LimitedStream(object):
'''
LimitedStream wraps another stream in order to not allow reading from it
past specified amount of bytes.
'''
def __init__(self, stream, limit, buf_size=64 * 1024 * 1024):
self.stream = stream
self.remaining = limit
self.buffer = ''
self.buf_size = buf_size
def _read_limited(self, size=None):
if size is None or size > self.remaining:
size = self.remaining
if size == 0:
return ''
result = self.stream.read(size)
self.remaining -= len(result)
return result
def read(self, size=None):
if size is None:
result = self.buffer + self._read_limited()
self.buffer = ''
elif size < len(self.buffer):
result = self.buffer[:size]
self.buffer = self.buffer[size:]
else: # size >= len(self.buffer)
result = self.buffer + self._read_limited(size - len(self.buffer))
self.buffer = ''
return result
def readline(self, size=None):
while '\n' not in self.buffer and \
(size is None or len(self.buffer) < size):
if size:
# since size is not None here, len(self.buffer) < size
chunk = self._read_limited(size - len(self.buffer))
else:
chunk = self._read_limited()
if not chunk:
break
self.buffer += chunk
sio = StringIO(self.buffer)
if size:
line = sio.readline(size)
else:
line = sio.readline()
self.buffer = sio.read()
return line
class WSGIRequest(http.HttpRequest):
def __init__(self, environ):
script_name = base.get_script_name(environ)
path_info = force_unicode(environ.get('PATH_INFO', u'/'))
if not path_info or path_info == script_name:
# Sometimes PATH_INFO exists, but is empty (e.g. accessing
# the SCRIPT_NAME URL without a trailing slash). We really need to
# operate as if they'd requested '/'. Not amazingly nice to force
# the path like this, but should be harmless.
#
# (The comparison of path_info to script_name is to work around an
# apparent bug in flup 1.0.1. Se Django ticket #8490).
path_info = u'/'
self.environ = environ
self.path_info = path_info
self.path = '%s%s' % (script_name, path_info)
self.META = environ
self.META['PATH_INFO'] = path_info
self.META['SCRIPT_NAME'] = script_name
self.method = environ['REQUEST_METHOD'].upper()
self._post_parse_error = False
if type(socket._fileobject) is type and isinstance(self.environ['wsgi.input'], socket._fileobject):
# Under development server 'wsgi.input' is an instance of
# socket._fileobject which hangs indefinitely on reading bytes past
# available count. To prevent this it's wrapped in LimitedStream
# that doesn't read past Content-Length bytes.
#
# This is not done for other kinds of inputs (like flup's FastCGI
# streams) beacuse they don't suffer from this problem and we can
# avoid using another wrapper with its own .read and .readline
# implementation.
#
# The type check is done because for some reason, AppEngine
# implements _fileobject as a function, not a class.
try:
content_length = int(self.environ.get('CONTENT_LENGTH', 0))
except (ValueError, TypeError):
content_length = 0
self._stream = LimitedStream(self.environ['wsgi.input'], content_length)
else:
self._stream = self.environ['wsgi.input']
self._read_started = False
def __repr__(self):
# Since this is called as part of error handling, we need to be very
# robust against potentially malformed input.
try:
get = pformat(self.GET)
except:
get = '<could not parse>'
if self._post_parse_error:
post = '<could not parse>'
else:
try:
post = pformat(self.POST)
except:
post = '<could not parse>'
try:
cookies = pformat(self.COOKIES)
except:
cookies = '<could not parse>'
try:
meta = pformat(self.META)
except:
meta = '<could not parse>'
return '<WSGIRequest\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
(get, post, cookies, meta)
def get_full_path(self):
# RFC 3986 requires query string arguments to be in the ASCII range.
# Rather than crash if this doesn't happen, we encode defensively.
return '%s%s' % (self.path, self.environ.get('QUERY_STRING', '') and ('?' + iri_to_uri(self.environ.get('QUERY_STRING', ''))) or '')
def is_secure(self):
return 'wsgi.url_scheme' in self.environ \
and self.environ['wsgi.url_scheme'] == 'https'
def _get_request(self):
if not hasattr(self, '_request'):
self._request = datastructures.MergeDict(self.POST, self.GET)
return self._request
def _get_get(self):
if not hasattr(self, '_get'):
# The WSGI spec says 'QUERY_STRING' may be absent.
self._get = http.QueryDict(self.environ.get('QUERY_STRING', ''), encoding=self._encoding)
return self._get
def _set_get(self, get):
self._get = get
def _get_post(self):
if not hasattr(self, '_post'):
self._load_post_and_files()
return self._post
def _set_post(self, post):
self._post = post
def _get_cookies(self):
if not hasattr(self, '_cookies'):
self._cookies = http.parse_cookie(self.environ.get('HTTP_COOKIE', ''))
return self._cookies
def _set_cookies(self, cookies):
self._cookies = cookies
def _get_files(self):
if not hasattr(self, '_files'):
self._load_post_and_files()
return self._files
GET = property(_get_get, _set_get)
POST = property(_get_post, _set_post)
COOKIES = property(_get_cookies, _set_cookies)
FILES = property(_get_files)
REQUEST = property(_get_request)
class WSGIHandler(base.BaseHandler):
initLock = Lock()
request_class = WSGIRequest
def __call__(self, environ, start_response):
from django.conf import settings
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.initLock.acquire()
try:
try:
# Check that middleware is still uninitialised.
if self._request_middleware is None:
self.load_middleware()
except:
# Unload whatever middleware we got
self._request_middleware = None
raise
finally:
self.initLock.release()
set_script_prefix(base.get_script_name(environ))
signals.request_started.send(sender=self.__class__)
try:
try:
request = self.request_class(environ)
except UnicodeDecodeError:
logger.warning('Bad Request (UnicodeDecodeError): %s' % request.path,
exc_info=sys.exc_info(),
extra={
'status_code': 400,
'request': request
}
)
response = http.HttpResponseBadRequest()
else:
response = self.get_response(request)
finally:
signals.request_finished.send(sender=self.__class__)
try:
status_text = STATUS_CODE_TEXT[response.status_code]
except KeyError:
status_text = 'UNKNOWN STATUS CODE'
status = '%s %s' % (response.status_code, status_text)
response_headers = [(str(k), str(v)) for k, v in response.items()]
for c in response.cookies.values():
response_headers.append(('Set-Cookie', str(c.output(header=''))))
start_response(status, response_headers)
return response
| agpl-3.0 |
live-clones/dolfin-adjoint | dolfin_adjoint/assignment.py | 1 | 3768 | import backend
import ufl
from .solving import solve, annotate as solving_annotate, do_checkpoint, register_initial_conditions
import libadjoint
from . import adjlinalg
from . import adjglobals
from . import utils
def register_assign(new, old, op=None):
if not isinstance(old, backend.Function):
assert op is not None
fn_space = new.function_space()
identity_block = utils.get_identity_block(fn_space)
dep = adjglobals.adj_variables.next(new)
if backend.parameters["adjoint"]["record_all"] and isinstance(old, backend.Function):
adjglobals.adjointer.record_variable(dep, libadjoint.MemoryStorage(adjlinalg.Vector(old)))
rhs = IdentityRHS(old, fn_space, op)
register_initial_conditions(zip(rhs.coefficients(),rhs.dependencies()), linear=True)
initial_eq = libadjoint.Equation(dep, blocks=[identity_block], targets=[dep], rhs=rhs)
cs = adjglobals.adjointer.register_equation(initial_eq)
do_checkpoint(cs, dep, rhs)
class IdentityRHS(libadjoint.RHS):
def __init__(self, var, fn_space, op):
self.var = var
self.fn_space = fn_space
self.op = op
if isinstance(var, backend.Function):
self.dep = adjglobals.adj_variables[var]
def __call__(self, dependencies, values):
if len(values) > 0:
return adjlinalg.Vector(values[0].data)
else:
return adjlinalg.Vector(self.op(self.var, self.fn_space))
def derivative_action(self, dependencies, values, variable, contraction_vector, hermitian):
# If you want to apply boundary conditions symmetrically in the adjoint
# -- and you often do --
# then we need to have a UFL representation of all the terms in the adjoint equation.
# However!
# Since UFL cannot represent the identity map,
# we need to find an f such that when
# assemble(inner(f, v)*dx)
# we get the contraction_vector.data back.
# This involves inverting a mass matrix.
if backend.parameters["adjoint"]["symmetric_bcs"] and backend.__version__ <= '1.2.0':
backend.info_red("Warning: symmetric BC application requested but unavailable in dolfin <= 1.2.0.")
if backend.parameters["adjoint"]["symmetric_bcs"] and backend.__version__ > '1.2.0':
V = contraction_vector.data.function_space()
v = backend.TestFunction(V)
if str(V) not in adjglobals.fsp_lu:
u = backend.TrialFunction(V)
A = backend.assemble(backend.inner(u, v)*backend.dx)
solver_method = "mumps" if "mumps" in backend.lu_solver_methods().keys() else "default"
lusolver = backend.LUSolver(A, solver_method)
lusolver.parameters["symmetric"] = True
lusolver.parameters["reuse_factorization"] = True
adjglobals.fsp_lu[str(V)] = lusolver
else:
lusolver = adjglobals.fsp_lu[str(V)]
riesz = backend.Function(V)
lusolver.solve(riesz.vector(), contraction_vector.data.vector())
return adjlinalg.Vector(backend.inner(riesz, v)*backend.dx)
else:
return adjlinalg.Vector(contraction_vector.data)
def second_derivative_action(self, dependencies, values, inner_variable, inner_contraction_vector, outer_variable, hermitian, action):
return None
def dependencies(self):
if isinstance(self.var, backend.Function):
return [self.dep]
else:
return []
def coefficients(self):
if isinstance(self.var, backend.Function):
return [self.var]
else:
return []
def __str__(self):
return "AssignIdentity(%s)" % str(self.dep)
| lgpl-3.0 |
nin042/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/test/finder_unittest.py | 124 | 5471 | # Copyright (C) 2012 Google, Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import unittest2 as unittest
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.test.finder import Finder
class FinderTest(unittest.TestCase):
def setUp(self):
files = {
'/foo/bar/baz.py': '',
'/foo/bar/baz_unittest.py': '',
'/foo2/bar2/baz2.py': '',
'/foo2/bar2/baz2.pyc': '',
'/foo2/bar2/baz2_integrationtest.py': '',
'/foo2/bar2/missing.pyc': '',
'/tmp/another_unittest.py': '',
}
self.fs = MockFileSystem(files)
self.finder = Finder(self.fs)
self.finder.add_tree('/foo', 'bar')
self.finder.add_tree('/foo2')
# Here we have to jump through a hoop to make sure test-webkitpy doesn't log
# any messages from these tests :(.
self.root_logger = logging.getLogger()
self.log_levels = []
self.log_handlers = self.root_logger.handlers[:]
for handler in self.log_handlers:
self.log_levels.append(handler.level)
handler.level = logging.CRITICAL
def tearDown(self):
for handler in self.log_handlers:
handler.level = self.log_levels.pop(0)
def test_additional_system_paths(self):
self.assertEqual(self.finder.additional_paths(['/usr']),
['/foo', '/foo2'])
def test_is_module(self):
self.assertTrue(self.finder.is_module('bar.baz'))
self.assertTrue(self.finder.is_module('bar2.baz2'))
self.assertTrue(self.finder.is_module('bar2.baz2_integrationtest'))
# Missing the proper namespace.
self.assertFalse(self.finder.is_module('baz'))
def test_to_module(self):
self.assertEqual(self.finder.to_module('/foo/test.py'), 'test')
self.assertEqual(self.finder.to_module('/foo/bar/test.py'), 'bar.test')
self.assertEqual(self.finder.to_module('/foo/bar/pytest.py'), 'bar.pytest')
def test_clean(self):
self.assertTrue(self.fs.exists('/foo2/bar2/missing.pyc'))
self.finder.clean_trees()
self.assertFalse(self.fs.exists('/foo2/bar2/missing.pyc'))
def check_names(self, names, expected_names, find_all=True):
self.assertEqual(self.finder.find_names(names, find_all), expected_names)
def test_default_names(self):
self.check_names([], ['bar.baz_unittest', 'bar2.baz2_integrationtest'], find_all=True)
self.check_names([], ['bar.baz_unittest', 'bar2.baz2_integrationtest'], find_all=False)
# Should return the names given it, even if they don't exist.
self.check_names(['foobar'], ['foobar'], find_all=False)
def test_paths(self):
self.fs.chdir('/foo/bar')
self.check_names(['baz_unittest.py'], ['bar.baz_unittest'])
self.check_names(['./baz_unittest.py'], ['bar.baz_unittest'])
self.check_names(['/foo/bar/baz_unittest.py'], ['bar.baz_unittest'])
self.check_names(['.'], ['bar.baz_unittest'])
self.check_names(['../../foo2/bar2'], ['bar2.baz2_integrationtest'])
self.fs.chdir('/')
self.check_names(['bar'], ['bar.baz_unittest'])
self.check_names(['/foo/bar/'], ['bar.baz_unittest'])
# This works 'by accident' since it maps onto a package.
self.check_names(['bar/'], ['bar.baz_unittest'])
# This should log an error, since it's outside the trees.
oc = OutputCapture()
oc.set_log_level(logging.ERROR)
oc.capture_output()
try:
self.check_names(['/tmp/another_unittest.py'], [])
finally:
_, _, logs = oc.restore_output()
self.assertIn('another_unittest.py', logs)
# Paths that don't exist are errors.
oc.capture_output()
try:
self.check_names(['/foo/bar/notexist_unittest.py'], [])
finally:
_, _, logs = oc.restore_output()
self.assertIn('notexist_unittest.py', logs)
# Names that don't exist are caught later, at load time.
self.check_names(['bar.notexist_unittest'], ['bar.notexist_unittest'])
| bsd-3-clause |
darith27/wagtail | wagtail/contrib/wagtailstyleguide/views.py | 11 | 2636 | from django import forms
from django.shortcuts import render
from django.utils.translation import ugettext as _
from wagtail.wagtailadmin import messages
from django.contrib.auth.decorators import permission_required
from wagtail.wagtailadmin.forms import SearchForm
from wagtail.wagtailadmin.widgets import AdminPageChooser, AdminDateInput, AdminTimeInput, AdminDateTimeInput
from wagtail.wagtailimages.widgets import AdminImageChooser
from wagtail.wagtaildocs.widgets import AdminDocumentChooser
class ExampleForm(forms.Form):
def __init__(self, *args, **kwargs):
super(ExampleForm, self).__init__(*args, **kwargs)
self.fields['page_chooser'].widget = AdminPageChooser()
self.fields['image_chooser'].widget = AdminImageChooser()
self.fields['document_chooser'].widget = AdminDocumentChooser()
self.fields['date'].widget = AdminDateInput()
self.fields['time'].widget = AdminTimeInput()
self.fields['datetime'].widget = AdminDateTimeInput()
CHOICES = (
('choice1', 'choice 1'),
('choice2', 'choice 2'),
)
text = forms.CharField(required=True, help_text="help text")
url = forms.URLField(required=True)
email = forms.EmailField(max_length=254)
date = forms.DateField()
time = forms.TimeField()
datetime = forms.DateTimeField()
select = forms.ChoiceField(choices=CHOICES)
boolean = forms.BooleanField(required=False)
page_chooser = forms.BooleanField(required=True)
image_chooser = forms.BooleanField(required=True)
document_chooser = forms.BooleanField(required=True)
@permission_required('wagtailadmin.access_admin')
def index(request):
form = SearchForm(placeholder=_("Search something"))
example_form = ExampleForm()
messages.success(request, _("Success message"), buttons=[
messages.button('', _('View live')),
messages.button('', _('Edit'))
])
messages.warning(request, _("Warning message"), buttons=[
messages.button('', _('View live')),
messages.button('', _('Edit'))
])
messages.error(request, _("Error message"), buttons=[
messages.button('', _('View live')),
messages.button('', _('Edit'))
])
fake_pagination = {
'number': 1,
'previous_page_number': 1,
'next_page_number': 2,
'has_previous': True,
'has_next': True,
'paginator': {
'num_pages': 10,
},
}
return render(request, 'wagtailstyleguide/base.html', {
'search_form': form,
'example_form': example_form,
'fake_pagination': fake_pagination,
})
| bsd-3-clause |
ToolsOnAir/gl-frame-bender | scripts/run_analysis.py | 1 | 25995 | import os
import shutil
import subprocess
import re
import tempfile
import time
import csv
from optparse import OptionParser
def parse_args():
parser = OptionParser()
parser.add_option("-o", "--output-directory", dest="out_dir", default= ".", help="Defines the output directory.")
parser.add_option("-t", "--trace", action="store_true", dest="do_trace", help="If set, new traces will be created, otherwise only trace muncher runs.")
parser.add_option("-f", "--force", action="store_true", dest="force_overwrite", help="If set, existing directories and possibly old traces will be overwritten without asking.")
parser.add_option("-a", "--append", action="store_true", dest="append", help="If set, existing directories won't be deleted, and only runs will run, whose directory doesn't exist yet.")
parser.add_option("-e", "--executable", dest="executable", default= "", help="Sets the framebender executable to use")
parser.add_option("-r", "--resourcedir", dest="resourcedir", default= "", help="Sets the resource-dir to use")
parser.add_option("-s", "--sequencedir", dest="sequencedir", default= "", help="Sets the sequence dir to use")
return parser.parse_args()
folder_id = 0
num_runs = 0
num_failed_runs = 0
folder_id_to_run_map = []
def run_frame_bender(name_prefix, list_of_parameters):
global num_runs
global num_failed_runs
global folder_id
global folder_id_to_run_map
global opts
folder_id += 1
folder_name_of_run = name_prefix
human_read_name_of_run = name_prefix
command_line_options = []
enable_upload_download_gl_traces = True
command_line_options.append("--program.glsl_sources_location=%s/glsl" % opts.resourcedir)
command_line_options.append("--program.sequences_location=%s" % opts.sequencedir)
command_line_options.append("--program.textures_location=%s/textures" % opts.resourcedir)
for option_name, option_parameters in list_of_parameters:
human_read_name_of_run += " :: "
folder_name_of_run += "_"
folder_name_of_run += "".join([x if x.isalnum() else "_" for x in option_name])
human_read_name_of_run += option_name
for option_line in option_parameters:
command_line_options.append(option_line)
folder_id_to_run_map.append([folder_id, human_read_name_of_run])
output_directory = os.path.join(opts.out_dir, `folder_id`)
if not os.path.exists(output_directory):
os.makedirs(output_directory)
elif opts.append:
print "Run " + `folder_id` + " already exists, skipping."
return
print "Running Debug run of '" + human_read_name_of_run + "'"
trace_output_file = os.path.join(output_directory, "trace.fbt")
frame_bender_dbg_run_args = [
framebender_executable,
"-c",
master_config_file,
"--logging.output_file="+os.path.abspath(os.path.join(output_directory, "debug_run.log")),
"--program.config_output_file="+os.path.abspath(os.path.join(output_directory, "debug_run.cfg")),
"--logging.min_severity=DEBUG",
"--opengl.context.debug=yes",
"--profiling.trace_name=" + human_read_name_of_run
]
for opt in command_line_options:
frame_bender_dbg_run_args.append(opt)
try:
subprocess.check_call(frame_bender_dbg_run_args)
except subprocess.CalledProcessError:
print "Execution of debug run '" + human_read_name_of_run + "' did not succeed."
num_failed_runs += 1
return
print "Running Trace run of '" + human_read_name_of_run + "'"
frame_bender_trace_run_args = [
framebender_executable,
"-c",
master_config_file,
"--logging.output_file="+os.path.abspath(os.path.join(output_directory, "trace_run.log")),
"--program.config_output_file="+os.path.abspath(os.path.join(output_directory, "trace_run.cfg")),
"--logging.min_severity=INFO",
"--opengl.context.debug=no",
"--profiling.trace_output_file="+os.path.abspath(trace_output_file),
"--profiling.trace_name=" + human_read_name_of_run
]
for opt in command_line_options:
frame_bender_trace_run_args.append(opt)
try:
subprocess.check_call(frame_bender_trace_run_args)
except subprocess.CalledProcessError:
print "Execution of trace run '" + human_read_name_of_run + "' did not succeed."
num_failed_runs += 1
return
num_runs += 1
####
# Man script execution
####
(opts, args) = parse_args()
if opts.do_trace:
if opts.force_overwrite and opts.append:
print "Can't have both options: force_overwrite and append!"
exit()
if not os.path.exists(opts.out_dir):
os.makedirs(opts.out_dir)
else:
if opts.force_overwrite:
print "Overwriting old directory."
shutil.rmtree(opts.out_dir)
elif opts.append:
print "Output directory already exists, appending to previous runs."
else:
print "Output directory already exists, either delete manually or add '-f' as program option."
exit()
# For each quality_* config file
script_dir = os.path.dirname(os.path.realpath(__file__))
resources_dir = opts.resourcedir
framebender_executable = opts.executable
if not os.path.exists(resources_dir):
raise RuntimeError("Path " + resources_dir + " does not exist.")
master_config_file = os.path.join(resources_dir, "resources/benchmark_master.cfg")
if not os.path.exists(master_config_file):
raise RuntimeError("Master benchmark config file '" + master_config_file + "' does not exist.")
input_sequences = [
("HD 1080p", ["--input.sequence.width=1920",
"--input.sequence.height=1080",
"--input.sequence.id=horse_v210_1920_1080p",
"--input.sequence.loop_count=10"]),
("UHD-1", ["--input.sequence.width=3840",
"--input.sequence.height=2160",
"--input.sequence.id=rain_fruits_v210_3820_2160p",
"--input.sequence.loop_count=40"])]
optimization_flags = [
("single GL context & single threaded", []),
("single GL context & async host copies", ["--pipeline.optimization_flags=ASYNC_INPUT",
"--pipeline.optimization_flags=ASYNC_OUTPUT"]),
("multiple GL contexts & async host copies", ["--pipeline.optimization_flags=ASYNC_INPUT",
"--pipeline.optimization_flags=ASYNC_OUTPUT",
"--pipeline.optimization_flags=MULTIPLE_GL_CONTEXTS"]),
]
optimization_flags.append(("single GL context & ARB persistent memory & async host copies", ["--pipeline.optimization_flags=ASYNC_INPUT",
"--pipeline.optimization_flags=ASYNC_OUTPUT",
"--pipeline.optimization_flags=ARB_PERSISTENT_MAPPING"]))
optimization_flags.append(("multiple GL contexts & ARB persistent memory & async host copies", ["--pipeline.optimization_flags=ASYNC_INPUT",
"--pipeline.optimization_flags=ASYNC_OUTPUT",
"--pipeline.optimization_flags=MULTIPLE_GL_CONTEXTS",
"--pipeline.optimization_flags=ARB_PERSISTENT_MAPPING"]))
optimization_flags_multi_gl_and_both_async_idx = 1
optimization_flags_throughput_runs = [
("async host copies", ["--pipeline.optimization_flags=ASYNC_INPUT",
"--pipeline.optimization_flags=ASYNC_OUTPUT"]),
("multiple GL contexts & async host copies", ["--pipeline.optimization_flags=ASYNC_INPUT",
"--pipeline.optimization_flags=ASYNC_OUTPUT",
"--pipeline.optimization_flags=MULTIPLE_GL_CONTEXTS"]),
]
optimization_flags_best_key, optimization_flags_best_value = optimization_flags_throughput_runs[optimization_flags_multi_gl_and_both_async_idx]
optimization_flags_throughput_runs.append(("single GL Context & ARB persistent memory & async host copies", ["--pipeline.optimization_flags=ASYNC_INPUT",
"--pipeline.optimization_flags=ASYNC_OUTPUT",
"--pipeline.optimization_flags=ARB_PERSISTENT_MAPPING"]))
optimization_flags_throughput_runs.append(("multiple GL Contexts & ARB persistent memory & async host copies", ["--pipeline.optimization_flags=ASYNC_INPUT",
"--pipeline.optimization_flags=ASYNC_OUTPUT",
"--pipeline.optimization_flags=MULTIPLE_GL_CONTEXTS",
"--pipeline.optimization_flags=ARB_PERSISTENT_MAPPING"]))
debug_flags_throughput_runs = [
("GPU upload-only" , ["--debug.output_is_enabled=false",
"--debug.input_is_enabled=true",
"--debug.rendering_is_enabled=false",
"--debug.host_input_copy_is_enabled=false",
"--debug.host_output_copy_is_enabled=false"]),
("GPU download-only", ["--debug.output_is_enabled=true",
"--debug.input_is_enabled=false",
"--debug.rendering_is_enabled=false",
"--debug.host_input_copy_is_enabled=false",
"--debug.host_output_copy_is_enabled=false"]),
("GPU upload & download", ["--debug.output_is_enabled=true",
"--debug.input_is_enabled=true",
"--debug.rendering_is_enabled=false",
"--debug.host_input_copy_is_enabled=false",
"--debug.host_output_copy_is_enabled=false"]),
("GPU upload & download & CPU host copying", ["--debug.output_is_enabled=true",
"--debug.input_is_enabled=true",
"--debug.rendering_is_enabled=false",
"--debug.host_input_copy_is_enabled=true",
"--debug.host_output_copy_is_enabled=true"]),
("GPU upload & download & render ", ["--debug.output_is_enabled=true",
"--debug.input_is_enabled=true",
"--debug.rendering_is_enabled=true",
"--debug.host_input_copy_is_enabled=false",
"--debug.host_output_copy_is_enabled=false"]),
("GPU upload & download & render & CPU host copying ", ["--debug.output_is_enabled=true",
"--debug.input_is_enabled=true",
"--debug.rendering_is_enabled=true",
"--debug.host_input_copy_is_enabled=true",
"--debug.host_output_copy_is_enabled=true"]),
]
pipeline_size_configuration_high_throughput_idx = 1
pipeline_size_configurations = [
("low latency pipeline" ,
["--pipeline.upload.gl_texture_count=1",
"--pipeline.upload.gl_pbo_count=3",
"--pipeline.upload.copy_to_unmap_queue_token_count=2",
"--pipeline.upload.unmap_to_unpack_queue_token_count=1",
"--pipeline.download.gl_texture_count=1",
"--pipeline.download.gl_pbo_count=3",
"--pipeline.download.pack_to_map_queue_token_count=1",
"--pipeline.download.map_to_copy_queue_token_count=2",
"--pipeline.input.token_count=3",
"--pipeline.output.token_count=3"]),
("high throughput pipeline" ,
["--pipeline.upload.gl_texture_count=3",
"--pipeline.upload.gl_pbo_count=5",
"--pipeline.upload.copy_to_unmap_queue_token_count=2",
"--pipeline.upload.unmap_to_unpack_queue_token_count=3",
"--pipeline.download.gl_texture_count=3",
"--pipeline.download.gl_pbo_count=4",
"--pipeline.download.pack_to_map_queue_token_count=2",
"--pipeline.download.map_to_copy_queue_token_count=2",
"--pipeline.input.token_count=8",
"--pipeline.output.token_count=8"])
]
input_load_constraint_non_interleaved_idx = 0
input_load_constraint_all_interleaved_idx = 3
input_load_constraints = [
("non-interleaved", ["--pipeline.download.pack_to_map_load_constraint_count=0",
"--pipeline.download.format_converter_to_pack_load_constraint_count=0",
"--pipeline.upload.unmap_to_unpack_load_constraint_count=0",
"--pipeline.upload.unpack_to_format_converter_load_constraint_count=0"]),
("(un)map <-> (un)pack interleaved", ["--pipeline.download.pack_to_map_load_constraint_count=2",
"--pipeline.download.format_converter_to_pack_load_constraint_count=0",
"--pipeline.upload.unmap_to_unpack_load_constraint_count=2",
"--pipeline.upload.unpack_to_format_converter_load_constraint_count=0"]),
("transfer <-> render interleaved", ["--pipeline.download.pack_to_map_load_constraint_count=0",
"--pipeline.download.format_converter_to_pack_load_constraint_count=2",
"--pipeline.upload.unmap_to_unpack_load_constraint_count=0",
"--pipeline.upload.unpack_to_format_converter_load_constraint_count=2"]),
("(un)map <-> (un)pack & transfer <-> render interleaved", ["--pipeline.download.pack_to_map_load_constraint_count=2",
"--pipeline.download.format_converter_to_pack_load_constraint_count=2",
"--pipeline.upload.unmap_to_unpack_load_constraint_count=2",
"--pipeline.upload.unpack_to_format_converter_load_constraint_count=2"])
]
pipeline_size_configuration_high_throughput_key, pipeline_size_configuration_high_throughput_value = pipeline_size_configurations[pipeline_size_configuration_high_throughput_idx]
input_load_constraint_non_interleaved_key,input_load_constraint_non_interleaved_value = input_load_constraints[input_load_constraint_non_interleaved_idx]
input_load_constraint_all_interleaved_key,input_load_constraint_all_interleaved_value = input_load_constraints[input_load_constraint_all_interleaved_idx]
render_pixel_format_render_runs = [
("RGBA 8-bit Integer" , [ "--render.pixel_format=RGBA_8BIT" ]),
("RGBA 16-bit Float" , [ "--render.pixel_format=RGBA_FLOAT_16BIT" ]),
("RGBA 32-bit Float" , [ "--render.pixel_format=RGBA_FLOAT_32BIT" ])
]
chroma_filter_type_render_runs = [
("drop/replicate" , ["--render.format_conversion.v210.decode.chroma_filter_type=none",
"--render.format_conversion.v210.encode.chroma_filter_type=none"]),
("basic", ["--render.format_conversion.v210.decode.chroma_filter_type=basic",
"--render.format_conversion.v210.encode.chroma_filter_type=basic"]),
("high", ["--render.format_conversion.v210.decode.chroma_filter_type=high",
"--render.format_conversion.v210.encode.chroma_filter_type=high"])
]
glsl_mode_render_runs = [
("GLSL 3.3 (FS per fragment)" , ["--render.format_conversion_mode=glsl_330"]),
("GLSL 4.2 (imageStore via FS per V210 group)" , ["--render.format_conversion_mode=glsl_420"]),
("GLSL 4.2 & ARB_framebuffer_no_attachments (imageStore via FS per V210 group)" , ["--render.format_conversion_mode=glsl_420_no_buffer_attachment_ext"]),
("GLSL 4.3 (imageStore via CS per V210 group caching reads in shared memory)" , ["--render.format_conversion_mode=glsl_430_compute"]),
("GLSL 4.3 (imageStore via CS per V210 group no caching via shared memory)" , ["--render.format_conversion_mode=glsl_430_compute_no_shared"]),
]
glsl_mode_best_key, glsl_mode_best_value = glsl_mode_render_runs[2]
glsl_mode_render_runs_compute_only = [
("GLSL 4.3 (imageStore via CS per V210 group caching reads in shared memory)" , ["--render.format_conversion_mode=glsl_430_compute"]),
("GLSL 4.3 (imageStore via CS per V210 group no caching via shared memory)" , ["--render.format_conversion_mode=glsl_430_compute_no_shared"]),
]
v210_unpack_mode_render_runs = [
("RGB10_A2UI internal tex format" , ["--render.format_conversion.v210.bitextraction_in_shader_is_enabled=false"]),
("GL_R32UI internal tex format + shader bit ops" , ["--render.format_conversion.v210.bitextraction_in_shader_is_enabled=true"])
]
compute_shader_working_group_sizes_render_runs = [
("workgroup size 16" , ["--render.format_conversion.v210.decode.glsl_430_work_group_size=16",
"--render.format_conversion.v210.encode.glsl_430_work_group_size=16"]),
("workgroup size 32" , ["--render.format_conversion.v210.decode.glsl_430_work_group_size=32",
"--render.format_conversion.v210.encode.glsl_430_work_group_size=32"]),
("workgroup size 64" , ["--render.format_conversion.v210.decode.glsl_430_work_group_size=64",
"--render.format_conversion.v210.encode.glsl_430_work_group_size=64"]),
("workgroup size 96" , ["--render.format_conversion.v210.decode.glsl_430_work_group_size=96",
"--render.format_conversion.v210.encode.glsl_430_work_group_size=96"]),
("workgroup size 128" , ["--render.format_conversion.v210.decode.glsl_430_work_group_size=128",
"--render.format_conversion.v210.encode.glsl_430_work_group_size=128"]),
]
start_time = time.time()
####
## Show
####
# Iterating over resolutions and optimizations flags, wide pipeline mode is fixed
for input_sequence_key, input_sequence_value in input_sequences:
for optimization_flag_key, optimization_flag_value in optimization_flags:
batched_options = [(input_sequence_key, input_sequence_value),
(optimization_flag_key, optimization_flag_value),
(glsl_mode_best_key, glsl_mode_best_value)]
run_frame_bender("Threading optimizations", batched_options)
# Iterating over pipeline size configs, and input load constraints.
for input_sequence_key, input_sequence_value in input_sequences:
# Now try different input_load_constraints, and see how that changes the image
for pipeline_size_configuration_key, pipeline_size_configuration_value in pipeline_size_configurations:
for input_load_constraint_key, input_load_constraint_value in input_load_constraints:
# Only run through different interleaved modes, if we are in high-throughput mode
if pipeline_size_configuration_key != pipeline_size_configuration_high_throughput_key and input_load_constraint_key != input_load_constraint_non_interleaved_key:
continue
batched_options = [
(input_sequence_key, input_sequence_value),
(pipeline_size_configuration_key, pipeline_size_configuration_value),
(input_load_constraint_key, input_load_constraint_value),
(optimization_flags_best_key, optimization_flags_best_value),
(glsl_mode_best_key, glsl_mode_best_value)
]
run_frame_bender("Pipeline configuration", batched_options)
# Iterating over switching on/off different stages of the pipeline for througput bottleneck determination (debug)
# Always using througput-optimized pipeline
for input_sequence_key, input_sequence_value in input_sequences:
for optimization_flag_key, optimization_flag_value in optimization_flags_throughput_runs:
for debug_flags_key, debug_flags_value in debug_flags_throughput_runs:
batched_options = [ (input_sequence_key, input_sequence_value),
(debug_flags_key, debug_flags_value),
(optimization_flag_key, optimization_flag_value),
(glsl_mode_best_key, glsl_mode_best_value)]
run_frame_bender("Isolated stages", batched_options)
for input_sequence_key, input_sequence_value in input_sequences:
for render_pixel_format_key, render_pixel_format_value in render_pixel_format_render_runs:
batched_options = [
(input_sequence_key, input_sequence_value),
(render_pixel_format_key, render_pixel_format_value),
(optimization_flags_best_key, optimization_flags_best_value),
(glsl_mode_best_key, glsl_mode_best_value)
]
run_frame_bender("Render formats", batched_options)
for input_sequence_key, input_sequence_value in input_sequences:
for glsl_mode_render_key, glsl_mode_render_value in glsl_mode_render_runs:
for chroma_filter_type_key, chroma_filter_type_value in chroma_filter_type_render_runs:
batched_options = [
(input_sequence_key, input_sequence_value),
(glsl_mode_render_key, glsl_mode_render_value),
(chroma_filter_type_key, chroma_filter_type_value),
(optimization_flags_best_key, optimization_flags_best_value)
]
run_frame_bender("GLSL mode & chroma filters", batched_options)
for input_sequence_key, input_sequence_value in input_sequences:
for v210_unpack_mode_key, v210_unpack_mode_value in v210_unpack_mode_render_runs:
batched_options = [
(input_sequence_key, input_sequence_value),
(v210_unpack_mode_key, v210_unpack_mode_value),
(optimization_flags_best_key, optimization_flags_best_value),
(glsl_mode_best_key, glsl_mode_best_value)
]
run_frame_bender("GLSL V210 10-bit handling", batched_options)
for input_sequence_key, input_sequence_value in input_sequences:
for glsl_mode_render_key, glsl_mode_render_value in glsl_mode_render_runs_compute_only:
for compute_shader_working_group_sizes_key, compute_shader_working_group_sizes_value in compute_shader_working_group_sizes_render_runs:
batched_options = [
(input_sequence_key, input_sequence_value),
(compute_shader_working_group_sizes_key, compute_shader_working_group_sizes_value),
(glsl_mode_render_key, glsl_mode_render_value),
(optimization_flags_best_key, optimization_flags_best_value)
]
run_frame_bender("GLSL compute workgroup sizes", batched_options)
end_time = time.time()
csv_file = open(os.path.join(opts.out_dir, '_run_table.csv'), 'wb')
csv_writer = csv.writer(csv_file)
for row in folder_id_to_run_map:
csv_writer.writerow(row)
csv_file.close()
print "Ran " + `num_runs` + " number of executions in " + `end_time - start_time` + " seconds."
if num_failed_runs != 0:
raise RuntimeError("One or more runs did not run succesfully, please check your logs (" + `num_failed_runs` + " runs failed.)")
# Now render the traces
if not os.path.exists(opts.out_dir):
print "Output folder '" + opts.out_dir + "' doesn't exist, can't render traces."
exit()
print "Rendering traces."
for file_folder in os.listdir(opts.out_dir):
folder_output = os.path.join(opts.out_dir, file_folder)
if not os.path.isdir(folder_output):
continue
trace_file_path = os.path.join(folder_output, "trace.fbt")
if not os.path.exists(trace_file_path):
continue
# Visualize the host trace
subprocess.check_call([
"/usr/bin/python2",
"trace_muncher.py",
"--input-file",
trace_file_path,
"--output-file",
os.path.join(folder_output, "host_trace.pdf"),
"--in-point",
"1000",
"--frames-count",
"12",
"--width",
"1000"
])
# Visualize the GPU trace
subprocess.check_call([
"/usr/bin/python2",
"trace_muncher.py",
"--input-file",
trace_file_path,
"--output-file",
os.path.join(folder_output, "gpu_trace.pdf"),
"--gl-times",
"--in-point",
"1000",
"--frames-count",
"10",
"--width",
"1000"
])
| mit |
rnirmal/cinder | setup.py | 3 | 1782 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import setuptools
from cinder.openstack.common import setup as common_setup
from cinder import version
requires = common_setup.parse_requirements()
setuptools.setup(name='cinder',
version=version.canonical_version_string(),
description='block storage service',
author='OpenStack',
author_email='cinder@lists.launchpad.net',
url='http://www.openstack.org/',
cmdclass=common_setup.get_cmdclass(),
packages=setuptools.find_packages(exclude=['bin', 'smoketests']),
install_requires=requires,
include_package_data=True,
test_suite='nose.collector',
setup_requires=['setuptools_git>=0.4'],
scripts=['bin/cinder-all',
'bin/cinder-api',
'bin/cinder-clear-rabbit-queues',
'bin/cinder-manage',
'bin/cinder-rootwrap',
'bin/cinder-scheduler',
'bin/cinder-volume',
'bin/cinder-volume-usage-audit',
],
py_modules=[])
| apache-2.0 |
h4r5h1t/django-hauthy | tests/utils_tests/test_html.py | 160 | 10711 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from datetime import datetime
from django.test import SimpleTestCase, ignore_warnings
from django.utils import html, safestring, six
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text
class TestUtilsHtml(SimpleTestCase):
def check_output(self, function, value, output=None):
"""
Check that function(value) equals output. If output is None,
check that function(value) equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
def test_escape(self):
f = html.escape
items = (
('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''),
)
# Substitution patterns for testing the above items.
patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb")
for value, output in items:
for pattern in patterns:
self.check_output(f, pattern % value, pattern % output)
# Check repeated values.
self.check_output(f, value * 2, output * 2)
# Verify it doesn't double replace &.
self.check_output(f, '<&', '<&')
def test_format_html(self):
self.assertEqual(
html.format_html("{} {} {third} {fourth}",
"< Dangerous >",
html.mark_safe("<b>safe</b>"),
third="< dangerous again",
fourth=html.mark_safe("<i>safe again</i>")
),
"< Dangerous > <b>safe</b> < dangerous again <i>safe again</i>"
)
def test_linebreaks(self):
f = html.linebreaks
items = (
("para1\n\npara2\r\rpara3", "<p>para1</p>\n\n<p>para2</p>\n\n<p>para3</p>"),
("para1\nsub1\rsub2\n\npara2", "<p>para1<br />sub1<br />sub2</p>\n\n<p>para2</p>"),
("para1\r\n\r\npara2\rsub1\r\rpara4", "<p>para1</p>\n\n<p>para2<br />sub1</p>\n\n<p>para4</p>"),
("para1\tmore\n\npara2", "<p>para1\tmore</p>\n\n<p>para2</p>"),
)
for value, output in items:
self.check_output(f, value, output)
def test_strip_tags(self):
f = html.strip_tags
items = (
('<p>See: 'é is an apostrophe followed by e acute</p>',
'See: 'é is an apostrophe followed by e acute'),
('<adf>a', 'a'),
('</adf>a', 'a'),
('<asdf><asdf>e', 'e'),
('hi, <f x', 'hi, <f x'),
('234<235, right?', '234<235, right?'),
('a4<a5 right?', 'a4<a5 right?'),
('b7>b2!', 'b7>b2!'),
('</fe', '</fe'),
('<x>b<y>', 'b'),
('a<p onclick="alert(\'<test>\')">b</p>c', 'abc'),
('a<p a >b</p>c', 'abc'),
('d<a:b c:d>e</p>f', 'def'),
('<strong>foo</strong><a href="http://example.com">bar</a>', 'foobar'),
# caused infinite loop on Pythons not patched with
# http://bugs.python.org/issue20288
('&gotcha&#;<>', '&gotcha&#;<>'),
)
for value, output in items:
self.check_output(f, value, output)
# Some convoluted syntax for which parsing may differ between python versions
output = html.strip_tags('<sc<!-- -->ript>test<<!-- -->/script>')
self.assertNotIn('<script>', output)
self.assertIn('test', output)
output = html.strip_tags('<script>alert()</script>&h')
self.assertNotIn('<script>', output)
self.assertIn('alert()', output)
# Test with more lengthy content (also catching performance regressions)
for filename in ('strip_tags1.html', 'strip_tags2.txt'):
path = os.path.join(os.path.dirname(upath(__file__)), 'files', filename)
with open(path, 'r') as fp:
content = force_text(fp.read())
start = datetime.now()
stripped = html.strip_tags(content)
elapsed = datetime.now() - start
self.assertEqual(elapsed.seconds, 0)
self.assertIn("Please try again.", stripped)
self.assertNotIn('<', stripped)
def test_strip_spaces_between_tags(self):
f = html.strip_spaces_between_tags
# Strings that should come out untouched.
items = (' <adf>', '<adf> ', ' </adf> ', ' <f> x</f>')
for value in items:
self.check_output(f, value)
# Strings that have spaces to strip.
items = (
('<d> </d>', '<d></d>'),
('<p>hello </p>\n<p> world</p>', '<p>hello </p><p> world</p>'),
('\n<p>\t</p>\n<p> </p>\n', '\n<p></p><p></p>\n'),
)
for value, output in items:
self.check_output(f, value, output)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_strip_entities(self):
f = html.strip_entities
# Strings that should come out untouched.
values = ("&", "&a", "&a", "a&#a")
for value in values:
self.check_output(f, value)
# Valid entities that should be stripped from the patterns.
entities = ("", "", "&a;", "&fdasdfasdfasdf;")
patterns = (
("asdf %(entity)s ", "asdf "),
("%(entity)s%(entity)s", ""),
("&%(entity)s%(entity)s", "&"),
("%(entity)s3", "3"),
)
for entity in entities:
for in_pattern, output in patterns:
self.check_output(f, in_pattern % {'entity': entity}, output)
def test_escapejs(self):
f = html.escapejs
items = (
('"double quotes" and \'single quotes\'', '\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027'),
(r'\ : backslashes, too', '\\u005C : backslashes, too'),
('and lots of whitespace: \r\n\t\v\f\b', 'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008'),
(r'<script>and this</script>', '\\u003Cscript\\u003Eand this\\u003C/script\\u003E'),
('paragraph separator:\u2029and line separator:\u2028', 'paragraph separator:\\u2029and line separator:\\u2028'),
)
for value, output in items:
self.check_output(f, value, output)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_remove_tags(self):
f = html.remove_tags
items = (
("<b><i>Yes</i></b>", "b i", "Yes"),
("<a>x</a> <p><b>y</b></p>", "a b", "x <p>y</p>"),
)
for value, tags, output in items:
self.assertEqual(f(value, tags), output)
def test_smart_urlquote(self):
quote = html.smart_urlquote
# Ensure that IDNs are properly quoted
self.assertEqual(quote('http://öäü.com/'), 'http://xn--4ca9at.com/')
self.assertEqual(quote('http://öäü.com/öäü/'), 'http://xn--4ca9at.com/%C3%B6%C3%A4%C3%BC/')
# Ensure that everything unsafe is quoted, !*'();:@&=+$,/?#[]~ is considered safe as per RFC
self.assertEqual(quote('http://example.com/path/öäü/'), 'http://example.com/path/%C3%B6%C3%A4%C3%BC/')
self.assertEqual(quote('http://example.com/%C3%B6/ä/'), 'http://example.com/%C3%B6/%C3%A4/')
self.assertEqual(quote('http://example.com/?x=1&y=2+3&z='), 'http://example.com/?x=1&y=2+3&z=')
self.assertEqual(quote('http://example.com/?x=<>"\''), 'http://example.com/?x=%3C%3E%22%27')
self.assertEqual(quote('http://example.com/?q=http://example.com/?x=1%26q=django'),
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango')
self.assertEqual(quote('http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango'),
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango')
def test_conditional_escape(self):
s = '<h1>interop</h1>'
self.assertEqual(html.conditional_escape(s),
'<h1>interop</h1>')
self.assertEqual(html.conditional_escape(safestring.mark_safe(s)), s)
def test_html_safe(self):
@html.html_safe
class HtmlClass(object):
if six.PY2:
def __unicode__(self):
return "<h1>I'm a html class!</h1>"
else:
def __str__(self):
return "<h1>I'm a html class!</h1>"
html_obj = HtmlClass()
self.assertTrue(hasattr(HtmlClass, '__html__'))
self.assertTrue(hasattr(html_obj, '__html__'))
self.assertEqual(force_text(html_obj), html_obj.__html__())
def test_html_safe_subclass(self):
if six.PY2:
class BaseClass(object):
def __html__(self):
# defines __html__ on its own
return 'some html content'
def __unicode__(self):
return 'some non html content'
@html.html_safe
class Subclass(BaseClass):
def __unicode__(self):
# overrides __unicode__ and is marked as html_safe
return 'some html safe content'
else:
class BaseClass(object):
def __html__(self):
# defines __html__ on its own
return 'some html content'
def __str__(self):
return 'some non html content'
@html.html_safe
class Subclass(BaseClass):
def __str__(self):
# overrides __str__ and is marked as html_safe
return 'some html safe content'
subclass_obj = Subclass()
self.assertEqual(force_text(subclass_obj), subclass_obj.__html__())
def test_html_safe_defines_html_error(self):
msg = "can't apply @html_safe to HtmlClass because it defines __html__()."
with self.assertRaisesMessage(ValueError, msg):
@html.html_safe
class HtmlClass(object):
def __html__(self):
return "<h1>I'm a html class!</h1>"
def test_html_safe_doesnt_define_str(self):
method_name = '__unicode__()' if six.PY2 else '__str__()'
msg = "can't apply @html_safe to HtmlClass because it doesn't define %s." % method_name
with self.assertRaisesMessage(ValueError, msg):
@html.html_safe
class HtmlClass(object):
pass
| bsd-3-clause |
mhotwagner/abackend | abackend-env/lib/python3.5/site-packages/django/core/mail/backends/console.py | 696 | 1477 | """
Email backend that writes messages to console instead of sending them.
"""
import sys
import threading
from django.core.mail.backends.base import BaseEmailBackend
from django.utils import six
class EmailBackend(BaseEmailBackend):
def __init__(self, *args, **kwargs):
self.stream = kwargs.pop('stream', sys.stdout)
self._lock = threading.RLock()
super(EmailBackend, self).__init__(*args, **kwargs)
def write_message(self, message):
msg = message.message()
msg_data = msg.as_bytes()
if six.PY3:
charset = msg.get_charset().get_output_charset() if msg.get_charset() else 'utf-8'
msg_data = msg_data.decode(charset)
self.stream.write('%s\n' % msg_data)
self.stream.write('-' * 79)
self.stream.write('\n')
def send_messages(self, email_messages):
"""Write all messages to the stream in a thread-safe way."""
if not email_messages:
return
msg_count = 0
with self._lock:
try:
stream_created = self.open()
for message in email_messages:
self.write_message(message)
self.stream.flush() # flush after each message
msg_count += 1
if stream_created:
self.close()
except Exception:
if not self.fail_silently:
raise
return msg_count
| mit |
qizenguf/MLC-STT | src/arch/x86/isa/insts/simd64/integer/data_reordering/shuffle_and_swap.py | 91 | 2486 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop PSHUFW_MMX_MMX_I {
shuffle mmx, mmxm, mmxm, size=2, ext=imm
};
def macroop PSHUFW_MMX_M_I {
ldfp ufp1, seg, sib, disp, dataSize=8
shuffle mmx, ufp1, ufp1, size=2, ext=imm
};
def macroop PSHUFW_MMX_P_I {
rdip t7
ldfp ufp1, seg, riprel, disp, dataSize=8
shuffle mmx, ufp1, ufp1, size=2, ext=imm
};
'''
# PSWAPD
| bsd-3-clause |
itsthejoker/Pokemon-Homage | lemonyellow/data/dialogue.py | 1 | 163756 | #!/usr/local/bin/python
# -*- coding: utf-8 -*-
from addict import Dict
__all__ = [
"introduction",
"pallet_town",
"viridian_city",
"viridian_city_2",
"viridian_forest",
"pewter_city",
"route_1",
"route_3",
"route_5",
"route_10",
"route_11",
"route_12",
"route_13",
"route_14",
"route_18",
"route_20",
"route_23",
"mt_moon",
"cerulean_city",
"sea_cottage",
"vermilion_city",
"ss_anne",
"digletts_cave",
"lavender_town",
"lavender_town_2",
"underground_path",
"celadon_city",
"celadon_gym",
"rocket_game_corner",
"saffron_city",
"saffron_city_2",
"cycling_road",
"fuschia_city",
"fuschia_gym",
"safari_zone",
"cinnabar_island",
"cinnabar_gym",
"pokemon_mansion",
"pokemon_league",
"victory_road",
"indigo_plateau",
"pokemon_center",
"world_interactions",
"trainers",
"special_dialogue",
]
# Replaceable items: $PLAYER, $RIVAL, $GENDER, $GENDERS (for plurals) and
# $SLOT1 (which pulls the name of the pokemon currently in slot1)
# Might not use this section, but it's nice to have
introduction = Dict()
# introduction is available to import, but it helps to save some space here
intro = introduction
intro.preintro001 = (
"The various buttons will be explained in the order oftheir importance."
)
intro.preintro002 = (
"Moves the main character. Also used to choose various data headings."
) # arrow keys
intro.preintro003 = (
"Used to confirm a choice, check things, chat, and scroll text."
) # A button
# B button
intro.preintro004 = "Used to exit, cancel a choice, and cancel a mode."
intro.preintro005 = "Press this button to open the MENU." # Start / ENTER
# Select
intro.preintro006 = "Used to shift items and to use a registered item."
intro.preintro007 = (
"In the world which you are about to enter, you will "
"embark on a grand adventure with you as the hero."
)
intro.preintro008 = (
"Speak to people and check things wherever you go, be it "
"towns, roads, or caves. Gather information and hints "
"from every source."
)
intro.preintro009 = (
"New paths will open to you by helping people in need, "
"overcoming challenges, and solving mysteries."
)
intro.preintro010 = (
"At times, you will be challenged by others and attacked "
"by wild creatures. Be brave and keep pushing on."
)
intro.preintro011 = (
"Through your adventure, we hope that you will interact "
"with all sorts of people and achieve personal growth. "
"That is our biggest objective."
)
intro.preintro012 = "Press the A button, and let your adventure begin!"
intro.intro001 = (
"Hello there! Welcome to the world of POKEMON! My name is "
"OAK! People call me the POKEMON PROF!"
)
intro.intro002 = (
"This world is inhabited by creatures called POKEMON! For "
"some people, POKEMON are pets. Others use them for fights. "
"Myself... I study POKEMON as a profession."
)
intro.intro003 = "First, what is your name?"
intro.intro004 = "Right! So your name is $PLAYER!"
intro.intro005 = "This is my grandson. He's been your rival since you were a baby."
intro.intro006 = "...Erm, what is his name again?"
intro.intro007 = "That's right! I remember now! His name is $RIVAL!"
intro.intro008 = (
"$PLAYER! Your very own POKEMON legend is about to unfold! "
"A world of dreams and adventures with POKEMON awaits! "
"Let's go!"
)
pallet_town = Dict()
pal = pallet_town
pal.sign001 = "PALLET TOWN - A Pure White Beginning"
pal.npc001 = "I'm raising POKEMON too! When they get strong, they can protect me!"
pal.npc002 = (
"Technology is incredible! You can now store and recall items "
"and POKEMON as data via PC!"
)
# Red's house
pal.sign002 = "$PLAYER's House"
pal.mom001 = "...Right. All $GENDERS leave home someday. It said so on TV."
pal.mom002 = "Oh, yes. PROF. OAK, next door, was looking for you."
# Blue's house
pal.sign003 = "$RIVAL's House"
pal.daisy001 = "Hi $PLAYER! $RIVAL is out at Grandpa's lab."
# Oak's lab
pal.sign004 = "Professor Oak's Research Lab"
pal.npc005 = "I study POKEMON as PROF. OAK'S AIDE."
pal.npc006 = "I study POKEMON as PROF. OAK'S AIDE."
pal.npc007 = (
"PROF. OAK is the authority on POKEMON! Many POKEMON trainers "
"hold him in high regard!"
)
pal.rival001 = "Yo $PLAYER! Gramps isn't around!"
# try to leave pallet town!
pal.oak001 = "HEY! Wait! Don't go out!"
pal.oak002 = (
"It's unsafe! Wild POKEMON live in tall grass! You need your own"
" POKEMON for your protection. I know! Here, come with me!"
)
# back in Oak's lab
pal.rival002 = "Gramps! I'm fed up with waiting!"
pal.oak003 = (
"$RIVAL? Let me think... Oh, that's right, I told you to come! Just wait!"
)
pal.oak003A = "$RIVAL, stop being such a little shit. Just wait!"
pal.oak004 = (
"Here, $PLAYER! There are 3 POKEMON here! Haha! They are inside "
"the POKE BALLS. When I was young, I was a serious POKEMON "
"Trainer. In my old age, I have only 3 left, but you can have "
"one! Choose!"
)
pal.rival003 = "Hey! Gramps! What about me?"
pal.oak005 = "Be patient! $RIVAL, you can have one too!"
pal.oak005A = (
"Be patient, toad. You get one too, but only as a favor to your"
"long deceased mother."
)
pal.oak006 = "Now, $PLAYER, which POKEMON do you want?"
pal.rival004 = (
"Heh, I don't need to be greedy like you! Go ahead and choose, $PLAYER!"
)
# the name is replaced dependent on the startertype variable
pal.oak007 = "Hey, don't go away yet!" # on trying to leave before choosing
# frameworks.starter_poke.slot1
pal.oak008 = "So! You want the $STARTERTYPE1 POKEMON, $STARTERNAME?"
# frameworks.starter_poke.slot2
pal.oak009 = "So! You want the $STARTERTYPE2 POKEMON, $STARTERNAME?"
# frameworks.starter_poke.slot3
pal.oak010 = "So! You want the $STARTERTYPE3 POKEMON, $STARTERNAME?"
pal.oak011 = "This POKEMON is really energetic!"
pal.rival005 = "I'll take this one, then!"
pal.oak012 = "If a wild POKEMON appears, your POKEMON can fight against it!"
pal.rival006 = "My POKEMON looks a lot stronger."
pal.rival007 = (
"Wait $PLAYER! Let's check out our POKEMON! Come on, I'll take you on!"
)
pal.rival008 = "Yeah! Am I great or what?"
pal.rival009 = "WHAT?! Unbelievable! I picked the wrong POKEMON!"
pal.rival010 = (
"Okay! I'll make my POKEMON fight to toughen it up! $PLAYER! "
"Gramps! Smell ya later!"
)
pal.oak013 = "$PLAYER, raise your young POKEMON by making it fight!"
pal.mom003 = "$PLAYER! You should take a quick rest."
pal.mom004 = "Oh good, you and your POKEMON are looking great! Take care now!"
route_1 = Dict()
rt1 = route_1
# shopkeeper
rt1.npc007 = (
"Hi! I work at a POKEMON MART. It's a convenient shop, so please"
" visit us in VIRIDIAN CITY. I know, I'll give you a sample! "
"Here you go!"
)
rt1.npc008 = "We also carry POKE BALLS for catching POKEMON!"
# random npc
rt1.npc009 = (
"See those ledges along the road? It's a bit scary, but you can "
"jump from them. You can get back to PALLET TOWN quicker this "
"way."
)
viridian_city = Dict()
vrd = viridian_city
vrd.sign005 = "Viridian City - The Eternally Green Paradise"
vrd.sign006 = (
"TRAINER TIPS! The battle moves of POKEMON are limited by their "
"POWER POINTS, PP. To replenish PP, rest your tired POKEMON at "
"a POKEMON CENTER!"
)
vrd.npc010 = (
"Those POKE BALLS at your waist! You have POKEMON! It's great "
"that you can carry and use POKEMON any time, anywhere!"
)
vrd.npc011 = "You want to know about the 2 kinds of caterpillar POKEMON?"
vrd.npc011A = (
"CATERPIE has no poison, but WEEDLE does. Watch out for its POISON STING!"
)
vrd.npc011B = "Oh, OK then!"
# grumpy asshole's granddaughter
vrd.npc012 = "Oh Grandpa! Don't be so mean! ...He hasn't had his coffee yet."
# grumpy asshole who blocks the way
vrd.npc013 = "You can't go through here! This is private property!"
vrd.npc014 = (
"Yawn! I must have dozed off in the sun. I had this dream about "
"a DROWZEE eating my dream. What's this? Where did this TM come "
"from? This is spooky! Here, you can have this TM."
)
vrd.npc014A = "TM42 contains DREAM EATER... ...Snore..."
# Poke Center
vrd.npc015 = (
"There's a POKE CENTER in every town ahead. They don't charge any money, either!"
)
vrd.npc016 = "POKEMON CENTERS heal your tied, hurt, or fainted POKEMON!"
vrd.npc017 = "You can use that PC in the corner. The receptionist told me. So kind!"
# Poke Mart
vrd.npc018 = "This shop sells many ANTIDOTES."
vrd.npc019 = "No! POTIONs are all sold out!"
# Poke School
vrd.npc020 = "Whew! I'm trying to memorize fll my notes."
vrd.npc020A = "Hey! Don't look at my notes!"
vrd.npc021 = "Okay! Be sure to read the blackboard carefully!"
# house
vrd.npc022 = (
"Coming up with nicknames is fun, but hard. Simple names are the "
"easiest to remember."
)
vrd.npc023 = "My Daddy loves POKEMON too!"
vrd.npc024 = "Tetweet!" # Spearow
# back at the poke mart...
vrd.npc025 = (
"Hey! You came from PALLET TOWN? You know PROF. OAK, right? His "
"order came in. Will you take it to him?"
)
vrd.notification001 = "$PLAYER received OAK'S PARCEL."
vrd.npc025A = "Okay! Say hi to PROF. OAK for me!"
# heading back to Oak's lab.
vrd.oak014 = (
"Oh, $PLAYER! How is my old POKEMON? Well, it seems to like you "
"a lot. You must be talented as a POKEMON Trainer! What? You "
"have something for me?"
)
vrd.notification002 = "$PLAYER delivered OAK'S PARCEL."
vrd.oak015 = "Ah! This is the custom POKE BALL I ordered! Thank you!"
vrd.rival011 = "Gramps! What did you call me for?"
vrd.oak016 = (
"Oh right! I have a request of you two. On the desk there is my "
"invention, the POKEDEX! It automatically records data on "
"POKEMON you've seen or caught! It's a hi-tech encyclopedia!"
)
vrd.oak017 = "$PLAYER and $RIVAL! Take these with you!"
vrd.notification003 = "$PLAYER received a POKEDEX!"
vrd.oak018 = (
"To make a complete guide on all the POKEMON in the world... "
"that was my dream! But, I'm too old! I can't do it! So, I want "
"you two to fulfill my dream for me! Get moving, you two! This "
"is a great undertaking in POKEMON history!"
)
vrd.rival012 = (
"Alright, Gramps! Leave it all to me. $PLAYER, I hate to say "
"it, but I don't need you! I know! I'll borrow a TOWN MAP from"
" my sis! I'll tell her not to lend you one, $PLAYER! Hahaha!"
)
vrd.oak019 = "POKEMON around the world wait for you, $PLAYER!"
# Blue's house
vrd.daisy002 = (
"Grandpa asked you to run an errand? Lazy... Here, this will help you!"
) # get town map
vrd.daisy003 = "POKEMON are living things! If they get tired, give them a rest!"
# Back in Viridian...
vrd.npc026 = (
"When I go shop in PEWTER CITY, I have to take the winding trail"
" in VIRIDIAN FOREST."
) # grumpy asshole's grandaughter
vrd.npc027 = (
"Ahh, I've had my coffee now and I feel great! Sure, you can go "
"through! Are you in a hurry?"
)
vrd.npc027A = "Time is money... go along, then." # if you say yes
vrd.npc027B = (
"I see you're using a POKEDEX. When you catch a POKEMON, your "
"POKEDEX is automatically updated. What? Don't you know how to "
"catch POKEMON? I'll show you, then."
)
# MOST BORING FIGHT SCENE EVER GOSH
vrd.npc027C = "First, you have to weaken the target POKEMON."
# guy outside viridian gym
vrd.npc028 = "This POKEMON GYM is always closed. I wonder who the LEADER is?"
vrd.sign007 = (
"TRAINER TIPS: Catch POKEMON and expand your collection! The "
"more you have, the easier it is to fight!"
)
viridian_forest = Dict()
vrf = viridian_forest
# route 2, before forest
vrf.npc029 = "Are you going to VIRIDIAN FOREST? Be careful, it's a natural maze!"
vrf.npc030 = "RATTATA may be small, but its bite is wicked! Did you get one?"
# ENTER THE FOREST
vrf.npc031 = "I came here with some friends! They're out for POKEMON fights!"
vrf.npc032 = (
"I ran out of POKE BALLS to catch POKEMON with! You should carry extras!"
)
vrf.sign008 = (
"TRAINER TIPS: Weaken POKEMON before attempting to capture! "
"When healthy, they may escape!"
)
vrf.sign009 = "For poison, use ANTIDOTE! Get it at POKEMON MARTs!"
vrf.sign010 = (
"TRAINER TIPS: If you want to avoid battles, stay away from grassy areas!"
)
vrf.sign011 = "TRAINER TIPS: Contact PROF. OAK via PC to get your POKEDEX evaluated!"
vrf.sign012 = (
"TRAINER TIPS: No stealing of POKEMON from other trainers! "
"Catch only wild POKEMON!"
)
# Post forest
vrf.npc033 = (
"Have you noticed the bushes on the roadside? They can be cut "
"down by a special POKEMON move."
)
vrf.npc034 = (
"Many POKEMON live only in forests and caves. You need to look "
"everywhere to get different kinds!"
)
pewter_city = Dict()
pwt = pewter_city
# Outside!
pwt.sign013 = "PEWTER CITY - A Stone Gray City"
pwt.sign014 = (
"TRAINER TIPS: Any POKEMON that takes part in battle, however short, earns EXP!"
)
pwt.sign015 = (
"NOTICE! Thieves have been stealing POKEMON fossils at MT. "
"MOON! Please call PEWTER POLICE with any info!"
)
pwt.npc035 = "There aren't any serious POKEMON trainers here!"
pwt.npc036 = (
"They're all like BUG CATCHERs, but PEWTER GYM's BROCK is totally into it!"
)
pwt.npc037 = "Did you check out the museum?"
pwt.npc037A = "Weren't those fossils from MT. MOON amazing?"
# do doo do do doo doo doo, doo doo doo, do do do
pwt.npc037B = "Really? You absolutely have to go!"
pwt.npc037C = (
"It's right here! You have to pay to get in, but it's worth it! See you around!"
)
pwt.npc038 = (
"You're a Trainer, right? BROCK's looking for new challengers! Follow me!"
) # grabs Player by scruff of neck, drags around
pwt.npc038A = "If you have the right stuff, go take on BROCK!"
pwt.npc039 = (
"It's rumored that CLEFAIRYs came from the moon! They appeared "
"after MOON STONE fell on MT. MOON."
)
pwt.npc040 = "Psssst! Do you know what I'm doing?"
pwt.npc040A = "That's right! It's hard work!"
pwt.npc040B = "I'm spraying REPEL to keep POKEMON out of my garden!"
# Poke Center
pwt.npc041 = "Puu pupuu!" # Jigglypuff
pwt.npc042 = (
"Yawn! When JIGGLYPUFF sings, POKEMON get drowsy... ...Me too... Snore..."
)
pwt.npc043 = "What?! TEAM ROCKET is at MT. MOON? Huh? I'm on the phone! Scram!"
# poke mart
pwt.npc044 = (
"Good things can happen if you raise POKEMON diligently, even the weak ones!"
)
pwt.npc045 = (
"A shady, old man got me to buy this really weird fish POKEMON! "
"It's totally weak and it cost P500!"
)
# house 1
pwt.npc046 = (
"POKEMON learn new techniques as they grow! But, some moves must "
"be taught by the Trainer!"
)
pwt.npc047 = (
"POKEMON become easier to catch when they are hurt or asleep! "
"But, it's not a sure thing!"
)
# house 2
pwt.npc048 = "Bowbow!" # Nidoran
pwt.npc049 = "NIDORAN, sit!"
pwt.npc050 = (
"Our POKEMON's an outsider, so it's hard to handle. An outsider "
"is a POKEMON that you get in a trade. It grows fast, but it may "
"ignore an unskilled Trainer in battle! If only we had some "
"BADGEs..."
)
# MUSEUM OF SCIENCE AND COOL SHIT
pwt.npc051 = "It's P50 for a child's ticket. Would you like to come in?"
pwt.npc051A = "Right, P50! Thank you! Take plenty of time to look!" # yes
pwt.npc051B = "Come again!" # no
pwt.npc052 = "That is one magnificent fossil!"
pwt.npc053 = "We have a space exhibit now."
pwt.npc054 = "MOON STONE? What's so special about it?"
pwt.npc055 = (
"July 20, 1969! The first lunar landing! I bought a color TV just to watch it!"
)
pwt.npc056 = "I want a PIKACHU! It's so cute! I asked my Daddy to catch me one!"
pwt.npc057 = "Yeah, a PIKACHU soon, I promise!"
# museum's side entrance
pwt.npc058 = (
"You can't sneak in the back way! Oh, whatever! Do you know what AMBER is?"
)
pwt.npc058A = (
"There's a lab somewhere trying to resurrect ancient POKEMON from AMBER."
) # yes
pwt.npc058B = "AMBER is fossilized tree sap." # no
pwt.npc059 = "We are proud of two fossils of very rare, prehistoric POKEMON!"
pwt.npc060 = (
"Ssh! I think that this chunk of AMBER contains POKEMON DNA! It "
"would be great if POKEMON could be resurrected from it! But, my "
"colleagues just ignore me! So I have a favor to ask! Take this "
"to a POKEMON LAB and get it examined!"
)
pwt.npc060A = "Ssh! Get the OLD AMBER checked!"
# PEWTER GYM, BROCK'S DOMAIN
pwt.sign016 = (
"PEWTER CITY POKEMON GYM - LEADER: BROCK - The Rock Solid POKEMON Trainer!"
)
pwt.npc061 = (
"Hiya! I can tell you what it takes to become a POKEMON champ! "
"I'm no Trainer, but I can tell you how to win! Let me take you "
"to the top!"
)
pwt.npc061A = "All right! Let's get happening!" # yes
pwt.npc061B = "It's a free service! Let's get happening!" # no
pwt.npc061C = (
"The first POKEMON out in a match is at the top of the POKEMON "
"LIST! By changing the order of POKEMON, matches could be made "
"easier!"
)
pwt.npc061D = "Just as I thought! You're POKEMON champ material!"
route_3 = Dict()
rt3 = route_3
rt3.npc062 = (
"Whew... I better take a rest... Groan... That tunnel from "
"CERULEAN takes a lot out of you!"
)
rt3.npc063 = "Ouch! I tripped over a rocky POKEMON, GEODUDE!"
rt3.npc064 = "If you have too many POKEMON, you should store them via PC!"
rt3.npc065 = "I've got 6 POKE BALLS in my belt. At most, you can carry 6 POKEMON."
rt3.npc066 = (
"TEAM ROCKET attacks CERULEAN citizens... TEAM ROCKET is always in the news!"
)
# magikarp scumbag
rt3.npc067 = (
"Hello, there! Have I got just the deal for you! I'll let you "
"have a swell MAGIKARP for just P500! What do you say?"
)
rt3.npc067A = "" # if yes, he just provides the pokemon.
rt3.npc067B = "No? I'm only doing this as a favor to you!" # no
# after the sale if you talk to him again
rt3.npc067C = "Well, I don't give refunds!"
mt_moon = Dict()
mt_moon.sign017 = "Beware! ZUBAT is a blood sucker!"
cerulean_city = Dict()
crl = cerulean_city
# outside
crl.sign018 = "CERULEAN CITY - A Mysterious, Blue Aura Surrounds It"
crl.sign019 = (
"TRAINER TIPS: Pressing B Button during evolution cancels the whole process."
)
crl.sign020 = "Grass and caves handled easily! BIKE SHOP"
crl.npc068 = "You're making an encyclopedia on POKEMON? That sounds amusing."
crl.npc069 = (
"I want a bright red BICYCLE! I'll keep it at home, so it won't get dirty!"
)
crl.npc070 = "SLOWBRO took a snooze..." # slowbro
crl.npc071 = (
"SLOWBRO, WITHDRAW! No! That's wrong! ...It's so hard to control "
"POKEMON! Your POKEMON's obedience depends on your abilities as "
"a Trainer!"
)
crl.npc072 = "You're a Trainer too? Collecting, fighting, it's a tough life."
crl.npc073 = (
"That bush in front of the shop is in the way. There might be a way around."
)
# police officer
crl.npc074 = (
"The people here were robbed. It’s obvious that TEAM ROCKET is "
"behind this most heinous crime! Even our POLICE force has "
"trouble with the ROCKETs!"
)
# standing in front of Cerulean Cave
crl.npc075 = (
"This is CERULEAN CAVE! Horribly strong POKEMON live in there! "
"The POKEMON LEAGUE champion is the only person who is allowed "
"in!"
)
# deep in Cerulean Cave
crl.npc076 = "Mew!" # mewtwo
# Pokemon Center
crl.npc077 = "BILL has lots of POKEMON! He collects rare ones, too!"
crl.npc078 = (
"Have you heard about BILL? Everyone calls him a POKEMANIAC! I "
"think people are just jealous of BILL, though. Who wouldn’t "
"want to boast about their POKEMON?"
)
# poke mart
crl.npc079 = (
"Use REPEL to keep bugs and weak POKEMON away. Put your "
"strongest POKEMON at the top of the list for best results!"
)
crl.npc080 = (
"Have you seen any RARE CANDY? It’s supposed to make POKEMON go up one level!"
)
# bike shop
crl.npc081 = (
"Hi! Welcome to our BIKE SHOP. Have we got just the BIKE for "
"you! It’s a cool BIKE! Do you want it?"
)
crl.npc081A = "It costs P1,000,000."
crl.npc081B = "Sorry, you can't afford it. Come back again sometime!"
crl.npc081C = "Oh, that's... a BIKE VOUCHER! OK! Here you go!"
crl.npc081D = (
"How do you like your new BICYCLE? You can take it on CYCLING ROAD and in caves!"
)
crl.npc082 = (
"A plain city BIKE is good enough for me! You can’t put a "
"shopping basket on an MTB!"
)
crl.npc083 = "These BIKEs are cool, but they’re way expensive!"
# house
crl.npc084 = "Hello there! Do you want to trade your POLIWHIRL for JYNX?"
# if wrong pokemon
crl.npc084A = "Hmm? This isn’t POLIWHIRL. Think of me when you get one."
crl.npc084B = "Okay, connect the cables like so!" # if yes
crl.npc084C = "The POLIWHIRL you traded to me went and evolved!" # after trade
crl.npc084D = "Well, if you don’t want to..." # no
crl.npc085 = (
"My husband likes trading POKEMON. If you are a collector, would "
"you please trade with him?"
)
# poke school 2
crl.npc086 = (
"POKEMON BADGES are owned only by skilled trainers. I see you "
"have at least one. Those BADGEs have amazing secrets! Now "
"then... Which of the 8 BADGEs should I describe?"
)
crl.npc086A = (
"The BOULDERBADGE! The ATTACK of all POKEMON increases a little "
"bit. It also lets you use FLASH any time you desire."
)
crl.npc086B = (
"The CASCADEBADGE! POKEMON up to L30 will obey you. Any higher, "
"they become unruly! It also lets you use CUT outside of battle."
)
crl.npc086C = (
"The THUNDERBADGE! The SPEED of all POKEMON increases a little "
"bit. It also lets you use FLY outside of battle."
)
crl.npc086D = (
"The RAINBOWBADGE! POKEMON up to L50 will obey you. Any higher, "
"they become unruly! It also lets you use STRENGTH out-side of "
"battle."
)
crl.npc086E = (
"The SOULBADGE! The DEFENSE of all POKEMON increases a little "
"bit. It also lets you use SURF outside of battle."
)
crl.npc086F = (
"The MARSHBADGE! POKEMON up to L70 will obey you. Any higher, "
"they become unruly!"
)
crl.npc086G = "The VOLCANOBADGE! Your POKEMON's SPECIAL abilities increase a bit."
crl.npc086H = "The EARTHBADGE! All POKEMON will obey you!"
crl.npc086I = "Come visit me any time you wish." # cancel
# Cerulean Gym
crl.sign021 = "CERULEAN CITY POKEMON GYM - LEADER: MISTY - The Tomboyish Mermaid!"
crl.npc087 = (
"Yo! Champ in the making! Here’s my advice! The LEADER, MISTY, "
"is a pro who uses water POKEMON! You can drain all their water "
"with plant POKEMON! Or, zap them with electricity!"
)
crl.npc087A = (
"You beat MISTY! What’d I tell ya? You and me kid, we make a "
"pretty darn good team!"
)
# robbed house - after you beat misty, that annoying copper moves
crl.npc088 = "TEAM ROCKET must be trying to DIG their way into no good!"
crl.npc089 = (
"Those miserable ROCKETs! Look what they did here! They stole "
"a TM for teaching POKEMON how to DIG holes! That cost me a "
"bundle, it did!"
)
crl.npc089A = (
"I figure what’s lost is lost! I decided to teach DIGLETT how "
"to DIG without a TM!"
) # after you beat the rocket outside
# the door going north - nugget bridge and route 24!
sea_cottage = Dict()
sea = sea_cottage
sea.sign022 = "SEA COTTAGE - Bill lives here!"
sea.bill001 = (
"Hiya! I'm a POKEMON... ...No I'm not! Call me BILL! I'm a true "
"blue POKEMANIAC! Hey! What's with the skeptical look? I'm not "
"joshing you, I screwed up an experiment and got combined with "
"a POKEMON! So, how about it? Help me out of here!"
)
sea.bill001A = (
"When I’m in the TELEPORTER, go to my PC and run the Cell Separation System!"
) # yes
sea.bill001B = (
"No!? Come on, you gotta help a guy in deep trouble! What do "
"you say, chief? Please? OK? All right! ...When I’m in the "
"TELEPORTER, go to my PC and run the Cell Separation System!"
)
sea.notification004 = "$PLAYER initiated the TELEPORTER's Cell Separator!"
# bill emerges fine for some reason
sea.bill002 = (
"Yeehah! Thanks, bud! I owe you one! So, did you come to see my "
"POKEMON collection? You didn't? That's a bummer. I've got to "
"thank you... Oh here, maybe this'll do."
)
sea.notification005 = "$PLAYER received an S. S. TICKET!"
sea.bill003 = (
"That cruise ship, S.S.ANNE, is in VERMILION CITY. It's "
"passengers are all trainers! They invited me to their party, "
"but I can't stand fancy do's. Why don't you go instead of me?"
)
# after leaving and returning:
# appears after beating elite4
sea.bill004 = (
"The storage system you're using is one that I co-developed "
"with LANETTE! She's great to work with!"
)
sea.lanette001 = (
"I'm in town visiting BILL! Do you like our storage system? "
"It took a long, long time to build!"
)
route_5 = Dict()
rt5 = route_5
# building 1, Daycare!
rt5.npc090 = "I run a DAYCARE. Would you like me to raise one of your POKEMON?"
rt5.npc090A = "Which POKEMON should I raise?" # yes
rt5.npc090B = "All right then, come again." # cancel
rt5.npc090C = "Fine, I’ll look after $SLOT1 for a while. Come see me in a while."
rt5.npc090D = "Come again." # no
# after coming back
rt5.npc090E = "Back already? Your POKEMON needs some more time with me."
rt5.npc090F = (
"Your POKEMON has grown a lot! by level, it’s grown by $LEVEL! Aren’t I great?"
)
rt5.npc090G = "You owe me P$AMOUNT for the return of this POKEMON."
rt5.npc090H = "Thank you! Here’s your POKEMON!" # accept to pay
rt5.npc090I = "All right, then." # refuse to pay
# building 2, gate
rt5.guard001 = (
"I'm on guard duty. Gee, I'm thirsty, though! Oh wait there, the road's closed."
)
# building 3, entrance to the Underground Path
rt5.sign023 = "UNDERGROUND PATH: CERULEAN CITY - VERMILION CITY"
rt5.npc091 = "Hi! Do you have NIDORAN(Male)? Want to trade it for NIDORAN(Female)?"
# wrong pokemon
rt5.npc091A = "...This is no NIDORAN(Male). If you get one, trade it with me!"
rt5.npc091B = "Okay, connect the cable like so! Thanks, pal!"
# after trading
rt5.npc091C = "How is my old NIDORAN(Female)? My NIDORAN(Male) is doing great!"
rt5.npc091D = "That’s too bad." # no
rt5.npc092 = "People often lost things in that UNDERGROUND PATH."
vermilion_city = Dict()
vrm = vermilion_city
vrm.sign024 = "VERMILION CITY - The Port of Exquisite Sunsets"
# outside
vrm.sign025 = (
"NOTICE! ROUTE 12 may be blocked off by a sleeping POKEMON. "
"Detour through ROCK TUNNEL to LAVENDER TOWN. -POLICE"
)
vrm.sign026 = "POKEMON FAN CLUB - All POKEMON fans welcome!"
vrm.npc093 = "Did you see the S.S.ANNE moored in the harbor?"
vrm.npc094 = (
"We’re careful about pollution! We’ve heard GRIMER multiplies in toxic sludge!"
)
vrm.npc095 = (
"I’m putting up a building on this plot of land. My POKEMON is tamping the land."
)
vrm.npc096 = "Guoh! Gogogoh! ...A MACHOP is stomping the land flat." # machop
# poke center
vrm.npc097 = (
"It is true that a higher level POKEMON will be more powerful... "
"But, all POKEMON will have weak points against specific types. "
"So, there is no universally strong POKEMON."
)
vrm.npc098 = "My POKEMON was poisoned! It fainted while we were walking!"
vrm.npc099 = (
"Even if they are the same level, POKEMON can have very "
"different abilities. A POKEMON raised by a Trainer is stronger "
"than one in the wild."
)
# poke mart
# TEAM ROCKET CARES NOT FOR PEP-8
vrm.npc100 = (
"There are evil people who will use POKEMON for criminal acts. TEAM ROCKET "
"traffics in rare POKEMON. They also abandon POKEMON that they "
" consider not to be popular or useful."
)
vrm.npc101 = "I think POKEMON can be good or evil. It depends on the Trainer."
# house
vrm.npc102 = "I’m the FISHING GURU! I simply loooove fishing! Do you like to fish?"
vrm.npc102A = "Grand! I like your style! Take this and fish, young one!" # yes
vrm.npc102B = (
"Fishing is a way of life! From the seas to rivers, go out and "
"land the big one, young one!"
)
# later conversation
vrm.npc102C = "Hello there, $PLAYER! How are the fish biting?"
vrm.npc102D = "Oh... That’s so disappointing..." # no
# house 2
vrm.npc103 = "Hi! Do you have a SPEAROW? Want to trade it for FARFETCH’D?"
# wrong poke
vrm.npc103A = "...This is no SPEAROW. If you get one, trade it with me!"
vrm.npc103B = "Okay, connect the cable like so!" # right poke
vrm.npc103C = "Thanks, pal!" # immediately after trade
# later conversation
vrm.npc103D = "How is my old FARFETCH’D? My SPEAROW is doing great!"
vrm.npc103E = "That’s too bad." # no
# pokemon fan club
vrm.npc104 = (
"I chair the POKEMON Fan Club! I have collected over 100 POKEMON!"
" I’m very fussy when it comes to POKEMON! So... Did you come "
"visit to hear about my POKEMON?"
)
vrm.npc104A = (
"Good! Then listen up! My favorite RAPIDASH... it... cute... "
"lovely... smart... plus... amazing... you think so? ...oh "
"yes... it... stunning... kindly... love it! Hug it... when... "
"sleeping... warm and cuddly... spectacular... ravishing... "
"...Oops! Look at the time! I kept you too long! Thanks for "
"hearing me out! I want you to have this!"
)
vrm.npc104B = (
"Exchange that for a BICYCLE! Don’t worry, my FEAROW will FLY "
"me anywhere! So, I don’t need a BICYCLE! I hope you like "
"cycling!"
)
vrm.npc104C = (
"Hello, $PLAYER! Did you come see me about my POKEMON again? No? Too bad!"
) # after
vrm.npc104D = "Oh. Come back when you want to hear my story!" # no
vrm.npc105 = "Our Chairman is very vocal about POKEMON."
vrm.npc106 = "Kyuoo!" # seel
vrm.npc107 = "I just love my SEEL! It squeals when I hug it!"
vrm.npc108 = "Humph! My PIKACHU is twice as cute as that one!"
vrm.npc109 = "Chu! Pikachu!" # really, do I need to explain this one?
# house 3
vrm.npc110 = "I’m getting my PIDGEY to fly a letter to SAFFRON in the north!"
vrm.npc111 = "Kurukkoo!" # pidgey
# Vermilion Gym
vrm.sign027 = (
"VERMILION CITY POKEMON GYM - LEADER: LT.SURGE - The Lighning American!"
)
vrm.npc144 = (
"Yo! Champ in the making! LT.SURGE has a nickname. People refer "
"to him as the Lightning American! He’s an expert on electric "
"POKEMON! Birds and water POKEMON are at risk! Beware of "
"paralysis too! LT.SURGE is very cautious! You’ll have to break "
"a code to get to him!"
)
vrm.npc144A = "Whew! That match was electric!"
ss_anne = Dict()
ssa = ss_anne
ssa.npc112 = (
"S.S.ANNE is a famous luxury cruise ship. We visit VERMILION once a year."
)
ssa.npc113 = "Welcome to S.S.ANNE! Excuse me, do you have a ticket?"
ssa.npc113A = "Sorry! You need a ticket to get aboard." # do not have ticket
ssa.npc113B = "Great! Welcome to S.S.ANNE!" # have ticket
# floor 1, hallway
ssa.npc114 = (
"The passengers are restless! You might be challenged by the more bored ones!"
)
ssa.npc115 = (
"Bonjour! I am le waiter on this ship! I will be happy to serve "
"you anything you please! ...Ah, le strong silent type!"
)
# kitchen
ssa.npc116 = "You, mon petit! We’re busy here! Out of the way!"
ssa.npc117 = "I saw an odd ball in the trash."
ssa.npc118 = "I’m so busy I’m getting dizzy!"
ssa.npc119 = (
"Er-hem! Indeed, I am le CHEF! Le main course is Eels au "
"Barbecue! Les guests will mutiny, I fear."
)
ssa.npc120 = "Snivel... Sniff... I only get to peel onions... Snivel..."
ssa.npc121 = "Did you hear about SNORLAX? All it does is eat and sleep!"
ssa.npc122 = "Hum-de-hum-de-ho... I peel spuds every day! Hum-hum..."
# floor 1 passenger quarters
ssa.npc123 = "Waiter, I would like a cherry pie, please!"
ssa.npc124 = "A cruise is so elegant, yet cozy!"
ssa.npc125 = "Puup pupuu!" # wigglytuff
ssa.npc126 = "I always travel with WIGGLYTUFF!"
ssa.npc127 = "We are cruising around the world."
ssa.npc128 = "Ssh! I’m a GLOBAL POLICE agent! I’m on the trail of TEAM ROCKET!"
# floor 2
ssa.npc129 = (
"My buddy, MACHOKE, is super strong! He has enough STRENGTH to move big rocks!"
)
ssa.npc130 = "Gwo! Goggoh!" # machoke
# floor 3
ssa.npc131 = (
"This ship, she is a luxury liner for trainers! At every port, "
"we hold parties with invited trainers!"
)
ssa.npc132 = (
"In all my travels, I’ve never seen any POKEMON sleep like this "
"one! I think it was called... Snorlay?"
)
ssa.npc133 = "Ah yes, I have seen some POKEMON ferry people across the water!"
ssa.npc134 = "POKEMON can CUT down small bushes."
ssa.npc135 = (
"Have you gone to the SAFARI ZONE in FUCHSIA CITY? It had many rare kinds of POKEMON!"
)
ssa.npc136 = "Me and my daddy think the SAFARI ZONE is awesome!"
ssa.npc137 = "The CAPTAIN looked really sick and pale!"
ssa.npc138 = "I hear many people get seasick!"
# deck
ssa.npc139 = "Our CAPTAIN is a sword master! He even teaches CUT to POKEMON!"
ssa.npc140 = "Urf. I feel ill. I stepped out to get some air."
ssa.npc141 = "Scrubbing decks is hard work!"
ssa.npc142 = "The party’s over. The ship will be departing soon."
# Captain's quarters
ssa.npc143 = "Ooargh... I feel hideous... Urrp! Seasick..."
ssa.notification006 = "$PLAYER rubbed the CAPTAIN's back! Rub-rub... Rub-rub..."
ssa.npc143A = (
"Whew! Thank you! I feel much better! You want to see my CUT "
"technique? I could show you if I wasn’t ill... I know! You can "
"have this! Teach it to your POKEMON and you can see it CUT any "
"time!"
)
ssa.npc143B = "Whew! Now that I’m not sick any more, I guess it’s time."
ssa.notification007 = "The ship set sail."
digletts_cave = Dict()
dig = digletts_cave
dig.npc145 = (
"What a surprise! DIGLETTs dug this long tunnel! It goes right to VIRIDIAN CITY!"
)
dig.npc146 = (
"I went to ROCK TUNNEL, but it’s dark and scary. If a POKEMON’s "
"FLASH could light it up..."
)
# ROUTE 2
dig.npc147 = "A fainted POKEMON can’t fight. But, it can still use moves like CUT!"
dig.npc148 = "I’m looking for an ABRA! Wanna trade one for MR.MIME?"
dig.npc148A = "What? That’s not an ABRA! If you get one, come back here!"
dig.npc148B = "Okay, connect the cable like so!"
dig.npc148C = "Hey, thanks!"
dig.npc148D = "Isn’t my old MR.MIME great?"
dig.npc148E = "Awww! Oh well..."
# guard house
dig.npc149 = "Once a POKEMON learns FLASH, you can get through ROCK TUNNEL."
dig.npc150 = (
"Hi! Remember me? I’m PROF.OAK’s AIDE! If you caught 10 kinds of "
"POKEMON, I’m supposed to give you an HM05! So, $PLAYER! Have "
"you caught at least 10 kinds of POKEMON?"
)
dig.npc150A = (
"Great! You have caught $CAUGHTNUMBER kinds of POKEMON! "
"Congratulations! Here you go!"
)
dig.npc150B = "The HM FLASH lights up even the darkest dungeons."
dig.npc150C = "Oh. I see. When you get 10 kinds, come back for HM05."
route_10 = Dict()
route_10.npc151 = "I sold a useless NUGGET for P5000!"
route_10.npc152 = "I heard that GHOSTS haunt LAVENDER TOWN!"
route_10.npc153 = (
"The element types of POKEMON make them stronger than some "
"types and weaker than others!"
)
route_10.zapdos001 = "Gyaoo!"
lavender_town = Dict()
lav = lavender_town
# outside
lav.sign028 = "New SILPH SCOPE! Make the Invisible Plain to SEE! - SILPH CO."
lav.sign029 = "May the Souls of POKEMON Rest Easy - POKEMON TOWER"
lav.npc154 = (
"GHOSTs appeared in POKEMON TOWER. I think they’re the spirits "
"of POKEMON that the ROCKETs killed."
)
lav.npc155 = (
"This town is known as the grave site of POKEMON. Memorial "
"services are held in POKEMON TOWER."
)
lav.npc156 = "Do you believe in GHOSTs?"
lav.npc156A = "Really? So there are believers..." # yes
lav.npc156B = "Hahaha, I guess not. That white hand on your shoulder, it’s not real."
# Pokemon Center
lav.npc157 = (
"I’m searching for items that raise the abilities of POKEMON "
"during a single battle. X ATTACK, X DEFEND, X SPEED, and X "
"SPECIAL are what I’m after. Do you know where I can get them?"
)
lav.npc158 = "You know REVIVE? It revives any fainted POKEMON!"
# Lavender Volunteer Pokemon House
lav.npc159 = "That’s odd, MR.FUJI isn’t here. Where’d he go?"
lav.npc160 = (
"This is really MR.FUJI’s house. He’s really kind! He looks "
"after abandoned and orphaned POKEMON!"
)
lav.npc161 = "Gwappa!" # psyduck
lav.npc162 = "Gaoo!" # nidorino
# house
lav.npc163 = (
"I hate those horrible ROCKETs! That poor CUBONE’s mother... It "
"was killed trying to escape from TEAM ROCKET!"
)
lav.npc164 = "Kyarugoo!" # cubone
# IT'S TIME FOR THE NAME RATER
lav.npc165 = (
"Hello, hello! I’m the official NAME RATER! Want me to rate the "
"nicknames of your POKEMON?"
)
lav.npc165A = "Which POKEMON should I look at?"
lav.npc165B = (
"$NAME, is it? That is a truly impeccable name! Take good care of $NAME!"
)
lav.npc165C = (
"$NAME, is it? That is a decent nickname! But, would you like "
"me to give it a nicer name? How about it?"
)
lav.npc165D = "Fine, what should we name it?"
lav.npc165E = (
"OK! This POKEMON has been renamed $NAME! That’s a better name than before!"
)
lav.npc165F = "Fine! Come any time you like!"
# Pokemon Tower
lav.npc166 = "POKEMON TOWER was erected in the memory of POKEMON that had died."
lav.npc167 = "I came to pray for my CLEFAIRY. Sniff! I can’t stop crying..."
lav.npc168 = "Did you come to pay respects? Bless you!"
lav.npc169 = "My GROWLITHE... Why did you die?"
lav.npc170 = "I am a CHANNELER! There are spirits up to mischief!"
# floor 2
lav.npc171 = (
"Even we could not identify the wayward GHOSTs! A SILPH SCOPE "
"might be able to unmask them."
)
# floor 3
# on battle open
lav.notification008 = "GHOST appeared! Darn! The GHOST can't be ID'd!"
# when trying to do anything
lav.notification009 = "$CURRENTPOKEMON is too scared to move!"
lav.notification010 = "Get out... Get out...." # ghost
# floor 5
lav.npc172 = (
"Come, child! I have sealed this space with white magic! You can rest here!"
)
lav.notification011 = (
"Entered purified, protected zone! $PLAYER's POKEMON are fully healed!"
)
# floor 6
lav.npc173 = "Begone... Intruders..." # unpassable ghost without silph scope
# *******************************************
# Route 8
# *******************************************
# for the guard, re-use guard001
# other than that, there is no dialogue for ths area
underground_path = Dict()
und = underground_path
und.sign030 = "UNDERGROUND PATH - CELADON CITY <-> LAVENDER TOWN"
und.npc174 = "The dept. store in CELADON has a great selection!"
und.npc175 = "I heard a sleepy POKEMON appeared near CELADON CITY."
celadon_city = Dict()
cel = celadon_city
# outside
cel.sign031 = "CELADON CITY - The City of Rainbow Dreams"
cel.sign032 = (
"TRAINER TIPS - X ACCURACY boosts the accuracy of techniques! "
"DIRE HIT jacks up the likelihood of critical hits! Get your "
"items at CELADON DEPT. STORE!"
)
cel.sign033 = (
"TRAINER TIPS - GUARD SPEC. protects POKEMON against SPECIAL "
"attacks such as fire and water! Get your items at CELADON "
"DEPT. STORE!"
)
cel.sign034 = "ROCKET GAME CORNER - The playground for grown-ups!"
cel.sign035 = "Coins exchanged for prices! PRIZE EXCHANGE"
cel.npc176 = "Keep out of TEAM ROCKET’s way!" # rocket
cel.npc177 = (
"This is my trusted pal, POLIWRATH! It evolved from POLIWHIRL "
"when I used WATER STONE!"
)
cel.npc178 = "Ribi ribit!" # poliwrath
cel.npc179 = (
"I got my KOFFING in CINNABAR! It’s nice, but it breathes poison "
"when it’s angry!"
)
cel.npc180 = "The GAME CORNER is bad for our city’s image!"
cel.npc181 = (
"Moan! I blew it all at the slots! I knew I should have cashed "
"in my coins for prizes!"
)
cel.npc182 = "What are you staring at?" # rocket
cel.npc183 = (
"Hello, there! I’ve seen you, but I never had a chance to talk! "
"Here’s a gift for dropping by!"
) # dude behind the pond
cel.npc183A = (
"TM41 teaches SOFTBOILED! Only one POKEMON can use it! That POKEMON is CHANSEY!"
) # dick
# pokemon center
cel.npc184 = "If I had a BIKE, I would go to CYCLING ROAD!"
cel.npc185 = "POKE FLUTE awakens POKEMON with a sound that only they can hear!"
cel.npc186 = "I rode uphill on CYCLING ROAD from FUCHSIA!"
# restaurant
cel.npc187 = "Hi! We’re taking a break now."
cel.npc188 = "My POKEMON are weak, so I often have to go to the DRUG STORE."
cel.npc189 = "Munch... The man at that table lost it all at the slots."
cel.npc190 = "Psst! There’s a basement under the GAME CORNER."
cel.npc191 = (
"Go ahead! Laugh! I’m flat out busted! No more slots for me! I’m "
"going straight! Here! I won’t be needing this anymore!"
)
cel.npc191A = "I always thought I was going to win it back..."
# house
cel.npc192 = (
"Don’t touch the poster at the GAME CORNER! There’s no secret switch behind it!"
) # dumbass
cel.npc193 = "CHIEF! We just shipped 2000 POKEMON as slot prizes!"
cel.npc194 = "Hehehe! The slots just reel in the dough, big time!"
# hotel
cel.npc195 = "POKEMON? No, this is a hotel for people. We’re full up."
cel.npc196 = (
"I’m on vacation with my brother and boyfriend. CELADON is such a pretty city!"
)
cel.npc197 = "My sis brought me on this vacation!"
cel.npc198 = "Why did she bring her brother?" # cockblocked
# prize exchange
cel.npc199 = "I had a major haul today!"
cel.npc200 = "I sure do fancy that PORYGON! But, it’s hard to win at slots!"
# celadon mansion
cel.sign036 = "CELADON MANSION - Manager's Suite"
cel.npc201 = "My dear POKEMON keep me company. MEOWTH even brings money home!"
cel.npc202 = "Pi pippippi!" # clefairy
cel.npc203 = "Kya kyaoo!" # nidoran
cel.npc204 = "Meow!" # meowth
# second floor
cel.notification012 = (
"The plaque is old and faded. You can just make out "
"'Meeting Room' on the dull gold plate."
)
# third floor! PLAYFUL JABS FOR EVERYBODY
cel.notification013 = (
"It's an old logo for a game studio. 'GAME FREAK' is "
"crossed out, and 'JOKER' is sharpie'd in above it."
)
cel.dev001 = "I'm Sam, a coder. If you find any bugs, tell Joe they're his problem."
cel.dev002 = "I'm Jake! I helped make sure everything was a bug-free as possible!"
# Teddy's dialogue is found in the Special section
# dev003 = "When your Pokemon's health is low in battle, do you heal or "
# press the attack? Gotta risk it for the biscuit!" # teddy
cel.dev004 = "Me? I'm Joe, the Joker himself! I programmed you!"
cel.dev005 = (
"Hey, is that right? I'm the original designer of this thing! "
"Who'd have thought it would turn out like it did? Crazy, huh? "
"Anyway, we both know that filling up your POKEDEX is tough, but "
"don't quit! When you finish, come tell me!"
)
cel.dev005A = (
"Wow! Excellent! I... I'm blown away! You completed your "
"POKEDEX! Congratulations!"
)
# insert diploma here
# fourth floor, accessed via rear entrance
cel.sign037 = "I KNOW EVERYTHING!"
cel.npc205 = (
"I know everything about the world of POKEMON! Remember, "
"trading is key to completing your POKEDEX!"
)
# Celadon Dept. Store
# first floor
cel.sign038 = (
"1F: SERVICE COUNTER - 2F: TRAINER'S MARKET - 3F: TV GAME SHOP "
"- 4F: WISEMAN GIFTS - 5F: DRUG STORE - ROOFTOP SQUARE: VENDING "
"MACHINES"
)
cel.npc206 = (
"Hello! Welcome to the CELADON DEPT. STORE. The board on the "
"right describes the store layout."
)
# Lottery Corner, open after beating the Elite 4
cel.npc211 = (
"Hi! This is POKEMON LOTTERY CORNER! I'm not open yet, but the "
"rest of the store is! Please come back later!"
)
cel.npc211A = (
"This is the POKEMON LOTTERY CORNER! As part of a special "
"promotion with LILYCOVE DEPT. STORE, all shoppers to the "
"CELADON DEPT. STORE get to draw a POKEMON LOTTO TICKET. If the "
"LOTTO TICKET’s number matches the ID number of any of your "
"POKEMON, you will receive a fabulous gift. Would you like to "
"draw a POKEMON LOTTO TICKET?"
)
cel.npc211B = "Please do visit us again." # decide not to draw
cel.npc211C = (
"Congratulations! The ID number of your PC-boxed "
"$LOTTO_NICKNAME matches your LOTTO TICKET’s number!"
)
cel.npc211D = (
"Congratulations! The ID number of your team’s $LOTTO_NICKNAME "
"matches your LOTTO TICKET’s number!"
)
cel.npc211E = (
" Two digits matched, so you win the third prize! You’ve won "
"the PP UP! ...Please do visit again!"
)
cel.npc211F = (
" Three digits matched, so you win the second prize! You’ve won "
"the EXP. SHARE! ...Please do visit again!"
)
cel.npc211G = (
" Four digits matched, so you win the first prize! You’ve won "
"the MAX REVIVE! ...Please do visit again!"
)
cel.npc211H = (
" Oh my goodness! All five digits matched! You’ve won the "
"jackpot prize! You’ve won the MASTER BALL! ...Please do visit "
"again!"
)
cel.npc211I = (
"Please come back later - you’ll need to wait a while before "
"drawing another ticket!"
) # wait 5 min in real time
# putting spaces in this breaks for some reason, I have no idea
cel.npc211J = "n.........\n"
cel.npc211K = (
"I'm sorry, none of your POKEMON's ID numbers matched. Try again later!"
)
# second floor
cel.sign039 = "Top Grade Items for Trainers! 2F: TRAINER'S MARKET"
cel.npc207 = "For long outings, you should buy REVIVE."
cel.npc208 = (
"SUPER REPEL keeps weak POKEMON at bay... Hmm, it’s a more powerful REPEL!"
)
# third floor
cel.npc209 = (
"Oh, hi! I finally finished POKEMON! Not done yet? This might be useful!"
) # gives TM18
cel.npc209A = "TM18 is COUNTER! Not like the one I’m leaning on, mind you!"
cel.npc210 = (
"Captured POKEMON are registered with an ID No. and OT, the name "
"of the Original Trainer that caught it!"
)
cel.npc212 = "You can identify POKEMON you got in trades by their ID numbers!"
cel.npc213 = (
"All right! My buddy’s going to trade me his KANGASKHAN for my GRAVELER!"
)
cel.npc214 = (
"Come on GRAVELER! I love GRAVELER! I collect them! ...Huh? "
"GRAVELER turned into a different POKEMON!"
)
# fourth floor
cel.sign040 = (
"Express yourself with gifts! 4F: WISEMAN GIFTS - Evolution "
"Special! Element STONEs on sale now!"
)
cel.npc215 = "I’m getting a POKE DOLL for my girlfriend!"
cel.npc216 = (
"I heard something useful. You can run from wild POKEMON by "
"distracting them with a POKE DOLL!"
)
# fifth floor
cel.npc217 = (
"POKEMON ability enhancers can only be bought here. Use CALCIUM "
"to increase SPECIAL abilities. Use CARBOS to increase SPEED."
)
cel.npc218 = (
"I’m here for POKEMON ability enhancers. PROTEIN increases "
"ATTACK power. IRON increases DEFENSE!"
)
# roof
cel.npc219 = (
"My sister is a Trainer, believe it or not. But she’s so "
"immature, she drives me nuts!"
)
cel.npc220_drinks_had = []
cel.npc220 = "I’m thirsty I want something to drink!"
cel.notification014 = "Give her a drink?" # yes / no
cel.notification015 = "Give her which drink?"
cel.npc220A = "Yay! $DRINK! Thank you! You can have this from me!"
# give her water
cel.npc220B = "TM13 contains ICE BEAM! It can freeze the target sometimes!"
# give her soda pop
cel.npc220C = "TM48 contains ROCK SLIDE! It can spook the target sometimes!"
cel.npc220D = "TM49 contains TRI ATTACK!" # give her lemonade
# if you try and give her something she's already had
cel.npc220E = "No thank you! I’m not thirsty after all!"
# if len(npc220_drinks_had+1) == 3
cel.npc220F = "Thanks! I’m feeling much better!"
rocket_game_corner = Dict()
rgc = rocket_game_corner
rgc.npc221 = (
"Welcome to ROCKET GAME CORNER! Do you need some game coins? "
"It’s P1000 for 50 coins. Would you like some?"
)
rgc.npc221A = "Thanks! Here are your 50 coins!"
# if you have more than 9,999 coins
rgc.npc221B = "Oops! Your COIN CASE is full!"
rgc.npc221C = "No? Please come play sometime!"
rgc.npc222 = "Welcome! You can exchange your coins for fabulous prizes next door."
# gives 20 coins
rgc.npc223 = "Hey, what? You’re throwing me off! Here are some coins, shoo!"
rgc.npc223A = "The trick is to watch the reels closely!"
rgc.npc224 = "What’s up? Want some coins?" # gives 20 coins
rgc.npc224A = "Darn! I need more coins for the POKEMON I want!"
rgc.npc225 = "Games are scary! It’s so easy to get hooked!"
rgc.npc226 = (
"Hey! You have better things to do, champ in the making! CELADON "
"GYM’s LEADER is ERIKA! She uses grass-type POKEMON! She might "
"appear docile, but don’t be fooled!"
)
rgc.npc227 = "I’m having a wonderful time!"
rgc.npc228 = "I think these machines have different odds."
rgc.npc229 = "Kid, do you want to play?" # gives 10 coins
rgc.npc229A = "Wins seem to come and go."
rgc.npc230 = "Keep this quiet. It’s rumored that this place is run by TEAM ROCKET."
rgc.npc231 = (
"They offer rare POKEMON that can be exchanged for your coins. "
"But, I just can’t seem to win!"
)
celadon_gym = Dict()
clg = celadon_gym
clg.npc232 = "Heheh! This GYM is great! It’s full of women!"
clg.sign041 = "CELADON CITY GYM - LEADER: ERIKA - The Nature Loving Princess!"
lavender_town_2 = Dict()
lt2 = lavender_town_2
# Back to Marowak, the friendly ghost at the top of the stairs
lt2.npc233 = "Be gone... Intruders..."
lt2.notification016 = (
"SILPH SCOPE unveiled the GHOST's identity! ...Wild MAROWAK appeared!"
)
lt2.notification017 = "It dodged the thrown BALL! This POKEMON can't be caught!"
lt2.notification018 = (
"The GHOST was the restless soul of CUBONE's mother! "
"The mother's soul was calmed. It departed to the "
"afterlife!"
)
# on the top floor
# Mr.Fuji
lt2.npc234 = (
"Heh? You came to save me? Thank you. But, I came here of my own "
"free will. I came to calm the soul of CUBONE’s mother. I think "
"MAROWAK’s spirit has gone to the afterlife. I must thank you "
"for your kind concern! Follow me to my home, POKEMON HOUSE, "
"at the foot of this tower."
)
# lavender town pokemon house
# mr.fuji again
lt2.npc235 = (
"$PLAYER. Your POKEDEX quest may fail without love for your "
"POKEMON. I think this may help in your quest."
)
lt2.npc235A = (
"Upon hearing the POKE FLUTE, sleeping POKEMON will spring "
"awake. It works on all sleeping POKEMON."
)
lt2.npc235B = "Has my FLUTE helped you?"
lt2.npc236 = "MR.FUJI had been praying alone for CUBONE’s mother."
lt2.npc237 = "It’s so warm! POKEMON are nice to hug!"
# other house
lt2.npc238 = (
"The GHOST of POKEMON TOWER is gone! Someone must have soothed "
"its restless soul!"
)
saffron_city = Dict()
saf = saffron_city
saf.sign042 = "SAFFRON CITY - Shining, Golden Land of Commerce"
# route 7
saf.guard002 = (
"Whoa, boy! I'm parched! ... Huh? I can have this drink? Gee, "
"thanks! ... Glug glug... ...Gulp... If you want to go to "
"SAFFRON CITY... ...you can go on through. I'll share this "
"with the other guards!"
) # I'm not sure that's hygenic
saf.guard003 = "Hi, thanks for the cool drinks!"
# saffron city
# outside
saf.sign043 = "SILPH's latest product! Release to be determined..."
saf.sign044 = (
"TRAINER TIPS - New GREAT BALL offers improved capture rates! "
"Try it on those hard-to-catch POKEMON!"
)
saf.sign045 = (
"TRAINER TIPS - FULL HEAL cures all ailments like sleep and "
"burns. It costs a bit more, but it's convenient."
)
# all these guys are ROCKETs
saf.npc239 = "What do you want? Get lost!"
saf.npc240 = "BOSS said he’ll take this town!"
saf.npc241 = "Get out of the way!"
saf.npc242 = "SAFFRON belongs to TEAM ROCKET!"
saf.npc243 = "Ow! Watch where you’re walking!"
saf.npc244 = "Being evil makes me feel so alive!"
saf.npc245 = "With SILPH under control, we can exploit POKEMON around the world!"
# building 1
saf.npc246 = "It would be great if the ELITE FOUR came and stomped TEAM ROCKET!"
saf.npc247 = "POKEMON growth rates differ from species to species."
saf.npc248 = "SILPH CO. is very famous. That’s why it attracted TEAM ROCKET!"
# Mr.Psychic's house
saf.npc249 = "...Wait! Don’t say a word! You wanted this!"
saf.npc249A = "TM49 is PSYCHIC! It can lower the target’s SPECIAL abilities!"
# Silph Co Office Building
# if you haven't met MR.FUJI
saf.npc250 = "I’m a security guard. Suspicious kids I don’t allow in!"
saf.npc250A = "...Snore..."
saf.notification019 = "Hah! He's taking a snooze!"
# floor 2 (the first floor is empty)
saf.npc251 = (
"Eeek! No! Stop! Help! Oh, you’re not with TEAM ROCKET. I "
"thought... I’m sorry. Here, please take this!"
)
saf.npc251A = (
"TM36 is SELFDESTRUCT! It’s powerful, but the POKEMON that "
"uses it faints! Be careful."
)
# floor 3
saf.npc252 = "I work for SILPH. What should I do?"
# floor 4
saf.npc253 = "Sssh! Can’t you see I’m hiding?"
# floor 5
saf.npc254 = "TEAM ROCKET is in an uproar over some intruder. That’s you, right?"
# floor 6
saf.npc255 = "Oh dear, oh dear. Help me please!"
saf.npc256 = "Look at him! He’s such a coward."
saf.npc257 = "They must have targeted SILPH for our POKEMON products."
saf.npc258 = "The ROCKETs came and took over the building!"
saf.npc259 = "TEAM ROCKET is trying to conquer the world with POKEMON!"
# floor 7
saf.npc260 = "You! It’s really dangerous here! You came to save me? You can’t!"
saf.npc261 = "It would be bad if TEAM ROCKET took over SILPH or our POKEMON!"
saf.npc262 = "TEAM ROCKET was after the MASTER BALL which will catch any POKEMON!"
# floor 8
saf.npc263 = "I wonder if SILPH is finished..."
# floor 9
saf.npc264 = "You look tired! You should take a quick nap!"
# floor 10
saf.npc265 = "Waaaaa! I’m scared!"
# floor 7, via teleportation
saf.npc266 = (
"Oh! Hi! You’re not a ROCKET! You came to save us? Why, thank "
"you! I want you to have this POKEMON for saving us."
)
saf.npc266A = (
"It’s LAPRAS. It’s very intelligent. We kept it in our lab, but "
"it will be much better off with you! I think you will be a "
"good Trainer for LAPRAS! It’s a good swimmer. It’ll give you "
"a lift!"
)
saf.npc266B = "TEAM ROCKET’s BOSS went to the boardroom! Is our PRESIDENT OK?"
# boardroom, after giovanni
# secretary
saf.npc267 = "Thank you for rescuing all of us! We admire your courage."
# el presidente, who I can't help but read all his lines as the Mayor from
# the PowerPuff Girls
saf.npc268 = (
"Thank you for saving SILPH! I will never forget you saved us "
"in our moment of peril! I have to thank you in some way! Here, "
"maybe this will do!"
)
saf.npc268A = (
"You can’t buy that anywhere! It’s our secret prototype MASTER "
"BALL! It will catch any POKEMON without fail! You should be "
"quiet about using it, though."
)
saffron_city_2 = Dict()
sa2 = saffron_city_2
# outside
sa2.npc269 = "I saw ROCKET BOSS escaping SILPH’s building."
sa2.npc270 = (
"I flew here on my PIDGEOT when I read about SILPH. It’s already "
"over? I missed the media action."
)
sa2.npc271 = "Bi bibii!" # pidgeot
sa2.npc272 = "Yeah! TEAM ROCKET is gone! It’s safe to go out again!"
sa2.npc273 = "People should be flocking back to SAFFRON now."
sa2.npc274 = "You beat TEAM ROCKET all alone? That’s amazing!"
# pokemon center
sa2.npc275 = "TEAM ROCKET took off! We can go out safely again! That’s great!"
sa2.npc276 = "POKEMON growth rates differ from species to species."
sa2.npc277 = "SILPH CO. is very famous. That’s why it attracted TEAM ROCKET!"
# pokemon mart
sa2.npc278 = "REVIVE is costly, but it revives fainted POKEMON!"
sa2.npc279 = (
"MAX REPEL lasts longer than SUPER REPEL for keeping weaker POKEMON away!"
)
# house 1
sa2.npc280 = (
"Thank you for writing. I hope to see you soon! ...Hey! Don’t look at my letter!"
)
sa2.npc281 = "The COPYCAT is cute! I’m getting her a POKE DOLL!"
sa2.npc282 = "Kurukkoo!" # pidgey
# COPYCAT's house
sa2.npc283 = "Chaan! Sii!" # seriously, guess
sa2.npc284 = (
"My daughter likes to mimic people. Her mimicry has earned her "
"the name COPYCAT around here!"
)
sa2.npc285 = "My daughter is so self-centered. She only has a few friends."
# COPYCAT!
sa2.npc286 = (
"Hi! Do you like POKEMON? ...Uh no, I just asked you. ...Huh? You’re strange!"
)
sa2.npc286A = "Hmm? Quit mimicking? But, that’s my favorite hobby!"
# if you have a poke doll in inventory
sa2.npc286B = "Oh wow! A POKE DOLL! For me? Thank you! You can have this, then!"
sa2.npc286C = "TM31 contains my favorite, MIMIC! Use it on a good POKEMON!"
# after receiving it
sa2.npc286D = (
"Hi! Thanks for TM31! ...Pardon? ...Is it that fun to mimic my every move?"
)
sa2.npc286E = "You bet! It’s a scream!"
sa2.notification020 = "This is a rare POKEMON! Huh? It's only a doll!"
# doduo
sa2.npc287 = "Giiih! MIRROR MIRROR ON THE WALL, WHO IS THE FAIREST ONE OF ALL?"
# Silph office building, first floor
# Receptionist
sa2.npc288 = "Welcome! The PRESIDENT is in the boardroom on 11F!"
# floor 2
sa2.npc289 = (
"TM36 is SELFDESTRUCT! It’s powerful, but the POKEMON that uses "
"it faints! Be careful."
)
# floor 3
sa2.npc290 = "$PLAYER! You and your POKEMON saved us!"
# floor 4
sa2.npc291 = "Huh? TEAM ROCKET is gone?"
# floor 5
sa2.npc292 = "TEAM ROCKET took off! You’re our hero! Thank you!"
# floor 6
sa2.npc293 = "We got engaged! Heheh!"
sa2.npc294 = "I feel so sorry for him, I have to marry him!"
sa2.npc295 = "Come work for SILPH when you get older!"
sa2.npc296 = "Well, better get back to work!"
sa2.npc297 = "TEAM ROCKET ran because of you!"
# floor 7
sa2.npc298 = "Safe at last! Oh thank you!"
sa2.npc299 = "Wow! You chased off TEAM ROCKET all by yourself?"
sa2.npc300 = (
"We canceled the MASTER BALL PROJECT because of TEAM ROCKET. We "
"might start it up in the future, though..."
)
# floor 8
sa2.npc301 = "Thanks for saving us!"
# floor 9
sa2.npc302 = "Thank you so much!"
# floor 10
sa2.npc303 = "Please keep quiet about my crying!"
# floor 11
sa2.npc304 = "Saved at last! Thank you!"
# SABRINA'S GYM
sa2.sign046 = (
"SAFFRON CITY POKEMON GYM - LEADER: SABRINA - The Master of Psychic POKEMON!"
)
sa2.npc305 = (
"Yo! Champ in the making! SABRINA’s POKEMON use psychic power "
"instead of force! Fighting POKEMON are weak against psychic "
"POKEMON! They get creamed before they can even aim a punch!"
)
sa2.npc305A = "Psychic power, huh? If I had that, I’d make a bundle at the slots!"
route_11 = Dict()
r11 = route_11
# east of vermillion
r11.npc306 = (
"When you catch lots of POKEMON, isn’t it hard to think up "
"names? In LAVENDER TOWN, there’s a man who rates POKEMON "
"nicknames. He’ll help you rename them, too!"
)
r11.npc307 = (
"Hi! Remember me? I’m PROF.OAK’s AIDE! If you caught 30 kinds of "
"POKEMON, I’m supposed to give you an ITEMFINDER! So, $PLAYER! "
"Have you caught at least 30 kinds of POKEMON?"
)
# less than 30
r11.npc307A = (
"Let’s see... Uh-oh! You have only caught $CAUGHTNUMBER kinds "
"of POKEMON! You need 30 kinds if you want the ITEMFINDER."
)
r11.npc307B = (
"Great! You’ve caught $CAUGHTNUMBER kinds of POKEMON! "
"Congratulations! Here you go!"
) # more than 30
r11.notification021 = "$PLAYER received an ITEMFINDER!"
r11.npc307C = (
"There are items on the ground that can’t be seen. ITEMFINDER "
"will detect an item close to you. It can’t pinpoint it, so "
"you have to look yourself!"
) # after receiving
# if you said no
r11.npc307D = "Oh. I see. When you get 30 kinds, come back for the ITEMFINDER."
r11.npc308 = "I’m looking for NIDORINO! Wanna trade one for NIDORINA?"
r11.npc308A = "What? That’s not a NIDORINO! If you get one, come back here!"
r11.npc308B = "Okay, connect the cable like so!"
r11.npc308C = "Hey, thanks!"
r11.npc308D = "Isn’t my old NIDORINA great?"
r11.npc308E = "Aww! Oh well..."
route_12 = Dict()
r12 = route_12
# this is snorlax
r12.npc309 = "A sleeping POKEMON blocks the way!"
r12.npc309A = (
"...$PLAYER played POKE FLUTE. ...SNORLAX woke up! It attacked in a grumpy rage!"
)
r12.npc309B = "SNORLAX calmed down! With a big yawn, it returned to the mountains!"
# building
r12.npc310 = "There’s a lookout spot upstairs."
r12.npc311 = (
"My POKEMON’s ashes are stored in POKEMON TOWER. You can have "
"this TM. I don’t need it anymore..."
)
r12.npc311A = (
"TM39 is a move called SWIFT. It’s very accurate, so use it "
"during battles you can’t afford to lose."
)
# building 2
r12.npc312 = (
"I’m the FISHING GURU’s brother! I simply looove fishing! Do you like to fish?"
)
# get super rod
r12.npc312A = "Grand! I like your style! Take this and fish, young one!"
r12.notification022 = "$PLAYER received a SUPER ROD!"
r12.npc312B = (
"Fishing is a way of life! From the seas to rivers, go out and land the big one!"
)
r12.npc312C = (
"Hello there, $PLAYER! Use the SUPER ROD in any water! You can "
"catch different kinds of POKEMON. Try fishing wherever you can!"
)
r12.npc312D = "Oh... That’s so disappointing..."
route_13 = Dict()
r13 = route_13
r13.sign047 = "TRAINER TIPS - Use SELECT to switch items in the ITEM window!"
r13.sign048 = "TRAINER TIPS - Look to the left of that post!" # Why?
# the rest is all trainers
route_14 = Dict()
r14 = route_14
r14.npc313 = "Are you working on a POKEDEX? PROF.OAK’s AIDE came by here."
r14.npc314 = (
"Hi! Remember me? I’m PROF.OAK’s AIDE! If you caught 50 kinds of "
"POKEMON, I’m supposed to give you an EXP.ALL! So, $PLAYER! Have "
"you caught at least 50 kinds of POKEMON?"
)
r14.npc314A = (
"Let’s see... Uh-oh! You’ve only caught $CAUGHTNUMBER kinds of "
"POKEMON! You need 50 kinds if you want the EXP.ALL."
)
r14.npc314B = (
"Great! You’ve caught $CAUGHTNUMBER kinds of POKEMON! "
"Congratulations! Here you go!"
)
r14.npc314C = (
"EXP.ALL gives EXP points to all the POKEMON with you, even if "
"they don’t fight. It does, however, reduce the amount of EXP "
"for each POKEMON. If you don’t need it, you should store it "
"via PC."
)
r14.npc314D = "Oh. I see. When you get 50 kinds, come back for EXP.ALL."
cycling_road = Dict()
cyc = cycling_road
cyc.sign049 = "Enjoy the slope! - CYCLING ROAD"
# if you have a bike in your inventory
cyc.npc315 = "CYCLING ROAD is a downhill course by the sea. It’s a great ride."
cyc.npc315A = "Excuse me! Wait up, please! No pedestrians allowed on CYCLING ROAD!"
cyc.npc316 = "We’re going riding together!"
cyc.npc317 = "I’m going for a ride with my girlfriend!"
# outside fly girl's house
cyc.npc318 = "How’d you get in? Good effort!"
# fly girl
cyc.npc319 = (
"Oh, you found my secret retreat! Please don’t tell anyone I’m "
"here. I’ll make it up to you with this!"
)
cyc.npc319A = "HM02 is FLY. It will take you back to any town. Put it to good use!"
cyc.npc320 = "Kyueen!" # fearow
cyc.sign050 = "It's a notice! Watch out for discarded items!"
# so not true, unless they're talking stats
cyc.sign051 = (
"TRAINER TIPS - All POKEMON are unique. Even POKEMON of the "
"same type and level grow at different rates."
)
cyc.sign052 = (
"TRAINER TIPS - Press the A or B button to stay in place while on a slope."
)
cyc.sign053 = "It's a notice! Don't throw the game, throw POKE BALLs instead!"
cyc.sign054 = "CYCLING ROAD - Slope ends here!"
route_18 = Dict()
r18 = route_18
r18.sign055 = "CYCLING ROAD - No pedestrians permitted!"
r18.npc321 = "CYCLING ROAD is all uphill from here."
r18.npc322 = "I’m looking for SLOWBRO! Wanna trade one for LICKITUNG?"
r18.npc322A = "What? That’s not a SLOWBRO! If you get one, come back here!"
r18.npc322B = "Okay, connect the cable like so!"
r18.npc322C = "Hey, thanks!" # get the lickitung, Marc
r18.npc322D = "Isn’t my old LICKITUNG great?"
r18.npc322E = "Awww! Oh well..."
fuschia_city = Dict()
fus = fuschia_city
# outside
fus.sign056 = "FUCHSIA CITY - Behold! It's Passion Pink!" # seriously?
fus.npc323 = (
"SAFARI ZONE has a zoo in front of the entrance. Out back is the "
"SAFARI GAME for catching POKEMON."
)
fus.npc324 = "Where’s SARA? I said I’d meet here here." # Erik
fus.npc325 = "That item ball there is really a POKEMON."
fus.npc326 = "Did you try the SAFARI GAME? Some POKEMON can only be caught there."
fus.sign057 = "SLOWPOKE - Friendly and very slow moving."
fus.sign058 = "CHANSEY - Catching one is all up to chance."
fus.sign059 = "VOLTORB - The very image of a POKE BALL."
fus.sign060 = "LAPRAS - A.K.A. the king of the seas."
fus.sign061 = "OMANYTE - A POKEMON that was resurrected from a fossil."
fus.sign062 = (
"KANGASKHAN - A maternal POKEMON that raises its young in a pouch on its belly."
)
# pokemon center
fus.npc327 = (
"If you’re studying POKEMON, visit the SAFARI ZONE. It has all "
"sorts of rare POKEMON."
)
fus.npc328 = (
"You can’t win with just one strong POKMEON. It’s tough, but you "
"have to raise them evenly."
)
fus.npc329 = (
"There’s a narrow trail west of VIRIDIAN CITY. It goes to the "
"POKEMON LEAGUE HQ. The HQ governs all trainers."
)
# pokemon mart
fus.npc330 = "Did you try X SPEED? It speeds up a POKEMON in battle!"
fus.npc331 = "Do you have a SAFARI ZONE flag? What about cards of calendars?"
# house
fus.npc332 = (
"I’m the FISHING GURU’s older brother! I simply Loooove fishing! "
"Do you like to fish?"
)
fus.npc332A = "Grand! I like your style! Take this and fish, young one!" # yes
fus.npc332B = "Hello there, $PLAYER! How are the fish biting?"
fus.npc332C = "Oh... that’s so disappointing..."
# warden's home
# put in choice
fus.npc333 = "Hif fuff hefifoo! Ha lof ha feef ee hafahi ho. Heff hee fwee!"
fus.npc333A = "Ah howhee ho hoo! Eef ee hafahi ho!" # yes
fus.npc333B = "Ha? He ohay heh ha hoo ee haheh!" # no
# after finding the Gold Teeth
fus.notification023 = (
"$PLAYER gave the GOLD TEETH to the WARDEN! The WARDEN popped in his teeth!"
)
fus.npc333C = (
"Thanks, kid! No one could understand a word I said. I couldn’t "
"work that way. Let me give you something for your trouble."
)
fus.npc333D = (
"HM04 teaches STRENGTH! It lets POKEMON move boulders when "
"you’re outside of battle. Oh yes, did you find the SECRET "
"HOUSE in SAFARI ZONE? If you do, you win an HM! I hear it’s "
"the rare SURF HM."
)
# house
fus.npc334 = (
"SLOWPOKE is very knowledgeable about POKEMON! He even has some "
"fossils of rare, extinct POKEMON!"
)
# because you are a cruel asshole
fus.npc335 = "We nicknamed the WARDEN SLOWPOKE. He and SLOWPOKE both look vacant!"
fus.npc336 = (
"SLOWPOKE came in, but I couldn’t understand him. I think he’s "
"got a speech problem!"
)
# house
fus.npc337 = (
"SAFARI ZONE’s WARDEN is old, but still active! All his teeth are false, though."
)
fus.npc338 = "BILL files his own POKEMON data on his PC! Did he show you?"
fus.npc339 = (
"Hmm? You’ve met BILL? He’s my grandson! He always liked "
"collecting things even as a child!"
)
safari_zone = Dict()
sfz = safari_zone
sfz.npc340 = "Hi! Is it your first time here?"
sfz.npc340A = (
"SAFARI ZONE has 4 zones in it. Each zone has different kinds "
"of POKEMON. Use SAFARI BALLs to catch them! When you run out "
"of time or SAFARI BALLs, it's game over for you!"
)
sfz.npc340B = "Sorry, you’re a regular here!"
sfz.npc341 = (
"Welcome to the SAFARI ZONE! For just P500, you can catch all "
"the POKEMON you want in the park! Would you like to join the "
"hunt?"
)
sfz.npc341A = (
"That’ll be P500, please! We only use a special kind of POKE BALL here."
)
sfz.notification024 = "$PLAYER received 30 SAFARI BALLs!"
sfz.npc341B = "We’ll call you on the PA when you run out of time or SAFARI BALLs!"
sfz.npc341C = "OK! Please come again!" # no
sfz.npc341D = "Leaving early?" # try leaving before your time is up
sfz.npc341E = "Please return any SAFARI BALLs you have left." # yes
sfz.npc341F = "Good luck!" # no
sfz.npc341G = "Did you get a good haul? Come again!" # when your time is up
# Zone 1
sfz.sign063 = "TRAINER TIPS - Press the START button to check remaining time!"
# Rest House
sfz.npc342 = "I’m catching POKEMON to take home as gifts!"
sfz.npc343 = "Where did my boyfriend, ERIK, go?" # Sara
# Zone 2
sfz.sign064 = "CENTER AREA - NORTH: AREA 2"
sfz.sign065 = "The remaining time declines only while you walk!"
# Rest House
sfz.npc344 = "How many did you catch? I’m bushed from the work!"
sfz.npc345 = "I caught a CHANSEY! That makes this all worthwhile!"
sfz.npc346 = "Whew! I’m tired from all the fun!"
# Zone 3
sfz.sign066 = "AREA 2"
sfz.sign067 = "TRAINER TIPS - Win a free HM for finding the SECRET HOUSE!"
sfz.sign068 = (
"TRAINER TIPS - POKEMON hide in the tall grass! Zigzag through "
"grassy areas to flush them out!"
)
sfz.sign069 = "TRAINER TIPS - The SECRET HOUSE is still ahead!"
# Rest House
# you know perfectly why, you ditz
sfz.npc347 = (
"My EEVEE evolved into a FLAREON! But, a friend’s EEVEE turned "
"into a VAPOREON! I wonder why?"
)
sfz.npc348 = "Go to the deepest part of the SAFARI ZONE. You will win a prize!"
sfz.npc349 = (
"You can keep any item you find on the ground here. But, you’ll "
"run out of time if you try for all of them at once!"
)
# Zone 4
sfz.sign070 = "AREA 3 - EAST: CENTER AREA"
sfz.sign071 = (
"TRAINER TIPS - Zone Exploration Campaign! The Search for the SECRET HOUSE!"
)
sfz.sign072 = (
"REQUEST NOTICE - Please find the SAFARI WARDEN's lost GOLD "
"TEETH. They're around here somewhere. Reward offered! "
"Contact: WARDEN"
)
# Rest House
sfz.npc350 = (
"Tossing ROCKs at POKEMON might make them run, but they’ll be easier to catch."
)
sfz.npc351 = "Using BAIT will make POKEMON easier to catch."
sfz.npc352 = "I hiked a lot, but I didn’t see any POKEMON I wanted."
# Secret House!
sfz.npc353 = (
"Ah! Finally! You’re the first person to reach the SECRET HOUSE! "
"I was getting worried that no one would win our campaign prize. "
"Congratulations! You have won!"
)
sfz.npc353A = (
"HM03 is SURF! POKEMON will be able to ferry you across water! "
"And, this HM isn't disposable! You can use it over and over! "
"You're super lucky for winning this fabulous prize!"
)
sfz.notification025 = "Ding-dong! Time's up! Your SAFARI GAME is over!"
fuschia_gym = Dict()
fug = fuschia_gym
fug.sign073 = "FUCHSIA CITY POKEMON GYM - LEADER: KOGA - The Poisonous Ninja Master"
fug.npc354 = (
"Yo! Champ in the making! FUCHSIA GYM is riddled with invisible "
"walls! KOGA might appear close, but he’s blocked off! You have "
"to find gaps in the walls to reach him!"
)
fug.npc354A = "It’s amazing how a ninja can terrify even now!"
# *******************************************
# Sea Route 19
# *******************************************
# There are only trainers here
route_20 = Dict()
# seafoam islands
route_20.sign074 = "DANGER - FAST CURRENT!"
route_20.sign075 = "Boulders might change the flow of water!"
route_20.npc355 = "Gyaoo!" # articuno
cinnabar_island = Dict()
cii = cinnabar_island
# outside
cii.sign076 = "CINNABAR ISLAND - The Fiery Town of Burning Desire"
cii.npc356 = "Scientists conduct experiements in the burned out building."
cii.npc357 = "CINNABAR GYM’s BLAINE is an odd man who has lived here for decades."
# pokemon center
cii.npc358 = (
"Do you have any friends? POKEMON you get in trades grow very "
"quickly! I think it’s worth a try!"
)
cii.npc359 = (
"POKEMON can still learn techniques after cancelling evolution. "
"Evolution can wait until new moves have been learned."
)
cii.npc360 = (
"You can cancel evolution. When a POKEMON is evolving, you can "
"stop it and leave it the way it is."
)
# pokemon mart
cii.npc361 = "It never hurts to have extra items!"
cii.npc362 = "Don’t they have X ATTACK? It’s good for battles!"
# Pokemon Lab
cii.npc363 = (
"We study POKEMON extensively here. People often bring us rare "
"POKEMON for examination."
)
# meeting room
# shitty trade
cii.npc364 = "Hello there! Do you want to trade your RAICHU for ELECTRODE?"
cii.npc364A = "Hmm? This isn’t RAICHU. Think of me when you get one."
cii.npc364B = "Okay, connect the cable like so!"
cii.npc364C = "Thanks!" # get Doris, the Electrode
# original line:
# npc364D = "The RAICHU you traded to me went and evolved!" # what?
# Seriously. That makes no sense. New line:
cii.npc364D = "That RAICHU you traded me shocked me!"
cii.npc364E = "Well, if you don’t want to..."
# other room
cii.npc365 = "Hi! Do you have VENONAT? Want to trade it for TANGELA?"
cii.npc365A = "...That is no VENONAT. If you get one, trade it with me!"
cii.npc365B = "Okay, connect the cable like so!"
cii.npc365C = "Thanks, pal!" # get Crinkles, the Tangela
cii.npc365D = "How is my old TANGELA? My VENONAT is doing great!"
cii.npc365E = "That’s too bad."
# R&D Room
cii.npc366 = "EEVEE can evolve into 1 of 3 kinds of POKEMON."
cii.npc367 = "Tch-tch-tch! I made a cool TM! It can cause all kinds of fun!"
cii.npc367A = (
"Tch-tch-tch! That’s the sound of a METRONOME! It tweaks your "
"POKEMON’s brain into using moves it doesn’t know!"
)
# Testing room
cii.npc368 = "I’m looking for PONYTA! Wanna trade one for SEEL?"
cii.npc368A = "What? That’s not PONYTA! If you get one, come back here!"
cii.npc368B = "Okay, connect the cable like so!"
cii.npc368C = "Hey, thanks!" # get Sailor, the Seel
cii.npc368D = "Isn’t my old SEEL great?"
cii.npc368E = "Awww! Oh well..."
cii.npc369 = (
"Hiya! I am an important doctor! I study rare POKEMON fossils "
"here! ...You! Have you a fossil for me?"
)
cii.npc369A = "No! Is too bad!"
cii.npc369B = (
"Oh! That is $FOSSILTYPE! It is fossil of $FOSSILPOKE, a "
"POKEMON that is already extinct! My Resurrection Machine will "
"make that POKEMON live again!"
)
cii.npc369C = (
"So! You hurry and give me that! I take a little time! You go "
"for walk a little while!"
) # yes
# after leaving and re-entering building
cii.npc369D = (
"Where were you? Your fossil is back to life! It was $FOSSILPOKE like I think!"
)
cii.npc369E = "Hiya! I am important doctor!" # after all is said and done
cii.npc369F = "Aiyah! You come again!" # no
pokemon_mansion = Dict()
# this is an 'npc' simply so it can use the yes/no function
pokemon_mansion.npc370 = "A secret switch! Press it?"
pokemon_mansion.npc370A = "Who wouldn’t?" # yes
pokemon_mansion.npc370B = "Not quite yet!" # no
cinnabar_gym = Dict()
cig = cinnabar_gym
cig.sign077 = (
"CINNABAR ISLAND POKEMON GYM - LEADER: BLAINE - The Hot-Headed Quiz Master!"
)
cig.npc371 = (
"Yo! Champ in the making! The hot-headed BLAINE is a fire "
"POKEMON pro! Douse his spirits with water! You better take "
"some BURN HEALS!"
)
# all quizzes use quizA and quizB as answers
cig.quiz1 = (
"POKEMON Quiz! Get it right and the door opens to the next room! "
"Get it wrong and face a Trainer! If you want to conserve your "
"POKEMON for the GYM LEADER... Then get it right! Here we go! "
"...CATERPIE evolves into BUTTERFREE?"
)
cig.quizA = "You're absolutely correct! Go on through!" # yes
cig.quizB = "Sorry! Bad call!" # no
cig.quiz2 = "There are 9 certified POKEMON LEAGUE BADGES?"
cig.quiz3 = "POLIWAG evolves 3 times?"
cig.quiz4 = "Are thunder moves effective against ground element-type POKEMON?"
cig.quiz5 = "POKEMON of the same kind and level are not identical?"
cig.quiz6 = "TM28 contains TOMBSTONE?"
cig.npc371A = "$PLAYER! You beat that firebrand!"
viridian_city_2 = Dict()
vi2 = viridian_city_2
vi2.npc372 = "VIRIDIAN GYM’s LEADER has returned!"
# inside the gym
vi2.npc374 = (
"Yo! Champ in the making! Even I don’t know VIRIDIAN LEADER’s "
"identity! This will be the toughest of all the GYM LEADERs! I "
"heard that the trainers here like ground-type POKEMON!"
)
vi2.npc374A = "Blow me away! GIOVANNI was the GYM LEADER here?"
pokemon_league = Dict()
pkl = pokemon_league
pkl.sign078 = "POKEMON LEAGUE - Front Gate"
pkl.npc375 = (
"Only truly skilled trainers are allowed through. You don’t have "
"the BOULDERBADGE yet! The rules are rules. I can’t let you pass."
)
pkl.npc375A = "Oh! That is the BOULDERBADGE! Go right ahead!"
route_23 = Dict()
r23 = route_23
# guards 2-8 use this template
r23.npc378 = "You can pass here only if you have the $BADGE!"
r23.npc378A = (
"You don’t have the $BADGE yet! You have to have it to get to "
"the POKEMON LEAGUE!"
)
r23.npc378B = "Oh! That is the $BADGE! OK then! Please, go right ahead!"
victory_road = Dict()
victory_road.npc379 = "Gyaoo!" # moltres
indigo_plateau = Dict()
idp = indigo_plateau
idp.sign079 = "INDIGO PLATEAU - The ultimate goal of trainers! POKEMON LEAGUE HQ"
idp.npc380 = (
"Yo! Champ in the making! At POKEMON LEAGUE, you have to face "
"the ELITE FOUR in succession. If you lose, you have to start "
"all over again! This is it! Go for it!"
)
idp.npc381 = (
"From here on, you face the ELITE FOUR one by one! If you win, "
"a door opens to the next Trainer! Good luck!"
)
# after beating your rival
idp.oak020 = "$PLAYER!"
idp.oak021 = (
"So, you won! Congratulations! You're the new POKEMON LEAGUE "
"champion! You've grown up so much since you first left with "
"$STARTERPOKE! $PLAYER, you have come of age!"
)
idp.oak022 = (
"$RIVAL! I'm disappointed! I came when I heard you beat the "
"ELITE FOUR! I mopped myself up and came straight over, but when "
"I got here, you had already lost! $RIVAL! Do you understand why "
"you lost? You have forgotten to treat your POKEMON with trust "
"and love! Without them, you will never become a champ again!"
)
idp.oak023 = (
"$PLAYER! You understand that your victory was not just your own "
"doing! The bond you share with your POKEMON is marvelous!"
)
idp.oak024 = "$PLAYER! Come with me!"
# final room
idp.oak025 = (
"Er-hem! Congratulations $PLAYER! This floor is the POKEMON "
"HALL OF FAME! POKEMON LEAGUE champions are honored for their "
"exploits here!"
)
idp.oak026 = (
"$PLAYER! You have endeavored hard to become the new LEAGUE "
"champion! Congratulations, $PLAYER, you and your POKEMON are "
"HALL OF FAMERs!"
)
pokemon_center = Dict()
pkc = pokemon_center
pkc.nursejoy001 = "Welcome to our POKEMON CENTER!"
pkc.nursejoy001A = "Would you like me to heal your POKEMON back to perfect health?"
pkc.nursejoy002 = "Okay, I'll take your POKEMON for a few seconds."
pkc.nursejoy003 = (
"Thank you for waiting. We've restored your POKEMON to full "
"health. We hope to see you again!"
)
pkc.nursejoy004 = "Come again!"
# PokeRus
pkc.nursejoy005 = (
"$PLAYER, it looks like your POKEMON may be infected with "
"PokeRus. Little is known about the PokeRus except they are "
"microscopic life forms that attach to POKEMON."
)
pkc.nursejoy005A = (
"While infected, POKEMON are said to grow exceptionally "
"well. Don't worry - they'll be fine!"
)
# Only occurs in Emerald and after, but totally cool.
# first time she sees you with the update
pkc.nursejoy006 = "Would you like to..."
pkc.nursejoy006A = (
"Th-that Trainer Card! That wonderful shade! I've seen "
"several Trainers with Violet Trainer Cards up to now..."
)
pkc.nursejoy006B = (
"But you're the first to top them with that impressive Trainer Card."
)
pkc.nursejoy006C = "Please, $PLAYER, may I please heal your POKEMON?"
# every time after
pkc.nursejoy007 = "Great to see you, $PLAYER!"
pkc.nursejoy007A = "You want the usual, right?"
pkc.nursejoy008 = "I will be pleased to take your POKEMON for a few seconds."
pkc.nursejoy009 = "Thank you for waiting! We hope to see you again!"
# /*
# ██ ██ ██████ ██████ ██ ██████
# ██ ██ ██ ██ ██ ██ ██ ██ ██
# ██ █ ██ ██ ██ ██████ ██ ██ ██
# ██ ███ ██ ██ ██ ██ ██ ██ ██ ██
# ███ ███ ██████ ██ ██ ███████ ██████
# */
world_interactions = Dict()
wld = world_interactions
# These are only played when the Player interacts with them in the game
# homes
wld.nes = "$PLAYER played with the NES. ...Okay! It's time to go!"
wld.bookshelf = "It's crammed full of POKEMON books."
wld.dresser = "It's a nicely made dresser. It will hold a lot of stuff."
wld.PC = "$PLAYER booted up the PC."
wld.TV001 = (
"There's a movie on TV. Four boys are walking on railroad tracks. "
"...I'd better go too."
)
wld.cupboard = "Dishes and plates are neatly lined up."
wld.oven = "It smells delicious! Someone's been cooking here."
wld.world_map = "It's a big map! This is useful!"
# oak's lab
wld.pokedex = "It's encyclopedia-like, but the pages are blank!"
wld.oaks_computer001 = (
"There's an email message here! ... Calling all POKEMON"
"trainers! The elite trainers of POKEMON LEAGUE are "
"ready to take on all comers! Bring your best POKEMON "
"and see how you rate as a Trainer!"
)
wld.oaks_computer002 = "POKEMON LEAGUE HQ INDIGO PLATEAU"
wld.oaks_computer003 = "PS: PROF. OAK, please visit us! ..."
wld.oaks_poster001 = "Push START to open the MENU!"
wld.oaks_poster002 = "The SAVE option is on the MENU screen."
wld.oaks_pokeballs001 = "Those are POKE BALLS. They contain POKEMON!"
wld.oaks_pokeballs002 = "That's PROF. OAK'S last POKEMON!"
# repeating signs
wld.poke_center = "Pokemon Center"
wld.poke_mart = "Pokemon Mart"
wld.poke_mart_poster = "It's an advertising poster about all kinds ofproducts."
# pokemon school
wld.notebook1 = (
"Looked at the notebook! First page... POKE BALLS are used to "
"catch POKEMON. Up to 6 POKEMON can be carried. People who "
"raise and make POKEMON fight are called POKEMON trainers."
)
wld.notebook2 = (
"Second page... A healthy POKEMON may be hard to catch, so "
"weaken it first! Poison, burns, and other damage are "
"effective!"
)
wld.notebook3 = (
"Third page... POKEMON trainers seek others to engage in "
"POKEMON fights. Battles are constantly fought at POKEMON "
"GYMs."
)
wld.notebook4 = (
"Fourth page... The goal for POKEMON trainers is to beat the "
"top 8 POKEMON GYM LEADERs. Do so to earn the right to face..."
"the ELITE FOUR of POKEMON LEAGUE!"
)
# After notebook4, call npc020A as the next piece of dialogue
wld.blackboard001 = (
"The blackboard describes POKEMON STATUS changes during "
"battles. Which heading do you want to read?"
)
wld.blackboard002 = (
"SLP: A POKEMON can't attack if it's asleep! POKEMON will "
"stay asleep even after battles. Use AWAKENING to wake "
"them up!"
)
wld.blackboard003 = (
"BRN: A burn reduces power and speed. It also causes "
"ongoing damage. Burns remain after battles. Use BURN HEAL"
" to cure a burn!"
)
wld.blackboard004 = (
"PSN: When poisoned, a POKEMON's health steadily drops. "
"Poison lingers after battles. Use an ANTIDOTE to cure "
"poison!"
)
wld.blackboard005 = (
"FRZ: If frozen, a POKEMON becomes totally immobile! It "
"stays frozen even after the battle ends. Use ICE HEAL to "
"thaw out POKEMON!"
)
wld.blackboard006 = (
"PAR: Paralysis could make POKEMON moves misfire! "
"Paralysis remains after battles. Use PARALYZ HEAL for "
"treatment!"
)
# viridian various
wld.viridian_house_poster = "SPEAROW. Name: SPEARY"
wld.viridian_gym_doors_closed = "The GYM's doors are locked..."
# Pewter Museum
wld.exhibit001 = "AERODACTYL Fossil - A primitive and rare POKEMON."
wld.exhibit002 = "KABUTOPS Fossil - A primitive and rare POKEMON."
wld.exhibit003 = "Meteorite that fell on MT. MOON. (MOON STONE?)"
wld.exhibit004 = "SPACE SHUTTLE COLUMBIA"
wld.museum_pedestal = "The AMBER is clear and gold!"
# gym crap
wld.gym_plaque_before_win = "WINNING TRAINERS: $RIVAL"
wld.gym_plaque_after_win = "WINNING TRAINERS: $RIVAL, $PLAYER"
# Cerulean City
wld.bicycle = "A shiny new BICYCLE!"
# bill's PC. Options: Eevee, Flareon, Jolteon, and Vaporeon.
wld.bills_computer = (
"BILL's favorite POKEMON list! Which POKEMON do you want to see?"
)
# Vermilion Pokemon Fan Club
wld.fan_club_poster001 = "Let's all listen politely to other trainers!"
wld.fan_club_poster002 = "If someone brags, brag right back!"
# vermilion letter to pippi
wld.letter_to_pippi = (
"Dear PIPPI, I hope to see you soon. I heard SAFFRON has"
" problems with TEAM ROCKET. VERMILION appears to be"
" safe."
)
# lavender town
wld.lavender_town_booklet = (
"POKEMON Monthly Grand Prize Drawing! ...The "
"application form is... Gone! It's been clipped "
"out!"
)
# celadon blackboard
wld.blackboard007 = "TRAINER TIPS - Using a Game Link Cable"
wld.blackboard008 = (
"HOW TO LINK - When you have linked your GAME BOY with "
"another GAME BOY, talk to the attendant on the right in "
"any POKEMON CENTER. ...Hrm. I don't think that applies "
"here."
)
wld.blackboard009 = "COLOSSEUM - COLOSSEUM lets you play against a friend."
wld.blackboard010 = "TRADE CENTER - The TRADE CENTER is used for trading POKEMON."
wld.blackboard_dusty = "The blackboard is dusty and grey from years of disuse."
wld.silph_pamphlet = (
"It's a pamphlet on TMS. ... There are 50 TMs in all. "
"There are also 5 HMs that can be used repeatedly. SILPH "
"CO."
)
wld.eevee_pokeball = "It's a pokeball. There's an Eevee inside!" # get eevee
# celadon department store
wld.celadon_third_floor_poster = "Blue and Red! Both are POKEMON!"
wld.videogame_1 = "A fighting game! Looks tough!"
wld.videogame_2 = "A sports game! Dad'll like that!"
wld.videogame_3 = "An RPG! There's no time for that!" # lol
wld.videogame_4 = "A puzzle game! Looks addictive!"
# rocket game corner
wld.rocket_game_corner_poster = "Hey! A switch behind the poster!? Let's push it!"
# saffron karate gym
wld.hitmonchan_pokeball = "You want the piston punching HITMONCHAN?"
wld.hitmonlee_pokeball = "You want the hard kicking HITMONLEE?"
# what's left over after one is taken
wld.hitmoneither_pokeball = "Better not get greedy..."
# silph co 5th floor
wld.silph_co_paper1 = (
"It's a POKEMON REPORT! 4 POKEMON evolve only when "
"traded by link-cable or newer methods."
)
wld.silph_co_paper2 = (
"It's a POKEMON REPORT! Over 160 POKEMON techniques havebeen confirmed."
)
wld.silph_co_paper3 = (
"It's a POKEMON REPORT! POKEMON LAB created PORYGON, the"
"first virtual reality POKEMON."
)
# saffron, house after beating TR
wld.saffron_letter = (
"I was given a PP UP as a gift. It's used for increasing the PP of techniques!"
)
wld.copycats_nes = "A game with MARIO wearing a bucket on his head!"
# cinnabar island
wld.pokemon_lab_picture = "A photo of the LAB's founder, DR.FUJI!"
wld.pokemon_lab_computer = (
"There's an email message! ... The 3 legendary bird"
"POKEMON are ARTICUNO, ZAPDOS, and MOLTRES. Their "
"whereabouts are unknown. We plan to explore the "
"cavern close to CERULEAN. From: POKEMON RESEARCH "
"TEAM"
)
# pokemon mansion, second floor
wld.pokemon_mansion_diary1 = (
"July 5 - Guyana, South America. A new POKEMON "
"has been discovered deep in the jungle."
)
wld.pokemon_mansion_diary2 = (
"July 10 - We christened the newly discovered POKEMON, MEW."
)
# third floor
wld.pokemon_mansion_diary3 = "Feb. 6 - MEW gave birth. We named the newborn MEWTWO."
# basement
wld.pokemon_mansion_diary4 = (
"Sept. 1 - MEWTWO is far too powerful. We have "
"failed to curb its vicious tendencies..."
)
# gym
# before getting the SECRET KEY
wld.blaines_gym_doors = "The doors are locked!"
class _trainers(object):
"""
So that I don't have to hard-code every freakin' bit of dialogue in the
game for each Trainer, every Trainer type has a set of phrases (before
and after) that will get chosen randomly, with a few token exceptions.
Format: ("greeting","lose","after")
def trainerdialogue(trainertext={},trainername=Trainer.name,
trainercash=Trainer.cash):
# check for Gary
if trainername == "Gary":
print "GARY FOUND"
if trainercash == 175:
print "I'm level 1!"
if trainercash == 280:
print "I'm level 2!"
if trainercash == 595:
print "I'm level 3!"
try:
randphrase = random.randrange(0,len(trainertext))
for i, phrase in trainertext.iteritems():
if randphrase == i:
print("Welcome: {} | Loss: {} | After: {}".format(
phrase[0], phrase[1], phrase[2]))
except ValueError:
print "TEST"
usage:
trainerdialogue(trainertype)
"""
def translate(self, trainertype):
# links the dialog dictionaries to the text Trainer types
dialogue_entry = {
"Rival": self.rival_dialogue,
"Bugcatcher": self.bugcatcher_dialogue,
"JrTrainerM": self.jrtrainer_m_dialogue,
"JrTrainerF": self.jrtrainer_f_dialogue,
"Lass": self.lass_dialogue,
"Rocket": self.rocket_dialogue,
"Leader": self.leader_dialogue,
"Elite": self.elite_dialogue,
"RocketLeader": self.rocket_leader_dialogue,
"Sailor": self.Sailor_Dialogue,
"Hiker": self.Hiker_Dialogue,
"CoolTrainerF": self.CoolTrainerF_Dialogue,
"CoolTrainerM": self.CoolTrainerM_Dialogue,
"Youngster": self.Youngster_Dialogue,
"SuperNerd": self.SuperNerd_Dialogue,
"PokeManiac": self.PokeManiac_Dialogue,
"Rocker": self.Rocker_Dialogue,
"Swimmer": self.Swimmer_Dialogue,
"Gambler": self.Gambler_Dialogue,
"Engineer": self.Engineer_Dialogue,
"Gentleman": self.Gentleman_Dialogue,
"Fisherman": self.Fisherman_Dialogue,
"Beauty": self.Beauty_Dialogue,
"Channeler": self.Channeler_Dialogue,
"Biker": self.Biker_Dialogue,
"CueBall": self.CueBall_Dialogue,
"BirdKeeper": self.BirdKeeper_Dialogue,
"Juggler": self.Juggler_Dialogue,
"Tamer": self.Tamer_Dialogue,
"Blackbelt": self.Blackbelt_Dialogue,
"Scientist": self.Scientist_Dialogue,
"Psychic": self.Psychic_Dialogue,
"Burglar": self.Burglar_Dialogue,
}
for key, value in dialogue_entry.items():
if trainertype is key:
return value
rival_dialogue = "Rival"
# all dialogue here is hard-coded, I have to play the game to get it
bugcatcher_dialogue = {
0: (
"Look what I found!",
"Guess I need to train them more...",
"Wow, that was a good catch!",
),
1: ("Lookee here! A Trainer!", "Augh, you're too strong for me!", "Man...."),
2: (
"Woohoo, caught another one!",
"Guess I should let this one go...",
"That one's a keeper!",
),
3: (
"Can you guess what's in my net?",
"Geez, I thought it would be harder than that...",
"Shh, you'll scare them away...",
),
4: (
"If you're a Trainer, you have to battle me!",
"Dang, I should choose my battles better.",
"Humph.",
),
5: ("How about a battle?", "Oh no, you're strong!", "I thought I could win!"),
6: ("Tag, you're it!", "You got me...", "You can't catch me!"),
7: (
"You can fight my new POKEMON!",
"Done like dinner!",
"Trained POKEMON are stronger than wild ones!",
),
8: (
"Hey! You have POKEMON! Come on! Let's battle 'em!",
"No! CATERPIE can't cut it!",
"Shh! You'll scare all the bugs away!",
),
9: (
"Hey, wait up! What's the hurry?",
"I give! You're good at this!",
"Sometimes, you can find stuff on the ground! I'm looking for the "
"stuff I dropped!",
),
}
jrtrainer_m_dialogue = {
0: (
"I'm the best there is!",
"I guess you're better than me...",
"HAH! Wait, I lost...",
),
1: (
"My dad put me in Trainer School!",
"I think it's time for another lesson...",
"Go away, meanie.",
),
2: (
"You look like you might be good. Let's battle!",
"I underestimated you.",
"Yeah, there's always more to learn!",
),
3: (
"I'm younger than you but I can win!",
"My older brother says I'm annoying, too.",
"I really wanted to win...",
),
4: (
"I learned everything I need to know in the POKESCOUTS.",
"Yeahhh... maybe I missed a few things.",
"Maybe you should check out the POKESCOUTS out sometime.",
),
5: (
"We locked eyes -- that means we have to fight!",
"Someday I'm gonna learn that I probably shouldn't do that.",
"Come back when you're ready to fight me again!",
),
6: (
"I’m a cool guy. I’ve got a girlfriend!",
"Aww, darn...",
"Oh well. My girl will cheer me up.",
),
7: (
"Huh? You want to talk to me?",
"I didn’t start it!",
"I should carry more POKEMON with me for safety.",
),
}
jrtrainer_f_dialogue = {
0: (
"You don't stand a chance.",
"For real? I'm so much better than you, though!",
"Tell me this: next time I see you? Believe me.",
),
1: (
"Older sister tells me you can't be too hard on someone.",
"Older sister is wrong!!",
"Older sister is totally right!!",
),
2: ("As if.", "Whatever, major loser.", "For reals though..."),
3: (
"my name is katy but u can call me t3h PeNgU1N oF d00m!!!!!!!!",
"*holds up spork*",
"Love and waffles!",
),
4: ("Who are you?", "Like, gag me with a spoon.", "Four for you, Glen Coco!"),
5: (
"Loser loser with a twist spells whatever!",
"MOOOOOOOOOOOMMM",
"Whatever, major loser.",
),
6: (
"Sparkles are so last year.",
"*stomps away*",
"Brr! It's cold in here. Must be some toros in the atmosphere.",
),
7: (
"Me? Well, OK. I’ll play!",
"Just didn’t work!",
"I want to get stronger! What’s your secret?",
),
8: (
"Hikers leave twigs as trail markers.",
"Ohhh! I did my best!",
"I want to go home!",
),
9: (
"I found CARBOS in a cave once.",
"Just messed up!",
"CARBOS boosted the SPEED of my POKEMON!",
),
10: (
"I’m told I’m good for a kid!",
"Ohh! I lost!",
"I want to become a good Trainer. I’ll train hard.",
),
11: (
"Wow! Your BADGEs are too cool!",
"Not enough!",
"You got those BADGEs from GYM LEADERS. I know!",
),
12: (
"My cute POKEMON wish to make your acquaintance.",
"Wow! You totally won!",
"You have to make POKEMON fight to toughen them up!",
),
13: (
"What’s cool? Trading POKEMON!",
"I said trade!",
"I trade POKEMON with my friends!",
),
14: (
"You look gentle, so I think I can beat you!",
"No, wrong!",
"I’m afraid of BIKERs, they look so ugly and mean!",
),
15: (
"Let me try out the POKEMON I just got in a trade!",
"Not good enough!",
"You can’t change the nickname of any POKEMON you get in a trade."
"Only the Original Trainer can.",
),
16: (
"Want to play with my POKEMON?",
"I was too impatient!",
"I’ll go train with weaker people.",
),
}
lass_dialogue = {
0: (
"My newest Pokemon is SO CUTE!",
"Clearly, looks aren't everything...",
"Was it just too kawaii?",
),
1: (
"Have you seen my new doll?",
"She was a hand-me-down anyway.",
"Oh, isn't she precious?",
),
2: (
"I want you to meet someone -- they're around here somewhere!",
"All my friends are imaginary...",
"They're here!",
),
3: (
"Daddy said if I win, I get a Ponyta!",
"WAAAAAHHHH!!",
"I can't wait to meet my Ponyta! I'm gonna name it Rainbows!",
),
4: (
"You're not very pretty. I bet I can beat you.",
"Beauty must only be skin deep...",
"I knew I chose right!",
),
5: (
"On Wednesdays, we wear pink!",
"Stop trying to make fetch happen. It's not going to happen.",
"That was so fetch.",
),
6: (
"Can I braid your hair?",
"Fine, I don't know how to braid anyway.",
"You're just jealous that my hair is better than yours.",
),
7: (
"Do you want to have a sleepover?",
"That was the worst dare EVER.",
"Just kidding - only cool people get invited to my parties.",
),
8: (
"You looked at me, didn't you?",
"You're mean!",
"Quit staring if you don't want to fight!",
),
9: (
"That look you gave me, it's so intriguing!",
"Be nice!",
"Avoid fights by not letting people see you!",
),
10: (
"What’s a cute, round, and fluffy POKEMON?",
"Stop! Don’t be so mean!",
"I heard that CLEFAIRY evolves when it’s exposed to a MOON STONE.",
),
}
rocket_dialogue = {
0: (
"Your POKEMON seem to adore you, kid!",
"Ghaaah!",
"If I had started as a Trainer at your age...",
),
1: (
"We, TEAM ROCKET, are POKEMON gangsters!",
"I blew it!",
"Darn it all! My associates won't stand for this!",
),
2: (
"Little kids should leave grown-ups alone!",
"I'm steamed!",
"POKEMON lived here long before people came.",
),
3: (
"What do you want? Why are you here?",
"I give up!",
"I’m not going to forget this!",
),
4: (
"Show TEAM ROCKET a little respect!",
"Cough... Cough...",
"Which reminds me. KOFFING evolves into WEEZING!",
),
5: (
"I heard a kid was wandering around.",
"Boom!",
"It’s not smart to pick a fight with TEAM ROCKET!",
),
6: (
"You dare betray TEAM ROCKET?",
"You traitor!",
"If you stand for justice, you betray evil!",
),
7: (
"That’s as far as you’ll go!",
"Not enough grit!",
"If you don’t turn back, I’ll call for backup!",
),
}
leader_dialogue = "Leader"
elite_dialogue = "Best of the bunch"
rocket_leader_dialogue = "This is basically here as a placeholder."
Sailor_Dialogue = {
0: (
"My sailor’s pride is at stake!",
"Your spirit sank me!",
"Did you see the FISHING GURU in VERMILION CITY?",
),
1: (
"You know what they say about sailors and fighting!",
"Right! Good fight, mate!",
"Haha! Want to be a sailor, mate?",
),
2: (
"I like feisty kids like you!",
"Argh! Lost it!",
"Sea POKEMON live in deep water. You’ll need a ROD!",
),
3: (
"Us sailors have POKEMON too!",
"OK, you’re not bad.",
"We caught all our POKEMON while out at sea!",
),
4: (
"Matey, you’re walking the plank if you lose!",
"Argh! Beaten by a kid!",
"Jellyfish sometimes drift into the ship.",
),
5: (
"Ahoy there! Are you seasick?",
"I was just careless!",
"My Pa said there are 100 kinds of POKEMON. I think there are more.",
),
6: (
"Hey matey! Let’s do a little jig!",
"You’re impressive!",
"How many kinds of POKEMON do you think there are?",
),
}
Hiker_Dialogue = {
0: (
"WHOA! You shocked me! Oh, you're just a kid!",
"Wow! Shocked again!",
"Kids like you shouldn't be around here!",
),
1: (
"I just got down from MT. MOON, but I’m ready!",
"You work hard!",
"Drat! A ZUBAT bit me back in there.",
),
2: (
"I’m going to see a POKEMON collector at the cape!",
"You got me.",
"The collector has many rare kinds of POKEMON.",
),
3: (
"Hahaha! Aren’t you a little toughie!",
"What’s that?",
"Hahaha! Kids should be tough!",
),
4: (
"Hahahaha! Come on, dude!",
"Hahahaha! You beat me fair!",
"Hahahaha! Us hearty guys always laugh!",
),
5: (
"My POKEMON techniques will leave you crying!",
"I give! You’re a better technician!",
"In the mountains, you’ll often find rock-type POKEMON.",
),
6: (
"Hit me with your best shot!",
"Fired away!",
"I’ll raise my POKEMON to beat yours, kid!",
),
7: (
"Some tunnels go a long way, kid!",
"Doh, you won!",
"Watch out for ONIX! it can put the squeeze on you!",
),
8: (
"Outsiders like you need to show me some respect!",
"I give!",
"You’re talented enough to hike!",
),
9: (
"Ha-hahah-ah-ha!",
"Ha-haha! Not laughing! Ha-hay fever! Haha-ha-choo!",
"Haha-ha-choo! Ha-choo! Snort! Snivel!",
),
}
CoolTrainerF_Dialogue = {
0: (
"There's no such thing as being to hard on someone.",
"Apparently there is -- I give!",
"See? And you couldn't take it.",
),
1: (
"I've trained for a long time to get where I am today.",
"I must train harder...",
"This is the result of my training. Come back if you want more.",
),
2: (
"YOU! I need to train more, and you're next!",
"I made a mistake.",
"More training! Woo!",
),
3: (
"If you win, I give you money. That's how this works.",
"I'm a gal of my word.",
"It's vice versa, too. I'll take my winnings now.",
),
4: (
"There's you, and then there's me. I'll be the one still standing.",
"...I apologize. I underestimated you.",
"Here we are; two people. I am the one still standing.",
),
5: (
"You know the best thing about battling? I always win.",
"Until now, apparently. Take the money and go.",
"Bah. You were going to be another notch on my pole.",
),
6: (
"I wonder if you are good enough for me!",
"I lost out!",
"I never wanted to lose to anybody!",
),
}
CoolTrainerM_Dialogue = {
0: (
"Don't bother trying. I always win.",
"I... but... I always win...",
"You see? I told you. Run along, now.",
),
1: (
"You have POKEMON with you! You’re mine!",
"You deceived me!",
"The next time we meet I will crush you.",
),
2: (
"Who’s that walking with those good looking POKEMON?",
"Out like a light!",
"Keep walking!",
),
3: (
"I can see you’re good! Let me see exactly how good!",
"I had a chance...",
"I concede, you’re better than me!",
),
}
Youngster_Dialogue = {
0: (
"Local trainers come here to practice!",
"You’re decent.",
"All POKEMON have weaknesses. It’s best to raise different kinds.",
),
1: (
"Dad took me to a great party on S.S.ANNE at VERMILION CITY!",
"I’m not mad!",
"On S.S.ANNE, I saw trainers from around the world!",
),
2: (
"I knew I had to fight you!",
"I’d knew I’d lose, too!",
"If your POKEMON gets confused or falls asleep, switch it!",
),
3: (
"I’ve been to a POKEMON GYM a few times. But, I lost each time.",
"Ohh! Blew it again!",
"I noticed som POKEMANIACs prowling around.",
),
4: (
"Let’s go, but don’t cheat!",
"Huh? That’s not right!",
"I did my best! I’ve no regrets!",
),
5: (
"I just became a Trainer! But, I think I can win!",
"My POKEMON couldn’t!",
"What do you want? Leave me alone!",
),
6: (
"My POKEMON should be ready by now!",
"Too much, too young!",
"I better go find stronger ones!",
),
7: (
"I’m the best in my class!",
"Darn! I need to make my POKEMON stronger!",
"There’s a fat POKEMON that comes down from the mountains. It’s "
"strong if you can get it.",
),
}
SuperNerd_Dialogue = {
0: (
"What?! Don't sneak up on me!",
"My POKEMON won't do!",
"I have to find stronger POKEMON.",
),
1: (
"You need a strategy to win at this!",
"It’s not logical!",
"Go with GRIMER first... and... ...and...then...",
),
2: (
"School is fun, but so are POKEMON.",
"I’ll stay with school.",
"I wish I was better at battling...",
),
3: (
"You look good at POKEMON, but how’s your chem?",
"Ow! Meltdown!",
"I am better at school than this!",
),
}
PokeManiac_Dialogue = {
0: (
"Wow, are you a POKEMANIAC too? Want to see my collection?",
"Humph. I’m not angry!",
"I have more rare POKEMON at home!",
),
1: (
"POKEMON fight! Ready, go!",
"Game over!",
"Oh well, I’ll get a ZUBAT as I go!",
),
2: (
"I draw POKEMON when I’m home.",
"Whew! I’m exhausted!",
"I’m an artist, not a fighter.",
),
3: (
"Do you know about costume players?",
"Well, that’s that.",
"Costume players dress up as POKEMON for fun.",
),
4: (
"You have a POKEDEX? I want one too!",
"Shoot! I’m so jealous!",
"When you finish your POKEDEX, can I have it?",
),
5: (
"Hi kid, want to see my POKEMON?",
"Oh no! My POKEMON!",
"I don’t like you for beating me!",
),
}
Rocker_Dialogue = {
0: (
"Electricity is my specialty!",
"Unplugged!",
"Water conducts electricity, so you should zap sea POKEMON!",
),
}
Swimmer_Dialogue = {
0: (
"Wait! You'll have a heart attack!",
"Ooh! That's chilly!",
"Watch out for TENTACOOL!",
),
1: (
"Have to warm up before my swim!",
"All warmed up!",
"Thanks, kid! I'm ready for a swim!",
),
2: (
"I look at the sea to forget!",
"Ooh! Traumatic!",
"I’m looking at the sea to forget!",
),
3: (
"I love swimming! What about you?",
"Belly flop!",
"I can beat POKEMON at swimming!",
),
4: ("What’s beyond the horizon?", "Glub!", "I see a couple of islands!"),
5: (
"I tried diving for POKEMON, but it was a no go!",
"Help!",
"You have to fish for sea POKEMON!",
),
6: (
"These waters are treacherous!",
"Oh! Dangerous!",
"I got a cramp! Glub, glub...",
),
7: ("The water is shallow here.", "Splash!", "I wish I could ride my POKEMON."),
8: (
"Why are you riding a POKEMON? Can’t you swim?",
"Ouch! Torpedoed!",
"Riding POKEMON sure looks like fun!",
),
9: (
"Check out my buff physique!",
"Wimpy!",
"I should’ve been buffing up my POKEMON, not me!",
),
10: (
"Right now, I’m in a triathlon meet!",
"Pant...pant...pant...",
"I’m beat! But, I still have the bike race and marathon left!",
),
11: (
"Ahh! Feel the sun and the wind!",
"Yow! I lost!",
"I'm sunburnt to a crisp!",
),
12: (
"I caught all my POKEMON at sea!",
"Diver!! Down!!",
"Where'd you catch your POKEMON?",
),
13: (
"The sea cleanses my body and soul!",
"Ayah!",
"I like the mountains too!",
),
}
Gambler_Dialogue = {
0: (
"I’m a rambling, gambling dude!",
"Missed the big score!",
"Gambling and POKEMON are like eating peanuts! Just can’t stop!",
),
1: (
"All right! Let’s roll the dice!",
"Drat! Came up short!",
"Lady Luck’s not with me today!",
),
2: (
"Win, lose, or draw!",
"Atcha! Didn’t go my way!",
"POKEMON is life! And to live is to gamble!",
),
3: (
"Competition! I can’t get enough!",
"I had a chance!",
"You can’t be a coward in the world of POKEMON!",
),
4: (
"I have never won before...",
"I saw this coming...",
"It’s just luck. Luck of the draw.",
),
5: (
"Fwahaha! I have never lost!",
"My first loss!",
"Luck of the draw! Just luck!",
),
}
Engineer_Dialogue = {
0: (
"Careful! I’m laying down some cables!",
"That was electric!",
"Spread the word to save energy!",
),
1: (
"Watch out for live wires!",
"Whoa! You spark plug!",
"Well, better get back to work.",
),
}
Gentleman_Dialogue = {
0: (
"Competing against the young keeps me youthful!",
"Good fight! Ah, I feel young again!",
"15 years ago, I would have won!",
),
1: (
"Which do you like, a strong or a rare POKEMON?",
"I must salute you!",
"I prefer strong and rare POKEMON.",
),
}
Fisherman_Dialogue = {
0: (
"Hello, stranger! Stop and chat! All my POKEMON are from the sea!",
"Darn! I let that one get away!",
"I was going to make you my assistant, too!",
),
1: (
"Check out what I fished up!",
"I’m all out!",
"Party? The cruise ship’s party should be over by now.",
),
2: (
"Yeah! I got a bite, here!",
"Tch! Just a small fry!",
"Hang on! My line’s snagged!",
),
3: (
"Be patient! Fishing is a waiting game!",
"That one got away!",
"With a better ROD, I could catch better POKEMON!",
),
4: (
"The FISHING FOOL vs. POKEMON KID!",
"Too much!",
"You beat me at POKEMON, but I’m good at fishing!",
),
5: (
"I’d rather be working!",
"It’s not easy...",
"It’s all right. Losing doesn’t bug me anymore.",
),
6: (
"You never know what you could catch!",
"Lost it!",
"I catch MAGIKARP all the time, but they’re so weak!",
),
7: (
"Keep my company 'til I get a hit!",
"That burned some time.",
"Oh wait! I got a bite! Yeah!",
),
8: (
"Hey, don't scare away the fish!",
"Sorry! I didn't mean it!",
"I was just angry that I couldn't catch anything.",
),
9: (
"I got a big haul? Wanna go for it?",
"Darn MAGIKARP!",
"I seem to only catch MAGIKARP!",
),
10: (
"You want to know if the fish are biting?",
"Dang!",
"I can't catch anything good!",
),
}
Beauty_Dialogue = {
0: (
"Pink is the best color!",
"Why has pink forsaken me?!",
"See? Pink goes with everyhing.",
),
1: (
"Don’t you dare condescend me!",
"No! You’re too much!",
"You’re obviously talented! Good luck to you!",
),
2: (
"You have POKEMON! Let’s start!",
"You play hard!",
"Eww, I’m all sweaty now!",
),
3: (
"I came this far for cute POKEMON!",
"I’m out of POKEMON!",
"You looked cute and harmless!",
),
4: (
"Sure, I’ll play with you!",
"Oh! You little brute!",
"I wonder which is stronger, male or female POKEMON?",
),
5: (
"Do you want to POKEMON with me?",
"It’s over already?",
"I didn’t know anything about POKEMON. I just like the cool ones!",
),
6: (
"Oh, you’re a little cutie!",
"You looked so cute, too!",
"I forgive you! I can take it!",
),
7: (
"I raise POKEMON because I live alone!",
"I didn’t ask for this!",
"I just like going home to be with my POKEMON!",
),
8: (
"My boyfriend gave me big pearls!",
"Don’t touch my pearls!",
"Will my pearls grow bigger inside CLOYSTER?",
),
}
Channeler_Dialogue = {
0: (
"Urrg... Awaa... Huhu... graaa...",
"Hwa! I’m saved!",
"The GHOSTs can be identified by the SILPH SCOPE.",
),
1: (
"Be gone! Evil spirit!",
"Whew! The spirit left!",
"My friends were possessed too!",
),
2: ("Kekeke... Kwaaah!", "Hmm? What am I doing?", "Sorry! I was possessed!"),
3: (
"Be cursed with me! Kwaaah!",
"What!",
"We can’t crack the identity of the GHOSTs.",
),
4: (
"Huhuhu... Beat me not!",
"Huh? Who? What?",
"May the departed souls of POKEMON rest in peace...",
),
5: (
"GHOST! No! Kwaaah!",
"Where is the GHOST?",
"I must have been dreaming...",
),
6: ("You... shall... join... us...", "What a nightmare!", "I was possessed!"),
7: ("Give... me... your... soul...", "Gasp!", "I was under possession!"),
8: ("Zombies!", "Ha?", "I regained my senses!"),
9: ("Urgah... Urff...", "Whoo!", "I fell to evil spirits despite my training!"),
10: ("Give... me... blood...", "Groan!", "I feel anemic and weak..."),
11: ("Ke... ke... ke... ke... ke...ke!!", "Keee!", "What’s going on here?"),
12: (
"Urff... Kwaaah!",
"Something fell out!",
"Hair didn’t fall out! It was an evil spirit!",
),
}
Biker_Dialogue = {
0: ("What’re you lookin’ at?", "Dang! Stripped gears!", "Get lost!"),
1: (
"We ride out here because there’s more room!",
"Wipe out!",
"It’s cool you made your POKEMON so strong! Might is right! And "
"you know it!",
),
2: (
"C’mon, c’mon. Let’s go, let’s go, let’s go!",
"Arrg! Lost! Get lost!",
"What, what, what? What do you want?",
),
3: (
"POKEMON fight? Cool! Rumble!",
"Blown away!",
"You know who’d win, you and me one on one!",
),
4: (
"Perfect! I need to burn some time!",
"What? You!?",
"Raising POKEMON is a drag, man.",
),
5: (
"Hey kid! C’mon! I just got these!",
"Why not?",
"You only live once, so I live as an outlaw! TEAM ROCKET RULES!",
),
6: (
"Fork over all your cash when you lose to me, kid!",
"That can’t be true!",
"I was just joking about the money!",
),
7: (
"What do you want?",
"Don’t you dare laugh!",
"We like just hanging here, what’s it to you?",
),
8: (
"Hey, you just bumped me!",
"Kaboom!",
"You can also get to FUCHSIA from VERMILION using a coastal road.",
),
9: (
"Sure, I’ll go!",
"Don’t make me mad!",
"I like harassing people with my vicious POKEMON!",
),
10: (
"We're BIKERs! Highway stars!",
"Smoked!",
"Are you looking for adventure?",
),
11: (
"Let VOLTORB electrify you!",
"Grounded out!",
"I got my VOLTORB at the abandoned POWER PLANT.",
),
12: ("Get lost, kid!", "Are you satisfied?", "I need to catch a few Zs!"),
}
CueBall_Dialogue = {
0: (
"Nice BIKE! Hand it over!",
"Knock out!",
"Forget it, who needs your BIKE!",
),
1: (
"Come out and play, little mouse!",
"You little rat!",
"I hate losing! Get away from me!",
),
2: (
"I’m feeling hungry and mean!",
"Bad, bad, bad!",
"I like my POKEMON ferocious! They tear up enemies!",
),
3: ("What do you want, kiddo?", "Whoo!", "I could belly-bump you outta here!"),
4: (
"There’s no money in fighting kids!",
"Burned out!",
"Good stuff is lying all over!",
),
5: (
"I need a little exercise!",
"Whew! Good workout!",
"I'm sure I lost weight there!",
),
6: (
"My POKEMON won't evolve! Why?",
"Why, you!",
"Maybe some POKEMON need element STONEs to evolve.",
),
7: ("Be a rebel!", "Aaaaargh!", "Be ready to fight for your beliefs!"),
}
BirdKeeper_Dialogue = {
0: (
"My bird POKEMON want to scrap!",
"My bird combo lost?",
"My POKEMON look happy even though they lost.",
),
1: (
"I always go with bird POKEMON!",
"Out of power!",
"I wish I could fly like PIDGEY and PIDGEOTTO...",
),
2: (
"The wind’s blowing my way!",
"The wind turned!",
"I’m beat. I guess I’ll FLY home.",
),
3: (
"You need to use TMs to teach good moves to POKEMON!",
"Not good enough!",
"You have some HMs, right? POKEMON can’t ever forget those moves.",
),
4: (
"My bird POKEMON should be ready for battle.",
"Not ready yet!",
"They need to learn better moves.",
),
5: (
"TMs are on sale in CELADON! But, only a few people have HMs!",
"Aww, bummer!",
"Teach POKEMON moves of the same element type for more power.",
),
6: (
"Have you taught your bird POKEMON how to FLY?",
"Shot down in flames!",
"Bird POKEMON are my true love!",
),
7: (
"Have you heard of the legendary POKEMON?",
"Why? Why’d I lose?",
"The 3 legendary POKEMON are all birds of prey.",
),
8: (
"I’m not into it, but OK! Let’s go!",
"I knew it!",
"Winning, losing, it doesn’t matter in the long run!",
),
9: (
"Hmm? My birds are shivering! You’re good, aren’t you?",
"Just as I thought!",
"Did you know that moves like EARTHQUAKE don’t have any effect on "
"birds?",
),
10: (
"When I whistle, I can summon bird POKEMON!",
"Ow! That’s tragic!",
"Maybe I’m not cut out for battles.",
),
11: (
"This is my turf! Get out of here!",
"Darn!",
"This is my fave POKEMON hunting area!",
),
12: (
"Kurukkoo! How do you like my bird call?",
"I had to bug you!",
"I also collect sea POKEMON on weekends!",
),
13: (
"I always check every grassy area for new POKEMON.",
"Tch!",
"I wish I had a BIKE!",
),
14: ("I rode my bird POKEMON here!", "Oh no!", "My birds can’t fly me back!"),
}
Juggler_Dialogue = "Juggler"
Tamer_Dialogue = {
0: (
"Your POKEMON will cower at the crack of my whip!",
"Yowch! Whiplash!",
"Wait! I was just careless!",
),
}
Blackbelt_Dialogue = {
0: (
"Hahaha! Can you beat my power?",
"Oops! Out-muscled!",
"I go for power because I hate thinking!",
),
1: (
"Karate is the ultimate form of martial arts!",
"Atcho!",
"If my POKEMON were as good at Karate as I...",
),
}
Scientist_Dialogue = {
0: (
"Your POKEMON have weak points! I can nail them!",
"You hammered me!",
"Exploiting weak spots does work! Think about element types!",
),
}
Psychic_Dialogue = "One who Psychs"
Burglar_Dialogue = "Burglar"
trainers = _trainers()
# /*
# ███████ ██████ ███████ ██████ ██ █████ ██
# ██ ██ ██ ██ ██ ██ ██ ██ ██
# ███████ ██████ █████ ██ ██ ███████ ██
# ██ ██ ██ ██ ██ ██ ██ ██
# ███████ ██ ███████ ██████ ██ ██ ██ ███████
# */
"""
Special_Dialogue is the only section that can contain four pieces of dialogue.
In these cases, they are out of order. Numbers 1, 2, and 3 are the standard
intro, defeat, and after parts, but some trainers (like leaders) give items
after the battle. Therefore, there needs to be an action between the two pieces
of dialogue - that's where Trainer.dialogue_extra comes in. Unfortunately,
since it's piece #4, the parts end up going in the following order: 1, 2, 4, 3.
"""
special_dialogue = {
# Trainer in Brock's gym
0: (
"Stop right there, kid! You're still light years from facing BROCK!",
"Darn! Light years isn't time! It measures distance!",
"You're pretty hot, but not as hot as BROCK!",
),
1: (
"I'm BROCK! I'm PEWTER GYM's GYM LEADER! I believe in rock hard "
"defense and determination! That's why my POKEMON are all the "
"rock-type! Do you still want to challenge me? Fine then! Show me your"
" best!",
"I took you for granted. As proof of your victory, here's the "
"BOULDERBADGE! That's an official POKEMON LEAGUE BADGE! It's bearer's "
"POKEMON are more powerful! The technique FLASH can now be used any "
"time! Wait, take this with you!",
"There are all kinds of trainers in the world! You appear to be very "
"gifted as a POKEMON Trainer! Go to the GYM in CERULEAN and test your"
" abilities!",
"A TM contains a technique that can be taught to POKEMON! A TM is good"
" only once! So when you use one to teach a new technique, pick the "
"POKEMON carefully! TM34 contains BIDE! Your POKEMON will absorb "
"damage in battle then pay it back double!",
),
# route 3
2: (
"Hi! I like shorts! They're comfy and easy to wear!",
"I don't believe it!",
"Are you storing your POKEMON on PC? Each BOX can hold 30 POKEMON!",
),
3: (
"Hey, you're not wearing shorts!",
"Lost! Lost! Lost!",
"I always wear shorts, even in winter!",
),
# mt. moon
4: (
"Suspicious men are in the cave. What about you?",
"You got me!",
"I saw them! I'm sure they're from TEAM ROCKET!",
),
5: (
"We're pulling a big job here! Get lost, kid!",
"So, you are good.",
"If you find a fossil, give it to me and scram!",
),
6: (
"TEAM ROCKET will find the fossils, revive and sell them for cash!",
"Urgh! Now I'm mad!",
"You made me mad! TEAM ROCKET will blacklist you!",
),
7: (
"Hey, stop! I found these fossils! They're both mine!",
"OK! I'll share!",
"We'll each take one! No being greedy!",
"All right. Then this one is mine! ...Far away, on CINNABAR ISLAND, "
"there's a POKEMON LAB. They do research on regenerating fossils.",
),
# Cerulean Gym
8: (
"Splash! I’m first up! Let’s do it!",
"That can’t be!",
"MISTY is going to keep improving! She won’t lose to someone like you!",
),
9: (
"I’m more than good enough for you! MISTY can wait!",
"You overwhelmed me!",
"You have to face other trainers to find out how good you really are.",
),
10: (
"Hi, you’re a new face! Trainers who want to to turn pro have to have"
"a policy about POKEMON! What is your approach when you catch "
"POKEMON? My policy is an all-out offensive with water-type POKEMON!",
"Wow! You’re too much! All right! You can have the CASCADEBADGE to "
"show you beat me! ...The CASCADEBADGE makes all POKEMON up to L30 "
"obey! That includes even outsiders! There’s more; you can now use "
"CUT any time! You can CUT down small bushes to open new paths! You "
"can also have my favorite TM!",
"TM11 teaches BUBBLEBEAM! Use it on an aquatic POKEMON!",
),
# rocket grunt who stole DIG
11: (
"Hey! Stay out! It’s not your yard! Huh? Me? I’m an innocent "
"bystander! Don’t you believe me?",
"Stop! I give up! I’ll leave quietly!",
"OK! I’ll return the TM I stole!",
),
# Blue coming from Nugget Bridge
12: (
"Yo! $PLAYER! You’re still struggling along back here? I’m doing "
"great! I caught a bunch of strong and smart POKEMON! Here, let me "
"see what you caught, $PLAYER!",
"Hey! Take it easy! You won already! ...Hey, guess what? I went to "
"BILL’s and got him to show me his rare POKEMON! That added a lot of"
" pages to my POKEDEX! After all, BILL’s world famous as a "
"POKEMANIAC! He invented the POKEMON Storage System on PC! Since "
"you’re using his system, go thank him! Well, I better get rolling! "
"Smell ya later!",
"",
),
# nugget bridge Trainer 1
13: (
"This is NUGGET BRIDGE! Beat us 5 trainers and win a fabulous prize! "
"Think you got what it takes?",
"Woo! Good stuff!",
"I did my best, I’ve no regrets!",
),
# nugget bridge Trainer 2
14: (
"I’m second! Now it’s serious!",
"How could I lose?",
"I did my best, I’ve no regrets!",
),
# nugget bridge Trainer 3
15: (
"Here’s No. 3! I won’t be easy!",
"Ow! Stomped flat!",
"I did my best, I’ve no regrets!",
),
# nugget bridge Trainer 4
16: ("I’m No. 4! Getting tired?", "I lost too!", "I did my best, I’ve no regrets!"),
# nugget bridge Trainer 5
17: (
"OK! I’m No. 5! I’ll stomp you!",
"Whoa! Too much!",
"I did my best, I’ve no regrets!",
),
# rocket asshole
18: (
"Congratulations! You beat our 5 contest trainers! You just earned a "
"fabulous prize!",
"Arrgh! You are good!",
"With your ability, you could become a top leader in TEAM ROCKET!",
"By the way, would you like to join TEAM ROCKET? We’re a group "
"dedicated to evil using POKEMON! Want to join? Are you sure? Come "
"on, join us! I’m telling you to join! OK, you need convincing! I’ll "
"make you an offer you can’t refuse!",
),
# dude in the grass
19: (
"I saw your feat from the grass!",
"I thought not!",
"I hid because the people on the bridge scared me!",
),
# gentleman on s.s.anne
20: (
"I travel alone on my journeys! My POKEMON are my only friends!",
"My, my friends...",
"You should be nice to friends!",
),
21: (
"You pup! How dare you barge in!",
"Humph! You rude child!",
"I wish to be left alone! Get out!",
),
# Blue comes out of the captain's quarters. What was he doing there?
# Hmmm...
22: (
"Bonjour! $PLAYER! Imagine seeing you here! $PLAYER, were you really "
"invited? So how’s your POKEDEX coming? I already caught 40 kinds, "
"pal! Different kinds are everywhere! Crawl around in grassy areas!",
"Humph! At least you’re raising your POKEMON! ...I heard there was a "
"CUT master on board. But he was just a seasick old man! But, CUT "
"itself is really useful! You should go see him! Smell ya!",
"",
),
# lt. surge gym guys
23: (
"This is no place for kids!",
"Wow! Surprised me!",
"LT.SURGE set up double locks! Here’s a hint! When you open the 1st "
"lock, the 2nd lock is right next to it!",
),
24: (
"I’m a lightweight, but I’m good with electricity!",
"Fried!",
"OK, I’ll talk! LT.SURGE said he hid door switches inside something!",
),
25: (
"When I was in the Army, LT.SURGE was my strict CO!",
"Stop! You’re very good!",
"The door won’t open? LT.SURGE always was cautious!",
),
26: (
"Hey, kid! What do you think you’re doing here? You won’t live long "
"in combat! That’s for sure! I tell you kid, electric POKEMON saved "
"me during the war! They zapped my enemies into paralysis! The same "
"as I’ll do to you!",
"Whoa! You’re the real deal, kid! Fine then, take the THUNDERBADGE! "
"...The THUNDERBADGE cranks up your POKEMON’s SPEED! It also lets "
"your POKEMON FLY any time, kid! You’re special, kid! Take this!",
"A little word of advice, kid! Electricity is sure powerful! But, "
"it’s useless against ground-type POKEMON!",
"TM24 contains THUNDERBOLT! Teach it to an electric POKEMON!",
),
# Pokemon Tower, Blue, floor 2
27: (
"Hey, $PLAYER! What brings you here? Your POKEMON don’t look dead! I"
" can at least make them faint! Let’s go, pal!",
"What? You stinker! I took it easy on you, too!",
"",
"How’s your POKEDEX coming, pal? I just caught a CUBONE! I can’t find"
" the grown-up MAROWAK yet! I doubt there are any left! Well, I "
"better get going! I’ve got a lot to accomplish, pal! Smell ya "
"later!",
),
# Celadon Mansion, trainer350. Beta Tester Teddy
28: (
"When your POKEMON’s health is low in battle, do you heal or press "
"the attack? Want to show me?", # yes/no
"Gotta risk it for the biscuit!", # lose
"I’m Teddy - nice to meet you, $PLAYER! I hope you’re having as much"
" fun playing as I did!", # after
"Cool! Let’s do it!",
), # after the yes/no
# Rocket Game Corner
29: (
"I’m guarding this poster! Go away, or else!",
"Dang!",
"Our hideout might be discovered! I better tell BOSS!",
),
30: (
"You broke into our operation?",
"Burnt!",
"You’re not going to get away with this, brat!",
),
31: (
"Who are you? How did you get here?",
"Owww! Beaten!",
"Are you dissing TEAM ROCKET?",
),
# section 2
32: (
"BOSS said you can see GHOSTs with the SILPH SCOPE!",
"I surrender!",
"The TEAM ROCKET HQ has 4 basement floors. Can you reach the BOSS?",
),
# section 3
33: (
"Why did you come here?",
"This won't do!",
"OK, I'll talk! Take the elevator to see my BOSS!",
),
34: ("Intruder alert!", "I can’t do it!", "SILPH SCOPE? I don’t know where it is!"),
# section 4
35: (
"We got word from upstairs that you were coming!",
"What? I lost? No!",
"Go ahead and go! But, you need the LIFT KEY to run the elevator!",
),
36: (
"Stop meddling in TEAM ROCKET’s affairs!",
"Oof! Taken down!",
"SILPH SCOPE? The machine the BOSS stole. It’s here somewhere.",
),
# section 5
37: (
"The elevator doesn’t work? Who has the LIFT KEY?",
"No!",
"Oh no! I dropped the LIFT KEY!",
),
# section 6
38: (
"Are you lost, you little rat?",
"Why...?",
"Uh-oh, that fight opened the door!",
),
# Giovanni's section
39: (
"How can you not see the beauty of our evil?",
"Ayaya!",
"BOSS! I’m sorry I failed you!",
),
40: (
"I know you! You ruined our plans at MT.MOON!",
"Burned again!",
"Do you have something against TEAM ROCKET?",
),
# GIOVANNIIIII
41: (
"So! I must say, I am impressed you got here!",
"WHAT! This cannot be!",
"",
"I see that you raise POKEMON with utmost care. A child like you "
"would never understand what I hope to achieve. I shall step aside "
"this time! I hope we meet again...",
),
# Celadon Gym - Leader Erika
42: (
"Hey! You’re not allowed here!",
"You’re too rough!",
"Bleaaah! I hope ERIKA wipes you out!",
),
43: (
"I was getting bored.",
"My makeup!",
"Grass-type POKEMON are tough against water-type! They also have an "
"edge on rock and ground POKEMON!",
),
44: (
"Look at my grass POKEMON! They’re so easy to raise!",
"No!",
"We only use grass-type POKEMON at our GYM! We also use them for "
"making flower arrangements!",
),
45: (
"Aren’t you the peeping Tom?",
"I’m in shock!",
"Oh, you weren’t peeping? We get a lot of gawkers!",
),
46: (
"Welcome to CELADON GYM! You better not underestimate girl power!",
"Oh! Beaten!",
"I didn’t bring my best POKEMON! Wait ’til next time!",
),
47: (
"Don’t bring any bugs or fire POKEMON in here!",
"Oh! You!",
"Our LEADER, ERIKA, might be quiet, but she’s also very skilled!",
),
# Erika
48: (
"Hello. Lovely weather, isn’t it? It’s so pleasant. ...Oh dear... I "
"must have dozed off. Welcome. My name is ERIKA. I am the LEADER of "
"CELADON GYM. I teach the art of flowing arranging. My POKEMON are "
"of the grass-type. ...Oh, I’m sorry, I had no idea that you wished"
" to challenge me. Very well, but I shall not lose.",
"Oh! I concede defeat. You are remarkably strong. I must confer you "
"the RAINBOWBADGE. ...The RAINBOWBADGE will make POKEMON up to L50 "
"obey. It also allows POKEMON to use STRENGTH in and out of battle. "
"Please also take this with you.",
"You are cataloguing POKEMON? I must say I’m impressed. I would "
"never collect POKEMON if they were unattractive.",
"TM21 contains MEGA DRAIN. Half the damage it inflicts is drained "
"to heal your POKEMON!",
),
# top floor of POKEMON TOWER
49: (
"This old guy came and complained about us harming useless POKEMON! "
"We’re talking it over as adults!",
"Please! No more!",
"POKEMON are only good for making money! Stay out of our business!",
),
50: (
"You’re not saving anyone, kid!",
"Don’t fight us ROCKETs!",
"You’re not getting away with this!",
),
# Fighting Dojo, Saffron City
51: (
"Hoohah! You’re trespassing in our FIGHTING DOJO!",
"Oof! I give up!",
"The prime fighters across the land train here.",
),
52: (
"I hear you’re good! Show me!",
"Judge! 1 point!",
"Our Master is a pro fighter!",
),
53: (
"Nothign tough frightens me! I break boulders for training!",
"Yow! Stubbed fingers!",
"The only thing that frightens us is psychic power!",
),
54: (
"Hoargh! Take your shoes off!",
"I give up!",
"You wait ’til you see our Master! I’m a small fry compared to him!",
),
55: (
"Grunt! I am the KARATE MASTER! I am the LEADER here! You wish to "
"challenge us? Expect no mercy! Fwaaa!",
"Hwa! Arrgh! Beaten!",
"Indeed, I have lost! But, I beseech you, do not take our emblem as"
"your trophy! in return, I will give you a prized fighting POKEMON! "
"Choose whichever you like!",
"Ho! Stay and train at Karate with us!",
),
# Silph Co Rockets
# floor 2
56: (
"Hey kid! What are you doing here?",
"I goofed!",
"SILPH CO. will be merged with TEAM ROCKET!",
),
57: (
"It’s off limits here! Go home!",
"You’re good.",
"Can you solve the maze in here?",
),
58: (
"No kids allowed in here!",
"Tough!",
"Diamond shaped tiles are teleport blocks! They’re hi-tech transporters!",
),
59: (
"Help! I’m a SILPH employee!",
"How did you know I was a ROCKET?",
"I work for both SILPH and TEAM ROCKET!",
),
# floor 3
60: (
"Quit messing with us, kid!",
"I give up!",
"A hint? You can open doors with a CARD KEY!",
),
61: (
"I support TEAM ROCKET more than I support SILPH!",
"You really got me!",
"Humph... TEAM ROCKET said that if I helped them, they’d let me "
"study POKEMON!",
),
# floor 4
62: ("Intruder spotted!", "Who are you?", "I’d better tell the BOSS on 11F!"),
63: (
"TEAM ROCKET has taken command of SILPH CO.!",
"Arrgh!",
"Fwahahahaha! My BOSS has been after this place!",
),
64: (
"My POKEMON are my loyal soldiers!",
"Darn! You weak POKEMON!",
"The doors are electronically locked! A CARD KEY opens them!",
),
# floor 5
65: (
"Whaaat? There shouldn’t be any children here!",
"Oh goodness!",
"You’re only on 5F. It’s a long way to my BOSS!",
),
66: (
"We study POKE BALL technology on this floo!",
"Dang! Blast it!",
"We worked on the ultimate POKE BALL which would catch anything!",
),
# floor 6
67: (
"I am one of the 4 ROCKET BROTHERS!",
"Flame out!",
"No matter! My brothers will avenge me!",
),
68: (
"That rotten PRESIDENT! He shouldn’t have sent me to the TIKSI BRANCH!",
"Shoot!",
"TIKSI BRANCH? It’s in Russian no man’s land!",
),
# floor 7
69: (
"I am one of the 4 ROCKET BROTHERS!",
"Aack! Brothers, I lost!",
"Doesn’t matter. My brothers will repay the favor!",
),
70: (
"A child intruder! That must be you!",
"Lights out!",
"Go on home before my BOSS gets ticked off!",
),
71: (
"Oh ho! I smell a little rat!",
"Lights out!",
"You won’t find my boss by just scurrying around!",
),
72: (
"Heheh! You mistook me for a SILPH worker?",
"I’m done!",
"Despite your age, you are a skilled Trainer!",
),
# floor 8
73: (
"I am one of the 4 ROCKET BROTHERS!",
"Whoo! Oh brothers!",
"I’ll leave you up to my brothers!",
),
74: (
"You’re causing us problems!",
"Huh? I lost?",
"So, what do you think of SILPH BUILDING’s maze?",
),
# floor 9
75: (
"I am one of the 4 ROCKET BROTHERS!",
"Warg! Brothers, I lost!",
"My brothers will avenge me!",
),
# floor 10
76: (
"Enough of your silly games!",
"No continues left!",
"Are you satisfied with beating me? Then go on home!",
),
77: (
"Welcome to the 10F! So good of you to join me!",
"I’m stunned!",
"Nice try, but the boardroom is up one more floor!",
),
# floor 11 - the wrong side
78: (
"Halt! Do you have an appointment with my BOSS?",
"Gaah! Demolished!",
"Watch your step, my BOSS likes his POKEMON though!",
),
# floor 7 - via teleportation. Blue!
79: (
"What kept you RED? Hahaha! I thought you'd turn up if I waited "
"here! I guess TEAM ROCKET slowed you down! Not that I care! I saw "
"you in SAFFRON, so I decided to see if you got better!",
"Oh ho! So, you are ready for BOSS ROCKET!",
"",
"Well, RED! I'm moving on up and ahead! By checking my POKEDEX, I'm "
"starting to see what's strong and how they evolve! I'm going to the"
" POKEMON LEAGUE to boot out the ELITE FOUR! I'll become the world's"
" most powerful Trainer! RED, well good luck to you! Don't sweat it!"
" Smell ya!",
),
# floor 11 - the right side
80: (
"Stop right there! Don’t you move!",
"Don’t... Please!",
"So, you want to see my BOSS?",
),
81: (
"Ah, $PLAYER! So we meet again! The PRESIDENT and I are discussing "
"a vital business proposition. Keep your nose out of grown-up "
"matters... Or, experience a world of pain!",
"Arrgh!! I lost again!?",
"",
"Blast it all! You ruined our plans for SILPH! But, TEAM ROCKET will"
" never fall! $PLAYER! Never forget that all POKEMON exist for "
"TEAM ROCKET! I must go, but I shall return!",
),
# SABRINA's GYM
82: (
"SABRINA is young, but she’s also our LEADER! You won’t reach hereasily!",
"I lost my concentration!",
"There used to be 2 POKEMON GYMs in SAFFRON. The FIGHTING DOJO next "
"door lost its GYM status when we went and creamed them!",
),
83: (
"You know that power alone isn’t enough!",
"I don’t believe this!",
"SABRINA just wiped out the KARATE MASTER next door!",
),
84: (
"Does our unseen power scare you?",
"I never foresaw this!",
"Psychic POKEMON fear only ghosts and bugs!",
),
85: (
"SABRINA is younder than I, but I respect her!",
"Not good enough!",
"In a battle of equals, the one with the stronger will wins! If you "
"wish to beat SABRINA, focus on winning!",
),
86: (
"SAFFRON POKEMON GYM is famous for its psychics! You want to see "
"SABRINA! I can tell!",
"Arrrgh!",
"That’s right! I used telepathy to read your mind!",
),
87: (
"POKEMON take on the appearance of their trainers. Your POKEMON must"
"be tough, then!",
"I knew it!",
"I must teach better techniques to my POKEMON!",
),
88: (
"You and I, our POKEMON shall fight!",
"I lost after all!",
"I knew that this was going to take place.",
),
# Sabrina
89: (
"I had a vision of your arrival! I have had psychic powers since I "
"was a child. I first learned to bend spoons with my mind. I "
"dislike fighting, but if you wish, I will show you my powers!",
"I’m shocked! But, a loss is a loss. I admit I didn’t work hard "
"enough to win! You earned the MARSHBADGE! ...The MARSHBADGE makes"
" POKEMON up to L70 obey you! Stronger POKEMON will become wild, "
"ignoring your orders in battle! Just don’t raise your POKEMON too"
" much! Wait, please take this TM with you!",
"",
"TM46 is PSYWAVE! It uses powerful psychic waves to inflict damage!",
),
# KOGA's GYM
90: (
"Strength isn’t the key for POKEMON! It’s strategy! I’ll show you "
"how strategy can beat brute strength!",
"What? Extraordinary!",
"So, you mix brawn with brains? Good strategy!",
),
91: (
"Let’s see you beat my special techniques!",
"You had me fooled!",
"I like poison and sleep techniques, as they linger after battle!",
),
92: (
"I wanted to become a ninja, so I joined this GYM!",
"I’m done for!",
"I will keep training under KOGA, my ninja master!",
),
93: (
"I also study the way of the ninja with master KOGA! Ninja have a "
"long history of using animals!",
"Awoo!",
"I still have much to learn!",
),
94: (
"Stop right there! Our invisible walls have you frustrated?",
"Whoa! You’ve got it!",
"You’ve impressed me! Here’s a hint! Look very closely for gaps in "
"the invisible walls!",
),
95: (
"Master KOGA comes from a long line of ninjas! What do you descend from?",
"Dropped my balls!",
"Where there is light, there is shadow! Light and shadow! Which do "
"you choose?",
),
# koga
96: (
"Fwahahahaha! A mere child like you dares to challenge me? Very "
"well, I shall show you true terror as a ninja master! You shall "
"feel the despair of poison and sleep techniques!",
"Humph! You have proven your worth! Here! Take the SOULBADGE! ...Now"
" that you have the SOULDBADGE, the DEFENSE of your POKEMON "
"increases! It also lets you SURF outside of battle! Ah! Take this,"
" too!",
"When afflicted by TOXIC, POKEMON suffer more and more as battle "
"progresses! It will surely terrorize foes!",
"TM06 contains TOXIC! It is a secret technique over 400 years old!",
),
# beauty swimmers, route 19-20
97: (
"Swimming’s great! Sunburns aren’t!",
"Shocker!",
"My boyfriend wanted to swim to SEAFOAM ISLANDS.",
),
98: (
"I swam here, but I’m tired.",
"I’m exhausted...",
"LAPRAS is so big, it must keep you dry on water.",
),
99: (
"Oh, I just love your ride! Can I have it if I win?",
"Oh! I lost!",
"It’s still a long way to go to SEAFOAM ISLANDS.",
),
100: (
"SEAFOAM is a quiet getaway!",
"Quit it!",
"There’s a huge cavern underneath this island.",
),
101: ("I love floating with the fishes!", "Yowch!", "Want to float with me?"),
# jr Trainer F
102: (
"I swam here from CINNABAR ISLAND!",
"I’m so disappointed!",
"POKEMON have taken over an abandoned mansion on CINNABAR!",
),
# beauty
103: (
"CINNABAR, in the west, has a LAB for POKEMON.",
"Wait!",
"CINNABAR is a volcanic island!",
),
# pokemon mansion, first floor
104: (
"Who are you? There shouldn’t be anyone here.",
"Ouch!",
"A key? I don’t know what you’re talking about.",
),
# pokemon mansion, second floor
105: (
"I can’t get out! This old place is one big puzzle!",
"Oh no! My bag of loot!",
"Switches open and close alternating sets of doors!",
),
# pokemon mansion, third floor
106: ("This place is like, huge!", "Ayah!", "I wonder where my partner went!"),
107: (
"My mentor once lived here.",
"Whew! Overwhelming!",
"So, you’re stuck? Try jumping off over there!",
),
# pokemon mansion, basement
108: ("Uh-oh. Where am I now?", "Awooh!", "You can find stuff lying around."),
109: (
"This place is ideal for a lab.",
"What was that for?",
"I like it here! It’s conducive to my studies!",
),
# BLAINE's GYM
110: (
"I was a thief, but I became straight as a Trainer!",
"I surrender!",
"I can’t help stealing other people’s POKEMON!",
),
111: (
"Do you know how hot POKEMON fire breath can get?",
"Yow! Hot, hot, hot!",
"Fire, or to be more precise, combustion... Blah, blah, blah, blah...",
),
112: (
"You can’t win! I have studied POKEMON totally!",
"Waah! My studies!",
"My theories are too complicated for you!",
),
113: (
"I just like using fire POKEMON!",
"Too hot to handle!",
"I wish there was a thief POKEMON! I’d use that!",
),
114: (
"I know why BLAINE became a Trainer!",
"Ow!",
"BLAINE was lost in the mountains when a fiery bird POKEMON "
"appeared. Its light enabled BLAINE to find his way down!",
),
115: (
"I’ve been to many GYMs, but this is my favorite!",
"Yowza! Too hot!",
"Us fire POKEMON fans like PONYTA and NINETALES!",
),
116: (
"Fire is weak against H2O!",
"Oh! Snuffed out!",
"Water beats fire! But, fire melts ice POKEMON!",
),
# blaine himself
117: (
"Hah! I am BLAINE! I am the LEADER of CINNABAR GYM! My fiery "
"POKEMON will incinerate all challengers! Hah! You better have "
"BURN HEAL!",
"I have burnt out! You have earned the VOLCANOBADGE! ...Hah! The "
"VOLCANOBADGE heightens the SPECIAL abilities of your POKEMON! "
"Here, you can have this too!",
"FIRE BLAST is the ultimate fire technique! Don’t waste it on water"
" POKEMON!",
"TM38 contains FIRE BLAST! Teach it to fire-type POKEMON! "
"CHARMELEON or PONYTA would be good bets!",
),
# Route 21, swimming CUE BALL
118: (
"What’s wrong with me swimming?",
"Cheap shot!",
"I look like what? A studded inner tube? Get lost!",
),
# viridian gym
119: (
"VIRIDIAN GYM was closed for a long time, but now our LEADER is back!",
"I was beaten?",
"You can go onto POKEMON LEAGUE only by defeating our GYM LEADER!",
),
120: (
"Heh! You must be running out of steam by now!",
"I ran out of gas!",
"You need power to keep up with our GYM LEADER!",
),
121: (
"Rrrrroar! I’m working myself into a rage!",
"Wargh!",
"I’m still not worthy!",
),
122: (
"POKEMON and I, we make wonderful music together!",
"You are in perfect harmony!",
"Do you know the identity of our GYM LEADER?",
),
123: (
"The truly talented win with style!",
"I lost my grip!",
"The LEADER will scold me!",
),
124: (
"I’m the KARATE KING! Your fate rests with me!",
"Ayah!",
"POKEMON LEAGUE? Don’t get cocky!",
),
# GIOVANNIIIIIII
125: (
"Fwahahaha! This is my hideout! I planned to resurrect TEAM ROCKET"
"here! But, you have caught me again! So be it! This time, I'm not"
" holding back! Once more, you shall face GIOVANNI, the greatest"
" Trainer!",
"Ha! That was a truly intense fight! You have won! As proof, here "
"is the EARTHBADGE! ...The EARTHBADGE makes POKEMON of any level "
"obey! It is evidence of your mastery as a POKEMON Trainer! With "
"it, you can enter the POKEMON LEAGUE! It is my gift to you for "
"your POKEMON LEAGUE challenge!",
"Having lost, I cannot face my underlings! TEAM ROCKET is finished "
"forever! I will dedicate my life to the study of POKEMON! ...Let "
"us meet again some day! Farewell!",
"TM27 is FISSURE! It will take out POKEMON with just one hit! I "
"made it when I ran the GYM here, too long ago...",
),
# Route 22, on the way to pokemon league
126: (
"What? $PLAYER! What a surprise to see you here! So you’re going "
"to POKEMON LEAGUE? You collected all the BADGEs too? That’s cool! "
"Then I’ll whip you, $PLAYER, as a warm up for the POKEMON LEAGUE! "
"Come on!",
"What!? I was just careless! ...That loosened me up! I’m ready for "
"the POKEMON LEAGUE! $PLAYER, you need more practice! But hey, you "
"know that! I’m out of here. Smell ya!",
"",
),
# victory road, floor 2
127: (
"VICTORY ROAD is the final test for trainers!",
"Aiyah!",
"If you get stuck, try moving some boulders around!",
),
128: (
"Ah, so you wish to challenge the ELITE FOUR?",
"You got me!",
"$RIVAL also came through here!",
),
129: (
"Come on! I’ll whip you!",
"I got whipped!",
"You earned the right to be on VICTORY ROAD!",
),
130: (
"Is VICTORY ROAD too tough?",
"Well done!",
"Many trainers give up the challenge here.",
),
131: (
"If you can get through here, you can go meet the ELITE FOUR!",
"No! Unbelievable!",
"I can beat you when it comes to knowledge about POKEMON!",
),
# level 3
132: (
"I heard rumors of a prodigy!",
"The rumors were true!",
"You beat GIOVANNI of TEAM ROCKET?",
),
133: (
"Trainers live to seek stronger opponents!",
"Oh! So strong!",
"By fighting tough battles, you get stronger!",
),
134: (
"Only the chosen can pass here!",
"I don’t believe it!",
"All trainers here are headed to the POKEMON LEAGUE! Be careful!",
),
# ahliah
135: (
"I’m the last one. You made it this far - show me you can go further!",
"You may continue.",
"You showed me just how good I was!",
),
# ELITE FOUR
136: (
"Welcome to the POKEMON LEAGUE! I am LORELEI of the ELITE FOUR! No "
"one can best me when it comes to icy POKEMON! Freezing moves are "
"powerful! Your POKEMON will be at my mercy when they are frozen "
"solid! Hahaha! Are you ready?",
"How dare you!",
"You’re better than I thought! Go on ahead! You only got a taste "
"of POKEMON LEAGUE power!",
),
137: (
"I am BRUNO of the ELITE FOUR! Through rigorous training, people "
"and POKEMON can become stronger! I’ve weight trained with my "
"POKEMON! ...$PLAYER! We will grind you down with our superior "
"power! Hoo hah!",
"Why? How could I lose?",
"My job is done! Go face your next challenge!",
),
138: (
"I am AGATHA of the ELITE FOUR! OAK’s taken a lot of interest in "
"you, child! That old duff was once tough and handsome! That was "
"decades ago! Now he just wants to fiddle with his POKEDEX! He’s "
"wrong! POKEMON are for fighting! ...$PLAYER! I’ll show you how a "
"real Trainer fights!", # wow, she's not bitter at all
"Oh ho! You’re something special, child!",
"You win! I see what the old duff sees in you now! I have nothing "
"else to say. Run along now, child!",
),
139: (
"Ah! I heard about you, $PLAYER! I lead the ELITE FOUR! You can "
"call me LANCE the dragon Trainer! You know that dragons are "
"mythical POKEMON! They’re hard to catch and raise, but their "
"powers are superior! They’re virtually indestructable! ...Well, "
"are you ready to lose? Your LEAGUE challenge ends with me, "
"$PLAYER!",
"That’s it! I hate to admit it, but you are a POKEMON master! ...I"
" still can’t believe my dragons lost to you, $PLAYER! You are now "
"the POKEMON LEAGUE champion!",
"...Or, you would have been, but you have one more challenge ahead."
" You have to face another Trainer! His name is... $RIVAL! He beat "
"the ELITE FOUR before you! He is the real POKEMON LEAGUE "
"champion!",
),
140: (
"Hey! I was looking forward to seeing you, $PLAYER! My rival should"
" be strong to keep me sharp! While working on the POKEDEX, I "
"looked all over for powerful POKEMON! Not only that, I assembled "
"teams that would beat any POKEMON type! And now! I’m the POKEMON "
"LEAGUE champion! ...$PLAYER! Do you know what that means? I’ll "
"tell you! I am the most powerful Trainer in the world!",
"NO!! That can’t be! You beat my best! After all that work to "
"become LEAGUE champ? My reign is over already? It’s not fair! "
"...WHY?! Why did I lose? I never made any mistakes raising my "
"POKEMON... Darn it! You’re the new POKEMON LEAGUE champion! "
"Although I don’t like to admit it.",
"",
),
}
| mit |
asrie/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/port/qt.py | 113 | 7883 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""QtWebKit implementation of the Port interface."""
import glob
import logging
import re
import sys
import os
import platform
from webkitpy.common.memoized import memoized
from webkitpy.layout_tests.models.test_configuration import TestConfiguration
from webkitpy.port.base import Port
from webkitpy.port.xvfbdriver import XvfbDriver
_log = logging.getLogger(__name__)
class QtPort(Port):
ALL_VERSIONS = ['linux', 'win', 'mac']
port_name = "qt"
def _wk2_port_name(self):
return "qt-wk2"
def _port_flag_for_scripts(self):
return "--qt"
@classmethod
def determine_full_port_name(cls, host, options, port_name):
if port_name and port_name != cls.port_name:
return port_name
return port_name + '-' + host.platform.os_name
# sys_platform exists only for unit testing.
def __init__(self, host, port_name, **kwargs):
super(QtPort, self).__init__(host, port_name, **kwargs)
self._operating_system = port_name.replace('qt-', '')
# FIXME: Why is this being set at all?
self._version = self.operating_system()
def _generate_all_test_configurations(self):
configurations = []
for version in self.ALL_VERSIONS:
for build_type in self.ALL_BUILD_TYPES:
configurations.append(TestConfiguration(version=version, architecture='x86', build_type=build_type))
return configurations
def _build_driver(self):
# The Qt port builds DRT as part of the main build step
return True
def supports_per_test_timeout(self):
return True
def _path_to_driver(self):
return self._build_path('bin/%s' % self.driver_name())
def _path_to_image_diff(self):
return self._build_path('bin/ImageDiff')
def _path_to_webcore_library(self):
if self.operating_system() == 'mac':
return self._build_path('lib/QtWebKitWidgets.framework/QtWebKitWidgets')
else:
return self._build_path('lib/libQt5WebKitWidgets.so')
def _modules_to_search_for_symbols(self):
# We search in every library to be reliable in the case of building with CONFIG+=force_static_libs_as_shared.
if self.operating_system() == 'mac':
frameworks = glob.glob(os.path.join(self._build_path('lib'), '*.framework'))
return [os.path.join(framework, os.path.splitext(os.path.basename(framework))[0]) for framework in frameworks]
else:
suffix = 'dll' if self.operating_system() == 'win' else 'so'
return glob.glob(os.path.join(self._build_path('lib'), 'lib*.' + suffix))
@memoized
def qt_version(self):
version = ''
try:
for line in self._executive.run_command(['qmake', '-v']).split('\n'):
match = re.search('Qt\sversion\s(?P<version>\d\.\d)', line)
if match:
version = match.group('version')
break
except OSError:
version = '5.0'
return version
def _search_paths(self):
# qt-mac-wk2
# /
# qt-wk1 qt-wk2
# \/
# qt-5.x
# \
# (qt-linux|qt-mac|qt-win)
# |
# qt
search_paths = []
if self.get_option('webkit_test_runner'):
if self.operating_system() == 'mac':
search_paths.append('qt-mac-wk2')
search_paths.append('qt-wk2')
else:
search_paths.append('qt-wk1')
search_paths.append('qt-' + self.qt_version())
search_paths.append(self.port_name + '-' + self.operating_system())
search_paths.append(self.port_name)
return search_paths
def default_baseline_search_path(self):
return map(self._webkit_baseline_path, self._search_paths())
def _port_specific_expectations_files(self):
paths = self._search_paths()
if self.get_option('webkit_test_runner'):
paths.append('wk2')
# expectations_files() uses the directories listed in _search_paths reversed.
# e.g. qt -> qt-linux -> qt-5.x -> qt-wk1
return list(reversed([self._filesystem.join(self._webkit_baseline_path(p), 'TestExpectations') for p in paths]))
def setup_environ_for_server(self, server_name=None):
clean_env = super(QtPort, self).setup_environ_for_server(server_name)
clean_env['QTWEBKIT_PLUGIN_PATH'] = self._build_path('lib/plugins')
self._copy_value_from_environ_if_set(clean_env, 'QT_DRT_WEBVIEW_MODE')
self._copy_value_from_environ_if_set(clean_env, 'DYLD_IMAGE_SUFFIX')
self._copy_value_from_environ_if_set(clean_env, 'QT_WEBKIT_LOG')
self._copy_value_from_environ_if_set(clean_env, 'DISABLE_NI_WARNING')
self._copy_value_from_environ_if_set(clean_env, 'QT_WEBKIT_PAUSE_UI_PROCESS')
self._copy_value_from_environ_if_set(clean_env, 'QT_QPA_PLATFORM_PLUGIN_PATH')
self._copy_value_from_environ_if_set(clean_env, 'QT_WEBKIT_DISABLE_UIPROCESS_DUMPPIXELS')
return clean_env
# FIXME: We should find a way to share this implmentation with Gtk,
# or teach run-launcher how to call run-safari and move this down to Port.
def show_results_html_file(self, results_filename):
run_launcher_args = []
if self.get_option('webkit_test_runner'):
run_launcher_args.append('-2')
run_launcher_args.append("file://%s" % results_filename)
self._run_script("run-launcher", run_launcher_args)
def operating_system(self):
return self._operating_system
def check_sys_deps(self, needs_http):
result = super(QtPort, self).check_sys_deps(needs_http)
if not 'WEBKIT_TESTFONTS' in os.environ:
_log.error('\nThe WEBKIT_TESTFONTS environment variable is not defined or not set properly.')
_log.error('You must set it before running the tests.')
_log.error('Use git to grab the actual fonts from http://gitorious.org/qtwebkit/testfonts')
return False
return result
# Qt port is not ready for parallel testing, see https://bugs.webkit.org/show_bug.cgi?id=77730 for details.
def default_child_processes(self):
return 1
| bsd-3-clause |
WesleyyC/Novels-Punctuation | pdfminer/pdfminer/pdfpage.py | 6 | 5140 | #!/usr/bin/env python
import logging
from .psparser import LIT
from .pdftypes import PDFObjectNotFound
from .pdftypes import resolve1
from .pdftypes import int_value
from .pdftypes import list_value
from .pdftypes import dict_value
from .pdfparser import PDFParser
from .pdfdocument import PDFDocument
from .pdfdocument import PDFTextExtractionNotAllowed
# some predefined literals and keywords.
LITERAL_PAGE = LIT('Page')
LITERAL_PAGES = LIT('Pages')
## PDFPage
##
class PDFPage(object):
"""An object that holds the information about a page.
A PDFPage object is merely a convenience class that has a set
of keys and values, which describe the properties of a page
and point to its contents.
Attributes:
doc: a PDFDocument object.
pageid: any Python object that can uniquely identify the page.
attrs: a dictionary of page attributes.
contents: a list of PDFStream objects that represents the page content.
lastmod: the last modified time of the page.
resources: a list of resources used by the page.
mediabox: the physical size of the page.
cropbox: the crop rectangle of the page.
rotate: the page rotation (in degree).
annots: the page annotations.
beads: a chain that represents natural reading order.
"""
debug = False
def __init__(self, doc, pageid, attrs):
"""Initialize a page object.
doc: a PDFDocument object.
pageid: any Python object that can uniquely identify the page.
attrs: a dictionary of page attributes.
"""
self.doc = doc
self.pageid = pageid
self.attrs = dict_value(attrs)
self.lastmod = resolve1(self.attrs.get('LastModified'))
self.resources = resolve1(self.attrs.get('Resources', dict()))
self.mediabox = resolve1(self.attrs['MediaBox'])
if 'CropBox' in self.attrs:
self.cropbox = resolve1(self.attrs['CropBox'])
else:
self.cropbox = self.mediabox
self.rotate = (int_value(self.attrs.get('Rotate', 0))+360) % 360
self.annots = self.attrs.get('Annots')
self.beads = self.attrs.get('B')
if 'Contents' in self.attrs:
contents = resolve1(self.attrs['Contents'])
else:
contents = []
if not isinstance(contents, list):
contents = [contents]
self.contents = contents
return
def __repr__(self):
return '<PDFPage: Resources=%r, MediaBox=%r>' % (self.resources, self.mediabox)
INHERITABLE_ATTRS = set(['Resources', 'MediaBox', 'CropBox', 'Rotate'])
@classmethod
def create_pages(klass, document):
def search(obj, parent):
if isinstance(obj, int):
objid = obj
tree = dict_value(document.getobj(objid)).copy()
else:
objid = obj.objid
tree = dict_value(obj).copy()
for (k, v) in parent.iteritems():
if k in klass.INHERITABLE_ATTRS and k not in tree:
tree[k] = v
if tree.get('Type') is LITERAL_PAGES and 'Kids' in tree:
if klass.debug: logging.info('Pages: Kids=%r' % tree['Kids'])
for c in list_value(tree['Kids']):
for x in search(c, tree):
yield x
elif tree.get('Type') is LITERAL_PAGE:
if klass.debug: logging.info('Page: %r' % tree)
yield (objid, tree)
pages = False
if 'Pages' in document.catalog:
for (objid, tree) in search(document.catalog['Pages'], document.catalog):
yield klass(document, objid, tree)
pages = True
if not pages:
# fallback when /Pages is missing.
for xref in document.xrefs:
for objid in xref.get_objids():
try:
obj = document.getobj(objid)
if isinstance(obj, dict) and obj.get('Type') is LITERAL_PAGE:
yield klass(document, objid, obj)
except PDFObjectNotFound:
pass
return
@classmethod
def get_pages(klass, fp,
pagenos=None, maxpages=0, password=b'',
caching=True, check_extractable=True):
# Create a PDF parser object associated with the file object.
parser = PDFParser(fp)
# Create a PDF document object that stores the document structure.
doc = PDFDocument(parser, password=password, caching=caching)
# Check if the document allows text extraction. If not, abort.
if check_extractable and not doc.is_extractable:
raise PDFTextExtractionNotAllowed('Text extraction is not allowed: %r' % fp)
# Process each page contained in the document.
for (pageno, page) in enumerate(klass.create_pages(doc)):
if pagenos and (pageno not in pagenos):
continue
yield page
if maxpages and maxpages <= pageno+1:
break
return
| mit |
ldong/vim_youcompleteme | cpp/ycm/tests/gmock/gtest/xcode/Scripts/versiongenerate.py | 3088 | 4536 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A script to prepare version informtion for use the gtest Info.plist file.
This script extracts the version information from the configure.ac file and
uses it to generate a header file containing the same information. The
#defines in this header file will be included in during the generation of
the Info.plist of the framework, giving the correct value to the version
shown in the Finder.
This script makes the following assumptions (these are faults of the script,
not problems with the Autoconf):
1. The AC_INIT macro will be contained within the first 1024 characters
of configure.ac
2. The version string will be 3 integers separated by periods and will be
surrounded by squre brackets, "[" and "]" (e.g. [1.0.1]). The first
segment represents the major version, the second represents the minor
version and the third represents the fix version.
3. No ")" character exists between the opening "(" and closing ")" of
AC_INIT, including in comments and character strings.
"""
import sys
import re
# Read the command line argument (the output directory for Version.h)
if (len(sys.argv) < 3):
print "Usage: versiongenerate.py input_dir output_dir"
sys.exit(1)
else:
input_dir = sys.argv[1]
output_dir = sys.argv[2]
# Read the first 1024 characters of the configure.ac file
config_file = open("%s/configure.ac" % input_dir, 'r')
buffer_size = 1024
opening_string = config_file.read(buffer_size)
config_file.close()
# Extract the version string from the AC_INIT macro
# The following init_expression means:
# Extract three integers separated by periods and surrounded by squre
# brackets(e.g. "[1.0.1]") between "AC_INIT(" and ")". Do not be greedy
# (*? is the non-greedy flag) since that would pull in everything between
# the first "(" and the last ")" in the file.
version_expression = re.compile(r"AC_INIT\(.*?\[(\d+)\.(\d+)\.(\d+)\].*?\)",
re.DOTALL)
version_values = version_expression.search(opening_string)
major_version = version_values.group(1)
minor_version = version_values.group(2)
fix_version = version_values.group(3)
# Write the version information to a header file to be included in the
# Info.plist file.
file_data = """//
// DO NOT MODIFY THIS FILE (but you can delete it)
//
// This file is autogenerated by the versiongenerate.py script. This script
// is executed in a "Run Script" build phase when creating gtest.framework. This
// header file is not used during compilation of C-source. Rather, it simply
// defines some version strings for substitution in the Info.plist. Because of
// this, we are not not restricted to C-syntax nor are we using include guards.
//
#define GTEST_VERSIONINFO_SHORT %s.%s
#define GTEST_VERSIONINFO_LONG %s.%s.%s
""" % (major_version, minor_version, major_version, minor_version, fix_version)
version_file = open("%s/Version.h" % output_dir, 'w')
version_file.write(file_data)
version_file.close()
| gpl-3.0 |
Reiex/Endless-rooms | Client/session.py | 1 | 25844 | # -*- coding:utf_8 -*
from editeur_niveaux import *
from pickle import Pickler, Unpickler
class Session:
def __init__(self):
self.nom = str()
self.utiliser_sauvegarde = False
self.tutoriel_fait = False
self.etage = dict()
self.joueur = dict()
self.monstres = list()
self.temps = list()
# Demande le nom de session a l'utilisateur puis initialise la session
def creer(self, window, resolution, temps_actuel, raccourcis):
nom_deja_pris = False
while True:
# Creer la page du menu de création de session puis l'afficher
if not nom_deja_pris:
page = Page([Menu(("Pseudo:", "_", 10), 0, 0,
resolution["w"] if resolution["w"] >= 1024 else 1024,
resolution["h"]-100 if resolution["h"] >= 576 else 476, "input"),
Menu(("Retour", "Valider"), 0,
resolution["h"]-100 if resolution["h"] >= 576 else 476,
resolution["w"] if resolution["w"] >= 1024 else 1024, 100,
flags=("horizontal",))], FOND)
else:
page = Page([Menu(("Le pseudo que vous avez saisi\nest déjà utilisé.",), 0, 0,
resolution["w"] if resolution["w"] >= 1024 else 1024, 150, "textes"),
Menu(("Pseudo:", "_", 10), 0, 100,
resolution["w"] if resolution["w"] >= 1024 else 1024,
resolution["h"]-250 if resolution["h"] >= 576 else 326, "input"),
Menu(("Retour", "Valider"), 0,
resolution["h"]-100 if resolution["h"] >= 576 else 476,
resolution["w"] if resolution["w"] >= 1024 else 1024, 100,
flags=("horizontal",))], FOND)
sortie = page.afficher(window, temps_actuel, raccourcis, resolution)
# Traiter la sortie
if isinstance(sortie, dict):
# Bouton: Retour
if sortie["choix"] == [1, 0]:
return False
# Bouton: Valider
if sortie["choix"] == [2 if nom_deja_pris else 1, 1]:
num = 1 if nom_deja_pris else 0
if sortie["valeur"][num] != str() and sortie["valeur"][num] not in obtenir_liste_noms_sessions():
self.nom = sortie["valeur"][num]
return True
elif sortie["valeur"][num] in obtenir_liste_noms_sessions():
nom_deja_pris = True
elif sortie == 0:
continue
elif sortie == 1:
exit(0)
else:
return False
# Affiche la liste des sessions disponnibles et fait choisir au joueur une session
def choisir(self, window, resolution, temps_actuel, raccourcis):
while True:
# Créer la page liste des sessions
page = Page([Menu(obtenir_liste_noms_sessions(), 50, 50,
resolution["w"]-100 if resolution["w"] >= 1024 else 924,
resolution["h"]-200 if resolution["h"] >= 576 else 376,
flags=("selection", "vertical", "defilant")),
Menu(("Retour", "Effacer", "Valider"), 0,
resolution["h"]-100 if resolution["h"] >= 576 else 476,
resolution["w"] if resolution["w"] >= 1024 else 1024, 100,
flags=("horizontal",))], FOND)
sortie = page.afficher(window, temps_actuel, raccourcis, resolution)
# Traiter la sortie
if isinstance(sortie, dict):
# Bouton: Retour
if sortie["choix"] == [1, 0]:
return False
# Bouton: Effacer
elif sortie["choix"] == [1, 1] and sortie["selection"][0] is not None:
self.nom = obtenir_liste_noms_sessions()[sortie["selection"][0]]
self.effacer()
# Bouton: Valider
elif sortie["choix"] == [1, 2] and sortie["selection"][0] is not None:
self.nom = obtenir_liste_noms_sessions()[sortie["selection"][0]]
self.recuperer()
return True
elif sortie == 0:
continue
elif sortie == 1:
exit(0)
else:
return False
# Inscrit la session dans un fichier qui porte le nom de la session
def sauvegarder(self):
with open("save/"+self.nom+".elr", "wb") as fichier:
Pickler(fichier).dump(self)
# Recupere la session entière a partir de self.nom
def recuperer(self):
with open("save/"+self.nom+".elr", "rb") as fichier:
session_recuperee = Unpickler(fichier).load()
for key, item in session_recuperee.__dict__.items():
self.__setattr__(key, item)
# Efface le fichier correspondant a {self.nom}.elr
def effacer(self):
try:
remove(getcwd()+"/save/"+self.nom+".elr")
except WindowsError:
raise ValueError("Cette session n'existe pas")
# A utiliser pour placer l'etage dans le session pour le sauvegarder plus tard
def placer_etage(self, etage):
self.utiliser_sauvegarde = True
# N'enregistre que les objets qui peuvent être récupérés facilement
self.etage = {}
for key in etage.__dict__:
if explorer(etage.__getattribute__(key)) and key != "refresh_list":
self.etage[key] = etage.__getattribute__(key)
elif isinstance(etage.__getattribute__(key), np.ndarray):
self.etage[key+".ndarray"] = etage.__getattribute__(key).tolist()
self.joueur = {}
for key in etage.joueur.__dict__:
if explorer(etage.joueur.__getattribute__(key)):
self.joueur[key] = etage.joueur.__getattribute__(key)
elif isinstance(etage.joueur.__getattribute__(key), np.ndarray):
self.joueur[key+".ndarray"] = etage.joueur.__getattribute__(key).tolist()
self.monstres = list()
for i, monstre in enumerate(etage.monstres):
self.monstres.append({})
for key in monstre.__dict__:
if explorer(monstre.__getattribute__(key)):
self.monstres[i][key] = monstre.__getattribute__(key)
elif isinstance(monstre.__getattribute__(key), np.ndarray):
self.monstres[i][key+".ndarray"] = monstre.__getattribute__(key).tolist()
# A utiliser pour modifier la variable etage avec les variables de session
@ecran_de_chargement("Chargement")
def recuperer_etage(self, **kwargs):
self.utiliser_sauvegarde = False
# Récupérer l'étage
etage = Etage(self.etage["level"])
for key, item in self.etage.items():
if ".ndarray" in key:
etage.__setattr__(key.replace(".ndarray", ""), np.array(item))
else:
etage.__setattr__(key, item)
# Récupérer les données du joueur
etage.joueur = Joueur()
for key, item in self.joueur.items():
if ".ndarray" in key:
etage.joueur.__setattr__(key.replace(".ndarray", ""), np.array(item))
else:
etage.joueur.__setattr__(key, item)
# Récupérer les monstres
etage.monstres = list()
for i, monstre in enumerate(self.monstres):
etage.monstres.append(Monstre())
for key, item in monstre.items():
if ".ndarray" in key:
etage.monstres[i].__setattr__(key.replace(".ndarray", ""), np.array(item))
else:
etage.monstres[i].__setattr__(key, item)
# Refaire les images de l'étage car on ne peut que très difficilement les enregistrer
etage.refresh_list = list()
etage.fond = list()
for y in range((etage.taille[1]*64)//TEXTURE_MAX+1):
etage.fond.append(list())
for x in range((etage.taille[0]*64//TEXTURE_MAX+1)):
etage.fond[y].append(sf.RenderTexture(TEXTURE_MAX, TEXTURE_MAX))
for y in range(etage.taille[1]):
for x in range(etage.taille[0]):
if (x, y) not in etage.blocs_importants["sprites_animes"]:
sprite = sf.Sprite(TILESET, ((etage.blocs[x, y] % 10)*64, (etage.blocs[x, y]//10)*64, 64, 64))
else:
sprite = sf.Sprite(TILESET, (0, 0, 64, 64))
sprite.position = ((x*64) % TEXTURE_MAX, (y*64) % TEXTURE_MAX)
etage.fond[(y*64)//TEXTURE_MAX][(x*64)//TEXTURE_MAX].draw(sprite)
etage.parametrer_shader_ombre(x, y)
sprite.position = ((x*64) % TEXTURE_MAX, (y*64) % TEXTURE_MAX)
etage.fond[(y*64)//TEXTURE_MAX][(x*64)//TEXTURE_MAX].draw(sprite, sf.RenderStates(shader=SHADER_OMBRE))
for item in etage.fond:
for chunk in item:
chunk.display()
return etage
class OptionsUtilisateur:
def __init__(self):
self.raccourcis = dict()
self.fullscreen = False
self.regler_raccourcis_defaut()
self.recuperer()
# Méthode qui réinitialiser les options
def regler_raccourcis_defaut(self):
self.raccourcis = {"gauche": [sf.Keyboard.Q, "Q"],
"droite": [sf.Keyboard.D, "D"],
"saut": [sf.Keyboard.SPACE, "Espace"],
"menu": [sf.Keyboard.ESCAPE, "Echap"],
"screenshot": [sf.Keyboard.F1, "F1"]}
# Méthode qui sauvegarde les options actuelles dans user_settings.elr
def sauvegarder(self):
with open("user_settings.elr", "wb") as fichier:
pickler = Pickler(fichier)
pickler.dump(self)
# Méthode qui récupere les options dans user_settings.elr
def recuperer(self):
if "user_settings.elr" in listdir(getcwd()):
with open("user_settings.elr", "rb") as fichier:
unpickler = Unpickler(fichier)
options_recuperees = unpickler.load()
for key, item in options_recuperees.__dict__.items():
if key == "raccourcis":
for raccourcis, donnees in item.items():
self.raccourcis[raccourcis] = donnees
else:
self.__setattr__(key, item)
# Méthode qui ouvre un menu où l'on peut changer les options
def modifier(self, window, temps_actuel, raccourcis, resolution):
continuer_menu_options = True
while continuer_menu_options:
# Créer la page menu du programme puis l'afficher
boutons = ("Raccourcis", "Mode fenêtré" if self.fullscreen else "Mode plein écran", "Retour")
page = Page([Menu(boutons, 0, 0, resolution["w"] if resolution["w"] >= 1024 else 1024,
resolution["h"] if resolution["h"] >= 576 else 576)], FOND)
sortie = page.afficher(window, temps_actuel, raccourcis, resolution)
# Traiter la sortie
if isinstance(sortie, dict):
# Bouton: Raccoucis
if sortie["choix"] == [0, 0]:
self.modifier_raccourcis(window, temps_actuel, resolution)
self.sauvegarder()
# Bouton: Mode fenêtré / Mode plein écran
if sortie["choix"] == [0, 1]:
self.fullscreen = not self.fullscreen
if self.fullscreen:
resolution["w"] = sf.VideoMode.get_desktop_mode().width
resolution["h"] = sf.VideoMode.get_desktop_mode().height
window.recreate(sf.VideoMode(resolution["w"], resolution["h"], 32),
"Endless-Rooms", sf.Style.FULLSCREEN)
else:
resolution["w"] = 1024
resolution["h"] = 576
window.recreate(sf.VideoMode(resolution["w"], resolution["h"], 32),
"Endless-Rooms", sf.Style.DEFAULT)
self.sauvegarder()
# Bouton: Retour
elif sortie["choix"] == [0, 2]:
continuer_menu_options = False
elif sortie == 0:
continue
elif sortie == 1:
exit(0)
else:
continuer_menu_options = False
# Méthode qui ouvre un menu qui permet de modifier les raccourcis
def modifier_raccourcis(self, window, temps_actuel, resolution):
# Petite fonction "creer_menu" pour ne pas avoir a recopier des bouts de code
def creer_menu(liste_raccourcis, resolution):
return [Menu(liste_raccourcis, 50, 50, resolution["w"]-100 if resolution["w"] >= 1024 else 924,
resolution["h"]-200 if resolution["h"] >= 576 else 376,
flags=("vertical", "defilant", "selection")),
Menu(("Réinitialiser", "Valider"), 0, resolution["h"]-100 if resolution["h"] > 576 else 476,
resolution["w"] if resolution["w"] >= 1024 else 924, 100, flags=("horizontal",))]
# Dictionnaire qui associe a chaque touche son nom
touches = {sf.Keyboard.A: "A", sf.Keyboard.B: "B", sf.Keyboard.C: "C", sf.Keyboard.D: "D", sf.Keyboard.E: "E",
sf.Keyboard.F: "F", sf.Keyboard.G: "G", sf.Keyboard.H: "H", sf.Keyboard.I: "I", sf.Keyboard.J: "J",
sf.Keyboard.K: "K", sf.Keyboard.L: "L", sf.Keyboard.M: "M", sf.Keyboard.N: "N", sf.Keyboard.O: "O",
sf.Keyboard.P: "P", sf.Keyboard.Q: "Q", sf.Keyboard.R: "R", sf.Keyboard.S: "S", sf.Keyboard.T: "T",
sf.Keyboard.U: "U", sf.Keyboard.V: "V", sf.Keyboard.W: "W", sf.Keyboard.X: "X", sf.Keyboard.Y: "Y",
sf.Keyboard.Z: "Z", sf.Keyboard.UP: "Haut", sf.Keyboard.DOWN: "Bas", sf.Keyboard.LEFT: "Gauche",
sf.Keyboard.RIGHT: "Droite", sf.Keyboard.ESCAPE: "Echap", sf.Keyboard.NUMPAD0: "0",
sf.Keyboard.NUMPAD1: "1", sf.Keyboard.NUMPAD2: "2", sf.Keyboard.NUMPAD3: "3",
sf.Keyboard.NUMPAD4: "4", sf.Keyboard.NUMPAD5: "5", sf.Keyboard.NUMPAD6: "6",
sf.Keyboard.NUMPAD7: "7", sf.Keyboard.NUMPAD8: "8", sf.Keyboard.NUMPAD9: "9", sf.Keyboard.NUM0: "à",
sf.Keyboard.NUM1: "&", sf.Keyboard.NUM2: "é", sf.Keyboard.NUM3: "\"", sf.Keyboard.NUM4: "'",
sf.Keyboard.NUM5: "(", sf.Keyboard.NUM6: "-", sf.Keyboard.NUM7: "è", sf.Keyboard.NUM8: "_",
sf.Keyboard.NUM9: "ç", sf.Keyboard.L_CONTROL: "L:Ctrl", sf.Keyboard.L_SHIFT: "L:Maj",
sf.Keyboard.F1: "F1", sf.Keyboard.F2: "F2", sf.Keyboard.F3: "F3", sf.Keyboard.F4: "F4",
sf.Keyboard.F5: "F5", sf.Keyboard.F6: "F6", sf.Keyboard.F7: "F7", sf.Keyboard.F8: "F8",
sf.Keyboard.F9: "F9", sf.Keyboard.F10: "F10", sf.Keyboard.F11: "F11", sf.Keyboard.F12: "F12"}
# Faire la liste des menus
liste_raccourcis = [key+" - "+item[1] for key, item in self.raccourcis.items()]
liste_menus = creer_menu(liste_raccourcis, resolution)
# Variables à initialiser
choix = -1
selection = str()
position_souris = [0, 0]
window.view.reset((0, 0, resolution["w"], resolution["h"]))
fond = sf.Sprite(sf.Texture.from_image(FOND))
fond.position = (0, 0)
tempo = 0
continuer_menu = True
while continuer_menu:
tempo = (tempo+1) % 24
# RAFRAICHIR L'IMAGE
window.display()
window.clear(sf.Color(0, 0, 0))
window.draw(fond)
gerer_fps(temps_actuel)
# GESTION DES EVENEMENTS
for event in window.events:
# Evenements universels
if isinstance(event, sf.ResizeEvent):
resolution["w"], resolution["h"] = event.size.x, event.size.y
window.view.reset((0, 0, resolution["w"], resolution["h"]))
liste_menus = creer_menu(liste_raccourcis, resolution)
if isinstance(event, sf.CloseEvent):
exit(0)
# Clique
if isinstance(event, sf.MouseButtonEvent):
if event.button == sf.Mouse.LEFT and event.released:
for menu in liste_menus:
for b, bouton in enumerate(menu.boutons):
if bouton.x < position_souris[0] < bouton.x+bouton.w and \
bouton.y < position_souris[1] < bouton.y+bouton.h:
if "selection" in menu.flags or menu.type == "saisies":
selection = liste_raccourcis[b].split(" - ")[0]
for autre_bouton in menu.boutons:
if autre_bouton is bouton:
bouton.data["selection"] = not bouton.data["selection"]
else:
autre_bouton.data["selection"] = False
else:
choix = b
# Entrée clavier
if isinstance(event, sf.KeyEvent):
if selection != str():
if event.code in touches.keys():
self.raccourcis[selection] = [event.code, touches[event.code]]
liste_raccourcis = [key+" - "+item[1] for key, item in self.raccourcis.items()]
liste_menus = creer_menu(liste_raccourcis, resolution)
selection = str()
# Molette
if isinstance(event, sf.MouseWheelEvent):
for menu in liste_menus:
if "defilant" in menu.flags:
if len(menu.boutons) > 0:
mouvement = ("horizontal" in menu.flags and
((event.delta < 0 and menu.boutons[len(menu.boutons)-1].x+event.delta*32 > menu.x) or
(event.delta > 0 and menu.boutons[0].x+menu.boutons[0].w+event.delta*32 < menu.x+menu.w))) or \
("horizontal" not in menu.flags and
((event.delta < 0 and menu.boutons[len(menu.boutons)-1].y+event.delta*32 > menu.y) or
(event.delta > 0 and menu.boutons[0].y+menu.boutons[0].h+event.delta*32 < menu.y+menu.h)))
if "defilant" in menu.flags and mouvement:
for bouton in menu.boutons:
if "horizontal" in menu.flags:
bouton.x += event.delta*32
else:
bouton.y += event.delta*32
position_souris = sf.Mouse.get_position(window)
# AFFICHAGE
for m, menu in enumerate(liste_menus):
# Afficher le fond du menu
if menu.fond is not None:
window.draw(menu.fond)
elif menu.type == "normal":
# Créer et afficher le fond du menu défilant
if "defilant" in menu.flags:
contour = sf.RectangleShape()
contour.size = (menu.w, menu.h)
contour.outline_color = sf.Color(0, 0, 0)
contour.outline_thickness = 2
contour.position = (menu.x, menu.y)
window.draw(contour)
# Afficher les boutons du menu
for b, bouton in enumerate(menu.boutons):
# Si c'est le menu défilant
if "defilant" in menu.flags:
coordonnees_sprite = {"x": bouton.x, "y": bouton.y, "cadre": [0, 0, bouton.w, bouton.h]}
if bouton.x < menu.x < bouton.x+bouton.w:
coordonnees_sprite["cadre"][0] = menu.x-bouton.x
coordonnees_sprite["cadre"][2] = bouton.x+bouton.w-menu.x
coordonnees_sprite["x"] = menu.x
if bouton.x < menu.x+menu.w < bouton.x+bouton.w:
coordonnees_sprite["cadre"][2] = menu.x+menu.w-bouton.x
if bouton.y < menu.y < bouton.y+bouton.h:
coordonnees_sprite["cadre"][1] = menu.y-bouton.y
coordonnees_sprite["cadre"][3] = bouton.y+bouton.h-menu.y
coordonnees_sprite["y"] = menu.y
if bouton.y < menu.y+menu.h < bouton.y+bouton.h:
coordonnees_sprite["cadre"][3] = menu.y+menu.h-bouton.y
if bouton.x < position_souris[0] < bouton.x+bouton.w and \
bouton.y < position_souris[1] < bouton.y+bouton.h:
if "selection" in menu.flags:
if bouton.data["selection"]:
sprite = sf.Sprite(bouton.images["selection"], coordonnees_sprite["cadre"])
else:
sprite = sf.Sprite(bouton.images["passage"], coordonnees_sprite["cadre"])
else:
sprite = sf.Sprite(bouton.images["passage"], coordonnees_sprite["cadre"])
else:
if "selection" in menu.flags:
if bouton.data["selection"]:
sprite = sf.Sprite(bouton.images["selection"], coordonnees_sprite["cadre"])
else:
sprite = sf.Sprite(bouton.images["normal"], coordonnees_sprite["cadre"])
else:
sprite = sf.Sprite(bouton.images["normal"], coordonnees_sprite["cadre"])
if bouton.x+bouton.w > menu.x and bouton.x < menu.x+menu.w and \
bouton.y+bouton.h > menu.y and bouton.y < menu.y+menu.h:
sprite.position = (coordonnees_sprite["x"], coordonnees_sprite["y"])
window.draw(sprite)
# Sinon
else:
if bouton.x < position_souris[0] < bouton.x+bouton.w and \
bouton.y < position_souris[1] < bouton.y+bouton.h:
if "selection" in menu.flags:
if bouton.data["selection"]:
sprite = sf.Sprite(bouton.images["selection"])
else:
sprite = sf.Sprite(bouton.images["passage"])
else:
sprite = sf.Sprite(bouton.images["passage"])
else:
if "selection" in menu.flags:
if bouton.data["selection"]:
sprite = sf.Sprite(bouton.images["selection"])
else:
sprite = sf.Sprite(bouton.images["normal"])
else:
sprite = sf.Sprite(bouton.images["normal"])
sprite.position = (bouton.x, bouton.y)
window.draw(sprite)
if choix != -1:
# Bouton: Réinitialiser
if choix == 0:
self.regler_raccourcis_defaut()
liste_raccourcis = [key+" - "+item[1] for key, item in self.raccourcis.items()]
liste_menus = creer_menu(liste_raccourcis, resolution)
# Bouton: Valider
if choix == 1:
continuer_menu = False
choix = -1
| gpl-2.0 |
ibmibmibm/beets | test/test_files.py | 2 | 22499 | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Test file manipulation functionality of Item.
"""
from __future__ import division, absolute_import, print_function
import shutil
import os
import stat
from os.path import join
import unittest
from test import _common
from test._common import item, touch
import beets.library
from beets import util
from beets.util import MoveOperation
class MoveTest(_common.TestCase):
def setUp(self):
super(MoveTest, self).setUp()
# make a temporary file
self.path = join(self.temp_dir, b'temp.mp3')
shutil.copy(join(_common.RSRC, b'full.mp3'), self.path)
# add it to a temporary library
self.lib = beets.library.Library(':memory:')
self.i = beets.library.Item.from_path(self.path)
self.lib.add(self.i)
# set up the destination
self.libdir = join(self.temp_dir, b'testlibdir')
os.mkdir(self.libdir)
self.lib.directory = self.libdir
self.lib.path_formats = [('default',
join('$artist', '$album', '$title'))]
self.i.artist = 'one'
self.i.album = 'two'
self.i.title = 'three'
self.dest = join(self.libdir, b'one', b'two', b'three.mp3')
self.otherdir = join(self.temp_dir, b'testotherdir')
def test_move_arrives(self):
self.i.move()
self.assertExists(self.dest)
def test_move_to_custom_dir(self):
self.i.move(basedir=self.otherdir)
self.assertExists(join(self.otherdir, b'one', b'two', b'three.mp3'))
def test_move_departs(self):
self.i.move()
self.assertNotExists(self.path)
def test_move_in_lib_prunes_empty_dir(self):
self.i.move()
old_path = self.i.path
self.assertExists(old_path)
self.i.artist = u'newArtist'
self.i.move()
self.assertNotExists(old_path)
self.assertNotExists(os.path.dirname(old_path))
def test_copy_arrives(self):
self.i.move(operation=MoveOperation.COPY)
self.assertExists(self.dest)
def test_copy_does_not_depart(self):
self.i.move(operation=MoveOperation.COPY)
self.assertExists(self.path)
def test_move_changes_path(self):
self.i.move()
self.assertEqual(self.i.path, util.normpath(self.dest))
def test_copy_already_at_destination(self):
self.i.move()
old_path = self.i.path
self.i.move(operation=MoveOperation.COPY)
self.assertEqual(self.i.path, old_path)
def test_move_already_at_destination(self):
self.i.move()
old_path = self.i.path
self.i.move()
self.assertEqual(self.i.path, old_path)
def test_move_file_with_colon(self):
self.i.artist = u'C:DOS'
self.i.move()
self.assertIn('C_DOS', self.i.path.decode())
def test_move_file_with_multiple_colons(self):
print(beets.config['replace'])
self.i.artist = u'COM:DOS'
self.i.move()
self.assertIn('COM_DOS', self.i.path.decode())
def test_move_file_with_colon_alt_separator(self):
old = beets.config['drive_sep_replace']
beets.config["drive_sep_replace"] = '0'
self.i.artist = u'C:DOS'
self.i.move()
self.assertIn('C0DOS', self.i.path.decode())
beets.config["drive_sep_replace"] = old
def test_read_only_file_copied_writable(self):
# Make the source file read-only.
os.chmod(self.path, 0o444)
try:
self.i.move(operation=MoveOperation.COPY)
self.assertTrue(os.access(self.i.path, os.W_OK))
finally:
# Make everything writable so it can be cleaned up.
os.chmod(self.path, 0o777)
os.chmod(self.i.path, 0o777)
def test_move_avoids_collision_with_existing_file(self):
# Make a conflicting file at the destination.
dest = self.i.destination()
os.makedirs(os.path.dirname(dest))
touch(dest)
self.i.move()
self.assertNotEqual(self.i.path, dest)
self.assertEqual(os.path.dirname(self.i.path),
os.path.dirname(dest))
@unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks")
def test_link_arrives(self):
self.i.move(operation=MoveOperation.LINK)
self.assertExists(self.dest)
self.assertTrue(os.path.islink(self.dest))
self.assertEqual(os.readlink(self.dest), self.path)
@unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks")
def test_link_does_not_depart(self):
self.i.move(operation=MoveOperation.LINK)
self.assertExists(self.path)
@unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks")
def test_link_changes_path(self):
self.i.move(operation=MoveOperation.LINK)
self.assertEqual(self.i.path, util.normpath(self.dest))
@unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks")
def test_hardlink_arrives(self):
self.i.move(operation=MoveOperation.HARDLINK)
self.assertExists(self.dest)
s1 = os.stat(self.path)
s2 = os.stat(self.dest)
self.assertTrue(
(s1[stat.ST_INO], s1[stat.ST_DEV]) ==
(s2[stat.ST_INO], s2[stat.ST_DEV])
)
@unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks")
def test_hardlink_does_not_depart(self):
self.i.move(operation=MoveOperation.HARDLINK)
self.assertExists(self.path)
@unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks")
def test_hardlink_changes_path(self):
self.i.move(operation=MoveOperation.HARDLINK)
self.assertEqual(self.i.path, util.normpath(self.dest))
class HelperTest(_common.TestCase):
def test_ancestry_works_on_file(self):
p = '/a/b/c'
a = ['/', '/a', '/a/b']
self.assertEqual(util.ancestry(p), a)
def test_ancestry_works_on_dir(self):
p = '/a/b/c/'
a = ['/', '/a', '/a/b', '/a/b/c']
self.assertEqual(util.ancestry(p), a)
def test_ancestry_works_on_relative(self):
p = 'a/b/c'
a = ['a', 'a/b']
self.assertEqual(util.ancestry(p), a)
def test_components_works_on_file(self):
p = '/a/b/c'
a = ['/', 'a', 'b', 'c']
self.assertEqual(util.components(p), a)
def test_components_works_on_dir(self):
p = '/a/b/c/'
a = ['/', 'a', 'b', 'c']
self.assertEqual(util.components(p), a)
def test_components_works_on_relative(self):
p = 'a/b/c'
a = ['a', 'b', 'c']
self.assertEqual(util.components(p), a)
def test_forward_slash(self):
p = br'C:\a\b\c'
a = br'C:/a/b/c'
self.assertEqual(util.path_as_posix(p), a)
class AlbumFileTest(_common.TestCase):
def setUp(self):
super(AlbumFileTest, self).setUp()
# Make library and item.
self.lib = beets.library.Library(':memory:')
self.lib.path_formats = \
[('default', join('$albumartist', '$album', '$title'))]
self.libdir = os.path.join(self.temp_dir, b'testlibdir')
self.lib.directory = self.libdir
self.i = item(self.lib)
# Make a file for the item.
self.i.path = self.i.destination()
util.mkdirall(self.i.path)
touch(self.i.path)
# Make an album.
self.ai = self.lib.add_album((self.i,))
# Alternate destination dir.
self.otherdir = os.path.join(self.temp_dir, b'testotherdir')
def test_albuminfo_move_changes_paths(self):
self.ai.album = u'newAlbumName'
self.ai.move()
self.ai.store()
self.i.load()
self.assertTrue(b'newAlbumName' in self.i.path)
def test_albuminfo_move_moves_file(self):
oldpath = self.i.path
self.ai.album = u'newAlbumName'
self.ai.move()
self.ai.store()
self.i.load()
self.assertFalse(os.path.exists(oldpath))
self.assertTrue(os.path.exists(self.i.path))
def test_albuminfo_move_copies_file(self):
oldpath = self.i.path
self.ai.album = u'newAlbumName'
self.ai.move(operation=MoveOperation.COPY)
self.ai.store()
self.i.load()
self.assertTrue(os.path.exists(oldpath))
self.assertTrue(os.path.exists(self.i.path))
def test_albuminfo_move_to_custom_dir(self):
self.ai.move(basedir=self.otherdir)
self.i.load()
self.ai.store()
self.assertTrue(b'testotherdir' in self.i.path)
class ArtFileTest(_common.TestCase):
def setUp(self):
super(ArtFileTest, self).setUp()
# Make library and item.
self.lib = beets.library.Library(':memory:')
self.libdir = os.path.join(self.temp_dir, b'testlibdir')
self.lib.directory = self.libdir
self.i = item(self.lib)
self.i.path = self.i.destination()
# Make a music file.
util.mkdirall(self.i.path)
touch(self.i.path)
# Make an album.
self.ai = self.lib.add_album((self.i,))
# Make an art file too.
self.art = self.lib.get_album(self.i).art_destination('something.jpg')
touch(self.art)
self.ai.artpath = self.art
self.ai.store()
# Alternate destination dir.
self.otherdir = os.path.join(self.temp_dir, b'testotherdir')
def test_art_deleted_when_items_deleted(self):
self.assertTrue(os.path.exists(self.art))
self.ai.remove(True)
self.assertFalse(os.path.exists(self.art))
def test_art_moves_with_album(self):
self.assertTrue(os.path.exists(self.art))
oldpath = self.i.path
self.ai.album = u'newAlbum'
self.ai.move()
self.i.load()
self.assertNotEqual(self.i.path, oldpath)
self.assertFalse(os.path.exists(self.art))
newart = self.lib.get_album(self.i).art_destination(self.art)
self.assertTrue(os.path.exists(newart))
def test_art_moves_with_album_to_custom_dir(self):
# Move the album to another directory.
self.ai.move(basedir=self.otherdir)
self.ai.store()
self.i.load()
# Art should be in new directory.
self.assertNotExists(self.art)
newart = self.lib.get_album(self.i).artpath
self.assertExists(newart)
self.assertTrue(b'testotherdir' in newart)
def test_setart_copies_image(self):
os.remove(self.art)
newart = os.path.join(self.libdir, b'newart.jpg')
touch(newart)
i2 = item()
i2.path = self.i.path
i2.artist = u'someArtist'
ai = self.lib.add_album((i2,))
i2.move(operation=MoveOperation.COPY)
self.assertEqual(ai.artpath, None)
ai.set_art(newart)
self.assertTrue(os.path.exists(ai.artpath))
def test_setart_to_existing_art_works(self):
os.remove(self.art)
# Original art.
newart = os.path.join(self.libdir, b'newart.jpg')
touch(newart)
i2 = item()
i2.path = self.i.path
i2.artist = u'someArtist'
ai = self.lib.add_album((i2,))
i2.move(operation=MoveOperation.COPY)
ai.set_art(newart)
# Set the art again.
ai.set_art(ai.artpath)
self.assertTrue(os.path.exists(ai.artpath))
def test_setart_to_existing_but_unset_art_works(self):
newart = os.path.join(self.libdir, b'newart.jpg')
touch(newart)
i2 = item()
i2.path = self.i.path
i2.artist = u'someArtist'
ai = self.lib.add_album((i2,))
i2.move(operation=MoveOperation.COPY)
# Copy the art to the destination.
artdest = ai.art_destination(newart)
shutil.copy(newart, artdest)
# Set the art again.
ai.set_art(artdest)
self.assertTrue(os.path.exists(ai.artpath))
def test_setart_to_conflicting_file_gets_new_path(self):
newart = os.path.join(self.libdir, b'newart.jpg')
touch(newart)
i2 = item()
i2.path = self.i.path
i2.artist = u'someArtist'
ai = self.lib.add_album((i2,))
i2.move(operation=MoveOperation.COPY)
# Make a file at the destination.
artdest = ai.art_destination(newart)
touch(artdest)
# Set the art.
ai.set_art(newart)
self.assertNotEqual(artdest, ai.artpath)
self.assertEqual(os.path.dirname(artdest),
os.path.dirname(ai.artpath))
def test_setart_sets_permissions(self):
os.remove(self.art)
newart = os.path.join(self.libdir, b'newart.jpg')
touch(newart)
os.chmod(newart, 0o400) # read-only
try:
i2 = item()
i2.path = self.i.path
i2.artist = u'someArtist'
ai = self.lib.add_album((i2,))
i2.move(operation=MoveOperation.COPY)
ai.set_art(newart)
mode = stat.S_IMODE(os.stat(ai.artpath).st_mode)
self.assertTrue(mode & stat.S_IRGRP)
self.assertTrue(os.access(ai.artpath, os.W_OK))
finally:
# Make everything writable so it can be cleaned up.
os.chmod(newart, 0o777)
os.chmod(ai.artpath, 0o777)
def test_move_last_file_moves_albumart(self):
oldartpath = self.lib.albums()[0].artpath
self.assertExists(oldartpath)
self.ai.album = u'different_album'
self.ai.store()
self.ai.items()[0].move()
artpath = self.lib.albums()[0].artpath
self.assertTrue(b'different_album' in artpath)
self.assertExists(artpath)
self.assertNotExists(oldartpath)
def test_move_not_last_file_does_not_move_albumart(self):
i2 = item()
i2.albumid = self.ai.id
self.lib.add(i2)
oldartpath = self.lib.albums()[0].artpath
self.assertExists(oldartpath)
self.i.album = u'different_album'
self.i.album_id = None # detach from album
self.i.move()
artpath = self.lib.albums()[0].artpath
self.assertFalse(b'different_album' in artpath)
self.assertEqual(artpath, oldartpath)
self.assertExists(oldartpath)
class RemoveTest(_common.TestCase):
def setUp(self):
super(RemoveTest, self).setUp()
# Make library and item.
self.lib = beets.library.Library(':memory:')
self.libdir = os.path.join(self.temp_dir, b'testlibdir')
self.lib.directory = self.libdir
self.i = item(self.lib)
self.i.path = self.i.destination()
# Make a music file.
util.mkdirall(self.i.path)
touch(self.i.path)
# Make an album with the item.
self.ai = self.lib.add_album((self.i,))
def test_removing_last_item_prunes_empty_dir(self):
parent = os.path.dirname(self.i.path)
self.assertExists(parent)
self.i.remove(True)
self.assertNotExists(parent)
def test_removing_last_item_preserves_nonempty_dir(self):
parent = os.path.dirname(self.i.path)
touch(os.path.join(parent, b'dummy.txt'))
self.i.remove(True)
self.assertExists(parent)
def test_removing_last_item_prunes_dir_with_blacklisted_file(self):
parent = os.path.dirname(self.i.path)
touch(os.path.join(parent, b'.DS_Store'))
self.i.remove(True)
self.assertNotExists(parent)
def test_removing_without_delete_leaves_file(self):
path = self.i.path
self.i.remove(False)
self.assertExists(path)
def test_removing_last_item_preserves_library_dir(self):
self.i.remove(True)
self.assertExists(self.libdir)
def test_removing_item_outside_of_library_deletes_nothing(self):
self.lib.directory = os.path.join(self.temp_dir, b'xxx')
parent = os.path.dirname(self.i.path)
self.i.remove(True)
self.assertExists(parent)
def test_removing_last_item_in_album_with_albumart_prunes_dir(self):
artfile = os.path.join(self.temp_dir, b'testart.jpg')
touch(artfile)
self.ai.set_art(artfile)
self.ai.store()
parent = os.path.dirname(self.i.path)
self.i.remove(True)
self.assertNotExists(parent)
# Tests that we can "delete" nonexistent files.
class SoftRemoveTest(_common.TestCase):
def setUp(self):
super(SoftRemoveTest, self).setUp()
self.path = os.path.join(self.temp_dir, b'testfile')
touch(self.path)
def test_soft_remove_deletes_file(self):
util.remove(self.path, True)
self.assertNotExists(self.path)
def test_soft_remove_silent_on_no_file(self):
try:
util.remove(self.path + b'XXX', True)
except OSError:
self.fail(u'OSError when removing path')
class SafeMoveCopyTest(_common.TestCase):
def setUp(self):
super(SafeMoveCopyTest, self).setUp()
self.path = os.path.join(self.temp_dir, b'testfile')
touch(self.path)
self.otherpath = os.path.join(self.temp_dir, b'testfile2')
touch(self.otherpath)
self.dest = self.path + b'.dest'
def test_successful_move(self):
util.move(self.path, self.dest)
self.assertExists(self.dest)
self.assertNotExists(self.path)
def test_successful_copy(self):
util.copy(self.path, self.dest)
self.assertExists(self.dest)
self.assertExists(self.path)
def test_unsuccessful_move(self):
with self.assertRaises(util.FilesystemError):
util.move(self.path, self.otherpath)
def test_unsuccessful_copy(self):
with self.assertRaises(util.FilesystemError):
util.copy(self.path, self.otherpath)
def test_self_move(self):
util.move(self.path, self.path)
self.assertExists(self.path)
def test_self_copy(self):
util.copy(self.path, self.path)
self.assertExists(self.path)
class PruneTest(_common.TestCase):
def setUp(self):
super(PruneTest, self).setUp()
self.base = os.path.join(self.temp_dir, b'testdir')
os.mkdir(self.base)
self.sub = os.path.join(self.base, b'subdir')
os.mkdir(self.sub)
def test_prune_existent_directory(self):
util.prune_dirs(self.sub, self.base)
self.assertExists(self.base)
self.assertNotExists(self.sub)
def test_prune_nonexistent_directory(self):
util.prune_dirs(os.path.join(self.sub, b'another'), self.base)
self.assertExists(self.base)
self.assertNotExists(self.sub)
class WalkTest(_common.TestCase):
def setUp(self):
super(WalkTest, self).setUp()
self.base = os.path.join(self.temp_dir, b'testdir')
os.mkdir(self.base)
touch(os.path.join(self.base, b'y'))
touch(os.path.join(self.base, b'x'))
os.mkdir(os.path.join(self.base, b'd'))
touch(os.path.join(self.base, b'd', b'z'))
def test_sorted_files(self):
res = list(util.sorted_walk(self.base))
self.assertEqual(len(res), 2)
self.assertEqual(res[0],
(self.base, [b'd'], [b'x', b'y']))
self.assertEqual(res[1],
(os.path.join(self.base, b'd'), [], [b'z']))
def test_ignore_file(self):
res = list(util.sorted_walk(self.base, (b'x',)))
self.assertEqual(len(res), 2)
self.assertEqual(res[0],
(self.base, [b'd'], [b'y']))
self.assertEqual(res[1],
(os.path.join(self.base, b'd'), [], [b'z']))
def test_ignore_directory(self):
res = list(util.sorted_walk(self.base, (b'd',)))
self.assertEqual(len(res), 1)
self.assertEqual(res[0],
(self.base, [], [b'x', b'y']))
def test_ignore_everything(self):
res = list(util.sorted_walk(self.base, (b'*',)))
self.assertEqual(len(res), 1)
self.assertEqual(res[0],
(self.base, [], []))
class UniquePathTest(_common.TestCase):
def setUp(self):
super(UniquePathTest, self).setUp()
self.base = os.path.join(self.temp_dir, b'testdir')
os.mkdir(self.base)
touch(os.path.join(self.base, b'x.mp3'))
touch(os.path.join(self.base, b'x.1.mp3'))
touch(os.path.join(self.base, b'x.2.mp3'))
touch(os.path.join(self.base, b'y.mp3'))
def test_new_file_unchanged(self):
path = util.unique_path(os.path.join(self.base, b'z.mp3'))
self.assertEqual(path, os.path.join(self.base, b'z.mp3'))
def test_conflicting_file_appends_1(self):
path = util.unique_path(os.path.join(self.base, b'y.mp3'))
self.assertEqual(path, os.path.join(self.base, b'y.1.mp3'))
def test_conflicting_file_appends_higher_number(self):
path = util.unique_path(os.path.join(self.base, b'x.mp3'))
self.assertEqual(path, os.path.join(self.base, b'x.3.mp3'))
def test_conflicting_file_with_number_increases_number(self):
path = util.unique_path(os.path.join(self.base, b'x.1.mp3'))
self.assertEqual(path, os.path.join(self.base, b'x.3.mp3'))
class MkDirAllTest(_common.TestCase):
def test_parent_exists(self):
path = os.path.join(self.temp_dir, b'foo', b'bar', b'baz', b'qux.mp3')
util.mkdirall(path)
self.assertTrue(os.path.isdir(
os.path.join(self.temp_dir, b'foo', b'bar', b'baz')
))
def test_child_does_not_exist(self):
path = os.path.join(self.temp_dir, b'foo', b'bar', b'baz', b'qux.mp3')
util.mkdirall(path)
self.assertTrue(not os.path.exists(
os.path.join(self.temp_dir, b'foo', b'bar', b'baz', b'qux.mp3')
))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| mit |
wxmeteorologist/pymetars | metarlist.py | 1 | 6959 | from pymetars.metar import MetarSite
from heapq import heappush, heappop
import os
from pymetars.latlonconversions import LatLonConversions
from ftplib import FTP as __FTP__
from datetime import datetime as __datetime__
"""Create a large set of MetarSites based off the id, name, state/providence, country, lat and lon"""
class MetarList(object):
def __init__(self):
self.__llconvert__ = LatLonConversions()
self.__metarSites__ = {}
direc = os.path.dirname(os.path.realpath(__file__))
with open(direc + "/" + "metar.tbl.txt","r") as f:
data = f.readlines()
f.close()
for i in data:
temp = i
lat = self.__convertLat__(temp[55:60].strip())
lon = self.__convertLat__(temp[61:67].strip())
self.__metarSites__[temp[0:4]] = MetarSite([temp[0:4], temp[16:49].strip(),temp[49:51], temp[52:54],lat,lon])
#insert data in the format of (Site, Location, State/Providence, Country, Lat, Lon)
def __convertLat__(self, latlon):
sign = ""
if latlon.startswith("-"):
sign = "-"
latlon = latlon.lstrip("-")
if len(latlon) ==4:#accuracy to decimal place
return(float(sign + latlon[0:2] + "." + latlon[2:]))
else:
return (float(sign + latlon[0:1] + "." + latlon[1:]))
def __convertLon__(self, latlon):
sign = ""
if latlon.startswith("-"):
sign = "-"
latlon = latlon.lstrip("-")
if len(latlon) >3 and latlons.startswith("1"):#includes decimal and is >=100 and <200, although there is no 200 longitude
return(float(sign + latlon[0:3] + "." + latlon[2:]))
elif len(latlon)>3:#still includes decimal but <100
return(float(sign + latlon[0:2] + "." + latlon[2:]))
else:
return (float(sign + latlon))
def __str__(self):
ret = ""
for i in self.__metarSites__:
ret = ret + ":" + print(self.__metarSites__.get(i))
return ret
def size(self):
size = len(self.__metarSites__)
return size
"""Return Metar specifications if a Site ID is found, if not will return None.
Should run in O(1) time as dictionary time is constant"""
def getSite(self, siteId):
return self.__metarSites__.get(siteId)
"""Takes in a string parameter, returns any areas which contain that string parameter"""
def getLocation(self, location):
locations = []
for i in self.__metarSites__:
site = self.__metarSites__.get(i)
if location in site.getName():
locations.append(site)
return locations
"""Assign the current 24 hour observations history to all the metars sites in our list"""
def __setHour__(self, hour,metarData):
utcnow = __datetime__.utcnow()
for obs in metarData:
metarSite = self.__metarSites__.get(obs[0:4])
if metarSite != None and "RMK" in obs:
metarSite.__setHour__(hour, obs)
elif metarSite != None and metarSite.__codedobHistory__[hour] == None:
metarSite.__setHour__(hour, obs)
"""Returns a dictionary of sites within the lat lon boundaries """
def sitesInBounds(self, llLat, llLon, ulLat, ulLon):
retSites = {}
for site in self.__metarSites__:
siteObj = self.__metarSites__.get(site)#should be near constant
siteLat = siteObj.getLatitude()
siteLon = siteObj.getLongitude()
if siteLat >= llLat and siteLon >= llLon and siteLat <= ulLat and siteLon<= ulLon:
retSites[site] = siteObj
return retSites
"""This method will ensure all sites within the sites dictionary will be at least the distance arguement from each other.
If a site lies below the distance threshold it's removed from the dictionary.
Worst case O(n^2)."""
def sitesSpacedBy(self, distanceKm, siteDic):
count = 0
keys = list(siteDic.keys())
while count != len(keys):
compSite = siteDic.get(keys[count])
icount = count+1
while icount !=len(keys):
otherSite = siteDic.get(keys[icount])
distance = self.__llconvert__.distance(compSite.getLatitude(),compSite.getLongitude(),
otherSite.getLatitude(), otherSite.getLongitude())
if distance <= distanceKm:
del(keys[icount])
del(siteDic[otherSite.getID()])
else:#only increment if we don't delete
icount +=1
count +=1
return siteDic
"""Removes sites with empty observations, useful if we want to display data, but don't want an empty site to remove
sites with data in the above method"""
def removeEmptySites(self, siteDic):
count = 0
keys = list(siteDic.keys())
while count != len(keys):
site = siteDic.get(keys[count])
if not site.obsInserted():#if no observations in 24 hours, remove it from dictionary.
del(keys[count])
del(siteDic[site.getID()])
else:#only increment if we don't delete
count +=1
return siteDic
"""Finds the n closest sites to the Metar Site object.
takes in a MetarSite object, an integer n.
returns a dictionary of the n closest MetarSites to the arguement MetarSite.
Runs O(n)"""
def findClosestSites(self, metarSite, n):
heap = []
retDic = {}
for site in self.__metarSites__:
otherSite = self.__metarSites__.get(site)
otherSite.distanceFrom(self.__llconvert__, metarSite)
heappush(heap, otherSite)
count = 0
while count != n:
site = heappop(heap)
if site.getID() != metarSite.getID():# no performance enhancement comparing memory address vs 4 characters.
retDic[site.getID()] = site
count +=1
return retDic
"""Downloads cycle from nws ftp server"""
def downloadCycle(self):
ftp = __FTP__("tgftp.nws.noaa.gov")
ftp.login()
ftp.cwd("/data/observations/metar/cycles/")
nlst = ftp.nlst()
for i in nlst:
hour = []
ftp.retrlines("RETR " + i, hour.append)
self.__setHour__(int(i[0:2]),hour)
ftp.close()
"""Downloads the current hour from the ftp server"""
def downloadCurrentHour(self):
utcnow = __datetime__.utcnow()
ftp = __FTP__("tgftp.nws.noaa.gov")
ftp.login()
ftp.cwd("/data/observations/metar/cycles/")
hour = []
ftp.retrlines("RETR " + "%02d"%(utcnow.hour) + "Z.TXT", hour.append)
self.__setHour__(utcnow.hour,hour)
ftp.close()
| mit |
Stane1983/kernel-aml-m3 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | 5411 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
| gpl-2.0 |
italomandara/mysite | myresume/admin.py | 1 | 1292 | from django.contrib import admin
from django.db import models
from .models import Person, Skill, MyContent, Job, Course, Contact, Post
from django_filepicker.forms import FPFileField
from django_filepicker.widgets import FPFileWidget
from django import forms
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
fields = '__all__'
widgets = {
'featured_image': FPFileWidget(attrs={'type':'filepicker'}),
'article_image1': FPFileWidget(attrs={'type':'filepicker'}),
'article_image2': FPFileWidget(attrs={'type':'filepicker'}),
}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
class Media:
js = (["//api.filepicker.io/v1/filepicker.js"])
class MyContentAdminForm(forms.ModelForm):
class Meta:
model = MyContent
fields = '__all__'
widgets = {
'video_primary': FPFileWidget(attrs={'type':'filepicker'}),
'image_primary': FPFileWidget(attrs={'type':'filepicker'}),
'image_secondary': FPFileWidget(attrs={'type':'filepicker'}),
}
class MyContentAdmin(admin.ModelAdmin):
form = MyContentAdminForm
class Media:
js = (["//api.filepicker.io/v1/filepicker.js"])
admin.site.register([Person, Skill, Job, Course, Contact])
admin.site.register(Post, PostAdmin)
admin.site.register(MyContent, MyContentAdmin)
| mit |
ktritz/vispy | vispy/util/svg/viewport.py | 23 | 2017 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier. All rights reserved.
# Distributed under the terms of the new BSD License.
# -----------------------------------------------------------------------------
from . length import XLength, YLength
class Viewport(object):
def __init__(self, content=None, parent=None):
self._x = None
self._computed_x = 0
if content.get('x'):
self._x = XLength(content.get('x'), parent)
self._computed_x = float(self._x)
self._y = None
self._computed_y = 0
if content.get('y'):
self._y = XLength(content.get('y'), parent)
self._computed_y = float(self._y)
self._width = None
self._computed_width = 800
if content.get('width'):
self._width = XLength(content.get('width'), parent)
self._computed_width = float(self._width)
self._height = None
self._computed_height = 800
if content.get('height'):
self._height = YLength(content.get('height'), parent)
self._computed_height = float(self._height)
@property
def x(self):
return self._computed_x
@property
def y(self):
return self._computed_y
@property
def width(self):
return self._computed_width
@property
def height(self):
return self._computed_height
def __repr__(self):
s = repr((self._x, self._y, self._width, self._height))
return s
@property
def xml(self):
return self._xml
@property
def _xml(self, prefix=""):
s = ""
if self._x:
s += 'x="%s" ' % repr(self._x)
if self._y:
s += 'y="%s" ' % repr(self._y)
if self._width:
s += 'width="%s" ' % repr(self._width)
if self._height:
s += 'height="%s" ' % repr(self._height)
return s
| bsd-3-clause |
ar0551/Wasp | src/ghComp/Wasp_Deconstruct Attribute.py | 1 | 2916 | # Wasp: Discrete Design with Grasshopper plug-in (GPL) initiated by Andrea Rossi
#
# This file is part of Wasp.
#
# Copyright (c) 2017, Andrea Rossi <a.rossi.andrea@gmail.com>
# Wasp is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Wasp is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Wasp; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0 <https://www.gnu.org/licenses/gpl.html>
#
# Significant parts of Wasp have been developed by Andrea Rossi
# as part of research on digital materials and discrete design at:
# DDU Digital Design Unit - Prof. Oliver Tessmann
# Technische Universitt Darmstadt
#########################################################################
## COMPONENT INFO ##
#########################################################################
"""
Extract values stored in an atrribute
-
Provided by Wasp 0.5
Args:
ATTR: Attribute to deconstruct
Returns:
ID: Name of the attribute
VAL: Value stored in the attribute
"""
ghenv.Component.Name = "Wasp_Deconstruct Attribute"
ghenv.Component.NickName = 'DeAttr'
ghenv.Component.Message = 'v0.5.004'
ghenv.Component.IconDisplayMode = ghenv.Component.IconDisplayMode.application
ghenv.Component.Category = "Wasp"
ghenv.Component.SubCategory = "1 | Elements"
try: ghenv.Component.AdditionalHelpFromDocStrings = "2"
except: pass
import sys
import Grasshopper as gh
## add Wasp install directory to system path
wasp_loaded = False
ghcompfolder = gh.Folders.DefaultAssemblyFolder
if ghcompfolder not in sys.path:
sys.path.append(ghcompfolder)
try:
from wasp import __version__
wasp_loaded = True
except:
msg = "Cannot import Wasp. Is the wasp folder available in " + ghcompfolder + "?"
ghenv.Component.AddRuntimeMessage(gh.Kernel.GH_RuntimeMessageLevel.Error, msg)
## if Wasp is installed correctly, load the classes required by the component
if wasp_loaded:
pass
def main(attribute):
check_data = True
## check inputs
if attribute is None:
check_data = False
msg = "No attribute provided"
ghenv.Component.AddRuntimeMessage(gh.Kernel.GH_RuntimeMessageLevel.Warning, msg)
## execute main code if all needed inputs are available
if check_data:
return attribute.name, attribute.values
else:
return -1
result = main(ATTR)
if result != -1:
ID = result[0]
VAL = result[1] | gpl-3.0 |
0x7678/youtube-dl | youtube_dl/extractor/nuvid.py | 127 | 2665 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_request,
)
from ..utils import (
parse_duration,
unified_strdate,
)
class NuvidIE(InfoExtractor):
_VALID_URL = r'https?://(?:www|m)\.nuvid\.com/video/(?P<id>[0-9]+)'
_TEST = {
'url': 'http://m.nuvid.com/video/1310741/',
'md5': 'eab207b7ac4fccfb4e23c86201f11277',
'info_dict': {
'id': '1310741',
'ext': 'mp4',
'title': 'Horny babes show their awesome bodeis and',
'duration': 129,
'upload_date': '20140508',
'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
formats = []
for dwnld_speed, format_id in [(0, '3gp'), (5, 'mp4')]:
request = compat_urllib_request.Request(
'http://m.nuvid.com/play/%s' % video_id)
request.add_header('Cookie', 'skip_download_page=1; dwnld_speed=%d; adv_show=1' % dwnld_speed)
webpage = self._download_webpage(
request, video_id, 'Downloading %s page' % format_id)
video_url = self._html_search_regex(
r'<a\s+href="([^"]+)"\s+class="b_link">', webpage, '%s video URL' % format_id, fatal=False)
if not video_url:
continue
formats.append({
'url': video_url,
'format_id': format_id,
})
webpage = self._download_webpage(
'http://m.nuvid.com/video/%s' % video_id, video_id, 'Downloading video page')
title = self._html_search_regex(
[r'<span title="([^"]+)">',
r'<div class="thumb-holder video">\s*<h5[^>]*>([^<]+)</h5>'], webpage, 'title').strip()
thumbnails = [
{
'url': thumb_url,
} for thumb_url in re.findall(r'<img src="([^"]+)" alt="" />', webpage)
]
thumbnail = thumbnails[0]['url'] if thumbnails else None
duration = parse_duration(self._html_search_regex(
r'<i class="fa fa-clock-o"></i>\s*(\d{2}:\d{2})', webpage, 'duration', fatal=False))
upload_date = unified_strdate(self._html_search_regex(
r'<i class="fa fa-user"></i>\s*(\d{4}-\d{2}-\d{2})', webpage, 'upload date', fatal=False))
return {
'id': video_id,
'title': title,
'thumbnails': thumbnails,
'thumbnail': thumbnail,
'duration': duration,
'upload_date': upload_date,
'age_limit': 18,
'formats': formats,
}
| unlicense |
banglakit/spaCy | spacy/tests/spans/test_merge.py | 1 | 4698 | # coding: utf-8
from __future__ import unicode_literals
from ..util import get_doc
import pytest
def test_spans_merge_tokens(en_tokenizer):
text = "Los Angeles start."
heads = [1, 1, 0, -1]
tokens = en_tokenizer(text)
doc = get_doc(tokens.vocab, [t.text for t in tokens], heads=heads)
assert len(doc) == 4
assert doc[0].head.text == 'Angeles'
assert doc[1].head.text == 'start'
doc.merge(0, len('Los Angeles'), 'NNP', 'Los Angeles', 'GPE')
assert len(doc) == 3
assert doc[0].text == 'Los Angeles'
assert doc[0].head.text == 'start'
def test_spans_merge_heads(en_tokenizer):
text = "I found a pilates class near work."
heads = [1, 0, 2, 1, -3, -1, -1, -6]
tokens = en_tokenizer(text)
doc = get_doc(tokens.vocab, [t.text for t in tokens], heads=heads)
assert len(doc) == 8
doc.merge(doc[3].idx, doc[4].idx + len(doc[4]), doc[4].tag_, 'pilates class', 'O')
assert len(doc) == 7
assert doc[0].head.i == 1
assert doc[1].head.i == 1
assert doc[2].head.i == 3
assert doc[3].head.i == 1
assert doc[4].head.i in [1, 3]
assert doc[5].head.i == 4
def test_span_np_merges(en_tokenizer):
text = "displaCy is a parse tool built with Javascript"
heads = [1, 0, 2, 1, -3, -1, -1, -1]
tokens = en_tokenizer(text)
doc = get_doc(tokens.vocab, [t.text for t in tokens], heads=heads)
assert doc[4].head.i == 1
doc.merge(doc[2].idx, doc[4].idx + len(doc[4]), 'NP', 'tool', 'O')
assert doc[2].head.i == 1
text = "displaCy is a lightweight and modern dependency parse tree visualization tool built with CSS3 and JavaScript."
heads = [1, 0, 8, 3, -1, -2, 4, 3, 1, 1, -9, -1, -1, -1, -1, -2, -15]
tokens = en_tokenizer(text)
doc = get_doc(tokens.vocab, [t.text for t in tokens], heads=heads)
ents = [(e[0].idx, e[-1].idx + len(e[-1]), e.label_, e.lemma_) for e in doc.ents]
for start, end, label, lemma in ents:
merged = doc.merge(start, end, label, lemma, label)
assert merged != None, (start, end, label, lemma)
text = "One test with entities like New York City so the ents list is not void"
heads = [1, 11, -1, -1, -1, 1, 1, -3, 4, 2, 1, 1, 0, -1, -2]
tokens = en_tokenizer(text)
doc = get_doc(tokens.vocab, [t.text for t in tokens], heads=heads)
for span in doc.ents:
merged = doc.merge()
assert merged != None, (span.start, span.end, span.label_, span.lemma_)
def test_spans_entity_merge(en_tokenizer):
text = "Stewart Lee is a stand up comedian who lives in England and loves Joe Pasquale.\n"
heads = [1, 1, 0, 1, 2, -1, -4, 1, -2, -1, -1, -3, -10, 1, -2, -13, -1]
tags = ['NNP', 'NNP', 'VBZ', 'DT', 'VB', 'RP', 'NN', 'WP', 'VBZ', 'IN', 'NNP', 'CC', 'VBZ', 'NNP', 'NNP', '.', 'SP']
ents = [('Stewart Lee', 'PERSON', 0, 2), ('England', 'GPE', 10, 11), ('Joe Pasquale', 'PERSON', 13, 15)]
tokens = en_tokenizer(text)
doc = get_doc(tokens.vocab, [t.text for t in tokens], heads=heads, tags=tags, ents=ents)
assert len(doc) == 17
for ent in doc.ents:
label, lemma, type_ = (ent.root.tag_, ent.root.lemma_, max(w.ent_type_ for w in ent))
ent.merge(label, lemma, type_)
# check looping is ok
assert len(doc) == 15
def test_spans_sentence_update_after_merge(en_tokenizer):
text = "Stewart Lee is a stand up comedian. He lives in England and loves Joe Pasquale."
heads = [1, 1, 0, 1, 2, -1, -4, -5, 1, 0, -1, -1, -3, -4, 1, -2, -7]
deps = ['compound', 'nsubj', 'ROOT', 'det', 'amod', 'prt', 'attr',
'punct', 'nsubj', 'ROOT', 'prep', 'pobj', 'cc', 'conj',
'compound', 'dobj', 'punct']
tokens = en_tokenizer(text)
doc = get_doc(tokens.vocab, [t.text for t in tokens], heads=heads, deps=deps)
sent1, sent2 = list(doc.sents)
init_len = len(sent1)
init_len2 = len(sent2)
doc[0:2].merge('none', 'none', 'none')
doc[-2:].merge('none', 'none', 'none')
assert len(sent1) == init_len - 1
assert len(sent2) == init_len2 - 1
def test_spans_subtree_size_check(en_tokenizer):
text = "Stewart Lee is a stand up comedian who lives in England and loves Joe Pasquale"
heads = [1, 1, 0, 1, 2, -1, -4, 1, -2, -1, -1, -3, -10, 1, -2]
deps = ['compound', 'nsubj', 'ROOT', 'det', 'amod', 'prt', 'attr',
'nsubj', 'relcl', 'prep', 'pobj', 'cc', 'conj', 'compound',
'dobj']
tokens = en_tokenizer(text)
doc = get_doc(tokens.vocab, [t.text for t in tokens], heads=heads, deps=deps)
sent1 = list(doc.sents)[0]
init_len = len(list(sent1.root.subtree))
doc[0:2].merge('none', 'none', 'none')
assert len(list(sent1.root.subtree)) == init_len - 1
| mit |
DataKind-SG/test-driven-data-cleaning | tddc/tests/test_build.py | 1 | 1106 | import os
import subprocess
import pytest
from tddc import build
@pytest.fixture()
def build_class(fixtures_dir, input_filename, tmpdir):
return build.Scripts(
summaries_root_dir=fixtures_dir,
input_file=input_filename,
scripts_root_dir=tmpdir.strpath,
output_dir='')
def is_same_file(file_a, file_b):
diff_return = subprocess.call(['diff', file_a, file_b])
return diff_return == 0
def test_write_cleaning_script(build_class, fixtures_dir):
cleaning_file = build_class.write_cleaning_script()
cleaning_base = os.path.splitext(os.path.basename(cleaning_file))[0]
fixture_clean = os.path.join(fixtures_dir, cleaning_base)
assert is_same_file(cleaning_file, fixture_clean)
def test_write_test_cleaning_script(build_class, fixtures_dir):
test_cleaning_file = build_class.write_test_cleaning_script()
test_cleaning_base = os.path.splitext(
os.path.basename(test_cleaning_file))[0]
fixture_test_clean = os.path.join(
fixtures_dir, test_cleaning_base)
assert is_same_file(test_cleaning_file, fixture_test_clean)
| mit |
archf/ansible | test/units/modules/network/ios/test_ios_ping.py | 20 | 2742 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests.mock import patch
from ansible.modules.network.ios import ios_ping
from .ios_module import TestIosModule, load_fixture, set_module_args
class TestIosPingModule(TestIosModule):
''' Class used for Unit Tests agains ios_ping module '''
module = ios_ping
def setUp(self):
self.mock_run_commands = patch('ansible.modules.network.ios.ios_ping.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
module = args
commands = kwargs['commands']
output = list()
for command in commands:
filename = str(command).split(' | ')[0].replace(' ', '_')
output.append(load_fixture('ios_ping_%s' % filename))
return output
self.run_commands.side_effect = load_from_file
def test_ios_ping_expected_success(self):
''' Test for successful pings when destination should be reachable '''
set_module_args(dict(count=2, dest="8.8.8.8"))
self.execute_module()
def test_ios_ping_expected_failure(self):
''' Test for unsuccessful pings when destination should not be reachable '''
set_module_args(dict(count=2, dest="10.255.255.250", state="absent", timeout=45))
self.execute_module()
def test_ios_ping_unexpected_success(self):
''' Test for successful pings when destination should not be reachable - FAIL. '''
set_module_args(dict(count=2, dest="8.8.8.8", state="absent"))
self.execute_module(failed=True)
def test_ios_ping_unexpected_failure(self):
''' Test for unsuccessful pings when destination should be reachable - FAIL. '''
set_module_args(dict(count=2, dest="10.255.255.250", timeout=45))
self.execute_module(failed=True)
| gpl-3.0 |
tlein/Ancona | Test/ExtLibs/gtest-1.7.0/test/gtest_xml_outfiles_test.py | 2526 | 5340 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module."""
__author__ = "keith.ray@gmail.com (Keith Ray)"
import os
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_OUTPUT_SUBDIR = "xml_outfiles"
GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_"
GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyOne" SetUpProp="1" TestSomeProperty="1" TearDownProp="1" />
</testsuite>
</testsuites>
"""
EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyTwo" SetUpProp="2" TestSomeProperty="2" TearDownProp="2" />
</testsuite>
</testsuites>
"""
class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
"""Unit test for Google Test's XML output functionality."""
def setUp(self):
# We want the trailing '/' that the last "" provides in os.path.join, for
# telling Google Test to create an output directory instead of a single file
# for xml output.
self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_OUTPUT_SUBDIR, "")
self.DeleteFilesAndDir()
def tearDown(self):
self.DeleteFilesAndDir()
def DeleteFilesAndDir(self):
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + ".xml"))
except os.error:
pass
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + ".xml"))
except os.error:
pass
try:
os.rmdir(self.output_dir_)
except os.error:
pass
def testOutfile1(self):
self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1)
def testOutfile2(self):
self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2)
def _TestOutFile(self, test_name, expected_xml):
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
command = [gtest_prog_path, "--gtest_output=xml:%s" % self.output_dir_]
p = gtest_test_utils.Subprocess(command,
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
# TODO(wan@google.com): libtool causes the built test binary to be
# named lt-gtest_xml_outfiles_test_ instead of
# gtest_xml_outfiles_test_. To account for this possibillity, we
# allow both names in the following code. We should remove this
# hack when Chandler Carruth's libtool replacement tool is ready.
output_file_name1 = test_name + ".xml"
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
output_file2 = os.path.join(self.output_dir_, output_file_name2)
self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
output_file1)
expected = minidom.parseString(expected_xml)
if os.path.isfile(output_file1):
actual = minidom.parse(output_file1)
else:
actual = minidom.parse(output_file2)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == "__main__":
os.environ["GTEST_STACK_TRACE_DEPTH"] = "0"
gtest_test_utils.Main()
| mit |
drewandersonnz/openshift-tools | ansible/roles/lib_openshift_3.2/library/oc_serviceaccount.py | 6 | 40250 | #!/usr/bin/env python # pylint: disable=too-many-lines
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
import atexit
import json
import os
import re
import shutil
import subprocess
import ruamel.yaml as yaml
#import yaml
#
## This is here because of a bug that causes yaml
## to incorrectly handle timezone info on timestamps
#def timestamp_constructor(_, node):
# '''return timestamps as strings'''
# return str(node.value)
#yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = kubeconfig
self.all_namespaces = all_namespaces
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = '/tmp/%s' % rname
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''return all pods '''
cmd = ['-n', self.namespace, 'replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''return all pods '''
fname = '/tmp/%s' % rname
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''return all pods '''
return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
def _delete(self, resource, rname, selector=None):
'''return all pods '''
cmd = ['delete', resource, rname, '-n', self.namespace]
if selector:
cmd.append('--selector=%s' % selector)
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None):
'''return all pods '''
cmd = ['process', '-n', self.namespace]
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["%s=%s" % (key, value) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = '/tmp/%s' % template_name
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['-n', self.namespace, 'create', '-f', fname])
def _get(self, resource, rname=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector:
cmd.append('--selector=%s' % selector)
if self.all_namespaces:
cmd.extend(['--all-namespaces'])
elif self.namespace:
cmd.extend(['-n', self.namespace])
cmd.extend(['-o', 'json'])
if rname:
cmd.append(rname)
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if rval.has_key('items'):
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
cmd.append('--schedulable=%s' % schedulable)
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
#pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
if grace_period:
cmd.append('--grace-period=%s' % int(grace_period))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
#pylint: disable=too-many-arguments
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = []
if oadm:
cmds = ['/usr/bin/oc', 'adm']
else:
cmds = ['/usr/bin/oc']
cmds.extend(cmd)
rval = {}
results = ''
err = None
if self.verbose:
print ' '.join(cmds)
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={'KUBECONFIG': self.kubeconfig})
stdout, stderr = proc.communicate(input_data)
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
if proc.returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.message:
err = err.message
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print stdout
print stderr
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds
})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {},
})
return rval
class Utils(object):
''' utilities for openshiftcli modules '''
@staticmethod
def create_file(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
path = os.path.join('/tmp', rname)
with open(path, 'w') as fds:
if ftype == 'yaml':
fds.write(yaml.dump(data, Dumper=yaml.RoundTripDumper))
elif ftype == 'json':
fds.write(json.dumps(data))
else:
fds.write(data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [path])
return path
@staticmethod
def create_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_file(item['path'], item['data'], ftype=content_type)
files.append({'name': os.path.basename(path), 'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if result.has_key('metadata') and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
contents = yaml.load(contents, yaml.RoundTripLoader)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if not user_def.has_key(key):
if debug:
print 'User data does not have key [%s]' % key
print 'User data: %s' % user_def
return False
if not isinstance(user_def[key], list):
if debug:
print 'user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key])
return False
if len(user_def[key]) != len(value):
if debug:
print "List lengths are not equal."
print "key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value))
print "user_def: %s" % user_def[key]
print "value: %s" % value
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print 'sending list - list'
print type(values[0])
print type(values[1])
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print 'list compare returned false'
return False
elif value != user_def[key]:
if debug:
print 'value should be identical'
print value
print user_def[key]
return False
# recurse on a dictionary
elif isinstance(value, dict):
if not user_def.has_key(key):
if debug:
print "user_def does not have key [%s]" % key
return False
if not isinstance(user_def[key], dict):
if debug:
print "dict returned false: not instance of dict"
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print "keys are not equal in dict"
print api_values
print user_values
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print "dict returned false"
print result
return False
# Verify each key, value pair is the same
else:
if not user_def.has_key(key) or value != user_def[key]:
if debug:
print "value not equal; user_def does not have key"
print key
print value
if user_def.has_key(key):
print user_def[key]
return False
if debug:
print 'returning true'
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self):
'''return all options as a string'''
return self.stringify()
def stringify(self):
''' return the options hash as cli params in a string '''
rval = []
for key, data in self.config_options.items():
if data['include'] \
and (data['value'] or isinstance(data['value'], int)):
rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
return rval
class YeditException(Exception):
''' Exception class for Yedit '''
pass
class Yedit(object):
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z%s/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self, filename=None, content=None, content_type='yaml', separator='.', backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict == None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@separator.setter
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key % ''.join(common_separators), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key % ''.join(common_separators), key):
return False
return True
@staticmethod
def remove_entry(data, key, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
data.clear()
return True
elif key == '' and isinstance(data, list):
del data[:]
return True
if not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and data.has_key(dict_key) and data[dict_key]:
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding data to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
return data
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
tmp_filename = self.filename + '.yedit'
try:
with open(tmp_filename, 'w') as yfd:
# pylint: disable=no-member,maybe-no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except Exception as err:
raise YeditException(err.message)
os.rename(tmp_filename, self.filename)
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename == None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
# pylint: disable=no-member,maybe-no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. %s' % err)
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError as _:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# pylint: disable=no-member,maybe-no-member
if entry.has_key(key_or_item):
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# pylint: disable=no-member,maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# pylint: disable=no-member,maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if isinstance(entry, dict):
# pylint: disable=no-member,maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type.' \
' value=[%s] [%s]' % (value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# pylint: disable=no-member,maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index != None:
ind = index
if ind != None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
#already exists, return
if ind != None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader)
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
tmp_copy.fa.set_block_style()
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader)
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
tmp_copy.fa.set_block_style()
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
class ServiceAccountConfig(object):
'''Service account config class
This class stores the options and returns a default service account
'''
# pylint: disable=too-many-arguments
def __init__(self, sname, namespace, kubeconfig, secrets=None, image_pull_secrets=None):
self.name = sname
self.kubeconfig = kubeconfig
self.namespace = namespace
self.secrets = secrets or []
self.image_pull_secrets = image_pull_secrets or []
self.data = {}
self.create_dict()
def create_dict(self):
''' return a properly structured volume '''
self.data['apiVersion'] = 'v1'
self.data['kind'] = 'ServiceAccount'
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
self.data['metadata']['namespace'] = self.namespace
self.data['secrets'] = []
if self.secrets:
for sec in self.secrets:
self.data['secrets'].append({"name": sec})
self.data['imagePullSecrets'] = []
if self.image_pull_secrets:
for sec in self.image_pull_secrets:
self.data['imagePullSecrets'].append({"name": sec})
# pylint: disable=too-many-public-methods
class ServiceAccount(Yedit):
''' Class to wrap the oc command line tools '''
image_pull_secrets_path = "imagePullSecrets"
secrets_path = "secrets"
def __init__(self, content):
'''ServiceAccount constructor'''
super(ServiceAccount, self).__init__(content=content)
self._secrets = None
self._image_pull_secrets = None
@property
def image_pull_secrets(self):
''' property for image_pull_secrets '''
if self._image_pull_secrets == None:
self._image_pull_secrets = self.get(ServiceAccount.image_pull_secrets_path) or []
return self._image_pull_secrets
@image_pull_secrets.setter
def image_pull_secrets(self, secrets):
''' property for secrets '''
self._image_pull_secrets = secrets
@property
def secrets(self):
''' property for secrets '''
print "Getting secrets property"
if not self._secrets:
self._secrets = self.get(ServiceAccount.secrets_path) or []
return self._secrets
@secrets.setter
def secrets(self, secrets):
''' property for secrets '''
self._secrets = secrets
def delete_secret(self, inc_secret):
''' remove a secret '''
remove_idx = None
for idx, sec in enumerate(self.secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.secrets[remove_idx]
return True
return False
def delete_image_pull_secret(self, inc_secret):
''' remove a image_pull_secret '''
remove_idx = None
for idx, sec in enumerate(self.image_pull_secrets):
if sec['name'] == inc_secret:
remove_idx = idx
break
if remove_idx:
del self.image_pull_secrets[remove_idx]
return True
return False
def find_secret(self, inc_secret):
'''find secret'''
for secret in self.secrets:
if secret['name'] == inc_secret:
return secret
return None
def find_image_pull_secret(self, inc_secret):
'''find secret'''
for secret in self.image_pull_secrets:
if secret['name'] == inc_secret:
return secret
return None
def add_secret(self, inc_secret):
'''add secret'''
if self.secrets:
self.secrets.append({"name": inc_secret})
else:
self.put(ServiceAccount.secrets_path, [{"name": inc_secret}])
def add_image_pull_secret(self, inc_secret):
'''add image_pull_secret'''
if self.image_pull_secrets:
self.image_pull_secrets.append({"name": inc_secret})
else:
self.put(ServiceAccount.image_pull_secrets_path, [{"name": inc_secret}])
# pylint: disable=too-many-instance-attributes
class OCServiceAccount(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'sa'
# pylint allows 5
# pylint: disable=too-many-arguments
def __init__(self,
config,
verbose=False):
''' Constructor for OCVolume '''
super(OCServiceAccount, self).__init__(config.namespace, config.kubeconfig)
self.config = config
self.namespace = config.namespace
self._service_account = None
@property
def service_account(self):
''' property function service'''
if not self._service_account:
self.get()
return self._service_account
@service_account.setter
def service_account(self, data):
''' setter function for yedit var '''
self._service_account = data
def exists(self):
''' return whether a volume exists '''
if self.service_account:
return True
return False
def get(self):
'''return volume information '''
result = self._get(self.kind, self.config.name)
if result['returncode'] == 0:
self.service_account = ServiceAccount(content=result['results'][0])
elif '\"%s\" not found' % self.config.name in result['stderr']:
result['returncode'] = 0
result['results'] = [{}]
return result
def delete(self):
'''delete the object'''
return self._delete(self.kind, self.config.name)
def create(self):
'''create the object'''
return self._create_from_content(self.config.name, self.config.data)
def update(self):
'''update the object'''
# need to update the tls information and the service name
for secret in self.config.secrets:
result = self.service_account.find_secret(secret)
if not result:
self.service_account.add_secret(secret)
for secret in self.config.image_pull_secrets:
result = self.service_account.find_image_pull_secret(secret)
if not result:
self.service_account.add_image_pull_secret(secret)
return self._replace_content(self.kind, self.config.name, self.config.data)
def needs_update(self):
''' verify an update is needed '''
# since creating an service account generates secrets and imagepullsecrets
# check_def_equal will not work
# Instead, verify all secrets passed are in the list
for secret in self.config.secrets:
result = self.service_account.find_secret(secret)
if not result:
return True
for secret in self.config.image_pull_secrets:
result = self.service_account.find_image_pull_secret(secret)
if not result:
return True
return False
def main():
'''
ansible oc module for route
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
name=dict(default=None, required=True, type='str'),
namespace=dict(default=None, required=True, type='str'),
secrets=dict(default=None, type='list'),
image_pull_secrets=dict(default=None, type='list'),
),
supports_check_mode=True,
)
rconfig = ServiceAccountConfig(module.params['name'],
module.params['namespace'],
module.params['kubeconfig'],
module.params['secrets'],
module.params['image_pull_secrets'],
)
oc_sa = OCServiceAccount(rconfig,
verbose=module.params['debug'])
state = module.params['state']
api_rval = oc_sa.get()
#####
# Get
#####
if state == 'list':
module.exit_json(changed=False, results=api_rval['results'], state="list")
########
# Delete
########
if state == 'absent':
if oc_sa.exists():
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed a delete.')
api_rval = oc_sa.delete()
module.exit_json(changed=True, results=api_rval, state="absent")
module.exit_json(changed=False, state="absent")
if state == 'present':
########
# Create
########
if not oc_sa.exists():
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed a create.')
# Create it here
api_rval = oc_sa.create()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
# return the created object
api_rval = oc_sa.get()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=True, results=api_rval, state="present")
########
# Update
########
if oc_sa.needs_update():
api_rval = oc_sa.update()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
# return the created object
api_rval = oc_sa.get()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=True, results=api_rval, state="present")
module.exit_json(changed=False, results=api_rval, state="present")
module.exit_json(failed=True,
changed=False,
results='Unknown state passed. %s' % state,
state="unknown")
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
from ansible.module_utils.basic import *
main()
| apache-2.0 |
jonhadfield/ansible-modules-core | cloud/rackspace/rax_queue.py | 25 | 4000 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: rax_queue
short_description: create / delete a queue in Rackspace Public Cloud
description:
- creates / deletes a Rackspace Public Cloud queue.
version_added: "1.5"
options:
name:
description:
- Name to give the queue
default: null
state:
description:
- Indicate desired state of the resource
choices:
- present
- absent
default: present
author:
- "Christopher H. Laco (@claco)"
- "Matt Martz (@sivel)"
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
- name: Build a Queue
gather_facts: False
hosts: local
connection: local
tasks:
- name: Queue create request
local_action:
module: rax_queue
credentials: ~/.raxpub
name: my-queue
region: DFW
state: present
register: my_queue
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def cloud_queue(module, state, name):
for arg in (state, name):
if not arg:
module.fail_json(msg='%s is required for rax_queue' % arg)
changed = False
queues = []
instance = {}
cq = pyrax.queues
if not cq:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
for queue in cq.list():
if name != queue.name:
continue
queues.append(queue)
if len(queues) > 1:
module.fail_json(msg='Multiple Queues were matched by name')
if state == 'present':
if not queues:
try:
queue = cq.create(name)
changed = True
except Exception as e:
module.fail_json(msg='%s' % e.message)
else:
queue = queues[0]
instance = dict(name=queue.name)
result = dict(changed=changed, queue=instance)
module.exit_json(**result)
elif state == 'absent':
if queues:
queue = queues[0]
try:
queue.delete()
changed = True
except Exception as e:
module.fail_json(msg='%s' % e.message)
module.exit_json(changed=changed, queue=instance)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
name=dict(),
state=dict(default='present', choices=['present', 'absent']),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
name = module.params.get('name')
state = module.params.get('state')
setup_rax_module(module, pyrax)
cloud_queue(module, state, name)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
### invoke the module
if __name__ == '__main__':
main()
| gpl-3.0 |
gaddman/ansible | lib/ansible/modules/remote_management/cobbler/cobbler_sync.py | 80 | 4403 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Dag Wieers (dagwieers) <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: cobbler_sync
version_added: '2.7'
short_description: Sync Cobbler
description:
- Sync Cobbler to commit changes.
options:
host:
description:
- The name or IP address of the Cobbler system.
default: 127.0.0.1
port:
description:
- Port number to be used for REST connection.
- The default value depends on parameter C(use_ssl).
username:
description:
- The username to log in to Cobbler.
default: cobbler
password:
description:
- The password to log in to Cobbler.
required: yes
use_ssl:
description:
- If C(no), an HTTP connection will be used instead of the default HTTPS connection.
type: bool
default: 'yes'
validate_certs:
description:
- If C(no), SSL certificates will not be validated.
- This should only set to C(no) when used on personally controlled sites using self-signed certificates.
type: bool
default: 'yes'
author:
- Dag Wieers (@dagwieers)
todo:
notes:
- Concurrently syncing Cobbler is bound to fail with weird errors.
- On python 2.7.8 and older (i.e. on RHEL7) you may need to tweak the python behaviour to disable certificate validation.
More information at L(Certificate verification in Python standard library HTTP clients,https://access.redhat.com/articles/2039753).
'''
EXAMPLES = r'''
- name: Commit Cobbler changes
cobbler_sync:
host: cobbler01
username: cobbler
password: MySuperSecureP4sswOrd
run_once: yes
delegate_to: localhost
'''
RETURN = r'''
# Default return values
'''
import datetime
import ssl
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves import xmlrpc_client
from ansible.module_utils._text import to_text
def main():
module = AnsibleModule(
argument_spec=dict(
host=dict(type='str', default='127.0.0.1'),
port=dict(type='int'),
username=dict(type='str', default='cobbler'),
password=dict(type='str', no_log=True),
use_ssl=dict(type='bool', default=True),
validate_certs=dict(type='bool', default=True),
),
supports_check_mode=True,
)
username = module.params['username']
password = module.params['password']
port = module.params['port']
use_ssl = module.params['use_ssl']
validate_certs = module.params['validate_certs']
module.params['proto'] = 'https' if use_ssl else 'http'
if not port:
module.params['port'] = '443' if use_ssl else '80'
result = dict(
changed=True,
)
start = datetime.datetime.utcnow()
ssl_context = None
if not validate_certs:
try: # Python 2.7.9 and newer
ssl_context = ssl.create_unverified_context()
except AttributeError: # Legacy Python that doesn't verify HTTPS certificates by default
ssl._create_default_context = ssl._create_unverified_context
else: # Python 2.7.8 and older
ssl._create_default_https_context = ssl._create_unverified_https_context
url = '{proto}://{host}:{port}/cobbler_api'.format(**module.params)
if ssl_context:
conn = xmlrpc_client.ServerProxy(url, context=ssl_context)
else:
conn = xmlrpc_client.Server(url)
try:
token = conn.login(username, password)
except xmlrpc_client.Fault as e:
module.fail_json(msg="Failed to log in to Cobbler '{url}' as '{username}'. {error}".format(url=url, error=to_text(e), **module.params))
except Exception as e:
module.fail_json(msg="Connection to '{url}' failed. {error}".format(url=url, error=to_text(e)))
if not module.check_mode:
try:
conn.sync(token)
except Exception as e:
module.fail_json(msg="Failed to sync Cobbler. {error}".format(error=to_text(e)))
elapsed = datetime.datetime.utcnow() - start
module.exit_json(elapsed=elapsed.seconds, **result)
if __name__ == '__main__':
main()
| gpl-3.0 |
lrq3000/neuro_experiments_tools | utils/asciirename/unidecode/x05c.py | 252 | 4612 | data = (
'Po ', # 0x00
'Feng ', # 0x01
'Zhuan ', # 0x02
'Fu ', # 0x03
'She ', # 0x04
'Ke ', # 0x05
'Jiang ', # 0x06
'Jiang ', # 0x07
'Zhuan ', # 0x08
'Wei ', # 0x09
'Zun ', # 0x0a
'Xun ', # 0x0b
'Shu ', # 0x0c
'Dui ', # 0x0d
'Dao ', # 0x0e
'Xiao ', # 0x0f
'Ji ', # 0x10
'Shao ', # 0x11
'Er ', # 0x12
'Er ', # 0x13
'Er ', # 0x14
'Ga ', # 0x15
'Jian ', # 0x16
'Shu ', # 0x17
'Chen ', # 0x18
'Shang ', # 0x19
'Shang ', # 0x1a
'Mo ', # 0x1b
'Ga ', # 0x1c
'Chang ', # 0x1d
'Liao ', # 0x1e
'Xian ', # 0x1f
'Xian ', # 0x20
'[?] ', # 0x21
'Wang ', # 0x22
'Wang ', # 0x23
'You ', # 0x24
'Liao ', # 0x25
'Liao ', # 0x26
'Yao ', # 0x27
'Mang ', # 0x28
'Wang ', # 0x29
'Wang ', # 0x2a
'Wang ', # 0x2b
'Ga ', # 0x2c
'Yao ', # 0x2d
'Duo ', # 0x2e
'Kui ', # 0x2f
'Zhong ', # 0x30
'Jiu ', # 0x31
'Gan ', # 0x32
'Gu ', # 0x33
'Gan ', # 0x34
'Tui ', # 0x35
'Gan ', # 0x36
'Gan ', # 0x37
'Shi ', # 0x38
'Yin ', # 0x39
'Chi ', # 0x3a
'Kao ', # 0x3b
'Ni ', # 0x3c
'Jin ', # 0x3d
'Wei ', # 0x3e
'Niao ', # 0x3f
'Ju ', # 0x40
'Pi ', # 0x41
'Ceng ', # 0x42
'Xi ', # 0x43
'Bi ', # 0x44
'Ju ', # 0x45
'Jie ', # 0x46
'Tian ', # 0x47
'Qu ', # 0x48
'Ti ', # 0x49
'Jie ', # 0x4a
'Wu ', # 0x4b
'Diao ', # 0x4c
'Shi ', # 0x4d
'Shi ', # 0x4e
'Ping ', # 0x4f
'Ji ', # 0x50
'Xie ', # 0x51
'Chen ', # 0x52
'Xi ', # 0x53
'Ni ', # 0x54
'Zhan ', # 0x55
'Xi ', # 0x56
'[?] ', # 0x57
'Man ', # 0x58
'E ', # 0x59
'Lou ', # 0x5a
'Ping ', # 0x5b
'Ti ', # 0x5c
'Fei ', # 0x5d
'Shu ', # 0x5e
'Xie ', # 0x5f
'Tu ', # 0x60
'Lu ', # 0x61
'Lu ', # 0x62
'Xi ', # 0x63
'Ceng ', # 0x64
'Lu ', # 0x65
'Ju ', # 0x66
'Xie ', # 0x67
'Ju ', # 0x68
'Jue ', # 0x69
'Liao ', # 0x6a
'Jue ', # 0x6b
'Shu ', # 0x6c
'Xi ', # 0x6d
'Che ', # 0x6e
'Tun ', # 0x6f
'Ni ', # 0x70
'Shan ', # 0x71
'[?] ', # 0x72
'Xian ', # 0x73
'Li ', # 0x74
'Xue ', # 0x75
'Nata ', # 0x76
'[?] ', # 0x77
'Long ', # 0x78
'Yi ', # 0x79
'Qi ', # 0x7a
'Ren ', # 0x7b
'Wu ', # 0x7c
'Han ', # 0x7d
'Shen ', # 0x7e
'Yu ', # 0x7f
'Chu ', # 0x80
'Sui ', # 0x81
'Qi ', # 0x82
'[?] ', # 0x83
'Yue ', # 0x84
'Ban ', # 0x85
'Yao ', # 0x86
'Ang ', # 0x87
'Ya ', # 0x88
'Wu ', # 0x89
'Jie ', # 0x8a
'E ', # 0x8b
'Ji ', # 0x8c
'Qian ', # 0x8d
'Fen ', # 0x8e
'Yuan ', # 0x8f
'Qi ', # 0x90
'Cen ', # 0x91
'Qian ', # 0x92
'Qi ', # 0x93
'Cha ', # 0x94
'Jie ', # 0x95
'Qu ', # 0x96
'Gang ', # 0x97
'Xian ', # 0x98
'Ao ', # 0x99
'Lan ', # 0x9a
'Dao ', # 0x9b
'Ba ', # 0x9c
'Zuo ', # 0x9d
'Zuo ', # 0x9e
'Yang ', # 0x9f
'Ju ', # 0xa0
'Gang ', # 0xa1
'Ke ', # 0xa2
'Gou ', # 0xa3
'Xue ', # 0xa4
'Bei ', # 0xa5
'Li ', # 0xa6
'Tiao ', # 0xa7
'Ju ', # 0xa8
'Yan ', # 0xa9
'Fu ', # 0xaa
'Xiu ', # 0xab
'Jia ', # 0xac
'Ling ', # 0xad
'Tuo ', # 0xae
'Pei ', # 0xaf
'You ', # 0xb0
'Dai ', # 0xb1
'Kuang ', # 0xb2
'Yue ', # 0xb3
'Qu ', # 0xb4
'Hu ', # 0xb5
'Po ', # 0xb6
'Min ', # 0xb7
'An ', # 0xb8
'Tiao ', # 0xb9
'Ling ', # 0xba
'Chi ', # 0xbb
'Yuri ', # 0xbc
'Dong ', # 0xbd
'Cem ', # 0xbe
'Kui ', # 0xbf
'Xiu ', # 0xc0
'Mao ', # 0xc1
'Tong ', # 0xc2
'Xue ', # 0xc3
'Yi ', # 0xc4
'Kura ', # 0xc5
'He ', # 0xc6
'Ke ', # 0xc7
'Luo ', # 0xc8
'E ', # 0xc9
'Fu ', # 0xca
'Xun ', # 0xcb
'Die ', # 0xcc
'Lu ', # 0xcd
'An ', # 0xce
'Er ', # 0xcf
'Gai ', # 0xd0
'Quan ', # 0xd1
'Tong ', # 0xd2
'Yi ', # 0xd3
'Mu ', # 0xd4
'Shi ', # 0xd5
'An ', # 0xd6
'Wei ', # 0xd7
'Hu ', # 0xd8
'Zhi ', # 0xd9
'Mi ', # 0xda
'Li ', # 0xdb
'Ji ', # 0xdc
'Tong ', # 0xdd
'Wei ', # 0xde
'You ', # 0xdf
'Sang ', # 0xe0
'Xia ', # 0xe1
'Li ', # 0xe2
'Yao ', # 0xe3
'Jiao ', # 0xe4
'Zheng ', # 0xe5
'Luan ', # 0xe6
'Jiao ', # 0xe7
'E ', # 0xe8
'E ', # 0xe9
'Yu ', # 0xea
'Ye ', # 0xeb
'Bu ', # 0xec
'Qiao ', # 0xed
'Qun ', # 0xee
'Feng ', # 0xef
'Feng ', # 0xf0
'Nao ', # 0xf1
'Li ', # 0xf2
'You ', # 0xf3
'Xian ', # 0xf4
'Hong ', # 0xf5
'Dao ', # 0xf6
'Shen ', # 0xf7
'Cheng ', # 0xf8
'Tu ', # 0xf9
'Geng ', # 0xfa
'Jun ', # 0xfb
'Hao ', # 0xfc
'Xia ', # 0xfd
'Yin ', # 0xfe
'Yu ', # 0xff
)
| mit |
gmt/portage | pym/_emerge/emergelog.py | 11 | 1675 | # Copyright 1999-2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import unicode_literals
import io
import sys
import time
import portage
from portage import os
from portage import _encodings
from portage import _unicode_decode
from portage import _unicode_encode
from portage.data import secpass
from portage.output import xtermTitle
# We disable emergelog by default, since it's called from
# dblink.merge() and we don't want that to trigger log writes
# unless it's really called via emerge.
_disable = True
_emerge_log_dir = '/var/log'
def emergelog(xterm_titles, mystr, short_msg=None):
if _disable:
return
mystr = _unicode_decode(mystr)
if short_msg is not None:
short_msg = _unicode_decode(short_msg)
if xterm_titles and short_msg:
if "HOSTNAME" in os.environ:
short_msg = os.environ["HOSTNAME"]+": "+short_msg
xtermTitle(short_msg)
try:
file_path = os.path.join(_emerge_log_dir, 'emerge.log')
existing_log = os.path.isfile(file_path)
mylogfile = io.open(_unicode_encode(file_path,
encoding=_encodings['fs'], errors='strict'),
mode='a', encoding=_encodings['content'],
errors='backslashreplace')
if not existing_log:
portage.util.apply_secpass_permissions(file_path,
uid=portage.portage_uid, gid=portage.portage_gid,
mode=0o660)
mylock = portage.locks.lockfile(file_path)
try:
mylogfile.write("%.0f: %s\n" % (time.time(), mystr))
mylogfile.close()
finally:
portage.locks.unlockfile(mylock)
except (IOError,OSError,portage.exception.PortageException) as e:
if secpass >= 1:
portage.util.writemsg("emergelog(): %s\n" % (e,), noiselevel=-1)
| gpl-2.0 |
shenxudeu/deuNet | deuNet/datasets/data_utils.py | 2 | 1244 | from __future__ import absolute_import
from __future__ import print_function
import tarfile, inspect, os
from six.moves.urllib.request import urlretrieve
from ..utils.generic_utils import Progbar
def get_file(fname, origin, untar=False):
datadir = os.path.expanduser(os.path.join('~', '.deuNet', 'datasets'))
if not os.path.exists(datadir):
os.makedirs(datadir)
if untar:
untar_fpath = os.path.join(datadir, fname)
fpath = untar_fpath + '.tar.gz'
else:
fpath = os.path.join(datadir, fname)
try:
f = open(fpath)
except:
print('Downloading data from', origin)
global progbar
progbar = None
def dl_progress(count, block_size, total_size):
global progbar
if progbar is None:
progbar = Progbar(total_size)
else:
progbar.update(count*block_size)
urlretrieve(origin, fpath, dl_progress)
progbar = None
if untar:
if not os.path.exists(untar_fpath):
print('Untaring file...')
tfile = tarfile.open(fpath, 'r:gz')
tfile.extractall(path=datadir)
tfile.close()
return untar_fpath
return fpath
| mit |
was4444/chromium.src | ppapi/generators/idl_generator.py | 165 | 8397 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
from idl_log import ErrOut, InfoOut, WarnOut
from idl_option import GetOption, Option, ParseOptions
from idl_parser import ParseFiles
GeneratorList = []
Option('out', 'List of output files', default='')
Option('release', 'Which release to generate.', default='')
Option('range', 'Which ranges in the form of MIN,MAX.', default='start,end')
class Generator(object):
"""Base class for generators.
This class provides a mechanism for adding new generator objects to the IDL
driver. To use this class override the GenerateRelease and GenerateRange
members, and instantiate one copy of the class in the same module which
defines it to register the generator. After the AST is generated, call the
static Run member which will check every registered generator to see which
ones have been enabled through command-line options. To enable a generator
use the switches:
--<sname> : To enable with defaults
--<sname>_opt=<XXX,YYY=y> : To enable with generator specific options.
NOTE: Generators still have access to global options
"""
def __init__(self, name, sname, desc):
self.name = name
self.run_switch = Option(sname, desc)
self.opt_switch = Option(sname + '_opt', 'Options for %s.' % sname,
default='')
GeneratorList.append(self)
self.errors = 0
self.skip_list = []
def Error(self, msg):
ErrOut.Log('Error %s : %s' % (self.name, msg))
self.errors += 1
def GetRunOptions(self):
options = {}
option_list = self.opt_switch.Get()
if option_list:
option_list = option_list.split(',')
for opt in option_list:
offs = opt.find('=')
if offs > 0:
options[opt[:offs]] = opt[offs+1:]
else:
options[opt] = True
return options
if self.run_switch.Get():
return options
return None
def Generate(self, ast, options):
self.errors = 0
rangestr = GetOption('range')
releasestr = GetOption('release')
print "Found releases: %s" % ast.releases
# Generate list of files to ignore due to errors
for filenode in ast.GetListOf('File'):
# If this file has errors, skip it
if filenode.GetProperty('ERRORS') > 0:
self.skip_list.append(filenode)
continue
# Check for a range option which over-rides a release option
if not releasestr and rangestr:
range_list = rangestr.split(',')
if len(range_list) != 2:
self.Error('Failed to generate for %s, incorrect range: "%s"' %
(self.name, rangestr))
else:
vmin = range_list[0]
vmax = range_list[1]
# Generate 'start' and 'end' represent first and last found.
if vmin == 'start':
vmin = ast.releases[0]
if vmax == 'end':
vmax = ast.releases[-1]
vmin = ast.releases.index(vmin)
vmax = ast.releases.index(vmax) + 1
releases = ast.releases[vmin:vmax]
InfoOut.Log('Generate range %s of %s.' % (rangestr, self.name))
ret = self.GenerateRange(ast, releases, options)
if ret < 0:
self.Error('Failed to generate range %s : %s.' %(vmin, vmax))
else:
InfoOut.Log('%s wrote %d files.' % (self.name, ret))
# Otherwise this should be a single release generation
else:
if releasestr == 'start':
releasestr = ast.releases[0]
if releasestr == 'end':
releasestr = ast.releases[-1]
if releasestr > ast.releases[-1]:
InfoOut.Log('There is no unique release for %s, using last release.' %
releasestr)
releasestr = ast.releases[-1]
if releasestr not in ast.releases:
self.Error('Release %s not in [%s].' %
(releasestr, ', '.join(ast.releases)))
if releasestr:
InfoOut.Log('Generate release %s of %s.' % (releasestr, self.name))
ret = self.GenerateRelease(ast, releasestr, options)
if ret < 0:
self.Error('Failed to generate release %s.' % releasestr)
else:
InfoOut.Log('%s wrote %d files.' % (self.name, ret))
else:
self.Error('No range or release specified for %s.' % releasestr)
return self.errors
def GenerateRelease(self, ast, release, options):
__pychecker__ = 'unusednames=ast,release,options'
self.Error("Undefined release generator.")
return 0
def GenerateRange(self, ast, releases, options):
__pychecker__ = 'unusednames=ast,releases,options'
self.Error("Undefined range generator.")
return 0
@staticmethod
def Run(ast):
fail_count = 0
# Check all registered generators if they should run.
for gen in GeneratorList:
options = gen.GetRunOptions()
if options is not None:
if gen.Generate(ast, options):
fail_count += 1
return fail_count
class GeneratorByFile(Generator):
"""A simplified generator that generates one output file per IDL source file.
A subclass of Generator for use of generators which have a one to one
mapping between IDL sources and output files.
Derived classes should define GenerateFile.
"""
def GenerateFile(self, filenode, releases, options):
"""Generates an output file from the IDL source.
Returns true if the generated file is different than the previously
generated file.
"""
__pychecker__ = 'unusednames=filenode,releases,options'
self.Error("Undefined release generator.")
return 0
def GenerateRelease(self, ast, release, options):
return self.GenerateRange(ast, [release], options)
def GenerateRange(self, ast, releases, options):
# Get list of out files
outlist = GetOption('out')
if outlist: outlist = outlist.split(',')
skipList = []
cnt = 0
for filenode in ast.GetListOf('File'):
# Ignore files with errors
if filenode in self.skip_list:
continue
# Skip this file if not required
if outlist and filenode.GetName() not in outlist:
continue
# Create the output file and increment out count if there was a delta
if self.GenerateFile(filenode, releases, options):
cnt = cnt + 1
for filenode in skipList:
errcnt = filenode.GetProperty('ERRORS')
ErrOut.Log('%s : Skipped because of %d errors.' % (
filenode.GetName(), errcnt))
if skipList:
return -len(skipList)
if GetOption('diff'):
return -cnt
return cnt
check_release = 0
check_range = 0
class GeneratorReleaseTest(Generator):
def GenerateRelease(self, ast, release, options = {}):
__pychecker__ = 'unusednames=ast,release,options'
global check_release
check_map = {
'so_long': True,
'MyOpt': 'XYZ',
'goodbye': True
}
check_release = 1
for item in check_map:
check_item = check_map[item]
option_item = options.get(item, None)
if check_item != option_item:
print 'Option %s is %s, expecting %s' % (item, option_item, check_item)
check_release = 0
if release != 'M14':
check_release = 0
return check_release == 1
def GenerateRange(self, ast, releases, options):
__pychecker__ = 'unusednames=ast,releases,options'
global check_range
check_range = 1
return True
def Test():
__pychecker__ = 'unusednames=args'
global check_release
global check_range
ParseOptions(['--testgen_opt=so_long,MyOpt=XYZ,goodbye'])
if Generator.Run('AST') != 0:
print 'Generate release: Failed.\n'
return -1
if check_release != 1 or check_range != 0:
print 'Gererate release: Failed to run.\n'
return -1
check_release = 0
ParseOptions(['--testgen_opt="HELLO"', '--range=M14,M16'])
if Generator.Run('AST') != 0:
print 'Generate range: Failed.\n'
return -1
if check_release != 0 or check_range != 1:
print 'Gererate range: Failed to run.\n'
return -1
print 'Generator test: Pass'
return 0
def Main(args):
if not args: return Test()
filenames = ParseOptions(args)
ast = ParseFiles(filenames)
return Generator.Run(ast)
if __name__ == '__main__':
GeneratorReleaseTest('Test Gen', 'testgen', 'Generator Class Test.')
sys.exit(Main(sys.argv[1:]))
| bsd-3-clause |
zmap/ztag | ztag/annotations/FtpFritz.py | 1 | 2211 | import re
from ztag.annotation import Annotation
from ztag.annotation import OperatingSystem
from ztag.annotation import Type
from ztag.annotation import Manufacturer
from ztag import protocols
import ztag.test
class FtpFritz(Annotation):
protocol = protocols.FTP
subprotocol = protocols.FTP.BANNER
port = None
manufact_re = re.compile("^220 FRITZ!Box", re.IGNORECASE)
product_re = re.compile("^220 (.+) FTP server ready", re.IGNORECASE)
tests = {
"FtpFritz_1": {
"global_metadata": {
"device_type": Type.CABLE_MODEM,
"manufacturer": Manufacturer.AVM,
"product": "FRITZ!BoxFonWLAN7390"
}
},
"FtpFritz_2": {
"global_metadata": {
"device_type": Type.CABLE_MODEM,
"manufacturer": Manufacturer.AVM,
"product": "FRITZ!BoxFonWLAN7360(EWEEdition)"
}
}
}
def process(self, obj, meta):
banner = obj["banner"]
if self.manufact_re.search(banner):
meta.global_metadata.device_type = Type.CABLE_MODEM
meta.global_metadata.manufacturer = Manufacturer.AVM
product = self.product_re.search(banner).group(1)
meta.global_metadata.product = product
return meta
""" Tests
"220 FRITZ!Box7272 FTP server ready.\r\n"
"220 FRITZ!BoxFonWLAN7390 FTP server ready.\r\n"
"220 FRITZ!Box7490 FTP server ready.\r\n"
"220 FRITZ!BoxFonWLAN7390 FTP server ready.\r\n"
"220 FRITZ!BoxFonWLAN7390 FTP server ready.\r\n"
"220 FRITZ!BoxFonWLAN7170 FTP server ready.\r\n"
"220 FRITZ!BoxFonWLAN7390(UI) FTP server ready.\r\n"
"220 FRITZ!BoxFonWLAN7270v2 FTP server ready.\r\n"
"220 FRITZ!BoxFonWLAN7390(UI) FTP server ready.\r\n"
"220 FRITZ!BoxFonWLAN7360(EWEEdition) FTP server ready.\r\n"
"220 FRITZ!Box7330SL(UI) FTP server ready.\r\n"
"220 FRITZ!Box7490 FTP server ready.\r\n"
"220 FRITZ!BoxFonWLAN7270v3 FTP server ready.\r\n"
"220 FRITZ!BoxFonWLAN7340 FTP server ready.\r\n"
"220 FRITZ!Box6360Cable(kdg) FTP server ready.\r\n"
"220 FRITZ!Box6360Cable(um) FTP server ready.\r\n"
"""
| apache-2.0 |
nemesiscodex/openfonacide | openfonacide/migrations/0004_construccionaulas_construccionsanitario_reparacionaulas_reparacionsanitario.py | 3 | 6913 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('openfonacide', '0003_institucion_nombre'),
]
operations = [
migrations.CreateModel(
name='ConstruccionAulas',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('prioridad', models.IntegerField()),
('cod_local', models.CharField(max_length=128)),
('cod_institucion', models.CharField(max_length=128)),
('nombre_institucion', models.CharField(max_length=128)),
('nro_esc', models.CharField(max_length=128)),
('distrito', models.CharField(max_length=128)),
('localidad_barrio', models.CharField(max_length=128)),
('zona', models.CharField(max_length=128)),
('nombre_asentamiento', models.CharField(max_length=128)),
('region_supervision', models.CharField(max_length=128)),
('nro_beneficiados', models.CharField(max_length=128)),
('nivel_educativo_beneficiado', models.CharField(max_length=128)),
('espacio_destinado', models.CharField(max_length=128)),
('cantidad_espacios_nuevos', models.CharField(max_length=128)),
('abastecimiento_agua', models.CharField(max_length=128)),
('corriente_electrica', models.CharField(max_length=128)),
('titulo_propiedad', models.CharField(max_length=128)),
('cuenta_con_espacio_construccion', models.CharField(max_length=128)),
('justificacion', models.CharField(max_length=128)),
('departamento', models.CharField(max_length=128)),
('cod_departamento', models.CharField(max_length=128)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ConstruccionSanitario',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('prioridad', models.IntegerField()),
('cod_local', models.CharField(max_length=128)),
('cod_institucion', models.CharField(max_length=128)),
('nombre_institucion', models.CharField(max_length=128)),
('nro_esc', models.CharField(max_length=128)),
('distrito', models.CharField(max_length=128)),
('localidad_barrio', models.CharField(max_length=128)),
('zona', models.CharField(max_length=128)),
('nombre_asentamiento', models.CharField(max_length=128)),
('region_supervision', models.CharField(max_length=128)),
('nro_beneficiados', models.CharField(max_length=128)),
('nivel_educativo_beneficiado', models.CharField(max_length=128)),
('cant_sanitarios_construccion', models.CharField(max_length=128)),
('abastecimiento_agua', models.CharField(max_length=128)),
('corriente_electrica', models.CharField(max_length=128)),
('titulo_propiedad', models.CharField(max_length=128)),
('cuenta_con_espacio', models.CharField(max_length=128)),
('justificacion', models.CharField(max_length=128)),
('departamento', models.CharField(max_length=128)),
('cod_departamento', models.CharField(max_length=128)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ReparacionAulas',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('prioridad', models.IntegerField()),
('cod_local', models.CharField(max_length=128)),
('cod_institucion', models.CharField(max_length=128)),
('nombre_institucion', models.CharField(max_length=128)),
('nro_esc', models.CharField(max_length=128)),
('distrito', models.CharField(max_length=128)),
('localidad_barrio', models.CharField(max_length=128)),
('zona', models.CharField(max_length=128)),
('nombre_asentamiento', models.CharField(max_length=128)),
('region_supervision', models.CharField(max_length=128)),
('nro_beneficiados', models.CharField(max_length=128)),
('nivel_educativo_beneficiado', models.CharField(max_length=128)),
('espacio_destinado_a', models.CharField(max_length=128)),
('cant_espacios_necesitan_reparacion', models.CharField(max_length=128)),
('cant_espacios_construidos_adecuacion', models.CharField(max_length=128)),
('justificacion', models.CharField(max_length=128)),
('departamento', models.CharField(max_length=128)),
('cod_departamento', models.CharField(max_length=128)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ReparacionSanitario',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('prioridad', models.IntegerField()),
('cod_local', models.CharField(max_length=128)),
('cod_institucion', models.CharField(max_length=128)),
('nombre_institucion', models.CharField(max_length=128)),
('nro_esc', models.CharField(max_length=128)),
('distrito', models.CharField(max_length=128)),
('localidad_barrio', models.CharField(max_length=128)),
('zona', models.CharField(max_length=128)),
('nombre_asentamiento', models.CharField(max_length=128)),
('region_supervision', models.CharField(max_length=128)),
('nro_beneficiados', models.CharField(max_length=128)),
('nivel_educativo_beneficiado', models.CharField(max_length=128)),
('cantidad_sanitarios_construidos_reparacion', models.CharField(max_length=128)),
('cantidad_sanitarios_construidos_adecuacion', models.CharField(max_length=128)),
('justificacion', models.CharField(max_length=128)),
('departamento', models.CharField(max_length=128)),
('cod_departamento', models.CharField(max_length=128)),
],
options={
},
bases=(models.Model,),
),
]
| lgpl-3.0 |
fredericlepied/ansible | lib/ansible/modules/system/capabilities.py | 9 | 6584 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Nate Coraor <nate@bx.psu.edu>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: capabilities
short_description: Manage Linux capabilities
description:
- This module manipulates files privileges using the Linux capabilities(7) system.
version_added: "1.6"
options:
path:
description:
- Specifies the path to the file to be managed.
required: true
default: null
capability:
description:
- Desired capability to set (with operator and flags, if state is C(present)) or remove (if state is C(absent))
required: true
default: null
aliases: [ 'cap' ]
state:
description:
- Whether the entry should be present or absent in the file's capabilities.
choices: [ "present", "absent" ]
default: present
notes:
- The capabilities system will automatically transform operators and flags
into the effective set, so (for example, cap_foo=ep will probably become
cap_foo+ep). This module does not attempt to determine the final operator
and flags to compare, so you will want to ensure that your capabilities
argument matches the final capabilities.
requirements: []
author: "Nate Coraor (@natefoo)"
'''
EXAMPLES = '''
# Set cap_sys_chroot+ep on /foo
- capabilities:
path: /foo
capability: cap_sys_chroot+ep
state: present
# Remove cap_net_bind_service from /bar
- capabilities:
path: /bar
capability: cap_net_bind_service
state: absent
'''
from ansible.module_utils.basic import AnsibleModule
OPS = ( '=', '-', '+' )
class CapabilitiesModule(object):
platform = 'Linux'
distribution = None
def __init__(self, module):
self.module = module
self.path = module.params['path'].strip()
self.capability = module.params['capability'].strip().lower()
self.state = module.params['state']
self.getcap_cmd = module.get_bin_path('getcap', required=True)
self.setcap_cmd = module.get_bin_path('setcap', required=True)
self.capability_tup = self._parse_cap(self.capability, op_required=self.state=='present')
self.run()
def run(self):
current = self.getcap(self.path)
caps = [ cap[0] for cap in current ]
if self.state == 'present' and self.capability_tup not in current:
# need to add capability
if self.module.check_mode:
self.module.exit_json(changed=True, msg='capabilities changed')
else:
# remove from current cap list if it's already set (but op/flags differ)
current = filter(lambda x: x[0] != self.capability_tup[0], current)
# add new cap with correct op/flags
current.append( self.capability_tup )
self.module.exit_json(changed=True, state=self.state, msg='capabilities changed', stdout=self.setcap(self.path, current))
elif self.state == 'absent' and self.capability_tup[0] in caps:
# need to remove capability
if self.module.check_mode:
self.module.exit_json(changed=True, msg='capabilities changed')
else:
# remove from current cap list and then set current list
current = filter(lambda x: x[0] != self.capability_tup[0], current)
self.module.exit_json(changed=True, state=self.state, msg='capabilities changed', stdout=self.setcap(self.path, current))
self.module.exit_json(changed=False, state=self.state)
def getcap(self, path):
rval = []
cmd = "%s -v %s" % (self.getcap_cmd, path)
rc, stdout, stderr = self.module.run_command(cmd)
# If file xattrs are set but no caps are set the output will be:
# '/foo ='
# If file xattrs are unset the output will be:
# '/foo'
# If the file does not eixst the output will be (with rc == 0...):
# '/foo (No such file or directory)'
if rc != 0 or (stdout.strip() != path and stdout.count(' =') != 1):
self.module.fail_json(msg="Unable to get capabilities of %s" % path, stdout=stdout.strip(), stderr=stderr)
if stdout.strip() != path:
caps = stdout.split(' =')[1].strip().split()
for cap in caps:
cap = cap.lower()
# getcap condenses capabilities with the same op/flags into a
# comma-separated list, so we have to parse that
if ',' in cap:
cap_group = cap.split(',')
cap_group[-1], op, flags = self._parse_cap(cap_group[-1])
for subcap in cap_group:
rval.append( ( subcap, op, flags ) )
else:
rval.append(self._parse_cap(cap))
return rval
def setcap(self, path, caps):
caps = ' '.join([ ''.join(cap) for cap in caps ])
cmd = "%s '%s' %s" % (self.setcap_cmd, caps, path)
rc, stdout, stderr = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Unable to set capabilities of %s" % path, stdout=stdout, stderr=stderr)
else:
return stdout
def _parse_cap(self, cap, op_required=True):
opind = -1
try:
i = 0
while opind == -1:
opind = cap.find(OPS[i])
i += 1
except:
if op_required:
self.module.fail_json(msg="Couldn't find operator (one of: %s)" % str(OPS))
else:
return (cap, None, None)
op = cap[opind]
cap, flags = cap.split(op)
return (cap, op, flags)
# ==============================================================
# main
def main():
# defining module
module = AnsibleModule(
argument_spec = dict(
path = dict(aliases=['key'], required=True),
capability = dict(aliases=['cap'], required=True),
state = dict(default='present', choices=['present', 'absent']),
),
supports_check_mode=True
)
CapabilitiesModule(module)
if __name__ == '__main__':
main()
| gpl-3.0 |
imtapps/django-imt-fork | tests/regressiontests/null_fk/tests.py | 118 | 3006 | from __future__ import absolute_import, unicode_literals
from django.db.models import Q
from django.test import TestCase
from .models import (SystemDetails, Item, PropertyValue, SystemInfo, Forum,
Post, Comment)
class NullFkTests(TestCase):
def test_null_fk(self):
d = SystemDetails.objects.create(details='First details')
s = SystemInfo.objects.create(system_name='First forum', system_details=d)
f = Forum.objects.create(system_info=s, forum_name='First forum')
p = Post.objects.create(forum=f, title='First Post')
c1 = Comment.objects.create(post=p, comment_text='My first comment')
c2 = Comment.objects.create(comment_text='My second comment')
# Starting from comment, make sure that a .select_related(...) with a specified
# set of fields will properly LEFT JOIN multiple levels of NULLs (and the things
# that come after the NULLs, or else data that should exist won't). Regression
# test for #7369.
c = Comment.objects.select_related().get(id=c1.id)
self.assertEqual(c.post, p)
self.assertEqual(Comment.objects.select_related().get(id=c2.id).post, None)
self.assertQuerysetEqual(
Comment.objects.select_related('post__forum__system_info').all(),
[
(c1.id, 'My first comment', '<Post: First Post>'),
(c2.id, 'My second comment', 'None')
],
transform = lambda c: (c.id, c.comment_text, repr(c.post))
)
# Regression test for #7530, #7716.
self.assertTrue(Comment.objects.select_related('post').filter(post__isnull=True)[0].post is None)
self.assertQuerysetEqual(
Comment.objects.select_related('post__forum__system_info__system_details'),
[
(c1.id, 'My first comment', '<Post: First Post>'),
(c2.id, 'My second comment', 'None')
],
transform = lambda c: (c.id, c.comment_text, repr(c.post))
)
def test_combine_isnull(self):
item = Item.objects.create(title='Some Item')
pv = PropertyValue.objects.create(label='Some Value')
item.props.create(key='a', value=pv)
item.props.create(key='b') # value=NULL
q1 = Q(props__key='a', props__value=pv)
q2 = Q(props__key='b', props__value__isnull=True)
# Each of these individually should return the item.
self.assertEqual(Item.objects.get(q1), item)
self.assertEqual(Item.objects.get(q2), item)
# Logically, qs1 and qs2, and qs3 and qs4 should be the same.
qs1 = Item.objects.filter(q1) & Item.objects.filter(q2)
qs2 = Item.objects.filter(q2) & Item.objects.filter(q1)
qs3 = Item.objects.filter(q1) | Item.objects.filter(q2)
qs4 = Item.objects.filter(q2) | Item.objects.filter(q1)
# Regression test for #15823.
self.assertEqual(list(qs1), list(qs2))
self.assertEqual(list(qs3), list(qs4))
| bsd-3-clause |
arantebillywilson/python-snippets | microblog/flask/lib/python3.5/site-packages/whoosh/lang/snowball/russian.py | 95 | 20904 | from whoosh.compat import u
class RussianStemmer(object):
"""
The Russian Snowball stemmer.
:cvar __perfective_gerund_suffixes: Suffixes to be deleted.
:type __perfective_gerund_suffixes: tuple
:cvar __adjectival_suffixes: Suffixes to be deleted.
:type __adjectival_suffixes: tuple
:cvar __reflexive_suffixes: Suffixes to be deleted.
:type __reflexive_suffixes: tuple
:cvar __verb_suffixes: Suffixes to be deleted.
:type __verb_suffixes: tuple
:cvar __noun_suffixes: Suffixes to be deleted.
:type __noun_suffixes: tuple
:cvar __superlative_suffixes: Suffixes to be deleted.
:type __superlative_suffixes: tuple
:cvar __derivational_suffixes: Suffixes to be deleted.
:type __derivational_suffixes: tuple
:note: A detailed description of the Russian
stemming algorithm can be found under
http://snowball.tartarus.org/algorithms/russian/stemmer.html
"""
__perfective_gerund_suffixes = ("ivshis'", "yvshis'", "vshis'",
"ivshi", "yvshi", "vshi", "iv",
"yv", "v")
__adjectival_suffixes = ('ui^ushchi^ui^u', 'ui^ushchi^ai^a',
'ui^ushchimi', 'ui^ushchymi', 'ui^ushchego',
'ui^ushchogo', 'ui^ushchemu', 'ui^ushchomu',
'ui^ushchikh', 'ui^ushchykh',
'ui^ushchui^u', 'ui^ushchaia',
'ui^ushchoi^u', 'ui^ushchei^u',
'i^ushchi^ui^u', 'i^ushchi^ai^a',
'ui^ushchee', 'ui^ushchie',
'ui^ushchye', 'ui^ushchoe', 'ui^ushchei`',
'ui^ushchii`', 'ui^ushchyi`',
'ui^ushchoi`', 'ui^ushchem', 'ui^ushchim',
'ui^ushchym', 'ui^ushchom', 'i^ushchimi',
'i^ushchymi', 'i^ushchego', 'i^ushchogo',
'i^ushchemu', 'i^ushchomu', 'i^ushchikh',
'i^ushchykh', 'i^ushchui^u', 'i^ushchai^a',
'i^ushchoi^u', 'i^ushchei^u', 'i^ushchee',
'i^ushchie', 'i^ushchye', 'i^ushchoe',
'i^ushchei`', 'i^ushchii`',
'i^ushchyi`', 'i^ushchoi`', 'i^ushchem',
'i^ushchim', 'i^ushchym', 'i^ushchom',
'shchi^ui^u', 'shchi^ai^a', 'ivshi^ui^u',
'ivshi^ai^a', 'yvshi^ui^u', 'yvshi^ai^a',
'shchimi', 'shchymi', 'shchego', 'shchogo',
'shchemu', 'shchomu', 'shchikh', 'shchykh',
'shchui^u', 'shchai^a', 'shchoi^u',
'shchei^u', 'ivshimi', 'ivshymi',
'ivshego', 'ivshogo', 'ivshemu', 'ivshomu',
'ivshikh', 'ivshykh', 'ivshui^u',
'ivshai^a', 'ivshoi^u', 'ivshei^u',
'yvshimi', 'yvshymi', 'yvshego', 'yvshogo',
'yvshemu', 'yvshomu', 'yvshikh', 'yvshykh',
'yvshui^u', 'yvshai^a', 'yvshoi^u',
'yvshei^u', 'vshi^ui^u', 'vshi^ai^a',
'shchee', 'shchie', 'shchye', 'shchoe',
'shchei`', 'shchii`', 'shchyi`', 'shchoi`',
'shchem', 'shchim', 'shchym', 'shchom',
'ivshee', 'ivshie', 'ivshye', 'ivshoe',
'ivshei`', 'ivshii`', 'ivshyi`',
'ivshoi`', 'ivshem', 'ivshim', 'ivshym',
'ivshom', 'yvshee', 'yvshie', 'yvshye',
'yvshoe', 'yvshei`', 'yvshii`',
'yvshyi`', 'yvshoi`', 'yvshem',
'yvshim', 'yvshym', 'yvshom', 'vshimi',
'vshymi', 'vshego', 'vshogo', 'vshemu',
'vshomu', 'vshikh', 'vshykh', 'vshui^u',
'vshai^a', 'vshoi^u', 'vshei^u',
'emi^ui^u', 'emi^ai^a', 'nni^ui^u',
'nni^ai^a', 'vshee',
'vshie', 'vshye', 'vshoe', 'vshei`',
'vshii`', 'vshyi`', 'vshoi`',
'vshem', 'vshim', 'vshym', 'vshom',
'emimi', 'emymi', 'emego', 'emogo',
'ememu', 'emomu', 'emikh', 'emykh',
'emui^u', 'emai^a', 'emoi^u', 'emei^u',
'nnimi', 'nnymi', 'nnego', 'nnogo',
'nnemu', 'nnomu', 'nnikh', 'nnykh',
'nnui^u', 'nnai^a', 'nnoi^u', 'nnei^u',
'emee', 'emie', 'emye', 'emoe',
'emei`', 'emii`', 'emyi`',
'emoi`', 'emem', 'emim', 'emym',
'emom', 'nnee', 'nnie', 'nnye', 'nnoe',
'nnei`', 'nnii`', 'nnyi`',
'nnoi`', 'nnem', 'nnim', 'nnym',
'nnom', 'i^ui^u', 'i^ai^a', 'imi', 'ymi',
'ego', 'ogo', 'emu', 'omu', 'ikh',
'ykh', 'ui^u', 'ai^a', 'oi^u', 'ei^u',
'ee', 'ie', 'ye', 'oe', 'ei`',
'ii`', 'yi`', 'oi`', 'em',
'im', 'ym', 'om')
__reflexive_suffixes = ("si^a", "s'")
__verb_suffixes = ("esh'", 'ei`te', 'ui`te', 'ui^ut',
"ish'", 'ete', 'i`te', 'i^ut', 'nno',
'ila', 'yla', 'ena', 'ite', 'ili', 'yli',
'ilo', 'ylo', 'eno', 'i^at', 'uet', 'eny',
"it'", "yt'", 'ui^u', 'la', 'na', 'li',
'em', 'lo', 'no', 'et', 'ny', "t'",
'ei`', 'ui`', 'il', 'yl', 'im',
'ym', 'en', 'it', 'yt', 'i^u', 'i`',
'l', 'n')
__noun_suffixes = ('ii^ami', 'ii^akh', 'i^ami', 'ii^am', 'i^akh',
'ami', 'iei`', 'i^am', 'iem', 'akh',
'ii^u', "'i^u", 'ii^a', "'i^a", 'ev', 'ov',
'ie', "'e", 'ei', 'ii', 'ei`',
'oi`', 'ii`', 'em', 'am', 'om',
'i^u', 'i^a', 'a', 'e', 'i', 'i`',
'o', 'u', 'y', "'")
__superlative_suffixes = ("ei`she", "ei`sh")
__derivational_suffixes = ("ost'", "ost")
def stem(self, word):
"""
Stem a Russian word and return the stemmed form.
:param word: The word that is stemmed.
:type word: str or unicode
:return: The stemmed form.
:rtype: unicode
"""
chr_exceeded = False
for i in range(len(word)):
if ord(word[i]) > 255:
chr_exceeded = True
break
if chr_exceeded:
word = self.__cyrillic_to_roman(word)
step1_success = False
adjectival_removed = False
verb_removed = False
undouble_success = False
superlative_removed = False
rv, r2 = self.__regions_russian(word)
# Step 1
for suffix in self.__perfective_gerund_suffixes:
if rv.endswith(suffix):
if suffix in ("v", "vshi", "vshis'"):
if (rv[-len(suffix) - 3:-len(suffix)] == "i^a" or
rv[-len(suffix) - 1:-len(suffix)] == "a"):
word = word[:-len(suffix)]
r2 = r2[:-len(suffix)]
rv = rv[:-len(suffix)]
step1_success = True
break
else:
word = word[:-len(suffix)]
r2 = r2[:-len(suffix)]
rv = rv[:-len(suffix)]
step1_success = True
break
if not step1_success:
for suffix in self.__reflexive_suffixes:
if rv.endswith(suffix):
word = word[:-len(suffix)]
r2 = r2[:-len(suffix)]
rv = rv[:-len(suffix)]
break
for suffix in self.__adjectival_suffixes:
if rv.endswith(suffix):
if suffix in ('i^ushchi^ui^u', 'i^ushchi^ai^a',
'i^ushchui^u', 'i^ushchai^a', 'i^ushchoi^u',
'i^ushchei^u', 'i^ushchimi', 'i^ushchymi',
'i^ushchego', 'i^ushchogo', 'i^ushchemu',
'i^ushchomu', 'i^ushchikh', 'i^ushchykh',
'shchi^ui^u', 'shchi^ai^a', 'i^ushchee',
'i^ushchie', 'i^ushchye', 'i^ushchoe',
'i^ushchei`', 'i^ushchii`', 'i^ushchyi`',
'i^ushchoi`', 'i^ushchem', 'i^ushchim',
'i^ushchym', 'i^ushchom', 'vshi^ui^u',
'vshi^ai^a', 'shchui^u', 'shchai^a',
'shchoi^u', 'shchei^u', 'emi^ui^u',
'emi^ai^a', 'nni^ui^u', 'nni^ai^a',
'shchimi', 'shchymi', 'shchego', 'shchogo',
'shchemu', 'shchomu', 'shchikh', 'shchykh',
'vshui^u', 'vshai^a', 'vshoi^u', 'vshei^u',
'shchee', 'shchie', 'shchye', 'shchoe',
'shchei`', 'shchii`', 'shchyi`', 'shchoi`',
'shchem', 'shchim', 'shchym', 'shchom',
'vshimi', 'vshymi', 'vshego', 'vshogo',
'vshemu', 'vshomu', 'vshikh', 'vshykh',
'emui^u', 'emai^a', 'emoi^u', 'emei^u',
'nnui^u', 'nnai^a', 'nnoi^u', 'nnei^u',
'vshee', 'vshie', 'vshye', 'vshoe',
'vshei`', 'vshii`', 'vshyi`', 'vshoi`',
'vshem', 'vshim', 'vshym', 'vshom',
'emimi', 'emymi', 'emego', 'emogo',
'ememu', 'emomu', 'emikh', 'emykh',
'nnimi', 'nnymi', 'nnego', 'nnogo',
'nnemu', 'nnomu', 'nnikh', 'nnykh',
'emee', 'emie', 'emye', 'emoe', 'emei`',
'emii`', 'emyi`', 'emoi`', 'emem', 'emim',
'emym', 'emom', 'nnee', 'nnie', 'nnye',
'nnoe', 'nnei`', 'nnii`', 'nnyi`', 'nnoi`',
'nnem', 'nnim', 'nnym', 'nnom'):
if (rv[-len(suffix) - 3:-len(suffix)] == "i^a" or
rv[-len(suffix) - 1:-len(suffix)] == "a"):
word = word[:-len(suffix)]
r2 = r2[:-len(suffix)]
rv = rv[:-len(suffix)]
adjectival_removed = True
break
else:
word = word[:-len(suffix)]
r2 = r2[:-len(suffix)]
rv = rv[:-len(suffix)]
adjectival_removed = True
break
if not adjectival_removed:
for suffix in self.__verb_suffixes:
if rv.endswith(suffix):
if suffix in ("la", "na", "ete", "i`te", "li",
"i`", "l", "em", "n", "lo", "no",
"et", "i^ut", "ny", "t'", "esh'",
"nno"):
if (rv[-len(suffix) - 3:-len(suffix)] == "i^a" or
rv[-len(suffix) - 1:-len(suffix)] == "a"):
word = word[:-len(suffix)]
r2 = r2[:-len(suffix)]
rv = rv[:-len(suffix)]
verb_removed = True
break
else:
word = word[:-len(suffix)]
r2 = r2[:-len(suffix)]
rv = rv[:-len(suffix)]
verb_removed = True
break
if not adjectival_removed and not verb_removed:
for suffix in self.__noun_suffixes:
if rv.endswith(suffix):
word = word[:-len(suffix)]
r2 = r2[:-len(suffix)]
rv = rv[:-len(suffix)]
break
# Step 2
if rv.endswith("i"):
word = word[:-1]
r2 = r2[:-1]
# Step 3
for suffix in self.__derivational_suffixes:
if r2.endswith(suffix):
word = word[:-len(suffix)]
break
# Step 4
if word.endswith("nn"):
word = word[:-1]
undouble_success = True
if not undouble_success:
for suffix in self.__superlative_suffixes:
if word.endswith(suffix):
word = word[:-len(suffix)]
superlative_removed = True
break
if word.endswith("nn"):
word = word[:-1]
if not undouble_success and not superlative_removed:
if word.endswith("'"):
word = word[:-1]
if chr_exceeded:
word = self.__roman_to_cyrillic(word)
return word
def __regions_russian(self, word):
"""
Return the regions RV and R2 which are used by the Russian stemmer.
In any word, RV is the region after the first vowel,
or the end of the word if it contains no vowel.
R2 is the region after the first non-vowel following
a vowel in R1, or the end of the word if there is no such non-vowel.
R1 is the region after the first non-vowel following a vowel,
or the end of the word if there is no such non-vowel.
:param word: The Russian word whose regions RV and R2 are determined.
:type word: str or unicode
:return: the regions RV and R2 for the respective Russian word.
:rtype: tuple
:note: This helper method is invoked by the stem method of the subclass
RussianStemmer. It is not to be invoked directly!
"""
r1 = ""
r2 = ""
rv = ""
vowels = ("A", "U", "E", "a", "e", "i", "o", "u", "y")
word = (word.replace("i^a", "A")
.replace("i^u", "U")
.replace("e`", "E"))
for i in range(1, len(word)):
if word[i] not in vowels and word[i - 1] in vowels:
r1 = word[i + 1:]
break
for i in range(1, len(r1)):
if r1[i] not in vowels and r1[i - 1] in vowels:
r2 = r1[i + 1:]
break
for i in range(len(word)):
if word[i] in vowels:
rv = word[i + 1:]
break
r2 = (r2.replace("A", "i^a")
.replace("U", "i^u")
.replace("E", "e`"))
rv = (rv.replace("A", "i^a")
.replace("U", "i^u")
.replace("E", "e`"))
return (rv, r2)
def __cyrillic_to_roman(self, word):
"""
Transliterate a Russian word into the Roman alphabet.
A Russian word whose letters consist of the Cyrillic
alphabet are transliterated into the Roman alphabet
in order to ease the forthcoming stemming process.
:param word: The word that is transliterated.
:type word: unicode
:return: the transliterated word.
:rtype: unicode
:note: This helper method is invoked by the stem method of the subclass
RussianStemmer. It is not to be invoked directly!
"""
word = (word.replace(u("\u0410"), "a").replace(u("\u0430"), "a")
.replace(u("\u0411"), "b").replace(u("\u0431"), "b")
.replace(u("\u0412"), "v").replace(u("\u0432"), "v")
.replace(u("\u0413"), "g").replace(u("\u0433"), "g")
.replace(u("\u0414"), "d").replace(u("\u0434"), "d")
.replace(u("\u0415"), "e").replace(u("\u0435"), "e")
.replace(u("\u0401"), "e").replace(u("\u0451"), "e")
.replace(u("\u0416"), "zh").replace(u("\u0436"), "zh")
.replace(u("\u0417"), "z").replace(u("\u0437"), "z")
.replace(u("\u0418"), "i").replace(u("\u0438"), "i")
.replace(u("\u0419"), "i`").replace(u("\u0439"), "i`")
.replace(u("\u041A"), "k").replace(u("\u043A"), "k")
.replace(u("\u041B"), "l").replace(u("\u043B"), "l")
.replace(u("\u041C"), "m").replace(u("\u043C"), "m")
.replace(u("\u041D"), "n").replace(u("\u043D"), "n")
.replace(u("\u041E"), "o").replace(u("\u043E"), "o")
.replace(u("\u041F"), "p").replace(u("\u043F"), "p")
.replace(u("\u0420"), "r").replace(u("\u0440"), "r")
.replace(u("\u0421"), "s").replace(u("\u0441"), "s")
.replace(u("\u0422"), "t").replace(u("\u0442"), "t")
.replace(u("\u0423"), "u").replace(u("\u0443"), "u")
.replace(u("\u0424"), "f").replace(u("\u0444"), "f")
.replace(u("\u0425"), "kh").replace(u("\u0445"), "kh")
.replace(u("\u0426"), "t^s").replace(u("\u0446"), "t^s")
.replace(u("\u0427"), "ch").replace(u("\u0447"), "ch")
.replace(u("\u0428"), "sh").replace(u("\u0448"), "sh")
.replace(u("\u0429"), "shch").replace(u("\u0449"), "shch")
.replace(u("\u042A"), "''").replace(u("\u044A"), "''")
.replace(u("\u042B"), "y").replace(u("\u044B"), "y")
.replace(u("\u042C"), "'").replace(u("\u044C"), "'")
.replace(u("\u042D"), "e`").replace(u("\u044D"), "e`")
.replace(u("\u042E"), "i^u").replace(u("\u044E"), "i^u")
.replace(u("\u042F"), "i^a").replace(u("\u044F"), "i^a"))
return word
def __roman_to_cyrillic(self, word):
"""
Transliterate a Russian word back into the Cyrillic alphabet.
A Russian word formerly transliterated into the Roman alphabet
in order to ease the stemming process, is transliterated back
into the Cyrillic alphabet, its original form.
:param word: The word that is transliterated.
:type word: str or unicode
:return: word, the transliterated word.
:rtype: unicode
:note: This helper method is invoked by the stem method of the subclass
RussianStemmer. It is not to be invoked directly!
"""
word = (word.replace("i^u", u("\u044E")).replace("i^a", u("\u044F"))
.replace("shch", u("\u0449")).replace("kh", u("\u0445"))
.replace("t^s", u("\u0446")).replace("ch", u("\u0447"))
.replace("e`", u("\u044D")).replace("i`", u("\u0439"))
.replace("sh", u("\u0448")).replace("k", u("\u043A"))
.replace("e", u("\u0435")).replace("zh", u("\u0436"))
.replace("a", u("\u0430")).replace("b", u("\u0431"))
.replace("v", u("\u0432")).replace("g", u("\u0433"))
.replace("d", u("\u0434")).replace("e", u("\u0435"))
.replace("z", u("\u0437")).replace("i", u("\u0438"))
.replace("l", u("\u043B")).replace("m", u("\u043C"))
.replace("n", u("\u043D")).replace("o", u("\u043E"))
.replace("p", u("\u043F")).replace("r", u("\u0440"))
.replace("s", u("\u0441")).replace("t", u("\u0442"))
.replace("u", u("\u0443")).replace("f", u("\u0444"))
.replace("''", u("\u044A")).replace("y", u("\u044B"))
.replace("'", u("\u044C")))
return word
| mit |
nagyistoce/netzob | src/netzob/Common/MMSTD/Dictionary/DataTypes/IPv4WordType.py | 1 | 8758 | # -*- coding: utf-8 -*-
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
#| |
#| Netzob : Inferring communication protocols |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011 Georges Bossert and Frédéric Guihéry |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more details. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+---------------------------------------------------------------------------+
#| @url : http://www.netzob.org |
#| @contact : contact@netzob.org |
#| @sponsors : Amossys, http://www.amossys.fr |
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Standard library imports |
#+---------------------------------------------------------------------------+
import logging
import random
import string
#+---------------------------------------------------------------------------+
#| Related third party imports |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Local application imports |
#+---------------------------------------------------------------------------+
from netzob.Common.MMSTD.Dictionary.DataTypes.AbstractWordType import AbstractWordType
class IPv4WordType(AbstractWordType):
"""IPv4WordType:
A type represented by IPv4 formatted 8-bits strings (192.168.10.100 or 0d.0d.0d.0d).
"""
TYPE = "IPv4 Word"
def __init__(self, sized, minChars=0, maxChars=0, delimiter=None):
"""Constructor of IPv4WordType:
"""
AbstractWordType.__init__(self, True, 7, 15, None)
self.log = logging.getLogger('netzob.Common.MMSTD.Dictionary.Types.IPv4WordType.py')
#+---------------------------------------------------------------------------+
#| Functions inherited from AbstractType |
#+---------------------------------------------------------------------------+
def mutateValue(self, generationStrategies, value, mutationRate=10, deletionRate=5, additionRate=5):
"""mutateValue:
We mutate only, we do not delete or add new characters, so deletionRate and additionRate are useless.
"""
mutatedValue = ""
for generationStrategy in generationStrategies:
if generationStrategy == "random":
mutNumbers = value.split('.')
# We mutate by term.
for i in range(len(mutNumbers)):
# We mutate by character.
for j in range(len(mutNumbers[i])):
dice = random.randint(0, 100)
if dice < mutationRate:
# We want to make valid IP address, so between 0 and 255.
if len(mutNumbers[i]) == 3: # The critic size of three characters: 100-255
if j == 0:
mutNumbers[i][j] = str(random.randint(0, 2))
elif j == 1:
if mutNumbers[i][0] == '2': # term = 2.. <= 255
mutNumbers[i][j] = str(random.randint(0, 5))
else:
mutNumbers[i][j] = str(random.randint(0, 9))
elif j == 2:
if mutNumbers[i][0] == '2' and mutNumbers[i][1] == '5': # term = 25. <= 255
mutNumbers[i][j] = str(random.randint(0, 5))
else:
mutNumbers[i][j] = str(random.randint(0, 9))
else:
mutNumbers[i][j] = str(random.randint(0, 9))
mutatedValue = ".".join(mutNumbers) # We do not mutate dots.
break
elif generationStrategy == "random hex":
for i in range(4):
value = value + "." + self.mutateRandomlyAString(string.hexdigits, value, mutationRate, deletionRate, additionRate)
value = value[1:]
return self.str2bin(mutatedValue)
def generateFixedSizeValue(self, generationStrategies, charSize):
"""generateFixedSizeValue:
charSize is not used, IPv4 addresses have always the same format.
"""
value = ""
for generationStrategy in generationStrategies:
if generationStrategy == "random":
for i in range(4):
value = value + "." + str(random.randint(0, 255))
value = value[1:]
break
elif generationStrategy == "random hex":
for i in range(4):
value = value + "." + self.generateRandomString(string.hexdigits, 2)
value = value[1:]
break
return self.str2bin(value)
def getType(self):
return IPv4WordType.TYPE
def suitsBinary(self, bina):
byteset = bina.tobytes()
stri = ''
ip = ''
for byte in byteset:
# We naively try to decode in ascii the binary.
try:
stri = byte.decode('ascii')
# We search if each character is in string.hexdigits or is a dot.
if string.hexdigits.find(stri) == -1:
if stri != '.':
return False
ip += stri
except:
return False
spip = ip.split('.')
# An ipv4 is composed of four parts.
if len(spip) != 4:
return False
# We search if the ip is in decimal format : 128.215.0.16
decimalIP = True
for i in range(len(spip)):
# Each term cannot exceed 3 characters.
if len(spip[i]) > 3:
decimalIP = False
break
# Each term can contain only decimal characters.
for char in spip[i]:
if string.digits.find(char) == -1:
decimalIP = False
break
# Can be seen as a second check.
try:
intspip = int(spip[i])
except:
decimalIP = False
break
# These terms can not exceed 255.
if intspip > 255:
decimalIP = False
break
# We search if the ip is in hex format : a0.bb.0.8f
hexIP = True
for i in range(len(spip)):
# Each term cannot exceed 2 characters.
if len(spip[i]) > 2:
hexIP = False
break
return hexIP or decimalIP
#+---------------------------------------------------------------------------+
#| Functions inherited from AbstractWordType |
#+---------------------------------------------------------------------------+
def getMaxBitSize(self, nbChars):
return (15 * 8) # 100.100.100.100
def getMinBitSize(self, nbChars):
return (7 * 8) # 1.1.1.1
def normalizeValue(self, value):
return value # No normalization needed.
| gpl-3.0 |
spektom/incubator-airflow | airflow/contrib/operators/oracle_to_oracle_transfer.py | 4 | 1213 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.oracle.operators.oracle_to_oracle_transfer`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.oracle.operators.oracle_to_oracle_transfer import OracleToOracleTransfer # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.oracle.operators.oracle_to_oracle_transfer`.",
DeprecationWarning, stacklevel=2
)
| apache-2.0 |
CarlosCondor/pelisalacarta-xbmc-plus | servers/cloudzer.py | 42 | 1954 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para cloudzer
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
return True,""
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[cloudzer.py] get_video_url(page_url='%s')" % page_url)
video_urls = []
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
#http://cloudzer.net/file/u71da1tk
patronvideos = '(cloudzer.net/file/[a-z0-9]+)'
logger.info("[cloudzer.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[cloudzer.py]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'cloudzer' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://clz.to/mjphp9hl
patronvideos = '(clz.to/[a-zA-Z0-9]+)'
logger.info("[cloudzer.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[cloudzer.net]"
url = match.replace("clz.to/","http://cloudzer.net/file/")
if url!="http://cloudzer.net/file/file" and url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'cloudzer' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
| gpl-3.0 |
birdland/dlkit-doc | dlkit/osid/metadata.py | 1 | 58888 |
class Metadata:
"""The ``Metadata`` interface defines a set of methods describing a the syntax and rules for creating and updating a data element inside an ``OsidForm``.
This interface provides a means to retrieve special restrictions
placed upon data elements such as sizes and ranges that may vary
from provider to provider or from object to object.
"""
def get_element_id(self):
"""Gets a unique ``Id`` for the data element.
:return: an ``Id``
:rtype: ``osid.id.Id``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
element_id = property(fget=get_element_id)
def get_element_label(self):
"""Gets a display label for the data element.
:return: a display label
:rtype: ``osid.locale.DisplayText``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.locale.DisplayText
element_label = property(fget=get_element_label)
def get_instructions(self):
"""Gets instructions for updating this element value.
This is a human readable description of the data element or
property that may include special instructions or caveats to the
end-user above and beyond what this interface provides.
:return: instructions
:rtype: ``osid.locale.DisplayText``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.locale.DisplayText
instructions = property(fget=get_instructions)
def get_syntax(self):
"""Gets the syntax of this data.
:return: an enumeration indicating thetype of value
:rtype: ``osid.Syntax``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.Syntax
syntax = property(fget=get_syntax)
def is_array(self):
"""Tests if this data element is an array.
:return: ``true`` if this data is an array, ``false`` if a single element
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def is_required(self):
"""Tests if this data element is required for creating new objects.
:return: ``true`` if this element value is required, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def is_read_only(self):
"""Tests if this data can be updated.
This may indicate the result of a pre-authorization but is not a
guarantee that an authorization failure will not occur when the
create or update transaction is issued.
:return: ``true`` if this data is not updatable, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def is_linked(self):
"""Tests if this data element is linked to other data in the object.
Updating linked data elements should refresh all metadata and
revalidate object elements.
:return: true if this element is linked, false if updates have no side effect
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def is_value_known(self):
"""Tests if an existing value is known for this data element.
If it is known that a value does not exist, then this method
returns ``true``.
:return: ``true`` if the element value is known, ``false`` if the element value is not known
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def has_value(self):
"""Tests if this data element has a set non-default value.
:return: ``true`` if this element value has been set, ``false`` otherwise
:rtype: ``boolean``
:raise: ``IllegalState`` -- ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_units(self):
"""Gets the units of this data for display purposes ('lbs', 'gills', 'furlongs').
:return: the display units of this data or an empty string if not applicable
:rtype: ``osid.locale.DisplayText``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.locale.DisplayText
units = property(fget=get_units)
def get_minimum_elements(self):
"""In the case where an array or list of elements is specified in an ``OsidForm,`` this specifies the minimum number of elements that must be included.
:return: the minimum elements or ``1`` if ``is_array()`` is ``false``
:rtype: ``cardinal``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
minimum_elements = property(fget=get_minimum_elements)
def get_maximum_elements(self):
"""In the case where an array or list of elements is specified in an ``OsidForm,`` this specifies the maximum number of elements that can be specified.
:return: the maximum elements or ``1`` if ``is_array()`` is ``false``
:rtype: ``cardinal``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
maximum_elements = property(fget=get_maximum_elements)
def get_minimum_cardinal(self):
"""Gets the minimum cardinal value.
:return: the minimum cardinal
:rtype: ``cardinal``
:raise: ``IllegalState`` -- syntax is not a ``CARDINAL``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
minimum_cardinal = property(fget=get_minimum_cardinal)
def get_maximum_cardinal(self):
"""Gets the maximum cardinal value.
:return: the maximum cardinal
:rtype: ``cardinal``
:raise: ``IllegalState`` -- syntax is not a ``CARDINAL``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
maximum_cardinal = property(fget=get_maximum_cardinal)
def get_cardinal_set(self):
"""Gets the set of acceptable cardinal values.
:return: a set of cardinals or an empty array if not restricted
:rtype: ``cardinal``
:raise: ``IllegalState`` -- syntax is not a ``CARDINAL``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
cardinal_set = property(fget=get_cardinal_set)
def get_default_cardinal_values(self):
"""Gets the default cardinal values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default cardinal values
:rtype: ``cardinal``
:raise: ``IllegalState`` -- syntax is not a ``CARDINAL`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
default_cardinal_values = property(fget=get_default_cardinal_values)
def get_existing_cardinal_values(self):
"""Gets the existing cardinal values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing cardinal values
:rtype: ``cardinal``
:raise: ``IllegalState`` -- syntax is not a ``CARDINAL`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
existing_cardinal_values = property(fget=get_existing_cardinal_values)
def get_coordinate_types(self):
"""Gets the set of acceptable coordinate types.
:return: the set of coordinate types
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``COORDINATE or SPATIALUNIT``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
coordinate_types = property(fget=get_coordinate_types)
def supports_coordinate_type(self, coordinate_type):
"""Tests if the given coordinate type is supported.
:param coordinate_type: a coordinate Type
:type coordinate_type: ``osid.type.Type``
:return: ``true`` if the type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``IllegalState`` -- syntax is not a ``COORDINATE``
:raise: ``NullArgument`` -- ``coordinate_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_axes_for_coordinate_type(self, coordinate_type):
"""Gets the number of axes for a given supported coordinate type.
:param coordinate_type: a coordinate Type
:type coordinate_type: ``osid.type.Type``
:return: the number of axes
:rtype: ``cardinal``
:raise: ``IllegalState`` -- syntax is not a ``COORDINATE``
:raise: ``NullArgument`` -- ``coordinate_type`` is ``null``
:raise: ``Unsupported`` -- ``supports_coordinate_type(coordinate_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
def get_minimum_coordinate_values(self, coordinate_type):
"""Gets the minimum coordinate values given supported coordinate type.
:param coordinate_type: a coordinate Type
:type coordinate_type: ``osid.type.Type``
:return: the minimum coordinate values
:rtype: ``decimal``
:raise: ``IllegalState`` -- syntax is not a ``COORDINATE``
:raise: ``NullArgument`` -- ``coordinate_type`` is ``null``
:raise: ``Unsupported`` -- ``supports_coordinate_type(coordinate_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
def get_maximum_coordinate_values(self, coordinate_type):
"""Gets the maximum coordinate values given supported coordinate type.
:param coordinate_type: a coordinate Type
:type coordinate_type: ``osid.type.Type``
:return: the maximum coordinate values
:rtype: ``decimal``
:raise: ``IllegalState`` -- syntax is not a ``COORDINATE``
:raise: ``NullArgument`` -- ``coordinate_type`` is ``null``
:raise: ``Unsupported`` -- ``supports_coordinate_type(coordinate_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
def get_coordinate_set(self):
"""Gets the set of acceptable coordinate values.
:return: a set of coordinates or an empty array if not restricted
:rtype: ``osid.mapping.Coordinate``
:raise: ``IllegalState`` -- syntax is not a ``COORDINATE``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Coordinate
coordinate_set = property(fget=get_coordinate_set)
def get_default_coordinate_values(self):
"""Gets the default coordinate values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default coordinate values
:rtype: ``osid.mapping.Coordinate``
:raise: ``IllegalState`` -- syntax is not a ``COORDINATE`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Coordinate
default_coordinate_values = property(fget=get_default_coordinate_values)
def get_existing_coordinate_values(self):
"""Gets the existing coordinate values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing coordinate values
:rtype: ``osid.mapping.Coordinate``
:raise: ``IllegalState`` -- syntax is not a ``COORDINATE`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Coordinate
existing_coordinate_values = property(fget=get_existing_coordinate_values)
def get_currency_types(self):
"""Gets the set of acceptable currency types.
:return: the set of currency types
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``CURRENCY``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
currency_types = property(fget=get_currency_types)
def supports_currency_type(self, currency_type):
"""Tests if the given currency type is supported.
:param currency_type: a currency Type
:type currency_type: ``osid.type.Type``
:return: ``true`` if the type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``IllegalState`` -- syntax is not a ``CURRENCY``
:raise: ``NullArgument`` -- ``currency_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_minimum_currency(self):
"""Gets the minimum currency value.
:return: the minimum currency
:rtype: ``osid.financials.Currency``
:raise: ``IllegalState`` -- syntax is not a ``CURRENCY``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.financials.Currency
minimum_currency = property(fget=get_minimum_currency)
def get_maximum_currency(self):
"""Gets the maximum currency value.
:return: the maximum currency
:rtype: ``osid.financials.Currency``
:raise: ``IllegalState`` -- syntax is not a ``CURRENCY``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.financials.Currency
maximum_currency = property(fget=get_maximum_currency)
def get_currency_set(self):
"""Gets the set of acceptable currency values.
:return: a set of currencies or an empty array if not restricted
:rtype: ``osid.financials.Currency``
:raise: ``IllegalState`` -- syntax is not a ``CURRENCY``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.financials.Currency
currency_set = property(fget=get_currency_set)
def get_default_currency_values(self):
"""Gets the default currency values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default currency values
:rtype: ``osid.financials.Currency``
:raise: ``IllegalState`` -- syntax is not a ``CURRENCY`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.financials.Currency
default_currency_values = property(fget=get_default_currency_values)
def get_existing_currency_values(self):
"""Gets the existing currency values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing currency values
:rtype: ``osid.financials.Currency``
:raise: ``IllegalState`` -- syntax is not a ``CURRENCY`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.financials.Currency
existing_currency_values = property(fget=get_existing_currency_values)
def get_date_time_resolution(self):
"""Gets the smallest resolution of the date time value.
:return: the resolution
:rtype: ``osid.calendaring.DateTimeResolution``
:raise: ``IllegalState`` -- syntax is not a ``DATETIME, DURATION`` , or ``TIME``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.DateTimeResolution
date_time_resolution = property(fget=get_date_time_resolution)
def get_calendar_types(self):
"""Gets the set of acceptable calendar types.
:return: the set of calendar types
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``DATETIME`` or ``DURATION``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
calendar_types = property(fget=get_calendar_types)
def supports_calendar_type(self, calendar_type):
"""Tests if the given calendar type is supported.
:param calendar_type: a calendar Type
:type calendar_type: ``osid.type.Type``
:return: ``true`` if the type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``IllegalState`` -- syntax is not a ``DATETIME`` or ``DURATION``
:raise: ``NullArgument`` -- ``calendar_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_time_types(self):
"""Gets the set of acceptable time types.
:return: a set of time types or an empty array if not restricted
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``DATETIME, DURATION,`` or ``TIME``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
time_types = property(fget=get_time_types)
def supports_time_type(self, time_type):
"""Tests if the given time type is supported.
:param time_type: a time Type
:type time_type: ``osid.type.Type``
:return: ``true`` if the type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``IllegalState`` -- syntax is not a ``DATETIME, DURATION,`` or ``TIME``
:raise: ``NullArgument`` -- ``time_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_minimum_date_time(self):
"""Gets the minimum date time value.
:return: the minimum value
:rtype: ``osid.calendaring.DateTime``
:raise: ``IllegalState`` -- syntax is not a ``DATETIME``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.DateTime
minimum_date_time = property(fget=get_minimum_date_time)
def get_maximum_date_time(self):
"""Gets the maximum date time value.
:return: the maximum value
:rtype: ``osid.calendaring.DateTime``
:raise: ``IllegalState`` -- syntax is not a ``DATETIME``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.DateTime
maximum_date_time = property(fget=get_maximum_date_time)
def get_date_time_set(self):
"""Gets the set of acceptable date time values.
:return: a set of values or an empty array if not restricted
:rtype: ``osid.calendaring.DateTime``
:raise: ``IllegalState`` -- syntax is not a ``DATETIME``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.DateTime
date_time_set = property(fget=get_date_time_set)
def get_default_date_time_values(self):
"""Gets the default date time values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default date time values
:rtype: ``osid.calendaring.DateTime``
:raise: ``IllegalState`` -- syntax is not a ``DATETIME`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.DateTime
default_date_time_values = property(fget=get_default_date_time_values)
def get_existing_date_time_values(self):
"""Gets the existing date time values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing date time values
:rtype: ``osid.calendaring.DateTime``
:raise: ``IllegalState`` -- syntax is not a ``DATETIME`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.DateTime
existing_date_time_values = property(fget=get_existing_date_time_values)
def get_decimal_scale(self):
"""Gets the number of digits to the right of the decimal point.
:return: the scale
:rtype: ``cardinal``
:raise: ``IllegalState`` -- syntax is not a ``DECIMAL``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
decimal_scale = property(fget=get_decimal_scale)
def get_minimum_decimal(self):
"""Gets the minimum decimal value.
:return: the minimum decimal
:rtype: ``decimal``
:raise: ``IllegalState`` -- syntax is not a ``DECIMAL``
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
minimum_decimal = property(fget=get_minimum_decimal)
def get_maximum_decimal(self):
"""Gets the maximum decimal value.
:return: the maximum decimal
:rtype: ``decimal``
:raise: ``IllegalState`` -- syntax is not a ``DECIMAL``
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
maximum_decimal = property(fget=get_maximum_decimal)
def get_decimal_set(self):
"""Gets the set of acceptable decimal values.
:return: a set of decimals or an empty array if not restricted
:rtype: ``decimal``
:raise: ``IllegalState`` -- syntax is not a ``DECIMAL``
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
decimal_set = property(fget=get_decimal_set)
def get_default_decimal_values(self):
"""Gets the default decimal values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default decimal values
:rtype: ``decimal``
:raise: ``IllegalState`` -- syntax is not a ``DECIMAL`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
default_decimal_values = property(fget=get_default_decimal_values)
def get_existing_decimal_values(self):
"""Gets the existing decimal values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing decimal values
:rtype: ``decimal``
:raise: ``IllegalState`` -- syntax is not a ``DECIMAL`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
existing_decimal_values = property(fget=get_existing_decimal_values)
def get_distance_resolution(self):
"""Gets the smallest resolution of the distance value.
:return: the resolution
:rtype: ``osid.mapping.DistanceResolution``
:raise: ``IllegalState`` -- syntax is not a ``DISTANCE``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.DistanceResolution
distance_resolution = property(fget=get_distance_resolution)
def get_minimum_distance(self):
"""Gets the minimum distance value.
:return: the minimum value
:rtype: ``osid.mapping.Distance``
:raise: ``IllegalState`` -- syntax is not a ``DISTANCE``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Distance
minimum_distance = property(fget=get_minimum_distance)
def get_maximum_distance(self):
"""Gets the maximum distance value.
:return: the maximum value
:rtype: ``osid.mapping.Distance``
:raise: ``IllegalState`` -- syntax is not a ``DISTANCE``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Distance
maximum_distance = property(fget=get_maximum_distance)
def get_distance_set(self):
"""Gets the set of acceptable distance values.
:return: a set of values or an empty array if not restricted
:rtype: ``osid.mapping.Distance``
:raise: ``IllegalState`` -- syntax is not a ``DISTANCE``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Distance
distance_set = property(fget=get_distance_set)
def get_default_distance_values(self):
"""Gets the default distance values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default distance values
:rtype: ``osid.mapping.Distance``
:raise: ``IllegalState`` -- syntax is not a ``DISTANCE`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Distance
default_distance_values = property(fget=get_default_distance_values)
def get_existing_distance_values(self):
"""Gets the existing distance values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing distance values
:rtype: ``osid.mapping.Distance``
:raise: ``IllegalState`` -- syntax is not a ``DISTANCE`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Distance
existing_distance_values = property(fget=get_existing_distance_values)
def get_minimum_duration(self):
"""Gets the minimum duration.
:return: the minimum duration
:rtype: ``osid.calendaring.Duration``
:raise: ``IllegalState`` -- syntax is not a ``DURATION``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Duration
minimum_duration = property(fget=get_minimum_duration)
def get_maximum_duration(self):
"""Gets the maximum duration.
:return: the maximum duration
:rtype: ``osid.calendaring.Duration``
:raise: ``IllegalState`` -- syntax is not a ``DURATION``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Duration
maximum_duration = property(fget=get_maximum_duration)
def get_duration_set(self):
"""Gets the set of acceptable duration values.
:return: a set of durations or an empty array if not restricted
:rtype: ``osid.calendaring.Duration``
:raise: ``IllegalState`` -- syntax is not a ``DURATION``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Duration
duration_set = property(fget=get_duration_set)
def get_default_duration_values(self):
"""Gets the default duration values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most at most a single value.
:return: the default duration values
:rtype: ``osid.calendaring.Duration``
:raise: ``IllegalState`` -- syntax is not a DURATION or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Duration
default_duration_values = property(fget=get_default_duration_values)
def get_existing_duration_values(self):
"""Gets the existing duration values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing duration values
:rtype: ``osid.calendaring.Duration``
:raise: ``IllegalState`` -- syntax is not a ``DURATION`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Duration
existing_duration_values = property(fget=get_existing_duration_values)
def get_heading_types(self):
"""Gets the set of acceptable heading types.
:return: a set of heading types or an empty array if not restricted
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``HEADING``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
heading_types = property(fget=get_heading_types)
def supports_heading_type(self, heading_type):
"""Tests if the given heading type is supported.
:param heading_type: a heading Type
:type heading_type: ``osid.type.Type``
:return: ``true`` if the type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``IllegalState`` -- syntax is not a ``HEADING``
:raise: ``NullArgument`` -- ``heading_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_axes_for_heading_type(self, heading_type):
"""Gets the number of axes for a given supported heading type.
:param heading_type: a heading Type
:type heading_type: ``osid.type.Type``
:return: the number of axes
:rtype: ``cardinal``
:raise: ``IllegalState`` -- syntax is not a ``HEADING``
:raise: ``NullArgument`` -- ``heading_type`` is ``null``
:raise: ``Unsupported`` -- ``supports_heading_type(heading_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
def get_minimum_heading_values(self, heading_type):
"""Gets the minimum heading values given supported heading type.
:param heading_type: a heading Type
:type heading_type: ``osid.type.Type``
:return: the minimum heading values
:rtype: ``decimal``
:raise: ``IllegalState`` -- syntax is not a ``HEADING``
:raise: ``NullArgument`` -- ``heading_type`` is ``null``
:raise: ``Unsupported`` -- ``supports_heading_type(heading_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
def get_maximum_heading_values(self, heading_type):
"""Gets the maximum heading values given supported heading type.
:param heading_type: a heading Type
:type heading_type: ``osid.type.Type``
:return: the maximum heading values
:rtype: ``decimal``
:raise: ``IllegalState`` -- syntax is not a ``HEADING``
:raise: ``NullArgument`` -- ``heading_type`` is ``null``
:raise: ``Unsupported`` -- ``supports_heading_type(heading_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
def get_heading_set(self):
"""Gets the set of acceptable heading values.
:return: the set of heading
:rtype: ``osid.mapping.Heading``
:raise: ``IllegalState`` -- syntax is not a ``HEADING``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Heading
heading_set = property(fget=get_heading_set)
def get_default_heading_values(self):
"""Gets the default heading values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default heading values
:rtype: ``osid.mapping.Heading``
:raise: ``IllegalState`` -- syntax is not a ``HEADING`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Heading
default_heading_values = property(fget=get_default_heading_values)
def get_existing_heading_values(self):
"""Gets the existing heading values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing heading values
:rtype: ``osid.mapping.Heading``
:raise: ``IllegalState`` -- syntax is not a ``HEADING`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Heading
existing_heading_values = property(fget=get_existing_heading_values)
def get_id_set(self):
"""Gets the set of acceptable ``Ids``.
:return: a set of ``Ids`` or an empty array if not restricted
:rtype: ``osid.id.Id``
:raise: ``IllegalState`` -- syntax is not an ``ID``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
id_set = property(fget=get_id_set)
def get_default_id_values(self):
"""Gets the default ``Id`` values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default ``Id`` values
:rtype: ``osid.id.Id``
:raise: ``IllegalState`` -- syntax is not an ``ID`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
default_id_values = property(fget=get_default_id_values)
def get_existing_id_values(self):
"""Gets the existing ``Id`` values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing ``Id`` values
:rtype: ``osid.id.Id``
:raise: ``IllegalState`` -- syntax is not an ``ID``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.id.Id
existing_id_values = property(fget=get_existing_id_values)
def get_minimum_integer(self):
"""Gets the minimum integer value.
:return: the minimum value
:rtype: ``integer``
:raise: ``IllegalState`` -- syntax is not an ``INTEGER``
*compliance: mandatory -- This method must be implemented.*
"""
return # integer
minimum_integer = property(fget=get_minimum_integer)
def get_maximum_integer(self):
"""Gets the maximum integer value.
:return: the maximum value
:rtype: ``integer``
:raise: ``IllegalState`` -- syntax is not an ``INTEGER``
*compliance: mandatory -- This method must be implemented.*
"""
return # integer
maximum_integer = property(fget=get_maximum_integer)
def get_integer_set(self):
"""Gets the set of acceptable integer values.
:return: a set of values or an empty array if not restricted
:rtype: ``integer``
:raise: ``IllegalState`` -- syntax is not an ``INTEGER``
*compliance: mandatory -- This method must be implemented.*
"""
return # integer
integer_set = property(fget=get_integer_set)
def get_default_integer_values(self):
"""Gets the default integer values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default integer values
:rtype: ``integer``
:raise: ``IllegalState`` -- syntax is not an ``INTEGER`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # integer
default_integer_values = property(fget=get_default_integer_values)
def get_existing_integer_values(self):
"""Gets the existing integer values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing integer values
:rtype: ``integer``
:raise: ``IllegalState`` -- syntax is not a ``INTEGER`` or isValueKnown() is false
*compliance: mandatory -- This method must be implemented.*
"""
return # integer
existing_integer_values = property(fget=get_existing_integer_values)
def get_object_types(self):
"""Gets the set of acceptable ``Types`` for an arbitrary object.
:return: a set of ``Types`` or an empty array if not restricted
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not an ``OBJECT``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
object_types = property(fget=get_object_types)
def supports_object_type(self, object_type):
"""Tests if the given object type is supported.
:param object_type: an object Type
:type object_type: ``osid.type.Type``
:return: ``true`` if the type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``IllegalState`` -- syntax is not an ``OBJECT``
:raise: ``NullArgument`` -- ``object_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_object_set(self):
"""Gets the set of acceptable object values.
:return: a set of values or an empty array if not restricted
:rtype: ``object``
:raise: ``IllegalState`` -- syntax is not an ``OBJECT``
*compliance: mandatory -- This method must be implemented.*
"""
return # object
object_set = property(fget=get_object_set)
def get_default_object_values(self):
"""Gets the default object values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default object values
:rtype: ``object``
:raise: ``IllegalState`` -- syntax is not an ``OBJECT`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # object
default_object_values = property(fget=get_default_object_values)
def get_existing_object_values(self):
"""Gets the existing object values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing object values
:rtype: ``object``
:raise: ``IllegalState`` -- syntax is not an OBJECT or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # object
existing_object_values = property(fget=get_existing_object_values)
def get_spatial_unit_record_types(self):
"""Gets the set of acceptable spatial unit record types.
:return: the set of spatial unit types
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not ``SPATIALUNIT``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
spatial_unit_record_types = property(fget=get_spatial_unit_record_types)
def supports_spatial_unit_record_type(self, spatial_unit_record_type):
"""Tests if the given spatial unit record type is supported.
:param spatial_unit_record_type: a spatial unit record Type
:type spatial_unit_record_type: ``osid.type.Type``
:return: ``true`` if the type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``IllegalState`` -- syntax is not an ``SPATIALUNIT``
:raise: ``NullArgument`` -- ``spatial_unit_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_spatial_unit_set(self):
"""Gets the set of acceptable spatial unit values.
:return: a set of spatial units or an empty array if not restricted
:rtype: ``osid.mapping.SpatialUnit``
:raise: ``IllegalState`` -- syntax is not a ``SPATIALUNIT``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.SpatialUnit
spatial_unit_set = property(fget=get_spatial_unit_set)
def get_default_spatial_unit_values(self):
"""Gets the default spatial unit values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default spatial unit values
:rtype: ``osid.mapping.SpatialUnit``
:raise: ``IllegalState`` -- syntax is not a ``SPATIALUNIT`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.SpatialUnit
default_spatial_unit_values = property(fget=get_default_spatial_unit_values)
def get_existing_spatial_unit_values(self):
"""Gets the existing spatial unit values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing spatial unit values
:rtype: ``osid.mapping.SpatialUnit``
:raise: ``IllegalState`` -- syntax is not a SPATIALUNIT or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.SpatialUnit
existing_spatial_unit_values = property(fget=get_existing_spatial_unit_values)
def get_minimum_speed(self):
"""Gets the minimum speed value.
:return: the minimum speed
:rtype: ``osid.mapping.Speed``
:raise: ``IllegalState`` -- syntax is not a ``SPEED``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Speed
minimum_speed = property(fget=get_minimum_speed)
def get_maximum_speed(self):
"""Gets the maximum speed value.
:return: the maximum speed
:rtype: ``osid.mapping.Speed``
:raise: ``IllegalState`` -- syntax is not a ``SPEED``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Speed
maximum_speed = property(fget=get_maximum_speed)
def get_speed_set(self):
"""Gets the set of acceptable speed values.
:return: a set of speeds or an empty array if not restricted
:rtype: ``osid.mapping.Speed``
:raise: ``IllegalState`` -- syntax is not a ``SPEED``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Speed
speed_set = property(fget=get_speed_set)
def get_default_speed_values(self):
"""Gets the default speed values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default speed values
:rtype: ``osid.mapping.Speed``
:raise: ``IllegalState`` -- syntax is not a ``SPEED`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Speed
default_speed_values = property(fget=get_default_speed_values)
def get_existing_speed_values(self):
"""Gets the existing speed values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing speed values
:rtype: ``osid.mapping.Speed``
:raise: ``IllegalState`` -- syntax is not a ``SPEED`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Speed
existing_speed_values = property(fget=get_existing_speed_values)
def get_minimum_string_length(self):
"""Gets the minimum string length.
:return: the minimum string length
:rtype: ``cardinal``
:raise: ``IllegalState`` -- syntax is not a ``STRING``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
minimum_string_length = property(fget=get_minimum_string_length)
def get_maximum_string_length(self):
"""Gets the maximum string length.
:return: the maximum string length
:rtype: ``cardinal``
:raise: ``IllegalState`` -- syntax is not a ``STRING``
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
maximum_string_length = property(fget=get_maximum_string_length)
def get_string_match_types(self):
"""Gets the set of valid string match types for use in validating a string.
If the string match type indicates a regular expression then
``get_string_expression()`` returns a regular expression.
:return: the set of string match types
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``STRING``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
string_match_types = property(fget=get_string_match_types)
def supports_string_match_type(self, string_match_type):
"""Tests if the given string match type is supported.
:param string_match_type: a string match type
:type string_match_type: ``osid.type.Type``
:return: ``true`` if the given string match type Is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``IllegalState`` -- syntax is not a ``STRING``
:raise: ``NullArgument`` -- ``string_match_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_string_expression(self, string_match_type):
"""Gets the regular expression of an acceptable string for the given string match type.
:param string_match_type: a string match type
:type string_match_type: ``osid.type.Type``
:return: the regular expression
:rtype: ``string``
:raise: ``NullArgument`` -- ``string_match_type`` is ``null``
:raise: ``IllegalState`` -- syntax is not a ``STRING``
:raise: ``Unsupported`` -- ``supports_string_match_type(string_match_type`` ) is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # string
def get_string_format_types(self):
"""Gets the set of valid string formats.
:return: the set of valid text format types
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``STRING``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
string_format_types = property(fget=get_string_format_types)
def get_string_set(self):
"""Gets the set of acceptable string values.
:return: a set of strings or an empty array if not restricted
:rtype: ``string``
:raise: ``IllegalState`` -- syntax is not a ``STRING``
*compliance: mandatory -- This method must be implemented.*
"""
return # string
string_set = property(fget=get_string_set)
def get_default_string_values(self):
"""Gets the default string values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default string values
:rtype: ``string``
:raise: ``IllegalState`` -- syntax is not a ``STRING`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # string
default_string_values = property(fget=get_default_string_values)
def get_existing_string_values(self):
"""Gets the existing string values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing string values
:rtype: ``string``
:raise: ``IllegalState`` -- syntax is not a ``STRING`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # string
existing_string_values = property(fget=get_existing_string_values)
def get_minimum_time(self):
"""Gets the minimum time value.
:return: the minimum time
:rtype: ``osid.calendaring.Time``
:raise: ``IllegalState`` -- syntax is not a ``TIME``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Time
minimum_time = property(fget=get_minimum_time)
def get_maximum_time(self):
"""Gets the maximum time value.
:return: the maximum time
:rtype: ``osid.calendaring.Time``
:raise: ``IllegalState`` -- syntax is not a ``TIME``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Time
maximum_time = property(fget=get_maximum_time)
def get_time_set(self):
"""Gets the set of acceptable time values.
:return: a set of times or an empty array if not restricted
:rtype: ``osid.calendaring.Time``
:raise: ``IllegalState`` -- syntax is not a ``TIME``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Time
time_set = property(fget=get_time_set)
def get_default_time_values(self):
"""Gets the default time values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default time values
:rtype: ``osid.calendaring.Time``
:raise: ``IllegalState`` -- syntax is not a ``TIME`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Time
default_time_values = property(fget=get_default_time_values)
def get_existing_time_values(self):
"""Gets the existing time values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing time values
:rtype: ``osid.calendaring.Time``
:raise: ``IllegalState`` -- syntax is not a ``TIME`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Time
existing_time_values = property(fget=get_existing_time_values)
def get_type_set(self):
"""Gets the set of acceptable ``Types``.
:return: a set of ``Types`` or an empty array if not restricted
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``TYPE``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
type_set = property(fget=get_type_set)
def get_default_type_values(self):
"""Gets the default type values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default type values
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``TYPE`` or ``is_required()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
default_type_values = property(fget=get_default_type_values)
def get_existing_type_values(self):
"""Gets the existing type values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing type values
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``TYPE`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
existing_type_values = property(fget=get_existing_type_values)
def get_version_types(self):
"""Gets the set of acceptable version types.
:return: the set of version types
:rtype: ``osid.type.Type``
:raise: ``IllegalState`` -- syntax is not a ``VERSION``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
version_types = property(fget=get_version_types)
def supports_version_type(self, version_type):
"""Tests if the given version type is supported.
:param version_type: a version Type
:type version_type: ``osid.type.Type``
:return: ``true`` if the type is supported, ``false`` otherwise
:rtype: ``boolean``
:raise: ``IllegalState`` -- syntax is not a ``VERSION``
:raise: ``NullArgument`` -- ``version_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
def get_minimum_version(self):
"""Gets the minumim acceptable ``Version``.
:return: the minumim ``Version``
:rtype: ``osid.installation.Version``
:raise: ``IllegalState`` -- syntax is not a ``VERSION``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.installation.Version
minimum_version = property(fget=get_minimum_version)
def get_maximum_version(self):
"""Gets the maximum acceptable ``Version``.
:return: the maximum ``Version``
:rtype: ``osid.installation.Version``
:raise: ``IllegalState`` -- syntax is not a ``VERSION``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.installation.Version
maximum_version = property(fget=get_maximum_version)
def get_version_set(self):
"""Gets the set of acceptable ``Versions``.
:return: a set of ``Versions`` or an empty array if not restricted
:rtype: ``osid.installation.Version``
:raise: ``IllegalState`` -- syntax is not a ``VERSION``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.installation.Version
version_set = property(fget=get_version_set)
def get_default_version_values(self):
"""Gets the default version values.
These are the values used if the element value is not provided
or is cleared. If ``is_array()`` is false, then this method
returns at most a single value.
:return: the default version values
:rtype: ``osid.installation.Version``
:raise: ``IllegalState`` -- syntax is not a TIME or isValueKnown() is false
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.installation.Version
default_version_values = property(fget=get_default_version_values)
def get_existing_version_values(self):
"""Gets the existing version values.
If ``has_value()`` and ``is_required()`` are ``false,`` then
these values are the default values ````. If ``is_array()`` is
false, then this method returns at most a single value.
:return: the existing version values
:rtype: ``osid.installation.Version``
:raise: ``IllegalState`` -- syntax is not a ``VERSION`` or ``is_value_known()`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.installation.Version
existing_version_values = property(fget=get_existing_version_values)
| mit |
Dahlgren/HTPC-Manager | libs/mako/parsetree.py | 60 | 20434 | # mako/parsetree.py
# Copyright (C) 2006-2015 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""defines the parse tree components for Mako templates."""
from mako import exceptions, ast, util, filters, compat
import re
class Node(object):
"""base class for a Node in the parse tree."""
def __init__(self, source, lineno, pos, filename):
self.source = source
self.lineno = lineno
self.pos = pos
self.filename = filename
@property
def exception_kwargs(self):
return {'source': self.source, 'lineno': self.lineno,
'pos': self.pos, 'filename': self.filename}
def get_children(self):
return []
def accept_visitor(self, visitor):
def traverse(node):
for n in node.get_children():
n.accept_visitor(visitor)
method = getattr(visitor, "visit" + self.__class__.__name__, traverse)
method(self)
class TemplateNode(Node):
"""a 'container' node that stores the overall collection of nodes."""
def __init__(self, filename):
super(TemplateNode, self).__init__('', 0, 0, filename)
self.nodes = []
self.page_attributes = {}
def get_children(self):
return self.nodes
def __repr__(self):
return "TemplateNode(%s, %r)" % (
util.sorted_dict_repr(self.page_attributes),
self.nodes)
class ControlLine(Node):
"""defines a control line, a line-oriented python line or end tag.
e.g.::
% if foo:
(markup)
% endif
"""
has_loop_context = False
def __init__(self, keyword, isend, text, **kwargs):
super(ControlLine, self).__init__(**kwargs)
self.text = text
self.keyword = keyword
self.isend = isend
self.is_primary = keyword in ['for', 'if', 'while', 'try', 'with']
self.nodes = []
if self.isend:
self._declared_identifiers = []
self._undeclared_identifiers = []
else:
code = ast.PythonFragment(text, **self.exception_kwargs)
self._declared_identifiers = code.declared_identifiers
self._undeclared_identifiers = code.undeclared_identifiers
def get_children(self):
return self.nodes
def declared_identifiers(self):
return self._declared_identifiers
def undeclared_identifiers(self):
return self._undeclared_identifiers
def is_ternary(self, keyword):
"""return true if the given keyword is a ternary keyword
for this ControlLine"""
return keyword in {
'if':set(['else', 'elif']),
'try':set(['except', 'finally']),
'for':set(['else'])
}.get(self.keyword, [])
def __repr__(self):
return "ControlLine(%r, %r, %r, %r)" % (
self.keyword,
self.text,
self.isend,
(self.lineno, self.pos)
)
class Text(Node):
"""defines plain text in the template."""
def __init__(self, content, **kwargs):
super(Text, self).__init__(**kwargs)
self.content = content
def __repr__(self):
return "Text(%r, %r)" % (self.content, (self.lineno, self.pos))
class Code(Node):
"""defines a Python code block, either inline or module level.
e.g.::
inline:
<%
x = 12
%>
module level:
<%!
import logger
%>
"""
def __init__(self, text, ismodule, **kwargs):
super(Code, self).__init__(**kwargs)
self.text = text
self.ismodule = ismodule
self.code = ast.PythonCode(text, **self.exception_kwargs)
def declared_identifiers(self):
return self.code.declared_identifiers
def undeclared_identifiers(self):
return self.code.undeclared_identifiers
def __repr__(self):
return "Code(%r, %r, %r)" % (
self.text,
self.ismodule,
(self.lineno, self.pos)
)
class Comment(Node):
"""defines a comment line.
# this is a comment
"""
def __init__(self, text, **kwargs):
super(Comment, self).__init__(**kwargs)
self.text = text
def __repr__(self):
return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos))
class Expression(Node):
"""defines an inline expression.
${x+y}
"""
def __init__(self, text, escapes, **kwargs):
super(Expression, self).__init__(**kwargs)
self.text = text
self.escapes = escapes
self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs)
self.code = ast.PythonCode(text, **self.exception_kwargs)
def declared_identifiers(self):
return []
def undeclared_identifiers(self):
# TODO: make the "filter" shortcut list configurable at parse/gen time
return self.code.undeclared_identifiers.union(
self.escapes_code.undeclared_identifiers.difference(
set(filters.DEFAULT_ESCAPES.keys())
)
).difference(self.code.declared_identifiers)
def __repr__(self):
return "Expression(%r, %r, %r)" % (
self.text,
self.escapes_code.args,
(self.lineno, self.pos)
)
class _TagMeta(type):
"""metaclass to allow Tag to produce a subclass according to
its keyword"""
_classmap = {}
def __init__(cls, clsname, bases, dict):
if getattr(cls, '__keyword__', None) is not None:
cls._classmap[cls.__keyword__] = cls
super(_TagMeta, cls).__init__(clsname, bases, dict)
def __call__(cls, keyword, attributes, **kwargs):
if ":" in keyword:
ns, defname = keyword.split(':')
return type.__call__(CallNamespaceTag, ns, defname,
attributes, **kwargs)
try:
cls = _TagMeta._classmap[keyword]
except KeyError:
raise exceptions.CompileException(
"No such tag: '%s'" % keyword,
source=kwargs['source'],
lineno=kwargs['lineno'],
pos=kwargs['pos'],
filename=kwargs['filename']
)
return type.__call__(cls, keyword, attributes, **kwargs)
class Tag(compat.with_metaclass(_TagMeta, Node)):
"""abstract base class for tags.
<%sometag/>
<%someothertag>
stuff
</%someothertag>
"""
__keyword__ = None
def __init__(self, keyword, attributes, expressions,
nonexpressions, required, **kwargs):
"""construct a new Tag instance.
this constructor not called directly, and is only called
by subclasses.
:param keyword: the tag keyword
:param attributes: raw dictionary of attribute key/value pairs
:param expressions: a set of identifiers that are legal attributes,
which can also contain embedded expressions
:param nonexpressions: a set of identifiers that are legal
attributes, which cannot contain embedded expressions
:param \**kwargs:
other arguments passed to the Node superclass (lineno, pos)
"""
super(Tag, self).__init__(**kwargs)
self.keyword = keyword
self.attributes = attributes
self._parse_attributes(expressions, nonexpressions)
missing = [r for r in required if r not in self.parsed_attributes]
if len(missing):
raise exceptions.CompileException(
"Missing attribute(s): %s" %
",".join([repr(m) for m in missing]),
**self.exception_kwargs)
self.parent = None
self.nodes = []
def is_root(self):
return self.parent is None
def get_children(self):
return self.nodes
def _parse_attributes(self, expressions, nonexpressions):
undeclared_identifiers = set()
self.parsed_attributes = {}
for key in self.attributes:
if key in expressions:
expr = []
for x in re.compile(r'(\${.+?})',
re.S).split(self.attributes[key]):
m = re.compile(r'^\${(.+?)}$', re.S).match(x)
if m:
code = ast.PythonCode(m.group(1).rstrip(),
**self.exception_kwargs)
# we aren't discarding "declared_identifiers" here,
# which we do so that list comprehension-declared
# variables aren't counted. As yet can't find a
# condition that requires it here.
undeclared_identifiers = \
undeclared_identifiers.union(
code.undeclared_identifiers)
expr.append('(%s)' % m.group(1))
else:
if x:
expr.append(repr(x))
self.parsed_attributes[key] = " + ".join(expr) or repr('')
elif key in nonexpressions:
if re.search(r'\${.+?}', self.attributes[key]):
raise exceptions.CompileException(
"Attibute '%s' in tag '%s' does not allow embedded "
"expressions" % (key, self.keyword),
**self.exception_kwargs)
self.parsed_attributes[key] = repr(self.attributes[key])
else:
raise exceptions.CompileException(
"Invalid attribute for tag '%s': '%s'" %
(self.keyword, key),
**self.exception_kwargs)
self.expression_undeclared_identifiers = undeclared_identifiers
def declared_identifiers(self):
return []
def undeclared_identifiers(self):
return self.expression_undeclared_identifiers
def __repr__(self):
return "%s(%r, %s, %r, %r)" % (self.__class__.__name__,
self.keyword,
util.sorted_dict_repr(self.attributes),
(self.lineno, self.pos),
self.nodes
)
class IncludeTag(Tag):
__keyword__ = 'include'
def __init__(self, keyword, attributes, **kwargs):
super(IncludeTag, self).__init__(
keyword,
attributes,
('file', 'import', 'args'),
(), ('file',), **kwargs)
self.page_args = ast.PythonCode(
"__DUMMY(%s)" % attributes.get('args', ''),
**self.exception_kwargs)
def declared_identifiers(self):
return []
def undeclared_identifiers(self):
identifiers = self.page_args.undeclared_identifiers.\
difference(set(["__DUMMY"])).\
difference(self.page_args.declared_identifiers)
return identifiers.union(super(IncludeTag, self).
undeclared_identifiers())
class NamespaceTag(Tag):
__keyword__ = 'namespace'
def __init__(self, keyword, attributes, **kwargs):
super(NamespaceTag, self).__init__(
keyword, attributes,
('file',),
('name','inheritable',
'import','module'),
(), **kwargs)
self.name = attributes.get('name', '__anon_%s' % hex(abs(id(self))))
if not 'name' in attributes and not 'import' in attributes:
raise exceptions.CompileException(
"'name' and/or 'import' attributes are required "
"for <%namespace>",
**self.exception_kwargs)
if 'file' in attributes and 'module' in attributes:
raise exceptions.CompileException(
"<%namespace> may only have one of 'file' or 'module'",
**self.exception_kwargs
)
def declared_identifiers(self):
return []
class TextTag(Tag):
__keyword__ = 'text'
def __init__(self, keyword, attributes, **kwargs):
super(TextTag, self).__init__(
keyword,
attributes, (),
('filter'), (), **kwargs)
self.filter_args = ast.ArgumentList(
attributes.get('filter', ''),
**self.exception_kwargs)
def undeclared_identifiers(self):
return self.filter_args.\
undeclared_identifiers.\
difference(filters.DEFAULT_ESCAPES.keys()).union(
self.expression_undeclared_identifiers
)
class DefTag(Tag):
__keyword__ = 'def'
def __init__(self, keyword, attributes, **kwargs):
expressions = ['buffered', 'cached'] + [
c for c in attributes if c.startswith('cache_')]
super(DefTag, self).__init__(
keyword,
attributes,
expressions,
('name', 'filter', 'decorator'),
('name',),
**kwargs)
name = attributes['name']
if re.match(r'^[\w_]+$', name):
raise exceptions.CompileException(
"Missing parenthesis in %def",
**self.exception_kwargs)
self.function_decl = ast.FunctionDecl("def " + name + ":pass",
**self.exception_kwargs)
self.name = self.function_decl.funcname
self.decorator = attributes.get('decorator', '')
self.filter_args = ast.ArgumentList(
attributes.get('filter', ''),
**self.exception_kwargs)
is_anonymous = False
is_block = False
@property
def funcname(self):
return self.function_decl.funcname
def get_argument_expressions(self, **kw):
return self.function_decl.get_argument_expressions(**kw)
def declared_identifiers(self):
return self.function_decl.allargnames
def undeclared_identifiers(self):
res = []
for c in self.function_decl.defaults:
res += list(ast.PythonCode(c, **self.exception_kwargs).
undeclared_identifiers)
return set(res).union(
self.filter_args.\
undeclared_identifiers.\
difference(filters.DEFAULT_ESCAPES.keys())
).union(
self.expression_undeclared_identifiers
).difference(
self.function_decl.allargnames
)
class BlockTag(Tag):
__keyword__ = 'block'
def __init__(self, keyword, attributes, **kwargs):
expressions = ['buffered', 'cached', 'args'] + [
c for c in attributes if c.startswith('cache_')]
super(BlockTag, self).__init__(
keyword,
attributes,
expressions,
('name','filter', 'decorator'),
(),
**kwargs)
name = attributes.get('name')
if name and not re.match(r'^[\w_]+$',name):
raise exceptions.CompileException(
"%block may not specify an argument signature",
**self.exception_kwargs)
if not name and attributes.get('args', None):
raise exceptions.CompileException(
"Only named %blocks may specify args",
**self.exception_kwargs
)
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
**self.exception_kwargs)
self.name = name
self.decorator = attributes.get('decorator', '')
self.filter_args = ast.ArgumentList(
attributes.get('filter', ''),
**self.exception_kwargs)
is_block = True
@property
def is_anonymous(self):
return self.name is None
@property
def funcname(self):
return self.name or "__M_anon_%d" % (self.lineno, )
def get_argument_expressions(self, **kw):
return self.body_decl.get_argument_expressions(**kw)
def declared_identifiers(self):
return self.body_decl.allargnames
def undeclared_identifiers(self):
return (self.filter_args.\
undeclared_identifiers.\
difference(filters.DEFAULT_ESCAPES.keys())
).union(self.expression_undeclared_identifiers)
class CallTag(Tag):
__keyword__ = 'call'
def __init__(self, keyword, attributes, **kwargs):
super(CallTag, self).__init__(keyword, attributes,
('args'), ('expr',), ('expr',), **kwargs)
self.expression = attributes['expr']
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
**self.exception_kwargs)
def declared_identifiers(self):
return self.code.declared_identifiers.union(self.body_decl.allargnames)
def undeclared_identifiers(self):
return self.code.undeclared_identifiers.\
difference(self.code.declared_identifiers)
class CallNamespaceTag(Tag):
def __init__(self, namespace, defname, attributes, **kwargs):
super(CallNamespaceTag, self).__init__(
namespace + ":" + defname,
attributes,
tuple(attributes.keys()) + ('args', ),
(),
(),
**kwargs)
self.expression = "%s.%s(%s)" % (
namespace,
defname,
",".join(["%s=%s" % (k, v) for k, v in
self.parsed_attributes.items()
if k != 'args'])
)
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
self.body_decl = ast.FunctionArgs(
attributes.get('args', ''),
**self.exception_kwargs)
def declared_identifiers(self):
return self.code.declared_identifiers.union(self.body_decl.allargnames)
def undeclared_identifiers(self):
return self.code.undeclared_identifiers.\
difference(self.code.declared_identifiers)
class InheritTag(Tag):
__keyword__ = 'inherit'
def __init__(self, keyword, attributes, **kwargs):
super(InheritTag, self).__init__(
keyword, attributes,
('file',), (), ('file',), **kwargs)
class PageTag(Tag):
__keyword__ = 'page'
def __init__(self, keyword, attributes, **kwargs):
expressions = ['cached', 'args', 'expression_filter', 'enable_loop'] + [
c for c in attributes if c.startswith('cache_')]
super(PageTag, self).__init__(
keyword,
attributes,
expressions,
(),
(),
**kwargs)
self.body_decl = ast.FunctionArgs(attributes.get('args', ''),
**self.exception_kwargs)
self.filter_args = ast.ArgumentList(
attributes.get('expression_filter', ''),
**self.exception_kwargs)
def declared_identifiers(self):
return self.body_decl.allargnames
| mit |
40223220/worktogether | wsgi.py | 1 | 44884 | #@+leo-ver=5-thin
#@+node:2014fall.20141212095015.1775: * @file wsgi.py
# coding=utf-8
# 上面的程式內容編碼必須在程式的第一或者第二行才會有作用
################# (1) 模組導入區
# 導入 cherrypy 模組, 為了在 OpenShift 平台上使用 cherrypy 模組, 必須透過 setup.py 安裝
#@@language python
#@@tabwidth -4
#@+<<declarations>>
#@+node:2014fall.20141212095015.1776: ** <<declarations>> (wsgi)
import cherrypy
# 導入 Python 內建的 os 模組, 因為 os 模組為 Python 內建, 所以無需透過 setup.py 安裝
import os
# 導入 random 模組
import random
# 導入 gear 模組
import gear
################# (2) 廣域變數設定區
# 確定程式檔案所在目錄, 在 Windows 下有最後的反斜線
_curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
# 設定在雲端與近端的資料儲存目錄
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示程式在雲端執行
download_root_dir = os.environ['OPENSHIFT_DATA_DIR']
data_dir = os.environ['OPENSHIFT_DATA_DIR']
else:
# 表示程式在近端執行
download_root_dir = _curdir + "/local_data/"
data_dir = _curdir + "/local_data/"
'''以下為近端 input() 與 for 迴圈應用的程式碼, 若要將程式送到 OpenShift 執行, 除了採用 CherryPy 網際框架外, 還要轉為 html 列印
# 利用 input() 取得的資料型別為字串
toprint = input("要印甚麼內容?")
# 若要將 input() 取得的字串轉為整數使用, 必須利用 int() 轉換
repeat_no = int(input("重複列印幾次?"))
for i in range(repeat_no):
print(toprint)
'''
#@-<<declarations>>
#@+others
#@+node:2014fall.20141212095015.1777: ** class Hello
################# (3) 程式類別定義區
# 以下改用 CherryPy 網際框架程式架構
# 以下為 Hello 類別的設計內容, 其中的 object 使用, 表示 Hello 類別繼承 object 的所有特性, 包括方法與屬性設計
class Hello(object):
# Hello 類別的啟動設定
_cp_config = {
'tools.encode.encoding': 'utf-8',
'tools.sessions.on' : True,
'tools.sessions.storage_type' : 'file',
#'tools.sessions.locking' : 'explicit',
# session 以檔案儲存, 而且位於 data_dir 下的 tmp 目錄
'tools.sessions.storage_path' : data_dir+'/tmp',
# session 有效時間設為 60 分鐘
'tools.sessions.timeout' : 60
}
#@+others
#@+node:2014fall.20141212095015.2004: *3* __init__
def __init__(self):
# 配合透過案例啟始建立所需的目錄
if not os.path.isdir(data_dir+'/tmp'):
os.mkdir(data_dir+'/tmp')
if not os.path.isdir(data_dir+"/downloads"):
os.mkdir(data_dir+"/downloads")
if not os.path.isdir(data_dir+"/images"):
os.mkdir(data_dir+"/images")
#@+node:2015.20150330144929.1713: *3* CheckGearSize
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def CheckGearSize(self, n_g1=15, n_g2=24,M=5, P=15):
n_g1 = int(str(n_g1))
n_g2 = int(str(n_g2))
if n_g1 < 15:
return "齒輪1 低於15" + self.DesignGear()
elif n_g1 > 80:
return "齒輪1 超過80 " + self.DesignGear()
elif n_g2 < 15:
return "齒輪2 低於15 " + self.DesignGear()
elif n_g2 > 80:
return "齒輪2 超過80 " + self.DesignGear()
else:
cherrypy.session['g1'] = n_g1
cherrypy.session['g2'] = n_g2
outstring = '''
<!DOCTYPE html>
<html>
<head>
齒輪1='''+str(n_g1)+'''<br />
齒輪2='''+str(n_g2)+'''<br />
<br /><a href="\ShowGear\">繪製齒輪</a><br />
<br /><a href="\DesignGear\">重新設計</a><br />
<head>
</html>
'''
return outstring
#@+node:2015.20150627113431.1: *3* CheckGearSize1
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def CheckGearSize1(self, n_g1=15, n_g2=24,n_g3=24,n_g4=15,M=5, P=15):
n_g1 = int(str(n_g1))
n_g2 = int(str(n_g2))
n_g3 = int(str(n_g3))
n_g4 = int(str(n_g4))
if n_g1 < 15:
return "齒輪1 低於15" + self.DesignGear1()
elif n_g1 > 80:
return "齒輪1 超過80 " + self.DesignGear1()
elif n_g2 < 15:
return "齒輪2 低於15 " + self.DesignGear1()
elif n_g2 > 80:
return "齒輪2 超過80 " + self.DesignGear1()
elif n_g3 < 15:
return "齒輪3 低於15" + self.DesignGear1()
elif n_g3 > 80:
return "齒輪3 超過80 " + self.DesignGear1()
elif n_g4 < 15:
return "齒輪4 低於15 " + self.DesignGear1()
elif n_g4 > 80:
return "齒輪4 超過80 " + self.DesignGear1()
elif n_g2!=n_g3:
return "齒輪2跟齒輪3齒數不同可能會有干涉的問題 " + self.DesignGear1()
else:
cherrypy.session['g1'] = n_g1
cherrypy.session['g2'] = n_g2
cherrypy.session['g3'] = n_g3
cherrypy.session['g4'] = n_g4
outstring = '''
<!DOCTYPE html>
<html>
<head>
齒輪1='''+str(n_g1)+'''<br />
齒輪2='''+str(n_g2)+'''<br />
齒輪3='''+str(n_g3)+'''<br />
齒輪4='''+str(n_g4)+'''<br />
<br /><a href="\ShowGear1\">繪製齒輪</a><br />
<br /><a href="\DesignGear1\">重新設計</a><br />
<head>
</html>
'''
return outstring
#@+node:2014fall.20141212095015.1778: *3* index_orig
# 以 @ 開頭的 cherrypy.expose 為 decorator, 用來表示隨後的成員方法, 可以直接讓使用者以 URL 連結執行
@cherrypy.expose
# index 方法為 CherryPy 各類別成員方法中的內建(default)方法, 當使用者執行時未指定方法, 系統將會優先執行 index 方法
# 有 self 的方法為類別中的成員方法, Python 程式透過此一 self 在各成員方法間傳遞物件內容
def index_orig(self, toprint="a_40223220"):
return toprint
#@+node:2014fall.20141212095015.1779: *3* hello
@cherrypy.expose
def hello(self, toprint="Hello World!"):
return toprint
#@+node:2014fall.20141215194146.1791: *3* index
@cherrypy.expose
def index(self, guess=None):
# 將標準答案存入 answer session 對應區
theanswer = random.randint(1, 100)
thecount = 0
# 將答案與計算次數變數存進 session 對應變數
cherrypy.session['answer'] = theanswer
cherrypy.session['count'] = thecount
# 印出讓使用者輸入的超文件表單
outstring = '''
a_40223220
'''
return outstring
#@+node:2015.20150615085301.1: *3* index1
@cherrypy.expose
def index1(self, guess=None):
# 將標準答案存入 answer session 對應區
theanswer = random.randint(1, 100)
thecount = 0
# 將答案與計算次數變數存進 session 對應變數
cherrypy.session['answer'] = theanswer
cherrypy.session['count'] = thecount
# 印出讓使用者輸入的超文件表單
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=doCheck>
請輸入您所猜的整數:<input type=text name=guess><br />
<input type=submit value=send>
</form>
<hr>
<!-- 以下在網頁內嵌 Brython 程式 -->
<script type="text/python">
from browser import document, alert
def echo(ev):
alert(document["zone"].value)
# 將文件中名稱為 mybutton 的物件, 透過 click 事件與 echo 函式 bind 在一起
document['mybutton'].bind('click',echo)
</script>
<input id="zone"><button id="mybutton">click !</button>
<hr>
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
# 以下使用中文變數名稱
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1733: *3* DesignGear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def DesignGear(self, n_g1=15, n_g2=24,M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=\"post\" action=\"CheckGearSize\">
<fieldset>
齒數1(範圍:15~80):<input type=text name=n_g1 value='''+str(n_g1)+'''><br />
齒數2(範圍:15~80):<input type=text name=n_g2 value = '''+str(n_g2)+'''><br />
<input type=\"submit\" value=\"繪製齒輪\">
</form>
</body>
</html>
'''
return outstring
#@+node:2015.20150622102244.1: *3* DesignGear1
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def DesignGear1(self, n_g1=15, n_g2=24,n_g3=24,n_g4=15,M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=\"post\" action=\"CheckGearSize1\">
<fieldset>
<legend>齒輪2跟齒輪3齒數需相同不然可能會有干涉的問題:</legend>
齒輪1(範圍:15~80):<input type=text name=n_g1 value='''+str(n_g1)+'''><br />
齒輪2(範圍:15~80):<input type=text name=n_g2 value='''+str(n_g2)+'''><br />
齒輪3(範圍:15~80):<input type=text name=n_g3 value='''+str(n_g3)+'''><br />
齒輪4(範圍:15~80):<input type=text name=n_g4 value='''+str(n_g4)+'''><br />
<input type=\"submit\" value=\"繪製齒輪\">
</form>
</body>
</html>
'''
return outstring
#@+node:2015.20150622011853.1: *3* threeDgear1
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def threeDgear1(self, N1=15, N2=24,N3=24,N4=24,N5=24,N6=24,N7=24,M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=\"post\" action=\"mygeartest3\">
<fieldset>
齒數1(範圍:15~80):<br /><input type=\"text\" name=\"N1\"><br />
齒數2(範圍:15~80):<br /><input type=\"text\" name=\"N2\"><br />
齒數3(範圍:15~80):<br /><input type=\"text\" name=\"N3\"><br />
齒數4(範圍:15~80):<br /><input type=\"text\" name=\"N4\"><br />
齒數5(範圍:15~80):<br /><input type=\"text\" name=\"N5\"><br />
齒數6(範圍:15~80):<br /><input type=\"text\" name=\"N6\"><br />
齒數7(範圍:15~80):<br /><input type=\"text\" name=\"N7\"><br />
<input type=\"submit\" value=\"send\">
</form>
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1762: *3* do2Dgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def do2Dgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
'''
outstring += '''
ctx.moveTo('''+str(N)+","+str(M)+")"
outstring += '''
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1735: *3* do3Dgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def do3Dgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
'''
outstring += '''
ctx.moveTo('''+str(N)+","+str(M)+")"
outstring += '''
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1765: *3* mygeartest
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def mygeartest(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
def create_line(x1, y1, x2, y2, width=3, fill="red"):
ctx.beginPath()
ctx.lineWidth = width
ctx.moveTo(x1, y1)
ctx.lineTo(x2, y2)
ctx.strokeStyle = fill
ctx.stroke()
# 導入數學函式後, 圓周率為 pi
# deg 為角度轉為徑度的轉換因子
deg = pi/180.
#
# 以下分別為正齒輪繪圖與主 tkinter 畫布繪圖
#
# 定義一個繪正齒輪的繪圖函式
# midx 為齒輪圓心 x 座標
# midy 為齒輪圓心 y 座標
# rp 為節圓半徑, n 為齒數
def 齒輪(midx, midy, rp, n, 顏色):
# 將角度轉換因子設為全域變數
global deg
# 齒輪漸開線分成 15 線段繪製
imax = 15
# 在輸入的畫布上繪製直線, 由圓心到節圓 y 軸頂點畫一直線
create_line(midx, midy, midx, midy-rp)
# 畫出 rp 圓, 畫圓函式尚未定義
#create_oval(midx-rp, midy-rp, midx+rp, midy+rp, width=2)
# a 為模數 (代表公制中齒的大小), 模數為節圓直徑(稱為節徑)除以齒數
# 模數也就是齒冠大小
a=2*rp/n
# d 為齒根大小, 為模數的 1.157 或 1.25倍, 這裡採 1.25 倍
d=2.5*rp/n
# ra 為齒輪的外圍半徑
ra=rp+a
print("ra:", ra)
# 畫出 ra 圓, 畫圓函式尚未定義
#create_oval(midx-ra, midy-ra, midx+ra, midy+ra, width=1)
# rb 則為齒輪的基圓半徑
# 基圓為漸開線長齒之基準圓
rb=rp*cos(20*deg)
print("rp:", rp)
print("rb:", rb)
# 畫出 rb 圓 (基圓), 畫圓函式尚未定義
#create_oval(midx-rb, midy-rb, midx+rb, midy+rb, width=1)
# rd 為齒根圓半徑
rd=rp-d
# 當 rd 大於 rb 時
print("rd:", rd)
# 畫出 rd 圓 (齒根圓), 畫圓函式尚未定義
#create_oval(midx-rd, midy-rd, midx+rd, midy+rd, width=1)
# dr 則為基圓到齒頂圓半徑分成 imax 段後的每段半徑增量大小
# 將圓弧分成 imax 段來繪製漸開線
dr=(ra-rb)/imax
# tan(20*deg)-20*deg 為漸開線函數
sigma=pi/(2*n)+tan(20*deg)-20*deg
for j in range(n):
ang=-2.*j*pi/n+sigma
ang2=2.*j*pi/n+sigma
lxd=midx+rd*sin(ang2-2.*pi/n)
lyd=midy-rd*cos(ang2-2.*pi/n)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(alpha-ang)
ypt=r*cos(alpha-ang)
xd=rd*sin(-ang)
yd=rd*cos(-ang)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由左側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
lfx=midx+xpt
lfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# the line from last end of dedendum point to the recent
# end of dedendum point
# lxd 為齒根圓上的左側 x 座標, lyd 則為 y 座標
# 下列為齒根圓上用來近似圓弧的直線
create_line((lxd),(lyd),(midx+xd),(midy-yd),fill=顏色)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(ang2-alpha)
ypt=r*cos(ang2-alpha)
xd=rd*sin(ang2)
yd=rd*cos(ang2)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由右側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
rfx=midx+xpt
rfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# lfx 為齒頂圓上的左側 x 座標, lfy 則為 y 座標
# 下列為齒頂圓上用來近似圓弧的直線
create_line(lfx,lfy,rfx,rfy,fill=顏色)
齒輪(400,400,300,41,"blue")
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
</body>
</html>
'''
return outstring
#@+node:amd.20150415215023.1: *3* ShowGear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def ShowGear(self, n_g1=15, n_g2=24,M=5, P=15):
g1= int(cherrypy.session.get('g1'))
g2= int(cherrypy.session.get('g2'))
m_g1 = 10*g1/2
m_g2 = 10*g2/2
if m_g1>=m_g2:
x=m_g1
else:
x=m_g2
outstring = '''
<!DOCTYPE html>
<html>
<head>
<form method=\"post\" action=\"DesignGear\">
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
<input type=\"submit\" value=\"重新繪製\">
</form>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<fieldset>
<legend>齒輪組合圖:</legend>
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 請注意, 這裡導入位於 Lib/site-packages 目錄下的 spur.py 檔案
import spur
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 以下利用 spur.py 程式進行繪圖, 接下來的協同設計運算必須要配合使用者的需求進行設計運算與繪圖
# 其中並將工作分配給其他組員建立類似 spur.py 的相關零件繪圖模組
# midx, midy 為齒輪圓心座標, rp 為節圓半徑, n 為齒數, pa 為壓力角, color 為線的顏色
# Gear(midx, midy, rp, n=20, pa=20, color="black"):
# 模數決定齒的尺寸大小, 囓合齒輪組必須有相同的模數與壓力角
# 壓力角 pa 單位為角度
x='''+str(x)+'''
x=x+20
pa = 10
# m 為模數
m = 10
# 第1齒輪齒數
n_g1 ='''+str( g1)+'''
# 第2齒輪齒數
n_g2 ='''+str( g2)+'''
# 計算兩齒輪的節圓半徑
rp_g1 = m*n_g1/2
rp_g2 = m*n_g2/2
y=rp_g1+20
# 將第1齒輪順時鐘轉 90 度
# 使用 ctx.save() 與 ctx.restore() 以確保各齒輪以相對座標進行旋轉繪圖
ctx.save()
# translate to the origin of second gear
ctx.translate(x,y)
# rotate to engage
ctx.rotate(pi)
# put it back
ctx.translate(-x,-y)
spur.Spur(ctx).Gear(x,y,rp_g1,n_g1, pa, "blue")
ctx.restore()
# 將第2齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(x,y+rp_g1+rp_g2)
# rotate to engage
ctx.rotate(-pi/n_g2)
# put it back
ctx.translate(-x,-(y+rp_g1+rp_g2))
spur.Spur(ctx).Gear(x,y+rp_g1+rp_g2,rp_g2,n_g2, pa, "black")
ctx.restore()
</script>
<canvas id="plotarea" width="3800" height="4000"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150622011937.1: *3* mygeartest3
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def mygeartest3(self, N1=15, N2=24,N3=24,N4=24,N5=24,N6=24,N7=24,M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=\"post\" action=\"threeDgear1\">
<fieldset>
<legend>最下面有返回按鈕:</legend>
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 請注意, 這裡導入位於 Lib/site-packages 目錄下的 spur.py 檔案
import spur
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 以下利用 spur.py 程式進行繪圖, 接下來的協同設計運算必須要配合使用者的需求進行設計運算與繪圖
# 其中並將工作分配給其他組員建立類似 spur.py 的相關零件繪圖模組
# midx, midy 為齒輪圓心座標, rp 為節圓半徑, n 為齒數, pa 為壓力角, color 為線的顏色
# Gear(midx, midy, rp, n=20, pa=20, color="black"):
# 模數決定齒的尺寸大小, 囓合齒輪組必須有相同的模數與壓力角
# 壓力角 pa 單位為角度
pa = 20
# m 為模數
m = 20
# 第1齒輪齒數
n_g1 = '''+str(N1)+'''
# 第2齒輪齒數
n_g2 = '''+str(N2)+'''
# 第3齒輪齒數
n_g3 = '''+str(N3)+'''
# 第4齒輪齒數
n_g4 = '''+str(N4)+'''
# 第5齒輪齒數
n_g5 = '''+str(N5)+'''
# 第6齒輪齒數
n_g6 = '''+str(N6)+'''
# 第7齒輪齒數
n_g7 = '''+str(N7)+'''
# 計算兩齒輪的節圓半徑
rp_g1 = m*n_g1/2
rp_g2 = m*n_g2/2
rp_g3 = m*n_g3/2
rp_g4 = m*n_g4/2
rp_g5 = m*n_g5/2
rp_g6 = m*n_g6/2
rp_g7 = m*n_g7/2
##########################################################################
# 將第1齒輪順時鐘轉 90 度
# 使用 ctx.save() 與 ctx.restore() 以確保各齒輪以相對座標進行旋轉繪圖
ctx.save()
# translate to the origin of second gear
ctx.translate(820,820)
# rotate to engage
ctx.rotate(pi)
# put it back
ctx.translate(-820,-820)
spur.Spur(ctx).Gear(820,820,rp_g1,n_g1, pa, "blue")
ctx.restore()
##########################################################################
# 將第2齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(820,820+rp_g1+rp_g2)
# rotate to engage
ctx.rotate(-pi/n_g2)
# put it back
ctx.translate(-820,-(820+rp_g1+rp_g2))
spur.Spur(ctx).Gear(820,820+rp_g1+rp_g2,rp_g2,n_g2, pa, "black")
ctx.restore()
##########################################################################
# 將第3齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(820,820+rp_g1+2*rp_g2+rp_g3)
# rotate to engage
ctx.rotate(-pi/n_g3*(n_g2%2))
# put it back
ctx.translate(-820,-(820+rp_g1+2*rp_g2+rp_g3))
spur.Spur(ctx).Gear(820,820+rp_g1+2*rp_g2+rp_g3,rp_g3,n_g3, pa, "black")
ctx.restore()
##########################################################################
# 將第4齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(820,820+rp_g1+2*rp_g2+2*rp_g3+rp_g4)
# rotate to engage
a=(n_g2%2)+(n_g3%2)-1
ctx.rotate(-pi/n_g4*a)
# put it back
ctx.translate(-820,-(820+rp_g1+2*rp_g2+2*rp_g3+rp_g4))
spur.Spur(ctx).Gear(820,820+rp_g1+2*rp_g2+2*rp_g3+rp_g4,rp_g4,n_g4, pa, "black")
ctx.restore()
##########################################################################
# 將第5齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(820,820+rp_g1+2*rp_g2+2*rp_g3+2*rp_g4+rp_g5)
# rotate to engage
b=(n_g2%2)+(n_g3%2)+(n_g4%2)-2
ctx.rotate(-pi/n_g5*b)
# put it back
ctx.translate(-820,-(820+rp_g1+2*rp_g2+2*rp_g3+2*rp_g4+rp_g5))
spur.Spur(ctx).Gear(820,820+rp_g1+2*rp_g2+2*rp_g3+2*rp_g4+rp_g5,rp_g5,n_g5, pa, "black")
ctx.restore()
##########################################################################
# 將第6齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(820,820+rp_g1+2*rp_g2+2*rp_g3+2*rp_g4+2*rp_g5+rp_g6)
# rotate to engage
c=(n_g2%2)+(n_g3%2)+(n_g4%2)+(n_g5%2)-3
ctx.rotate(-pi/n_g6*c)
# put it back
ctx.translate(-820,-(820+rp_g1+2*rp_g2+2*rp_g3+2*rp_g4+2*rp_g5+rp_g6))
spur.Spur(ctx).Gear(820,820+rp_g1+2*rp_g2+2*rp_g3+2*rp_g4+2*rp_g5+rp_g6,rp_g6,n_g6, pa, "black")
ctx.restore()
##########################################################################
# 將第7齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(820,820+rp_g1+2*rp_g2+2*rp_g3+2*rp_g4+2*rp_g5+2*rp_g6+rp_g7)
# rotate to engage
d=(n_g2%2)+(n_g3%2)+(n_g4%2)+(n_g5%2)+(n_g6%2)-4
ctx.rotate(-pi/n_g7*d)
# put it back
ctx.translate(-820,-(820+rp_g1+2*rp_g2+2*rp_g3+2*rp_g4+2*rp_g5+2*rp_g6+rp_g7))
spur.Spur(ctx).Gear(820,820+rp_g1+2*rp_g2+2*rp_g3+2*rp_g4+2*rp_g5+2*rp_g6+rp_g7,rp_g7,n_g7, pa, "black")
ctx.restore()
##########################################################################
</script>
<canvas id="plotarea" width="3800" height="12000"></canvas>
<input type=\"submit\" value=\"return\">
</form>
</body>
</html>
'''
return outstring
#@+node:2015.20150622102228.1: *3* ShowGear1
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def ShowGear1(self, n_g1=15, n_g2=24,n_g3=24,n_g4=15,M=5, P=15):
g1= int(cherrypy.session.get('g1'))
g2= int(cherrypy.session.get('g2'))
g3= int(cherrypy.session.get('g3'))
g4= int(cherrypy.session.get('g4'))
m_g1 = 10*g1/2
m_g2 = 10*g2/2
if m_g1>=m_g2:
x=m_g1
else:
x=m_g2
outstring = '''
<!DOCTYPE html>
<html>
<head>
<form method=\"post\" action=\"DesignGear1\">
<fieldset>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
<input type=\"submit\" value=\"重新繪製\">
</form>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<legend>齒輪組和圖:</legend>
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 請注意, 這裡導入位於 Lib/site-packages 目錄下的 spur.py 檔案
import spur
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 以下利用 spur.py 程式進行繪圖, 接下來的協同設計運算必須要配合使用者的需求進行設計運算與繪圖
# 其中並將工作分配給其他組員建立類似 spur.py 的相關零件繪圖模組
# midx, midy 為齒輪圓心座標, rp 為節圓半徑, n 為齒數, pa 為壓力角, color 為線的顏色
# Gear(midx, midy, rp, n=20, pa=20, color="black"):
# 模數決定齒的尺寸大小, 囓合齒輪組必須有相同的模數與壓力角
# 壓力角 pa 單位為角度
pa = 10
# m 為模數
m = 10
x='''+str(x)+'''
x=x+20
# 第1齒輪齒數
n_g1 = '''+str( g1)+'''
# 第2齒輪齒數
n_g2 = '''+str( g2)+'''
# 第3齒輪齒數
n_g3 = '''+str( g3)+'''
# 第4齒輪齒數
n_g4 = '''+str( g4)+'''
# 計算兩齒輪的節圓半徑
rp_g1 = m*n_g1/2
rp_g2 = m*n_g2/2
rp_g3 = m*n_g3/2
rp_g4 = m*n_g4/2
y=rp_g1+20
# 將第1齒輪順時鐘轉 90 度
# 使用 ctx.save() 與 ctx.restore() 以確保各齒輪以相對座標進行旋轉繪圖
ctx.save()
# translate to the origin of second gear
ctx.translate(x,y)
# rotate to engage
ctx.rotate(-pi)
# put it back
ctx.translate(-x,-y)
spur.Spur(ctx).Gear(x,y,rp_g1,n_g1, pa, "blue")
ctx.restore()
# 將第2齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(x,y+rp_g1+rp_g2)
# rotate to engage
ctx.rotate(-pi/n_g2)
# put it back
ctx.translate(-x,-(y+rp_g1+rp_g2))
spur.Spur(ctx).Gear(x,y+rp_g1+rp_g2,rp_g2,n_g2, pa, "black")
ctx.restore()
# 將第3齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(x+rp_g2+rp_g3,y+rp_g1+rp_g2)
# rotate to engage
ctx.rotate(-pi/2+(pi/n_g3*0.5)*(n_g2%4))
# put it back
ctx.translate(-(x+rp_g2+rp_g3),-(y+rp_g1+rp_g2))
spur.Spur(ctx).Gear(x+rp_g2+rp_g3,y+rp_g1+rp_g2,rp_g3,n_g3, pa, "black")
ctx.restore()
# 將第4齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(x+rp_g2+rp_g3,y+rp_g1+rp_g2+rp_g3+rp_g4)
# rotate to engage
a=(n_g2%2)-1
ctx.rotate(-pi/n_g4*a)
# put it back
ctx.translate(-(x+rp_g2+rp_g3),-(y+rp_g1+rp_g2+rp_g3+rp_g4))
spur.Spur(ctx).Gear(x+rp_g2+rp_g3,y+rp_g1+rp_g2+rp_g3+rp_g4,rp_g4,n_g4, pa, "black")
ctx.restore()
</script>
<canvas id="plotarea" width="3800" height="12000"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1737: *3* my3Dgeartest
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def my3Dgeartest(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
def create_line(x1, y1, x2, y2, width=3, fill="red"):
ctx.beginPath()
ctx.lineWidth = width
ctx.moveTo(x1, y1)
ctx.lineTo(x2, y2)
ctx.strokeStyle = fill
ctx.stroke()
# 導入數學函式後, 圓周率為 pi
# deg 為角度轉為徑度的轉換因子
deg = pi/180.
#
# 以下分別為正齒輪繪圖與主 tkinter 畫布繪圖
#
# 定義一個繪正齒輪的繪圖函式
# midx 為齒輪圓心 x 座標
# midy 為齒輪圓心 y 座標
# rp 為節圓半徑, n 為齒數
def gear(midx, midy, rp, n, 顏色):
# 將角度轉換因子設為全域變數
global deg
# 齒輪漸開線分成 15 線段繪製
imax = 15
# 在輸入的畫布上繪製直線, 由圓心到節圓 y 軸頂點畫一直線
create_line(midx, midy, midx, midy-rp)
# 畫出 rp 圓, 畫圓函式尚未定義
#create_oval(midx-rp, midy-rp, midx+rp, midy+rp, width=2)
# a 為模數 (代表公制中齒的大小), 模數為節圓直徑(稱為節徑)除以齒數
# 模數也就是齒冠大小
a=2*rp/n
# d 為齒根大小, 為模數的 1.157 或 1.25倍, 這裡採 1.25 倍
d=2.5*rp/n
# ra 為齒輪的外圍半徑
ra=rp+a
print("ra:", ra)
# 畫出 ra 圓, 畫圓函式尚未定義
#create_oval(midx-ra, midy-ra, midx+ra, midy+ra, width=1)
# rb 則為齒輪的基圓半徑
# 基圓為漸開線長齒之基準圓
rb=rp*cos(20*deg)
print("rp:", rp)
print("rb:", rb)
# 畫出 rb 圓 (基圓), 畫圓函式尚未定義
#create_oval(midx-rb, midy-rb, midx+rb, midy+rb, width=1)
# rd 為齒根圓半徑
rd=rp-d
# 當 rd 大於 rb 時
print("rd:", rd)
# 畫出 rd 圓 (齒根圓), 畫圓函式尚未定義
#create_oval(midx-rd, midy-rd, midx+rd, midy+rd, width=1)
# dr 則為基圓到齒頂圓半徑分成 imax 段後的每段半徑增量大小
# 將圓弧分成 imax 段來繪製漸開線
dr=(ra-rb)/imax
# tan(20*deg)-20*deg 為漸開線函數
sigma=pi/(2*n)+tan(20*deg)-20*deg
for j in range(n):
ang=-2.*j*pi/n+sigma
ang2=2.*j*pi/n+sigma
lxd=midx+rd*sin(ang2-2.*pi/n)
lyd=midy-rd*cos(ang2-2.*pi/n)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(alpha-ang)
ypt=r*cos(alpha-ang)
xd=rd*sin(-ang)
yd=rd*cos(-ang)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由左側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
lfx=midx+xpt
lfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# the line from last end of dedendum point to the recent
# end of dedendum point
# lxd 為齒根圓上的左側 x 座標, lyd 則為 y 座標
# 下列為齒根圓上用來近似圓弧的直線
create_line((lxd),(lyd),(midx+xd),(midy-yd),fill=顏色)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(ang2-alpha)
ypt=r*cos(ang2-alpha)
xd=rd*sin(ang2)
yd=rd*cos(ang2)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由右側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
rfx=midx+xpt
rfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# lfx 為齒頂圓上的左側 x 座標, lfy 則為 y 座標
# 下列為齒頂圓上用來近似圓弧的直線
create_line(lfx,lfy,rfx,rfy,fill=顏色)
gear(400,400,300,41,"blue")
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
</body>
</html>
'''
return outstring
#@+node:2014fall.20141215194146.1793: *3* doCheck
@cherrypy.expose
def doCheck(self, guess=None):
# 假如使用者直接執行 doCheck, 則設法轉回根方法
if guess is None:
raise cherrypy.HTTPRedirect("/")
# 從 session 取出 answer 對應資料, 且處理直接執行 doCheck 時無法取 session 值情況
try:
theanswer = int(cherrypy.session.get('answer'))
except:
raise cherrypy.HTTPRedirect("/")
# 經由表單所取得的 guess 資料型別為 string
try:
theguess = int(guess)
except:
return "error " + self.guessform()
# 每執行 doCheck 一次,次數增量一次
cherrypy.session['count'] += 1
# 答案與所猜數字進行比對
if theanswer < theguess:
return "big " + self.guessform()
elif theanswer > theguess:
return "small " + self.guessform()
else:
# 已經猜對, 從 session 取出累計猜測次數
thecount = cherrypy.session.get('count')
return "exact: <a href=''>再猜</a>"
#@+node:2014fall.20141215194146.1789: *3* guessform
def guessform(self):
# 印出讓使用者輸入的超文件表單
outstring = str(cherrypy.session.get('answer')) + "/" + str(cherrypy.session.get('count')) + '''<form method=POST action=doCheck>
請輸入您所猜的整數:<input type=text name=guess><br />
<input type=submit value=send>
</form>'''
return outstring
#@-others
#@-others
################# (4) 程式啟動區
# 配合程式檔案所在目錄設定靜態目錄或靜態檔案
application_conf = {'/static':{
'tools.staticdir.on': True,
# 程式執行目錄下, 必須自行建立 static 目錄
'tools.staticdir.dir': _curdir+"/static"},
'/downloads':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/downloads"},
'/images':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/images"}
}
root = Hello()
root.gear = gear.Gear()
cherrypy.server.socket_port = 8081
cherrypy.server.socket_host = '127.0.0.1'
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示在 OpenSfhit 執行
application = cherrypy.Application(root, config=application_conf)
else:
# 表示在近端執行
cherrypy.quickstart(root, config=application_conf)
#@-leo
| gpl-3.0 |
openmotics/gateway | src/toolbox.py | 1 | 5644 | # Copyright (C) 2019 OpenMotics BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
A few helper classes
"""
from __future__ import absolute_import
import inspect
import logging
import time
import traceback
from collections import deque
from threading import Thread
import msgpack
import six
from gateway.daemon_thread import BaseThread
logger = logging.getLogger('openmotics')
if False: # MYPY
from typing import Any, Callable, Dict, IO, List, Optional
class Full(Exception):
pass
class Empty(Exception):
pass
class Queue(object):
def __init__(self, size=None):
self._queue = deque() # type: deque
self._size = size # Not used
def put(self, value, block=False):
_ = block
self._queue.appendleft(value)
def get(self, block=True, timeout=None):
if not block:
try:
return self._queue.pop()
except IndexError:
raise Empty()
start = time.time()
while timeout is None or time.time() - start < timeout:
try:
return self._queue.pop()
except IndexError:
sleep = 0.025
if timeout is None or timeout > 1:
sleep = 0.1
time.sleep(sleep)
raise Empty()
def qsize(self):
return len(self._queue)
def clear(self):
return self._queue.clear()
class PluginIPCReader(object):
"""
This class handles IPC communications.
It uses a stream of msgpack encoded dict values.
"""
def __init__(self, stream, logger, command_receiver=None, name=None):
# type: (IO[bytes], Callable[[str,Exception],None], Callable[[Dict[str,Any]],None],Optional[str]) -> None
self._command_queue = Queue()
self._unpacker = msgpack.Unpacker(stream, read_size=1, raw=False) # type: msgpack.Unpacker[Dict[str,Any]]
self._read_thread = None # type: Optional[Thread]
self._logger = logger
self._running = False
self._command_receiver = command_receiver
self._name = name
def start(self):
# type: () -> None
self._running = True
self._read_thread = BaseThread(name='ipcread', target=self._read)
self._read_thread.daemon = True
self._read_thread.start()
def stop(self):
# type: () -> None
self._running = False
if self._read_thread is not None:
self._read_thread.join()
def _read(self):
# type: () -> None
while self._running:
try:
command = next(self._unpacker)
if not isinstance(command, dict):
raise ValueError('invalid value %s' % command)
if self._command_receiver is not None:
self._command_receiver(command)
else:
self._command_queue.put(command)
except StopIteration as ex:
self._logger('PluginIPCReader %s stopped' % self._name, ex)
self._running = False
except Exception as ex:
self._logger('Unexpected read exception', ex)
def get(self, block=True, timeout=None):
return self._command_queue.get(block, timeout)
class PluginIPCWriter(object):
def __init__(self, stream):
# type: (IO[bytes]) -> None
self._packer = msgpack.Packer() # type: msgpack.Packer[Dict[str,Any]]
self._stream = stream
def log(self, msg):
# type: (str) -> None
self.write({'cid': 0, 'action': 'logs', 'logs': str(msg)})
def log_exception(self, name, exception):
# type: (str, BaseException) -> None
self.log('Exception ({0}) in {1}: {2}'.format(exception, name, traceback.format_exc()))
def with_catch(self, name, target, args):
# type: (str, Callable[...,None], List[Any]) -> None
""" Logs Exceptions that happen in target(*args). """
try:
return target(*args)
except Exception as exception:
self.log_exception(name, exception)
def write(self, response):
# type: (Dict[str,Any]) -> None
try:
self._stream.write(self._packer.pack(response))
self._stream.flush()
except IOError:
pass # Ignore exceptions if the stream is not available (nothing that can be done anyway)
class Toolbox(object):
@staticmethod
def nonify(value, default_value):
return None if value == default_value else value
@staticmethod
def denonify(value, default_value):
return default_value if value is None else value
@staticmethod
def get_parameter_names(func):
if six.PY2:
return inspect.getargspec(func).args
return list(inspect.signature(func).parameters.keys())
@staticmethod
def shorten_name(name, maxlength=16):
if len(name) <= maxlength:
return name
return '{0}~{1}'.format(name[:maxlength - 2], name[-1:])
| agpl-3.0 |
cheehieu/linux | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
infoxchange/lettuce | tests/integration/lib/Django-1.3/tests/regressiontests/templates/tests.py | 26 | 112369 | # -*- coding: utf-8 -*-
from django.conf import settings
if __name__ == '__main__':
# When running this file in isolation, we need to set up the configuration
# before importing 'template'.
settings.configure()
from datetime import datetime, timedelta
import time
import os
import sys
import traceback
from django import template
from django.template import base as template_base
from django.core import urlresolvers
from django.template import loader
from django.template.loaders import app_directories, filesystem, cached
from django.utils import unittest
from django.utils.translation import activate, deactivate, ugettext as _
from django.utils.safestring import mark_safe
from django.utils.tzinfo import LocalTimezone
from context import ContextTests
from custom import CustomTagTests, CustomFilterTests
from parser import ParserTests
from unicode import UnicodeTests
from nodelist import NodelistTest
from smartif import *
from response import *
try:
from loaders import *
except ImportError:
pass # If setuptools isn't installed, that's fine. Just move on.
import filters
#################################
# Custom template tag for tests #
#################################
register = template.Library()
class EchoNode(template.Node):
def __init__(self, contents):
self.contents = contents
def render(self, context):
return " ".join(self.contents)
def do_echo(parser, token):
return EchoNode(token.contents.split()[1:])
def do_upper(value):
return value.upper()
register.tag("echo", do_echo)
register.tag("other_echo", do_echo)
register.filter("upper", do_upper)
template.libraries['testtags'] = register
#####################################
# Helper objects for template tests #
#####################################
class SomeException(Exception):
silent_variable_failure = True
class SomeOtherException(Exception):
pass
class ContextStackException(Exception):
pass
class SomeClass:
def __init__(self):
self.otherclass = OtherClass()
def method(self):
return "SomeClass.method"
def method2(self, o):
return o
def method3(self):
raise SomeException
def method4(self):
raise SomeOtherException
def __getitem__(self, key):
if key == 'silent_fail_key':
raise SomeException
elif key == 'noisy_fail_key':
raise SomeOtherException
raise KeyError
def silent_fail_attribute(self):
raise SomeException
silent_fail_attribute = property(silent_fail_attribute)
def noisy_fail_attribute(self):
raise SomeOtherException
noisy_fail_attribute = property(noisy_fail_attribute)
class OtherClass:
def method(self):
return "OtherClass.method"
class TestObj(object):
def is_true(self):
return True
def is_false(self):
return False
def is_bad(self):
time.sleep(0.3)
return True
class SilentGetItemClass(object):
def __getitem__(self, key):
raise SomeException
class SilentAttrClass(object):
def b(self):
raise SomeException
b = property(b)
class UTF8Class:
"Class whose __str__ returns non-ASCII data"
def __str__(self):
return u'ŠĐĆŽćžšđ'.encode('utf-8')
class Templates(unittest.TestCase):
def setUp(self):
self.old_static_url = settings.STATIC_URL
self.old_media_url = settings.MEDIA_URL
settings.STATIC_URL = u"/static/"
settings.MEDIA_URL = u"/media/"
def tearDown(self):
settings.STATIC_URL = self.old_static_url
settings.MEDIA_URL = self.old_media_url
def test_loaders_security(self):
ad_loader = app_directories.Loader()
fs_loader = filesystem.Loader()
def test_template_sources(path, template_dirs, expected_sources):
if isinstance(expected_sources, list):
# Fix expected sources so they are normcased and abspathed
expected_sources = [os.path.normcase(os.path.abspath(s)) for s in expected_sources]
# Test the two loaders (app_directores and filesystem).
func1 = lambda p, t: list(ad_loader.get_template_sources(p, t))
func2 = lambda p, t: list(fs_loader.get_template_sources(p, t))
for func in (func1, func2):
if isinstance(expected_sources, list):
self.assertEqual(func(path, template_dirs), expected_sources)
else:
self.assertRaises(expected_sources, func, path, template_dirs)
template_dirs = ['/dir1', '/dir2']
test_template_sources('index.html', template_dirs,
['/dir1/index.html', '/dir2/index.html'])
test_template_sources('/etc/passwd', template_dirs, [])
test_template_sources('etc/passwd', template_dirs,
['/dir1/etc/passwd', '/dir2/etc/passwd'])
test_template_sources('../etc/passwd', template_dirs, [])
test_template_sources('../../../etc/passwd', template_dirs, [])
test_template_sources('/dir1/index.html', template_dirs,
['/dir1/index.html'])
test_template_sources('../dir2/index.html', template_dirs,
['/dir2/index.html'])
test_template_sources('/dir1blah', template_dirs, [])
test_template_sources('../dir1blah', template_dirs, [])
# UTF-8 bytestrings are permitted.
test_template_sources('\xc3\x85ngstr\xc3\xb6m', template_dirs,
[u'/dir1/Ångström', u'/dir2/Ångström'])
# Unicode strings are permitted.
test_template_sources(u'Ångström', template_dirs,
[u'/dir1/Ångström', u'/dir2/Ångström'])
test_template_sources(u'Ångström', ['/Straße'], [u'/Straße/Ångström'])
test_template_sources('\xc3\x85ngstr\xc3\xb6m', ['/Straße'],
[u'/Straße/Ångström'])
# Invalid UTF-8 encoding in bytestrings is not. Should raise a
# semi-useful error message.
test_template_sources('\xc3\xc3', template_dirs, UnicodeDecodeError)
# Case insensitive tests (for win32). Not run unless we're on
# a case insensitive operating system.
if os.path.normcase('/TEST') == os.path.normpath('/test'):
template_dirs = ['/dir1', '/DIR2']
test_template_sources('index.html', template_dirs,
['/dir1/index.html', '/dir2/index.html'])
test_template_sources('/DIR1/index.HTML', template_dirs,
['/dir1/index.html'])
def test_loader_debug_origin(self):
# Turn TEMPLATE_DEBUG on, so that the origin file name will be kept with
# the compiled templates.
old_td, settings.TEMPLATE_DEBUG = settings.TEMPLATE_DEBUG, True
old_loaders = loader.template_source_loaders
try:
loader.template_source_loaders = (filesystem.Loader(),)
# We rely on the fact that runtests.py sets up TEMPLATE_DIRS to
# point to a directory containing a 404.html file. Also that
# the file system and app directories loaders both inherit the
# load_template method from the BaseLoader class, so we only need
# to test one of them.
load_name = '404.html'
template = loader.get_template(load_name)
template_name = template.nodelist[0].source[0].name
self.assertTrue(template_name.endswith(load_name),
'Template loaded by filesystem loader has incorrect name for debug page: %s' % template_name)
# Aso test the cached loader, since it overrides load_template
cache_loader = cached.Loader(('',))
cache_loader._cached_loaders = loader.template_source_loaders
loader.template_source_loaders = (cache_loader,)
template = loader.get_template(load_name)
template_name = template.nodelist[0].source[0].name
self.assertTrue(template_name.endswith(load_name),
'Template loaded through cached loader has incorrect name for debug page: %s' % template_name)
template = loader.get_template(load_name)
template_name = template.nodelist[0].source[0].name
self.assertTrue(template_name.endswith(load_name),
'Cached template loaded through cached loader has incorrect name for debug page: %s' % template_name)
finally:
loader.template_source_loaders = old_loaders
settings.TEMPLATE_DEBUG = old_td
def test_include_missing_template(self):
"""
Tests that the correct template is identified as not existing
when {% include %} specifies a template that does not exist.
"""
# TEMPLATE_DEBUG must be true, otherwise the exception raised
# during {% include %} processing will be suppressed.
old_td, settings.TEMPLATE_DEBUG = settings.TEMPLATE_DEBUG, True
old_loaders = loader.template_source_loaders
try:
# Test the base loader class via the app loader. load_template
# from base is used by all shipped loaders excepting cached,
# which has its own test.
loader.template_source_loaders = (app_directories.Loader(),)
load_name = 'test_include_error.html'
r = None
try:
tmpl = loader.select_template([load_name])
r = tmpl.render(template.Context({}))
except template.TemplateDoesNotExist, e:
settings.TEMPLATE_DEBUG = old_td
self.assertEqual(e.args[0], 'missing.html')
self.assertEqual(r, None, 'Template rendering unexpectedly succeeded, produced: ->%r<-' % r)
finally:
loader.template_source_loaders = old_loaders
settings.TEMPLATE_DEBUG = old_td
def test_extends_include_missing_baseloader(self):
"""
Tests that the correct template is identified as not existing
when {% extends %} specifies a template that does exist, but
that template has an {% include %} of something that does not
exist. See #12787.
"""
# TEMPLATE_DEBUG must be true, otherwise the exception raised
# during {% include %} processing will be suppressed.
old_td, settings.TEMPLATE_DEBUG = settings.TEMPLATE_DEBUG, True
old_loaders = loader.template_source_loaders
try:
# Test the base loader class via the app loader. load_template
# from base is used by all shipped loaders excepting cached,
# which has its own test.
loader.template_source_loaders = (app_directories.Loader(),)
load_name = 'test_extends_error.html'
tmpl = loader.get_template(load_name)
r = None
try:
r = tmpl.render(template.Context({}))
except template.TemplateSyntaxError, e:
settings.TEMPLATE_DEBUG = old_td
self.assertEqual(e.args[0], 'Caught TemplateDoesNotExist while rendering: missing.html')
self.assertEqual(r, None, 'Template rendering unexpectedly succeeded, produced: ->%r<-' % r)
finally:
loader.template_source_loaders = old_loaders
settings.TEMPLATE_DEBUG = old_td
def test_extends_include_missing_cachedloader(self):
"""
Same as test_extends_include_missing_baseloader, only tests
behavior of the cached loader instead of BaseLoader.
"""
old_td, settings.TEMPLATE_DEBUG = settings.TEMPLATE_DEBUG, True
old_loaders = loader.template_source_loaders
try:
cache_loader = cached.Loader(('',))
cache_loader._cached_loaders = (app_directories.Loader(),)
loader.template_source_loaders = (cache_loader,)
load_name = 'test_extends_error.html'
tmpl = loader.get_template(load_name)
r = None
try:
r = tmpl.render(template.Context({}))
except template.TemplateSyntaxError, e:
self.assertEqual(e.args[0], 'Caught TemplateDoesNotExist while rendering: missing.html')
self.assertEqual(r, None, 'Template rendering unexpectedly succeeded, produced: ->%r<-' % r)
# For the cached loader, repeat the test, to ensure the first attempt did not cache a
# result that behaves incorrectly on subsequent attempts.
tmpl = loader.get_template(load_name)
try:
tmpl.render(template.Context({}))
except template.TemplateSyntaxError, e:
self.assertEqual(e.args[0], 'Caught TemplateDoesNotExist while rendering: missing.html')
self.assertEqual(r, None, 'Template rendering unexpectedly succeeded, produced: ->%r<-' % r)
finally:
loader.template_source_loaders = old_loaders
settings.TEMPLATE_DEBUG = old_td
def test_token_smart_split(self):
# Regression test for #7027
token = template.Token(template.TOKEN_BLOCK, 'sometag _("Page not found") value|yesno:_("yes,no")')
split = token.split_contents()
self.assertEqual(split, ["sometag", '_("Page not found")', 'value|yesno:_("yes,no")'])
def test_url_reverse_no_settings_module(self):
# Regression test for #9005
from django.template import Template, Context, TemplateSyntaxError
old_settings_module = settings.SETTINGS_MODULE
old_template_debug = settings.TEMPLATE_DEBUG
settings.SETTINGS_MODULE = None
settings.TEMPLATE_DEBUG = True
t = Template('{% url will_not_match %}')
c = Context()
try:
rendered = t.render(c)
except TemplateSyntaxError, e:
# Assert that we are getting the template syntax error and not the
# string encoding error.
self.assertEqual(e.args[0], "Caught NoReverseMatch while rendering: Reverse for 'will_not_match' with arguments '()' and keyword arguments '{}' not found.")
settings.SETTINGS_MODULE = old_settings_module
settings.TEMPLATE_DEBUG = old_template_debug
def test_invalid_block_suggestion(self):
# See #7876
from django.template import Template, TemplateSyntaxError
try:
t = Template("{% if 1 %}lala{% endblock %}{% endif %}")
except TemplateSyntaxError, e:
self.assertEqual(e.args[0], "Invalid block tag: 'endblock', expected 'else' or 'endif'")
def test_templates(self):
template_tests = self.get_template_tests()
filter_tests = filters.get_filter_tests()
# Quickly check that we aren't accidentally using a name in both
# template and filter tests.
overlapping_names = [name for name in filter_tests if name in template_tests]
assert not overlapping_names, 'Duplicate test name(s): %s' % ', '.join(overlapping_names)
template_tests.update(filter_tests)
# Register our custom template loader.
def test_template_loader(template_name, template_dirs=None):
"A custom template loader that loads the unit-test templates."
try:
return (template_tests[template_name][0] , "test:%s" % template_name)
except KeyError:
raise template.TemplateDoesNotExist(template_name)
cache_loader = cached.Loader(('test_template_loader',))
cache_loader._cached_loaders = (test_template_loader,)
old_template_loaders = loader.template_source_loaders
loader.template_source_loaders = [cache_loader]
failures = []
tests = template_tests.items()
tests.sort()
# Turn TEMPLATE_DEBUG off, because tests assume that.
old_td, settings.TEMPLATE_DEBUG = settings.TEMPLATE_DEBUG, False
# Set TEMPLATE_STRING_IF_INVALID to a known string.
old_invalid = settings.TEMPLATE_STRING_IF_INVALID
expected_invalid_str = 'INVALID'
#Set ALLOWED_INCLUDE_ROOTS so that ssi works.
old_allowed_include_roots = settings.ALLOWED_INCLUDE_ROOTS
settings.ALLOWED_INCLUDE_ROOTS = os.path.dirname(os.path.abspath(__file__))
# Warm the URL reversing cache. This ensures we don't pay the cost
# warming the cache during one of the tests.
urlresolvers.reverse('regressiontests.templates.views.client_action',
kwargs={'id':0,'action':"update"})
for name, vals in tests:
if isinstance(vals[2], tuple):
normal_string_result = vals[2][0]
invalid_string_result = vals[2][1]
if isinstance(invalid_string_result, tuple):
expected_invalid_str = 'INVALID %s'
invalid_string_result = invalid_string_result[0] % invalid_string_result[1]
template_base.invalid_var_format_string = True
try:
template_debug_result = vals[2][2]
except IndexError:
template_debug_result = normal_string_result
else:
normal_string_result = vals[2]
invalid_string_result = vals[2]
template_debug_result = vals[2]
if 'LANGUAGE_CODE' in vals[1]:
activate(vals[1]['LANGUAGE_CODE'])
else:
activate('en-us')
for invalid_str, template_debug, result in [
('', False, normal_string_result),
(expected_invalid_str, False, invalid_string_result),
('', True, template_debug_result)
]:
settings.TEMPLATE_STRING_IF_INVALID = invalid_str
settings.TEMPLATE_DEBUG = template_debug
for is_cached in (False, True):
try:
start = datetime.now()
test_template = loader.get_template(name)
end = datetime.now()
if end-start > timedelta(seconds=0.2):
failures.append("Template test (Cached='%s', TEMPLATE_STRING_IF_INVALID='%s', TEMPLATE_DEBUG=%s): %s -- FAILED. Took too long to parse test" % (is_cached, invalid_str, template_debug, name))
start = datetime.now()
output = self.render(test_template, vals)
end = datetime.now()
if end-start > timedelta(seconds=0.2):
failures.append("Template test (Cached='%s', TEMPLATE_STRING_IF_INVALID='%s', TEMPLATE_DEBUG=%s): %s -- FAILED. Took too long to render test" % (is_cached, invalid_str, template_debug, name))
except ContextStackException:
failures.append("Template test (Cached='%s', TEMPLATE_STRING_IF_INVALID='%s', TEMPLATE_DEBUG=%s): %s -- FAILED. Context stack was left imbalanced" % (is_cached, invalid_str, template_debug, name))
continue
except Exception:
exc_type, exc_value, exc_tb = sys.exc_info()
if exc_type != result:
print "CHECK", name, exc_type, result
tb = '\n'.join(traceback.format_exception(exc_type, exc_value, exc_tb))
failures.append("Template test (Cached='%s', TEMPLATE_STRING_IF_INVALID='%s', TEMPLATE_DEBUG=%s): %s -- FAILED. Got %s, exception: %s\n%s" % (is_cached, invalid_str, template_debug, name, exc_type, exc_value, tb))
continue
if output != result:
failures.append("Template test (Cached='%s', TEMPLATE_STRING_IF_INVALID='%s', TEMPLATE_DEBUG=%s): %s -- FAILED. Expected %r, got %r" % (is_cached, invalid_str, template_debug, name, result, output))
cache_loader.reset()
if 'LANGUAGE_CODE' in vals[1]:
deactivate()
if template_base.invalid_var_format_string:
expected_invalid_str = 'INVALID'
template_base.invalid_var_format_string = False
loader.template_source_loaders = old_template_loaders
deactivate()
settings.TEMPLATE_DEBUG = old_td
settings.TEMPLATE_STRING_IF_INVALID = old_invalid
settings.ALLOWED_INCLUDE_ROOTS = old_allowed_include_roots
self.assertEqual(failures, [], "Tests failed:\n%s\n%s" %
('-'*70, ("\n%s\n" % ('-'*70)).join(failures)))
def render(self, test_template, vals):
context = template.Context(vals[1])
before_stack_size = len(context.dicts)
output = test_template.render(context)
if len(context.dicts) != before_stack_size:
raise ContextStackException
return output
def get_template_tests(self):
# SYNTAX --
# 'template_name': ('template contents', 'context dict', 'expected string output' or Exception class)
return {
### BASIC SYNTAX ################################################
# Plain text should go through the template parser untouched
'basic-syntax01': ("something cool", {}, "something cool"),
# Variables should be replaced with their value in the current
# context
'basic-syntax02': ("{{ headline }}", {'headline':'Success'}, "Success"),
# More than one replacement variable is allowed in a template
'basic-syntax03': ("{{ first }} --- {{ second }}", {"first" : 1, "second" : 2}, "1 --- 2"),
# Fail silently when a variable is not found in the current context
'basic-syntax04': ("as{{ missing }}df", {}, ("asdf","asINVALIDdf")),
# A variable may not contain more than one word
'basic-syntax06': ("{{ multi word variable }}", {}, template.TemplateSyntaxError),
# Raise TemplateSyntaxError for empty variable tags
'basic-syntax07': ("{{ }}", {}, template.TemplateSyntaxError),
'basic-syntax08': ("{{ }}", {}, template.TemplateSyntaxError),
# Attribute syntax allows a template to call an object's attribute
'basic-syntax09': ("{{ var.method }}", {"var": SomeClass()}, "SomeClass.method"),
# Multiple levels of attribute access are allowed
'basic-syntax10': ("{{ var.otherclass.method }}", {"var": SomeClass()}, "OtherClass.method"),
# Fail silently when a variable's attribute isn't found
'basic-syntax11': ("{{ var.blech }}", {"var": SomeClass()}, ("","INVALID")),
# Raise TemplateSyntaxError when trying to access a variable beginning with an underscore
'basic-syntax12': ("{{ var.__dict__ }}", {"var": SomeClass()}, template.TemplateSyntaxError),
# Raise TemplateSyntaxError when trying to access a variable containing an illegal character
'basic-syntax13': ("{{ va>r }}", {}, template.TemplateSyntaxError),
'basic-syntax14': ("{{ (var.r) }}", {}, template.TemplateSyntaxError),
'basic-syntax15': ("{{ sp%am }}", {}, template.TemplateSyntaxError),
'basic-syntax16': ("{{ eggs! }}", {}, template.TemplateSyntaxError),
'basic-syntax17': ("{{ moo? }}", {}, template.TemplateSyntaxError),
# Attribute syntax allows a template to call a dictionary key's value
'basic-syntax18': ("{{ foo.bar }}", {"foo" : {"bar" : "baz"}}, "baz"),
# Fail silently when a variable's dictionary key isn't found
'basic-syntax19': ("{{ foo.spam }}", {"foo" : {"bar" : "baz"}}, ("","INVALID")),
# Fail silently when accessing a non-simple method
'basic-syntax20': ("{{ var.method2 }}", {"var": SomeClass()}, ("","INVALID")),
# Don't get confused when parsing something that is almost, but not
# quite, a template tag.
'basic-syntax21': ("a {{ moo %} b", {}, "a {{ moo %} b"),
'basic-syntax22': ("{{ moo #}", {}, "{{ moo #}"),
# Will try to treat "moo #} {{ cow" as the variable. Not ideal, but
# costly to work around, so this triggers an error.
'basic-syntax23': ("{{ moo #} {{ cow }}", {"cow": "cow"}, template.TemplateSyntaxError),
# Embedded newlines make it not-a-tag.
'basic-syntax24': ("{{ moo\n }}", {}, "{{ moo\n }}"),
# Literal strings are permitted inside variables, mostly for i18n
# purposes.
'basic-syntax25': ('{{ "fred" }}', {}, "fred"),
'basic-syntax26': (r'{{ "\"fred\"" }}', {}, "\"fred\""),
'basic-syntax27': (r'{{ _("\"fred\"") }}', {}, "\"fred\""),
# regression test for ticket #12554
# make sure a silent_variable_failure Exception is supressed
# on dictionary and attribute lookup
'basic-syntax28': ("{{ a.b }}", {'a': SilentGetItemClass()}, ('', 'INVALID')),
'basic-syntax29': ("{{ a.b }}", {'a': SilentAttrClass()}, ('', 'INVALID')),
# Something that starts like a number but has an extra lookup works as a lookup.
'basic-syntax30': ("{{ 1.2.3 }}", {"1": {"2": {"3": "d"}}}, "d"),
'basic-syntax31': ("{{ 1.2.3 }}", {"1": {"2": ("a", "b", "c", "d")}}, "d"),
'basic-syntax32': ("{{ 1.2.3 }}", {"1": (("x", "x", "x", "x"), ("y", "y", "y", "y"), ("a", "b", "c", "d"))}, "d"),
'basic-syntax33': ("{{ 1.2.3 }}", {"1": ("xxxx", "yyyy", "abcd")}, "d"),
'basic-syntax34': ("{{ 1.2.3 }}", {"1": ({"x": "x"}, {"y": "y"}, {"z": "z", "3": "d"})}, "d"),
# Numbers are numbers even if their digits are in the context.
'basic-syntax35': ("{{ 1 }}", {"1": "abc"}, "1"),
'basic-syntax36': ("{{ 1.2 }}", {"1": "abc"}, "1.2"),
# Call methods in the top level of the context
'basic-syntax37': ('{{ callable }}', {"callable": lambda: "foo bar"}, "foo bar"),
# Call methods returned from dictionary lookups
'basic-syntax38': ('{{ var.callable }}', {"var": {"callable": lambda: "foo bar"}}, "foo bar"),
# List-index syntax allows a template to access a certain item of a subscriptable object.
'list-index01': ("{{ var.1 }}", {"var": ["first item", "second item"]}, "second item"),
# Fail silently when the list index is out of range.
'list-index02': ("{{ var.5 }}", {"var": ["first item", "second item"]}, ("", "INVALID")),
# Fail silently when the variable is not a subscriptable object.
'list-index03': ("{{ var.1 }}", {"var": None}, ("", "INVALID")),
# Fail silently when variable is a dict without the specified key.
'list-index04': ("{{ var.1 }}", {"var": {}}, ("", "INVALID")),
# Dictionary lookup wins out when dict's key is a string.
'list-index05': ("{{ var.1 }}", {"var": {'1': "hello"}}, "hello"),
# But list-index lookup wins out when dict's key is an int, which
# behind the scenes is really a dictionary lookup (for a dict)
# after converting the key to an int.
'list-index06': ("{{ var.1 }}", {"var": {1: "hello"}}, "hello"),
# Dictionary lookup wins out when there is a string and int version of the key.
'list-index07': ("{{ var.1 }}", {"var": {'1': "hello", 1: "world"}}, "hello"),
# Basic filter usage
'filter-syntax01': ("{{ var|upper }}", {"var": "Django is the greatest!"}, "DJANGO IS THE GREATEST!"),
# Chained filters
'filter-syntax02': ("{{ var|upper|lower }}", {"var": "Django is the greatest!"}, "django is the greatest!"),
# Raise TemplateSyntaxError for space between a variable and filter pipe
'filter-syntax03': ("{{ var |upper }}", {}, template.TemplateSyntaxError),
# Raise TemplateSyntaxError for space after a filter pipe
'filter-syntax04': ("{{ var| upper }}", {}, template.TemplateSyntaxError),
# Raise TemplateSyntaxError for a nonexistent filter
'filter-syntax05': ("{{ var|does_not_exist }}", {}, template.TemplateSyntaxError),
# Raise TemplateSyntaxError when trying to access a filter containing an illegal character
'filter-syntax06': ("{{ var|fil(ter) }}", {}, template.TemplateSyntaxError),
# Raise TemplateSyntaxError for invalid block tags
'filter-syntax07': ("{% nothing_to_see_here %}", {}, template.TemplateSyntaxError),
# Raise TemplateSyntaxError for empty block tags
'filter-syntax08': ("{% %}", {}, template.TemplateSyntaxError),
# Chained filters, with an argument to the first one
'filter-syntax09': ('{{ var|removetags:"b i"|upper|lower }}', {"var": "<b><i>Yes</i></b>"}, "yes"),
# Literal string as argument is always "safe" from auto-escaping..
'filter-syntax10': (r'{{ var|default_if_none:" endquote\" hah" }}',
{"var": None}, ' endquote" hah'),
# Variable as argument
'filter-syntax11': (r'{{ var|default_if_none:var2 }}', {"var": None, "var2": "happy"}, 'happy'),
# Default argument testing
'filter-syntax12': (r'{{ var|yesno:"yup,nup,mup" }} {{ var|yesno }}', {"var": True}, 'yup yes'),
# Fail silently for methods that raise an exception with a
# "silent_variable_failure" attribute
'filter-syntax13': (r'1{{ var.method3 }}2', {"var": SomeClass()}, ("12", "1INVALID2")),
# In methods that raise an exception without a
# "silent_variable_attribute" set to True, the exception propagates
'filter-syntax14': (r'1{{ var.method4 }}2', {"var": SomeClass()}, (SomeOtherException, SomeOtherException, template.TemplateSyntaxError)),
# Escaped backslash in argument
'filter-syntax15': (r'{{ var|default_if_none:"foo\bar" }}', {"var": None}, r'foo\bar'),
# Escaped backslash using known escape char
'filter-syntax16': (r'{{ var|default_if_none:"foo\now" }}', {"var": None}, r'foo\now'),
# Empty strings can be passed as arguments to filters
'filter-syntax17': (r'{{ var|join:"" }}', {'var': ['a', 'b', 'c']}, 'abc'),
# Make sure that any unicode strings are converted to bytestrings
# in the final output.
'filter-syntax18': (r'{{ var }}', {'var': UTF8Class()}, u'\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111'),
# Numbers as filter arguments should work
'filter-syntax19': ('{{ var|truncatewords:1 }}', {"var": "hello world"}, "hello ..."),
#filters should accept empty string constants
'filter-syntax20': ('{{ ""|default_if_none:"was none" }}', {}, ""),
# Fail silently for non-callable attribute and dict lookups which
# raise an exception with a "silent_variable_failure" attribute
'filter-syntax21': (r'1{{ var.silent_fail_key }}2', {"var": SomeClass()}, ("12", "1INVALID2")),
'filter-syntax22': (r'1{{ var.silent_fail_attribute }}2', {"var": SomeClass()}, ("12", "1INVALID2")),
# In attribute and dict lookups that raise an unexpected exception
# without a "silent_variable_attribute" set to True, the exception
# propagates
'filter-syntax23': (r'1{{ var.noisy_fail_key }}2', {"var": SomeClass()}, (SomeOtherException, SomeOtherException, template.TemplateSyntaxError)),
'filter-syntax24': (r'1{{ var.noisy_fail_attribute }}2', {"var": SomeClass()}, (SomeOtherException, SomeOtherException, template.TemplateSyntaxError)),
### COMMENT SYNTAX ########################################################
'comment-syntax01': ("{# this is hidden #}hello", {}, "hello"),
'comment-syntax02': ("{# this is hidden #}hello{# foo #}", {}, "hello"),
# Comments can contain invalid stuff.
'comment-syntax03': ("foo{# {% if %} #}", {}, "foo"),
'comment-syntax04': ("foo{# {% endblock %} #}", {}, "foo"),
'comment-syntax05': ("foo{# {% somerandomtag %} #}", {}, "foo"),
'comment-syntax06': ("foo{# {% #}", {}, "foo"),
'comment-syntax07': ("foo{# %} #}", {}, "foo"),
'comment-syntax08': ("foo{# %} #}bar", {}, "foobar"),
'comment-syntax09': ("foo{# {{ #}", {}, "foo"),
'comment-syntax10': ("foo{# }} #}", {}, "foo"),
'comment-syntax11': ("foo{# { #}", {}, "foo"),
'comment-syntax12': ("foo{# } #}", {}, "foo"),
### COMMENT TAG ###########################################################
'comment-tag01': ("{% comment %}this is hidden{% endcomment %}hello", {}, "hello"),
'comment-tag02': ("{% comment %}this is hidden{% endcomment %}hello{% comment %}foo{% endcomment %}", {}, "hello"),
# Comment tag can contain invalid stuff.
'comment-tag03': ("foo{% comment %} {% if %} {% endcomment %}", {}, "foo"),
'comment-tag04': ("foo{% comment %} {% endblock %} {% endcomment %}", {}, "foo"),
'comment-tag05': ("foo{% comment %} {% somerandomtag %} {% endcomment %}", {}, "foo"),
### CYCLE TAG #############################################################
'cycle01': ('{% cycle a %}', {}, template.TemplateSyntaxError),
'cycle02': ('{% cycle a,b,c as abc %}{% cycle abc %}', {}, 'ab'),
'cycle03': ('{% cycle a,b,c as abc %}{% cycle abc %}{% cycle abc %}', {}, 'abc'),
'cycle04': ('{% cycle a,b,c as abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}', {}, 'abca'),
'cycle05': ('{% cycle %}', {}, template.TemplateSyntaxError),
'cycle06': ('{% cycle a %}', {}, template.TemplateSyntaxError),
'cycle07': ('{% cycle a,b,c as foo %}{% cycle bar %}', {}, template.TemplateSyntaxError),
'cycle08': ('{% cycle a,b,c as foo %}{% cycle foo %}{{ foo }}{{ foo }}{% cycle foo %}{{ foo }}', {}, 'abbbcc'),
'cycle09': ("{% for i in test %}{% cycle a,b %}{{ i }},{% endfor %}", {'test': range(5)}, 'a0,b1,a2,b3,a4,'),
'cycle10': ("{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}", {}, 'ab'),
'cycle11': ("{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}{% cycle abc %}", {}, 'abc'),
'cycle12': ("{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}", {}, 'abca'),
'cycle13': ("{% for i in test %}{% cycle 'a' 'b' %}{{ i }},{% endfor %}", {'test': range(5)}, 'a0,b1,a2,b3,a4,'),
'cycle14': ("{% cycle one two as foo %}{% cycle foo %}", {'one': '1','two': '2'}, '12'),
'cycle15': ("{% for i in test %}{% cycle aye bee %}{{ i }},{% endfor %}", {'test': range(5), 'aye': 'a', 'bee': 'b'}, 'a0,b1,a2,b3,a4,'),
'cycle16': ("{% cycle one|lower two as foo %}{% cycle foo %}", {'one': 'A','two': '2'}, 'a2'),
'cycle17': ("{% cycle 'a' 'b' 'c' as abc silent %}{% cycle abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}", {}, ""),
'cycle18': ("{% cycle 'a' 'b' 'c' as foo invalid_flag %}", {}, template.TemplateSyntaxError),
'cycle19': ("{% cycle 'a' 'b' as silent %}{% cycle silent %}", {}, "ab"),
'cycle20': ("{% cycle one two as foo %} & {% cycle foo %}", {'one' : 'A & B', 'two' : 'C & D'}, "A & B & C & D"),
'cycle21': ("{% filter force_escape %}{% cycle one two as foo %} & {% cycle foo %}{% endfilter %}", {'one' : 'A & B', 'two' : 'C & D'}, "A & B & C & D"),
'cycle22': ("{% for x in values %}{% cycle 'a' 'b' 'c' as abc silent %}{{ x }}{% endfor %}", {'values': [1,2,3,4]}, "1234"),
'cycle23': ("{% for x in values %}{% cycle 'a' 'b' 'c' as abc silent %}{{ abc }}{{ x }}{% endfor %}", {'values': [1,2,3,4]}, "a1b2c3a4"),
'included-cycle': ('{{ abc }}', {'abc': 'xxx'}, 'xxx'),
'cycle24': ("{% for x in values %}{% cycle 'a' 'b' 'c' as abc silent %}{% include 'included-cycle' %}{% endfor %}", {'values': [1,2,3,4]}, "abca"),
### EXCEPTIONS ############################################################
# Raise exception for invalid template name
'exception01': ("{% extends 'nonexistent' %}", {}, (template.TemplateDoesNotExist, template.TemplateDoesNotExist, template.TemplateSyntaxError)),
# Raise exception for invalid template name (in variable)
'exception02': ("{% extends nonexistent %}", {}, (template.TemplateSyntaxError, template.TemplateDoesNotExist)),
# Raise exception for extra {% extends %} tags
'exception03': ("{% extends 'inheritance01' %}{% block first %}2{% endblock %}{% extends 'inheritance16' %}", {}, template.TemplateSyntaxError),
# Raise exception for custom tags used in child with {% load %} tag in parent, not in child
'exception04': ("{% extends 'inheritance17' %}{% block first %}{% echo 400 %}5678{% endblock %}", {}, template.TemplateSyntaxError),
### FILTER TAG ############################################################
'filter01': ('{% filter upper %}{% endfilter %}', {}, ''),
'filter02': ('{% filter upper %}django{% endfilter %}', {}, 'DJANGO'),
'filter03': ('{% filter upper|lower %}django{% endfilter %}', {}, 'django'),
'filter04': ('{% filter cut:remove %}djangospam{% endfilter %}', {'remove': 'spam'}, 'django'),
### FIRSTOF TAG ###########################################################
'firstof01': ('{% firstof a b c %}', {'a':0,'b':0,'c':0}, ''),
'firstof02': ('{% firstof a b c %}', {'a':1,'b':0,'c':0}, '1'),
'firstof03': ('{% firstof a b c %}', {'a':0,'b':2,'c':0}, '2'),
'firstof04': ('{% firstof a b c %}', {'a':0,'b':0,'c':3}, '3'),
'firstof05': ('{% firstof a b c %}', {'a':1,'b':2,'c':3}, '1'),
'firstof06': ('{% firstof a b c %}', {'b':0,'c':3}, '3'),
'firstof07': ('{% firstof a b "c" %}', {'a':0}, 'c'),
'firstof08': ('{% firstof a b "c and d" %}', {'a':0,'b':0}, 'c and d'),
'firstof09': ('{% firstof %}', {}, template.TemplateSyntaxError),
### FOR TAG ###############################################################
'for-tag01': ("{% for val in values %}{{ val }}{% endfor %}", {"values": [1, 2, 3]}, "123"),
'for-tag02': ("{% for val in values reversed %}{{ val }}{% endfor %}", {"values": [1, 2, 3]}, "321"),
'for-tag-vars01': ("{% for val in values %}{{ forloop.counter }}{% endfor %}", {"values": [6, 6, 6]}, "123"),
'for-tag-vars02': ("{% for val in values %}{{ forloop.counter0 }}{% endfor %}", {"values": [6, 6, 6]}, "012"),
'for-tag-vars03': ("{% for val in values %}{{ forloop.revcounter }}{% endfor %}", {"values": [6, 6, 6]}, "321"),
'for-tag-vars04': ("{% for val in values %}{{ forloop.revcounter0 }}{% endfor %}", {"values": [6, 6, 6]}, "210"),
'for-tag-vars05': ("{% for val in values %}{% if forloop.first %}f{% else %}x{% endif %}{% endfor %}", {"values": [6, 6, 6]}, "fxx"),
'for-tag-vars06': ("{% for val in values %}{% if forloop.last %}l{% else %}x{% endif %}{% endfor %}", {"values": [6, 6, 6]}, "xxl"),
'for-tag-unpack01': ("{% for key,value in items %}{{ key }}:{{ value }}/{% endfor %}", {"items": (('one', 1), ('two', 2))}, "one:1/two:2/"),
'for-tag-unpack03': ("{% for key, value in items %}{{ key }}:{{ value }}/{% endfor %}", {"items": (('one', 1), ('two', 2))}, "one:1/two:2/"),
'for-tag-unpack04': ("{% for key , value in items %}{{ key }}:{{ value }}/{% endfor %}", {"items": (('one', 1), ('two', 2))}, "one:1/two:2/"),
'for-tag-unpack05': ("{% for key ,value in items %}{{ key }}:{{ value }}/{% endfor %}", {"items": (('one', 1), ('two', 2))}, "one:1/two:2/"),
'for-tag-unpack06': ("{% for key value in items %}{{ key }}:{{ value }}/{% endfor %}", {"items": (('one', 1), ('two', 2))}, template.TemplateSyntaxError),
'for-tag-unpack07': ("{% for key,,value in items %}{{ key }}:{{ value }}/{% endfor %}", {"items": (('one', 1), ('two', 2))}, template.TemplateSyntaxError),
'for-tag-unpack08': ("{% for key,value, in items %}{{ key }}:{{ value }}/{% endfor %}", {"items": (('one', 1), ('two', 2))}, template.TemplateSyntaxError),
# Ensure that a single loopvar doesn't truncate the list in val.
'for-tag-unpack09': ("{% for val in items %}{{ val.0 }}:{{ val.1 }}/{% endfor %}", {"items": (('one', 1), ('two', 2))}, "one:1/two:2/"),
# Otherwise, silently truncate if the length of loopvars differs to the length of each set of items.
'for-tag-unpack10': ("{% for x,y in items %}{{ x }}:{{ y }}/{% endfor %}", {"items": (('one', 1, 'carrot'), ('two', 2, 'orange'))}, "one:1/two:2/"),
'for-tag-unpack11': ("{% for x,y,z in items %}{{ x }}:{{ y }},{{ z }}/{% endfor %}", {"items": (('one', 1), ('two', 2))}, ("one:1,/two:2,/", "one:1,INVALID/two:2,INVALID/")),
'for-tag-unpack12': ("{% for x,y,z in items %}{{ x }}:{{ y }},{{ z }}/{% endfor %}", {"items": (('one', 1, 'carrot'), ('two', 2))}, ("one:1,carrot/two:2,/", "one:1,carrot/two:2,INVALID/")),
'for-tag-unpack13': ("{% for x,y,z in items %}{{ x }}:{{ y }},{{ z }}/{% endfor %}", {"items": (('one', 1, 'carrot'), ('two', 2, 'cheese'))}, ("one:1,carrot/two:2,cheese/", "one:1,carrot/two:2,cheese/")),
'for-tag-unpack14': ("{% for x,y in items %}{{ x }}:{{ y }}/{% endfor %}", {"items": (1, 2)}, (":/:/", "INVALID:INVALID/INVALID:INVALID/")),
'for-tag-empty01': ("{% for val in values %}{{ val }}{% empty %}empty text{% endfor %}", {"values": [1, 2, 3]}, "123"),
'for-tag-empty02': ("{% for val in values %}{{ val }}{% empty %}values array empty{% endfor %}", {"values": []}, "values array empty"),
'for-tag-empty03': ("{% for val in values %}{{ val }}{% empty %}values array not found{% endfor %}", {}, "values array not found"),
### IF TAG ################################################################
'if-tag01': ("{% if foo %}yes{% else %}no{% endif %}", {"foo": True}, "yes"),
'if-tag02': ("{% if foo %}yes{% else %}no{% endif %}", {"foo": False}, "no"),
'if-tag03': ("{% if foo %}yes{% else %}no{% endif %}", {}, "no"),
# Filters
'if-tag-filter01': ("{% if foo|length == 5 %}yes{% else %}no{% endif %}", {'foo': 'abcde'}, "yes"),
'if-tag-filter02': ("{% if foo|upper == 'ABC' %}yes{% else %}no{% endif %}", {}, "no"),
# Equality
'if-tag-eq01': ("{% if foo == bar %}yes{% else %}no{% endif %}", {}, "yes"),
'if-tag-eq02': ("{% if foo == bar %}yes{% else %}no{% endif %}", {'foo': 1}, "no"),
'if-tag-eq03': ("{% if foo == bar %}yes{% else %}no{% endif %}", {'foo': 1, 'bar': 1}, "yes"),
'if-tag-eq04': ("{% if foo == bar %}yes{% else %}no{% endif %}", {'foo': 1, 'bar': 2}, "no"),
'if-tag-eq05': ("{% if foo == '' %}yes{% else %}no{% endif %}", {}, "no"),
# Comparison
'if-tag-gt-01': ("{% if 2 > 1 %}yes{% else %}no{% endif %}", {}, "yes"),
'if-tag-gt-02': ("{% if 1 > 1 %}yes{% else %}no{% endif %}", {}, "no"),
'if-tag-gte-01': ("{% if 1 >= 1 %}yes{% else %}no{% endif %}", {}, "yes"),
'if-tag-gte-02': ("{% if 1 >= 2 %}yes{% else %}no{% endif %}", {}, "no"),
'if-tag-lt-01': ("{% if 1 < 2 %}yes{% else %}no{% endif %}", {}, "yes"),
'if-tag-lt-02': ("{% if 1 < 1 %}yes{% else %}no{% endif %}", {}, "no"),
'if-tag-lte-01': ("{% if 1 <= 1 %}yes{% else %}no{% endif %}", {}, "yes"),
'if-tag-lte-02': ("{% if 2 <= 1 %}yes{% else %}no{% endif %}", {}, "no"),
# Contains
'if-tag-in-01': ("{% if 1 in x %}yes{% else %}no{% endif %}", {'x':[1]}, "yes"),
'if-tag-in-02': ("{% if 2 in x %}yes{% else %}no{% endif %}", {'x':[1]}, "no"),
'if-tag-not-in-01': ("{% if 1 not in x %}yes{% else %}no{% endif %}", {'x':[1]}, "no"),
'if-tag-not-in-02': ("{% if 2 not in x %}yes{% else %}no{% endif %}", {'x':[1]}, "yes"),
# AND
'if-tag-and01': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'yes'),
'if-tag-and02': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'no'),
'if-tag-and03': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'no'),
'if-tag-and04': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'no'),
'if-tag-and05': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': False}, 'no'),
'if-tag-and06': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'bar': False}, 'no'),
'if-tag-and07': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'foo': True}, 'no'),
'if-tag-and08': ("{% if foo and bar %}yes{% else %}no{% endif %}", {'bar': True}, 'no'),
# OR
'if-tag-or01': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'yes'),
'if-tag-or02': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'yes'),
'if-tag-or03': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'yes'),
'if-tag-or04': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'no'),
'if-tag-or05': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': False}, 'no'),
'if-tag-or06': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'bar': False}, 'no'),
'if-tag-or07': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'foo': True}, 'yes'),
'if-tag-or08': ("{% if foo or bar %}yes{% else %}no{% endif %}", {'bar': True}, 'yes'),
# multiple ORs
'if-tag-or09': ("{% if foo or bar or baz %}yes{% else %}no{% endif %}", {'baz': True}, 'yes'),
# NOT
'if-tag-not01': ("{% if not foo %}no{% else %}yes{% endif %}", {'foo': True}, 'yes'),
'if-tag-not02': ("{% if not not foo %}no{% else %}yes{% endif %}", {'foo': True}, 'no'),
# not03 to not05 removed, now TemplateSyntaxErrors
'if-tag-not06': ("{% if foo and not bar %}yes{% else %}no{% endif %}", {}, 'no'),
'if-tag-not07': ("{% if foo and not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'no'),
'if-tag-not08': ("{% if foo and not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'yes'),
'if-tag-not09': ("{% if foo and not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'no'),
'if-tag-not10': ("{% if foo and not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'no'),
'if-tag-not11': ("{% if not foo and bar %}yes{% else %}no{% endif %}", {}, 'no'),
'if-tag-not12': ("{% if not foo and bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'no'),
'if-tag-not13': ("{% if not foo and bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'no'),
'if-tag-not14': ("{% if not foo and bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'yes'),
'if-tag-not15': ("{% if not foo and bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'no'),
'if-tag-not16': ("{% if foo or not bar %}yes{% else %}no{% endif %}", {}, 'yes'),
'if-tag-not17': ("{% if foo or not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'yes'),
'if-tag-not18': ("{% if foo or not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'yes'),
'if-tag-not19': ("{% if foo or not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'no'),
'if-tag-not20': ("{% if foo or not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'yes'),
'if-tag-not21': ("{% if not foo or bar %}yes{% else %}no{% endif %}", {}, 'yes'),
'if-tag-not22': ("{% if not foo or bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'yes'),
'if-tag-not23': ("{% if not foo or bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'no'),
'if-tag-not24': ("{% if not foo or bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'yes'),
'if-tag-not25': ("{% if not foo or bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'yes'),
'if-tag-not26': ("{% if not foo and not bar %}yes{% else %}no{% endif %}", {}, 'yes'),
'if-tag-not27': ("{% if not foo and not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'no'),
'if-tag-not28': ("{% if not foo and not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'no'),
'if-tag-not29': ("{% if not foo and not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'no'),
'if-tag-not30': ("{% if not foo and not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'yes'),
'if-tag-not31': ("{% if not foo or not bar %}yes{% else %}no{% endif %}", {}, 'yes'),
'if-tag-not32': ("{% if not foo or not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': True}, 'no'),
'if-tag-not33': ("{% if not foo or not bar %}yes{% else %}no{% endif %}", {'foo': True, 'bar': False}, 'yes'),
'if-tag-not34': ("{% if not foo or not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': True}, 'yes'),
'if-tag-not35': ("{% if not foo or not bar %}yes{% else %}no{% endif %}", {'foo': False, 'bar': False}, 'yes'),
# Various syntax errors
'if-tag-error01': ("{% if %}yes{% endif %}", {}, template.TemplateSyntaxError),
'if-tag-error02': ("{% if foo and %}yes{% else %}no{% endif %}", {'foo': True}, template.TemplateSyntaxError),
'if-tag-error03': ("{% if foo or %}yes{% else %}no{% endif %}", {'foo': True}, template.TemplateSyntaxError),
'if-tag-error04': ("{% if not foo and %}yes{% else %}no{% endif %}", {'foo': True}, template.TemplateSyntaxError),
'if-tag-error05': ("{% if not foo or %}yes{% else %}no{% endif %}", {'foo': True}, template.TemplateSyntaxError),
'if-tag-error06': ("{% if abc def %}yes{% endif %}", {}, template.TemplateSyntaxError),
'if-tag-error07': ("{% if not %}yes{% endif %}", {}, template.TemplateSyntaxError),
'if-tag-error08': ("{% if and %}yes{% endif %}", {}, template.TemplateSyntaxError),
'if-tag-error09': ("{% if or %}yes{% endif %}", {}, template.TemplateSyntaxError),
'if-tag-error10': ("{% if == %}yes{% endif %}", {}, template.TemplateSyntaxError),
'if-tag-error11': ("{% if 1 == %}yes{% endif %}", {}, template.TemplateSyntaxError),
'if-tag-error12': ("{% if a not b %}yes{% endif %}", {}, template.TemplateSyntaxError),
# If evaluations are shortcircuited where possible
# These tests will fail by taking too long to run. When the if clause
# is shortcircuiting correctly, the is_bad() function shouldn't be
# evaluated, and the deliberate sleep won't happen.
'if-tag-shortcircuit01': ('{% if x.is_true or x.is_bad %}yes{% else %}no{% endif %}', {'x': TestObj()}, "yes"),
'if-tag-shortcircuit02': ('{% if x.is_false and x.is_bad %}yes{% else %}no{% endif %}', {'x': TestObj()}, "no"),
# Non-existent args
'if-tag-badarg01':("{% if x|default_if_none:y %}yes{% endif %}", {}, ''),
'if-tag-badarg02':("{% if x|default_if_none:y %}yes{% endif %}", {'y': 0}, ''),
'if-tag-badarg03':("{% if x|default_if_none:y %}yes{% endif %}", {'y': 1}, 'yes'),
'if-tag-badarg04':("{% if x|default_if_none:y %}yes{% else %}no{% endif %}", {}, 'no'),
# Additional, more precise parsing tests are in SmartIfTests
### IFCHANGED TAG #########################################################
'ifchanged01': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% endfor %}', {'num': (1,2,3)}, '123'),
'ifchanged02': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% endfor %}', {'num': (1,1,3)}, '13'),
'ifchanged03': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% endfor %}', {'num': (1,1,1)}, '1'),
'ifchanged04': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% for x in numx %}{% ifchanged %}{{ x }}{% endifchanged %}{% endfor %}{% endfor %}', {'num': (1, 2, 3), 'numx': (2, 2, 2)}, '122232'),
'ifchanged05': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% for x in numx %}{% ifchanged %}{{ x }}{% endifchanged %}{% endfor %}{% endfor %}', {'num': (1, 1, 1), 'numx': (1, 2, 3)}, '1123123123'),
'ifchanged06': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% for x in numx %}{% ifchanged %}{{ x }}{% endifchanged %}{% endfor %}{% endfor %}', {'num': (1, 1, 1), 'numx': (2, 2, 2)}, '1222'),
'ifchanged07': ('{% for n in num %}{% ifchanged %}{{ n }}{% endifchanged %}{% for x in numx %}{% ifchanged %}{{ x }}{% endifchanged %}{% for y in numy %}{% ifchanged %}{{ y }}{% endifchanged %}{% endfor %}{% endfor %}{% endfor %}', {'num': (1, 1, 1), 'numx': (2, 2, 2), 'numy': (3, 3, 3)}, '1233323332333'),
'ifchanged08': ('{% for data in datalist %}{% for c,d in data %}{% if c %}{% ifchanged %}{{ d }}{% endifchanged %}{% endif %}{% endfor %}{% endfor %}', {'datalist': [[(1, 'a'), (1, 'a'), (0, 'b'), (1, 'c')], [(0, 'a'), (1, 'c'), (1, 'd'), (1, 'd'), (0, 'e')]]}, 'accd'),
# Test one parameter given to ifchanged.
'ifchanged-param01': ('{% for n in num %}{% ifchanged n %}..{% endifchanged %}{{ n }}{% endfor %}', { 'num': (1,2,3) }, '..1..2..3'),
'ifchanged-param02': ('{% for n in num %}{% for x in numx %}{% ifchanged n %}..{% endifchanged %}{{ x }}{% endfor %}{% endfor %}', { 'num': (1,2,3), 'numx': (5,6,7) }, '..567..567..567'),
# Test multiple parameters to ifchanged.
'ifchanged-param03': ('{% for n in num %}{{ n }}{% for x in numx %}{% ifchanged x n %}{{ x }}{% endifchanged %}{% endfor %}{% endfor %}', { 'num': (1,1,2), 'numx': (5,6,6) }, '156156256'),
# Test a date+hour like construct, where the hour of the last day
# is the same but the date had changed, so print the hour anyway.
'ifchanged-param04': ('{% for d in days %}{% ifchanged %}{{ d.day }}{% endifchanged %}{% for h in d.hours %}{% ifchanged d h %}{{ h }}{% endifchanged %}{% endfor %}{% endfor %}', {'days':[{'day':1, 'hours':[1,2,3]},{'day':2, 'hours':[3]},] }, '112323'),
# Logically the same as above, just written with explicit
# ifchanged for the day.
'ifchanged-param05': ('{% for d in days %}{% ifchanged d.day %}{{ d.day }}{% endifchanged %}{% for h in d.hours %}{% ifchanged d.day h %}{{ h }}{% endifchanged %}{% endfor %}{% endfor %}', {'days':[{'day':1, 'hours':[1,2,3]},{'day':2, 'hours':[3]},] }, '112323'),
# Test the else clause of ifchanged.
'ifchanged-else01': ('{% for id in ids %}{{ id }}{% ifchanged id %}-first{% else %}-other{% endifchanged %},{% endfor %}', {'ids': [1,1,2,2,2,3]}, '1-first,1-other,2-first,2-other,2-other,3-first,'),
'ifchanged-else02': ('{% for id in ids %}{{ id }}-{% ifchanged id %}{% cycle red,blue %}{% else %}grey{% endifchanged %},{% endfor %}', {'ids': [1,1,2,2,2,3]}, '1-red,1-grey,2-blue,2-grey,2-grey,3-red,'),
'ifchanged-else03': ('{% for id in ids %}{{ id }}{% ifchanged id %}-{% cycle red,blue %}{% else %}{% endifchanged %},{% endfor %}', {'ids': [1,1,2,2,2,3]}, '1-red,1,2-blue,2,2,3-red,'),
'ifchanged-else04': ('{% for id in ids %}{% ifchanged %}***{{ id }}*{% else %}...{% endifchanged %}{{ forloop.counter }}{% endfor %}', {'ids': [1,1,2,2,2,3,4]}, '***1*1...2***2*3...4...5***3*6***4*7'),
### IFEQUAL TAG ###########################################################
'ifequal01': ("{% ifequal a b %}yes{% endifequal %}", {"a": 1, "b": 2}, ""),
'ifequal02': ("{% ifequal a b %}yes{% endifequal %}", {"a": 1, "b": 1}, "yes"),
'ifequal03': ("{% ifequal a b %}yes{% else %}no{% endifequal %}", {"a": 1, "b": 2}, "no"),
'ifequal04': ("{% ifequal a b %}yes{% else %}no{% endifequal %}", {"a": 1, "b": 1}, "yes"),
'ifequal05': ("{% ifequal a 'test' %}yes{% else %}no{% endifequal %}", {"a": "test"}, "yes"),
'ifequal06': ("{% ifequal a 'test' %}yes{% else %}no{% endifequal %}", {"a": "no"}, "no"),
'ifequal07': ('{% ifequal a "test" %}yes{% else %}no{% endifequal %}', {"a": "test"}, "yes"),
'ifequal08': ('{% ifequal a "test" %}yes{% else %}no{% endifequal %}', {"a": "no"}, "no"),
'ifequal09': ('{% ifequal a "test" %}yes{% else %}no{% endifequal %}', {}, "no"),
'ifequal10': ('{% ifequal a b %}yes{% else %}no{% endifequal %}', {}, "yes"),
# SMART SPLITTING
'ifequal-split01': ('{% ifequal a "test man" %}yes{% else %}no{% endifequal %}', {}, "no"),
'ifequal-split02': ('{% ifequal a "test man" %}yes{% else %}no{% endifequal %}', {'a': 'foo'}, "no"),
'ifequal-split03': ('{% ifequal a "test man" %}yes{% else %}no{% endifequal %}', {'a': 'test man'}, "yes"),
'ifequal-split04': ("{% ifequal a 'test man' %}yes{% else %}no{% endifequal %}", {'a': 'test man'}, "yes"),
'ifequal-split05': ("{% ifequal a 'i \"love\" you' %}yes{% else %}no{% endifequal %}", {'a': ''}, "no"),
'ifequal-split06': ("{% ifequal a 'i \"love\" you' %}yes{% else %}no{% endifequal %}", {'a': 'i "love" you'}, "yes"),
'ifequal-split07': ("{% ifequal a 'i \"love\" you' %}yes{% else %}no{% endifequal %}", {'a': 'i love you'}, "no"),
'ifequal-split08': (r"{% ifequal a 'I\'m happy' %}yes{% else %}no{% endifequal %}", {'a': "I'm happy"}, "yes"),
'ifequal-split09': (r"{% ifequal a 'slash\man' %}yes{% else %}no{% endifequal %}", {'a': r"slash\man"}, "yes"),
'ifequal-split10': (r"{% ifequal a 'slash\man' %}yes{% else %}no{% endifequal %}", {'a': r"slashman"}, "no"),
# NUMERIC RESOLUTION
'ifequal-numeric01': ('{% ifequal x 5 %}yes{% endifequal %}', {'x': '5'}, ''),
'ifequal-numeric02': ('{% ifequal x 5 %}yes{% endifequal %}', {'x': 5}, 'yes'),
'ifequal-numeric03': ('{% ifequal x 5.2 %}yes{% endifequal %}', {'x': 5}, ''),
'ifequal-numeric04': ('{% ifequal x 5.2 %}yes{% endifequal %}', {'x': 5.2}, 'yes'),
'ifequal-numeric05': ('{% ifequal x 0.2 %}yes{% endifequal %}', {'x': .2}, 'yes'),
'ifequal-numeric06': ('{% ifequal x .2 %}yes{% endifequal %}', {'x': .2}, 'yes'),
'ifequal-numeric07': ('{% ifequal x 2. %}yes{% endifequal %}', {'x': 2}, ''),
'ifequal-numeric08': ('{% ifequal x "5" %}yes{% endifequal %}', {'x': 5}, ''),
'ifequal-numeric09': ('{% ifequal x "5" %}yes{% endifequal %}', {'x': '5'}, 'yes'),
'ifequal-numeric10': ('{% ifequal x -5 %}yes{% endifequal %}', {'x': -5}, 'yes'),
'ifequal-numeric11': ('{% ifequal x -5.2 %}yes{% endifequal %}', {'x': -5.2}, 'yes'),
'ifequal-numeric12': ('{% ifequal x +5 %}yes{% endifequal %}', {'x': 5}, 'yes'),
# FILTER EXPRESSIONS AS ARGUMENTS
'ifequal-filter01': ('{% ifequal a|upper "A" %}x{% endifequal %}', {'a': 'a'}, 'x'),
'ifequal-filter02': ('{% ifequal "A" a|upper %}x{% endifequal %}', {'a': 'a'}, 'x'),
'ifequal-filter03': ('{% ifequal a|upper b|upper %}x{% endifequal %}', {'a': 'x', 'b': 'X'}, 'x'),
'ifequal-filter04': ('{% ifequal x|slice:"1" "a" %}x{% endifequal %}', {'x': 'aaa'}, 'x'),
'ifequal-filter05': ('{% ifequal x|slice:"1"|upper "A" %}x{% endifequal %}', {'x': 'aaa'}, 'x'),
### IFNOTEQUAL TAG ########################################################
'ifnotequal01': ("{% ifnotequal a b %}yes{% endifnotequal %}", {"a": 1, "b": 2}, "yes"),
'ifnotequal02': ("{% ifnotequal a b %}yes{% endifnotequal %}", {"a": 1, "b": 1}, ""),
'ifnotequal03': ("{% ifnotequal a b %}yes{% else %}no{% endifnotequal %}", {"a": 1, "b": 2}, "yes"),
'ifnotequal04': ("{% ifnotequal a b %}yes{% else %}no{% endifnotequal %}", {"a": 1, "b": 1}, "no"),
## INCLUDE TAG ###########################################################
'include01': ('{% include "basic-syntax01" %}', {}, "something cool"),
'include02': ('{% include "basic-syntax02" %}', {'headline': 'Included'}, "Included"),
'include03': ('{% include template_name %}', {'template_name': 'basic-syntax02', 'headline': 'Included'}, "Included"),
'include04': ('a{% include "nonexistent" %}b', {}, ("ab", "ab", template.TemplateDoesNotExist)),
'include 05': ('template with a space', {}, 'template with a space'),
'include06': ('{% include "include 05"%}', {}, 'template with a space'),
# extra inline context
'include07': ('{% include "basic-syntax02" with headline="Inline" %}', {'headline': 'Included'}, 'Inline'),
'include08': ('{% include headline with headline="Dynamic" %}', {'headline': 'basic-syntax02'}, 'Dynamic'),
'include09': ('{{ first }}--{% include "basic-syntax03" with first=second|lower|upper second=first|upper %}--{{ second }}', {'first': 'Ul', 'second': 'lU'}, 'Ul--LU --- UL--lU'),
# isolated context
'include10': ('{% include "basic-syntax03" only %}', {'first': '1'}, (' --- ', 'INVALID --- INVALID')),
'include11': ('{% include "basic-syntax03" only with second=2 %}', {'first': '1'}, (' --- 2', 'INVALID --- 2')),
'include12': ('{% include "basic-syntax03" with first=1 only %}', {'second': '2'}, ('1 --- ', '1 --- INVALID')),
# autoescape context
'include13': ('{% autoescape off %}{% include "basic-syntax03" %}{% endautoescape %}', {'first': '&'}, ('& --- ', '& --- INVALID')),
'include14': ('{% autoescape off %}{% include "basic-syntax03" with first=var1 only %}{% endautoescape %}', {'var1': '&'}, ('& --- ', '& --- INVALID')),
'include-error01': ('{% include "basic-syntax01" with %}', {}, template.TemplateSyntaxError),
'include-error02': ('{% include "basic-syntax01" with "no key" %}', {}, template.TemplateSyntaxError),
'include-error03': ('{% include "basic-syntax01" with dotted.arg="error" %}', {}, template.TemplateSyntaxError),
'include-error04': ('{% include "basic-syntax01" something_random %}', {}, template.TemplateSyntaxError),
'include-error05': ('{% include "basic-syntax01" foo="duplicate" foo="key" %}', {}, template.TemplateSyntaxError),
'include-error06': ('{% include "basic-syntax01" only only %}', {}, template.TemplateSyntaxError),
### INCLUSION ERROR REPORTING #############################################
'include-fail1': ('{% load bad_tag %}{% badtag %}', {}, RuntimeError),
'include-fail2': ('{% load broken_tag %}', {}, template.TemplateSyntaxError),
'include-error07': ('{% include "include-fail1" %}', {}, ('', '', RuntimeError)),
'include-error08': ('{% include "include-fail2" %}', {}, ('', '', template.TemplateSyntaxError)),
'include-error09': ('{% include failed_include %}', {'failed_include': 'include-fail1'}, ('', '', template.TemplateSyntaxError)),
'include-error10': ('{% include failed_include %}', {'failed_include': 'include-fail2'}, ('', '', template.TemplateSyntaxError)),
### NAMED ENDBLOCKS #######################################################
# Basic test
'namedendblocks01': ("1{% block first %}_{% block second %}2{% endblock second %}_{% endblock first %}3", {}, '1_2_3'),
# Unbalanced blocks
'namedendblocks02': ("1{% block first %}_{% block second %}2{% endblock first %}_{% endblock second %}3", {}, template.TemplateSyntaxError),
'namedendblocks03': ("1{% block first %}_{% block second %}2{% endblock %}_{% endblock second %}3", {}, template.TemplateSyntaxError),
'namedendblocks04': ("1{% block first %}_{% block second %}2{% endblock second %}_{% endblock third %}3", {}, template.TemplateSyntaxError),
'namedendblocks05': ("1{% block first %}_{% block second %}2{% endblock first %}", {}, template.TemplateSyntaxError),
# Mixed named and unnamed endblocks
'namedendblocks06': ("1{% block first %}_{% block second %}2{% endblock %}_{% endblock first %}3", {}, '1_2_3'),
'namedendblocks07': ("1{% block first %}_{% block second %}2{% endblock second %}_{% endblock %}3", {}, '1_2_3'),
### INHERITANCE ###########################################################
# Standard template with no inheritance
'inheritance01': ("1{% block first %}&{% endblock %}3{% block second %}_{% endblock %}", {}, '1&3_'),
# Standard two-level inheritance
'inheritance02': ("{% extends 'inheritance01' %}{% block first %}2{% endblock %}{% block second %}4{% endblock %}", {}, '1234'),
# Three-level with no redefinitions on third level
'inheritance03': ("{% extends 'inheritance02' %}", {}, '1234'),
# Two-level with no redefinitions on second level
'inheritance04': ("{% extends 'inheritance01' %}", {}, '1&3_'),
# Two-level with double quotes instead of single quotes
'inheritance05': ('{% extends "inheritance02" %}', {}, '1234'),
# Three-level with variable parent-template name
'inheritance06': ("{% extends foo %}", {'foo': 'inheritance02'}, '1234'),
# Two-level with one block defined, one block not defined
'inheritance07': ("{% extends 'inheritance01' %}{% block second %}5{% endblock %}", {}, '1&35'),
# Three-level with one block defined on this level, two blocks defined next level
'inheritance08': ("{% extends 'inheritance02' %}{% block second %}5{% endblock %}", {}, '1235'),
# Three-level with second and third levels blank
'inheritance09': ("{% extends 'inheritance04' %}", {}, '1&3_'),
# Three-level with space NOT in a block -- should be ignored
'inheritance10': ("{% extends 'inheritance04' %} ", {}, '1&3_'),
# Three-level with both blocks defined on this level, but none on second level
'inheritance11': ("{% extends 'inheritance04' %}{% block first %}2{% endblock %}{% block second %}4{% endblock %}", {}, '1234'),
# Three-level with this level providing one and second level providing the other
'inheritance12': ("{% extends 'inheritance07' %}{% block first %}2{% endblock %}", {}, '1235'),
# Three-level with this level overriding second level
'inheritance13': ("{% extends 'inheritance02' %}{% block first %}a{% endblock %}{% block second %}b{% endblock %}", {}, '1a3b'),
# A block defined only in a child template shouldn't be displayed
'inheritance14': ("{% extends 'inheritance01' %}{% block newblock %}NO DISPLAY{% endblock %}", {}, '1&3_'),
# A block within another block
'inheritance15': ("{% extends 'inheritance01' %}{% block first %}2{% block inner %}inner{% endblock %}{% endblock %}", {}, '12inner3_'),
# A block within another block (level 2)
'inheritance16': ("{% extends 'inheritance15' %}{% block inner %}out{% endblock %}", {}, '12out3_'),
# {% load %} tag (parent -- setup for exception04)
'inheritance17': ("{% load testtags %}{% block first %}1234{% endblock %}", {}, '1234'),
# {% load %} tag (standard usage, without inheritance)
'inheritance18': ("{% load testtags %}{% echo this that theother %}5678", {}, 'this that theother5678'),
# {% load %} tag (within a child template)
'inheritance19': ("{% extends 'inheritance01' %}{% block first %}{% load testtags %}{% echo 400 %}5678{% endblock %}", {}, '140056783_'),
# Two-level inheritance with {{ block.super }}
'inheritance20': ("{% extends 'inheritance01' %}{% block first %}{{ block.super }}a{% endblock %}", {}, '1&a3_'),
# Three-level inheritance with {{ block.super }} from parent
'inheritance21': ("{% extends 'inheritance02' %}{% block first %}{{ block.super }}a{% endblock %}", {}, '12a34'),
# Three-level inheritance with {{ block.super }} from grandparent
'inheritance22': ("{% extends 'inheritance04' %}{% block first %}{{ block.super }}a{% endblock %}", {}, '1&a3_'),
# Three-level inheritance with {{ block.super }} from parent and grandparent
'inheritance23': ("{% extends 'inheritance20' %}{% block first %}{{ block.super }}b{% endblock %}", {}, '1&ab3_'),
# Inheritance from local context without use of template loader
'inheritance24': ("{% extends context_template %}{% block first %}2{% endblock %}{% block second %}4{% endblock %}", {'context_template': template.Template("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}")}, '1234'),
# Inheritance from local context with variable parent template
'inheritance25': ("{% extends context_template.1 %}{% block first %}2{% endblock %}{% block second %}4{% endblock %}", {'context_template': [template.Template("Wrong"), template.Template("1{% block first %}_{% endblock %}3{% block second %}_{% endblock %}")]}, '1234'),
# Set up a base template to extend
'inheritance26': ("no tags", {}, 'no tags'),
# Inheritance from a template that doesn't have any blocks
'inheritance27': ("{% extends 'inheritance26' %}", {}, 'no tags'),
# Set up a base template with a space in it.
'inheritance 28': ("{% block first %}!{% endblock %}", {}, '!'),
# Inheritance from a template with a space in its name should work.
'inheritance29': ("{% extends 'inheritance 28' %}", {}, '!'),
# Base template, putting block in a conditional {% if %} tag
'inheritance30': ("1{% if optional %}{% block opt %}2{% endblock %}{% endif %}3", {'optional': True}, '123'),
# Inherit from a template with block wrapped in an {% if %} tag (in parent), still gets overridden
'inheritance31': ("{% extends 'inheritance30' %}{% block opt %}two{% endblock %}", {'optional': True}, '1two3'),
'inheritance32': ("{% extends 'inheritance30' %}{% block opt %}two{% endblock %}", {}, '13'),
# Base template, putting block in a conditional {% ifequal %} tag
'inheritance33': ("1{% ifequal optional 1 %}{% block opt %}2{% endblock %}{% endifequal %}3", {'optional': 1}, '123'),
# Inherit from a template with block wrapped in an {% ifequal %} tag (in parent), still gets overridden
'inheritance34': ("{% extends 'inheritance33' %}{% block opt %}two{% endblock %}", {'optional': 1}, '1two3'),
'inheritance35': ("{% extends 'inheritance33' %}{% block opt %}two{% endblock %}", {'optional': 2}, '13'),
# Base template, putting block in a {% for %} tag
'inheritance36': ("{% for n in numbers %}_{% block opt %}{{ n }}{% endblock %}{% endfor %}_", {'numbers': '123'}, '_1_2_3_'),
# Inherit from a template with block wrapped in an {% for %} tag (in parent), still gets overridden
'inheritance37': ("{% extends 'inheritance36' %}{% block opt %}X{% endblock %}", {'numbers': '123'}, '_X_X_X_'),
'inheritance38': ("{% extends 'inheritance36' %}{% block opt %}X{% endblock %}", {}, '_'),
# The super block will still be found.
'inheritance39': ("{% extends 'inheritance30' %}{% block opt %}new{{ block.super }}{% endblock %}", {'optional': True}, '1new23'),
'inheritance40': ("{% extends 'inheritance33' %}{% block opt %}new{{ block.super }}{% endblock %}", {'optional': 1}, '1new23'),
'inheritance41': ("{% extends 'inheritance36' %}{% block opt %}new{{ block.super }}{% endblock %}", {'numbers': '123'}, '_new1_new2_new3_'),
### LOADING TAG LIBRARIES #################################################
# {% load %} tag, importing individual tags
'load1': ("{% load echo from testtags %}{% echo this that theother %}", {}, 'this that theother'),
'load2': ("{% load echo other_echo from testtags %}{% echo this that theother %} {% other_echo and another thing %}", {}, 'this that theother and another thing'),
'load3': ("{% load echo upper from testtags %}{% echo this that theother %} {{ statement|upper }}", {'statement': 'not shouting'}, 'this that theother NOT SHOUTING'),
# {% load %} tag errors
'load4': ("{% load echo other_echo bad_tag from testtags %}", {}, template.TemplateSyntaxError),
'load5': ("{% load echo other_echo bad_tag from %}", {}, template.TemplateSyntaxError),
'load6': ("{% load from testtags %}", {}, template.TemplateSyntaxError),
'load7': ("{% load echo from bad_library %}", {}, template.TemplateSyntaxError),
### I18N ##################################################################
# {% spaceless %} tag
'spaceless01': ("{% spaceless %} <b> <i> text </i> </b> {% endspaceless %}", {}, "<b><i> text </i></b>"),
'spaceless02': ("{% spaceless %} <b> \n <i> text </i> \n </b> {% endspaceless %}", {}, "<b><i> text </i></b>"),
'spaceless03': ("{% spaceless %}<b><i>text</i></b>{% endspaceless %}", {}, "<b><i>text</i></b>"),
'spaceless04': ("{% spaceless %}<b> <i>{{ text }}</i> </b>{% endspaceless %}", {'text' : 'This & that'}, "<b><i>This & that</i></b>"),
'spaceless05': ("{% autoescape off %}{% spaceless %}<b> <i>{{ text }}</i> </b>{% endspaceless %}{% endautoescape %}", {'text' : 'This & that'}, "<b><i>This & that</i></b>"),
'spaceless06': ("{% spaceless %}<b> <i>{{ text|safe }}</i> </b>{% endspaceless %}", {'text' : 'This & that'}, "<b><i>This & that</i></b>"),
# simple translation of a string delimited by '
'i18n01': ("{% load i18n %}{% trans 'xxxyyyxxx' %}", {}, "xxxyyyxxx"),
# simple translation of a string delimited by "
'i18n02': ('{% load i18n %}{% trans "xxxyyyxxx" %}', {}, "xxxyyyxxx"),
# simple translation of a variable
'i18n03': ('{% load i18n %}{% blocktrans %}{{ anton }}{% endblocktrans %}', {'anton': '\xc3\x85'}, u"Å"),
# simple translation of a variable and filter
'i18n04': ('{% load i18n %}{% blocktrans with berta=anton|lower %}{{ berta }}{% endblocktrans %}', {'anton': '\xc3\x85'}, u'å'),
'legacyi18n04': ('{% load i18n %}{% blocktrans with anton|lower as berta %}{{ berta }}{% endblocktrans %}', {'anton': '\xc3\x85'}, u'å'),
# simple translation of a string with interpolation
'i18n05': ('{% load i18n %}{% blocktrans %}xxx{{ anton }}xxx{% endblocktrans %}', {'anton': 'yyy'}, "xxxyyyxxx"),
# simple translation of a string to german
'i18n06': ('{% load i18n %}{% trans "Page not found" %}', {'LANGUAGE_CODE': 'de'}, "Seite nicht gefunden"),
# translation of singular form
'i18n07': ('{% load i18n %}{% blocktrans count counter=number %}singular{% plural %}{{ counter }} plural{% endblocktrans %}', {'number': 1}, "singular"),
'legacyi18n07': ('{% load i18n %}{% blocktrans count number as counter %}singular{% plural %}{{ counter }} plural{% endblocktrans %}', {'number': 1}, "singular"),
# translation of plural form
'i18n08': ('{% load i18n %}{% blocktrans count number as counter %}singular{% plural %}{{ counter }} plural{% endblocktrans %}', {'number': 2}, "2 plural"),
'legacyi18n08': ('{% load i18n %}{% blocktrans count counter=number %}singular{% plural %}{{ counter }} plural{% endblocktrans %}', {'number': 2}, "2 plural"),
# simple non-translation (only marking) of a string to german
'i18n09': ('{% load i18n %}{% trans "Page not found" noop %}', {'LANGUAGE_CODE': 'de'}, "Page not found"),
# translation of a variable with a translated filter
'i18n10': ('{{ bool|yesno:_("yes,no,maybe") }}', {'bool': True, 'LANGUAGE_CODE': 'de'}, 'Ja'),
# translation of a variable with a non-translated filter
'i18n11': ('{{ bool|yesno:"ja,nein" }}', {'bool': True}, 'ja'),
# usage of the get_available_languages tag
'i18n12': ('{% load i18n %}{% get_available_languages as langs %}{% for lang in langs %}{% ifequal lang.0 "de" %}{{ lang.0 }}{% endifequal %}{% endfor %}', {}, 'de'),
# translation of constant strings
'i18n13': ('{{ _("Password") }}', {'LANGUAGE_CODE': 'de'}, 'Passwort'),
'i18n14': ('{% cycle "foo" _("Password") _(\'Password\') as c %} {% cycle c %} {% cycle c %}', {'LANGUAGE_CODE': 'de'}, 'foo Passwort Passwort'),
'i18n15': ('{{ absent|default:_("Password") }}', {'LANGUAGE_CODE': 'de', 'absent': ""}, 'Passwort'),
'i18n16': ('{{ _("<") }}', {'LANGUAGE_CODE': 'de'}, '<'),
# Escaping inside blocktrans and trans works as if it was directly in the
# template.
'i18n17': ('{% load i18n %}{% blocktrans with berta=anton|escape %}{{ berta }}{% endblocktrans %}', {'anton': 'α & β'}, u'α & β'),
'i18n18': ('{% load i18n %}{% blocktrans with berta=anton|force_escape %}{{ berta }}{% endblocktrans %}', {'anton': 'α & β'}, u'α & β'),
'i18n19': ('{% load i18n %}{% blocktrans %}{{ andrew }}{% endblocktrans %}', {'andrew': 'a & b'}, u'a & b'),
'i18n20': ('{% load i18n %}{% trans andrew %}', {'andrew': 'a & b'}, u'a & b'),
'i18n21': ('{% load i18n %}{% blocktrans %}{{ andrew }}{% endblocktrans %}', {'andrew': mark_safe('a & b')}, u'a & b'),
'i18n22': ('{% load i18n %}{% trans andrew %}', {'andrew': mark_safe('a & b')}, u'a & b'),
'legacyi18n17': ('{% load i18n %}{% blocktrans with anton|escape as berta %}{{ berta }}{% endblocktrans %}', {'anton': 'α & β'}, u'α & β'),
'legacyi18n18': ('{% load i18n %}{% blocktrans with anton|force_escape as berta %}{{ berta }}{% endblocktrans %}', {'anton': 'α & β'}, u'α & β'),
# Use filters with the {% trans %} tag, #5972
'i18n23': ('{% load i18n %}{% trans "Page not found"|capfirst|slice:"6:" %}', {'LANGUAGE_CODE': 'de'}, u'nicht gefunden'),
'i18n24': ("{% load i18n %}{% trans 'Page not found'|upper %}", {'LANGUAGE_CODE': 'de'}, u'SEITE NICHT GEFUNDEN'),
'i18n25': ('{% load i18n %}{% trans somevar|upper %}', {'somevar': 'Page not found', 'LANGUAGE_CODE': 'de'}, u'SEITE NICHT GEFUNDEN'),
# translation of plural form with extra field in singular form (#13568)
'i18n26': ('{% load i18n %}{% blocktrans with extra_field=myextra_field count counter=number %}singular {{ extra_field }}{% plural %}plural{% endblocktrans %}', {'number': 1, 'myextra_field': 'test'}, "singular test"),
'legacyi18n26': ('{% load i18n %}{% blocktrans with myextra_field as extra_field count number as counter %}singular {{ extra_field }}{% plural %}plural{% endblocktrans %}', {'number': 1, 'myextra_field': 'test'}, "singular test"),
# translation of singular form in russian (#14126)
'i18n27': ('{% load i18n %}{% blocktrans count counter=number %}{{ counter }} result{% plural %}{{ counter }} results{% endblocktrans %}', {'number': 1, 'LANGUAGE_CODE': 'ru'}, u'1 \u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442'),
'legacyi18n27': ('{% load i18n %}{% blocktrans count number as counter %}{{ counter }} result{% plural %}{{ counter }} results{% endblocktrans %}', {'number': 1, 'LANGUAGE_CODE': 'ru'}, u'1 \u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442'),
# simple translation of multiple variables
'i18n28': ('{% load i18n %}{% blocktrans with a=anton b=berta %}{{ a }} + {{ b }}{% endblocktrans %}', {'anton': 'α', 'berta': 'β'}, u'α + β'),
'legacyi18n28': ('{% load i18n %}{% blocktrans with anton as a and berta as b %}{{ a }} + {{ b }}{% endblocktrans %}', {'anton': 'α', 'berta': 'β'}, u'α + β'),
# retrieving language information
'i18n28': ('{% load i18n %}{% get_language_info for "de" as l %}{{ l.code }}: {{ l.name }}/{{ l.name_local }} bidi={{ l.bidi }}', {}, 'de: German/Deutsch bidi=False'),
'i18n29': ('{% load i18n %}{% get_language_info for LANGUAGE_CODE as l %}{{ l.code }}: {{ l.name }}/{{ l.name_local }} bidi={{ l.bidi }}', {'LANGUAGE_CODE': 'fi'}, 'fi: Finnish/suomi bidi=False'),
'i18n30': ('{% load i18n %}{% get_language_info_list for langcodes as langs %}{% for l in langs %}{{ l.code }}: {{ l.name }}/{{ l.name_local }} bidi={{ l.bidi }}; {% endfor %}', {'langcodes': ['it', 'no']}, u'it: Italian/italiano bidi=False; no: Norwegian/Norsk bidi=False; '),
'i18n31': ('{% load i18n %}{% get_language_info_list for langcodes as langs %}{% for l in langs %}{{ l.code }}: {{ l.name }}/{{ l.name_local }} bidi={{ l.bidi }}; {% endfor %}', {'langcodes': (('sl', 'Slovenian'), ('fa', 'Persian'))}, u'sl: Slovenian/Sloven\u0161\u010dina bidi=False; fa: Persian/\u0641\u0627\u0631\u0633\u06cc bidi=True; '),
'i18n32': ('{% load i18n %}{{ "hu"|language_name }} {{ "hu"|language_name_local }} {{ "hu"|language_bidi }}', {}, u'Hungarian Magyar False'),
'i18n33': ('{% load i18n %}{{ langcode|language_name }} {{ langcode|language_name_local }} {{ langcode|language_bidi }}', {'langcode': 'nl'}, u'Dutch Nederlands False'),
# blocktrans handling of variables which are not in the context.
'i18n34': ('{% load i18n %}{% blocktrans %}{{ missing }}{% endblocktrans %}', {}, u''),
### HANDLING OF TEMPLATE_STRING_IF_INVALID ###################################
'invalidstr01': ('{{ var|default:"Foo" }}', {}, ('Foo','INVALID')),
'invalidstr02': ('{{ var|default_if_none:"Foo" }}', {}, ('','INVALID')),
'invalidstr03': ('{% for v in var %}({{ v }}){% endfor %}', {}, ''),
'invalidstr04': ('{% if var %}Yes{% else %}No{% endif %}', {}, 'No'),
'invalidstr04': ('{% if var|default:"Foo" %}Yes{% else %}No{% endif %}', {}, 'Yes'),
'invalidstr05': ('{{ var }}', {}, ('', ('INVALID %s', 'var'))),
'invalidstr06': ('{{ var.prop }}', {'var': {}}, ('', ('INVALID %s', 'var.prop'))),
### MULTILINE #############################################################
'multiline01': ("""
Hello,
boys.
How
are
you
gentlemen.
""",
{},
"""
Hello,
boys.
How
are
you
gentlemen.
"""),
### REGROUP TAG ###########################################################
'regroup01': ('{% regroup data by bar as grouped %}' + \
'{% for group in grouped %}' + \
'{{ group.grouper }}:' + \
'{% for item in group.list %}' + \
'{{ item.foo }}' + \
'{% endfor %},' + \
'{% endfor %}',
{'data': [ {'foo':'c', 'bar':1},
{'foo':'d', 'bar':1},
{'foo':'a', 'bar':2},
{'foo':'b', 'bar':2},
{'foo':'x', 'bar':3} ]},
'1:cd,2:ab,3:x,'),
# Test for silent failure when target variable isn't found
'regroup02': ('{% regroup data by bar as grouped %}' + \
'{% for group in grouped %}' + \
'{{ group.grouper }}:' + \
'{% for item in group.list %}' + \
'{{ item.foo }}' + \
'{% endfor %},' + \
'{% endfor %}',
{}, ''),
### SSI TAG ########################################################
# Test normal behavior
'old-ssi01': ('{%% ssi %s %%}' % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates', 'ssi_include.html'), {}, 'This is for testing an ssi include. {{ test }}\n'),
'old-ssi02': ('{%% ssi %s %%}' % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'not_here'), {}, ''),
# Test parsed output
'old-ssi06': ('{%% ssi %s parsed %%}' % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates', 'ssi_include.html'), {'test': 'Look ma! It parsed!'}, 'This is for testing an ssi include. Look ma! It parsed!\n'),
'old-ssi07': ('{%% ssi %s parsed %%}' % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'not_here'), {'test': 'Look ma! It parsed!'}, ''),
# Future compatibility
# Test normal behavior
'ssi01': ('{%% load ssi from future %%}{%% ssi "%s" %%}' % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates', 'ssi_include.html'), {}, 'This is for testing an ssi include. {{ test }}\n'),
'ssi02': ('{%% load ssi from future %%}{%% ssi "%s" %%}' % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'not_here'), {}, ''),
'ssi03': ("{%% load ssi from future %%}{%% ssi '%s' %%}" % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'not_here'), {}, ''),
# Test passing as a variable
'ssi04': ('{% load ssi from future %}{% ssi ssi_file %}', {'ssi_file': os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates', 'ssi_include.html')}, 'This is for testing an ssi include. {{ test }}\n'),
'ssi05': ('{% load ssi from future %}{% ssi ssi_file %}', {'ssi_file': 'no_file'}, ''),
# Test parsed output
'ssi06': ('{%% load ssi from future %%}{%% ssi "%s" parsed %%}' % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates', 'ssi_include.html'), {'test': 'Look ma! It parsed!'}, 'This is for testing an ssi include. Look ma! It parsed!\n'),
'ssi07': ('{%% load ssi from future %%}{%% ssi "%s" parsed %%}' % os.path.join(os.path.dirname(os.path.abspath(__file__)), 'not_here'), {'test': 'Look ma! It parsed!'}, ''),
### TEMPLATETAG TAG #######################################################
'templatetag01': ('{% templatetag openblock %}', {}, '{%'),
'templatetag02': ('{% templatetag closeblock %}', {}, '%}'),
'templatetag03': ('{% templatetag openvariable %}', {}, '{{'),
'templatetag04': ('{% templatetag closevariable %}', {}, '}}'),
'templatetag05': ('{% templatetag %}', {}, template.TemplateSyntaxError),
'templatetag06': ('{% templatetag foo %}', {}, template.TemplateSyntaxError),
'templatetag07': ('{% templatetag openbrace %}', {}, '{'),
'templatetag08': ('{% templatetag closebrace %}', {}, '}'),
'templatetag09': ('{% templatetag openbrace %}{% templatetag openbrace %}', {}, '{{'),
'templatetag10': ('{% templatetag closebrace %}{% templatetag closebrace %}', {}, '}}'),
'templatetag11': ('{% templatetag opencomment %}', {}, '{#'),
'templatetag12': ('{% templatetag closecomment %}', {}, '#}'),
### WIDTHRATIO TAG ########################################################
'widthratio01': ('{% widthratio a b 0 %}', {'a':50,'b':100}, '0'),
'widthratio02': ('{% widthratio a b 100 %}', {'a':0,'b':0}, ''),
'widthratio03': ('{% widthratio a b 100 %}', {'a':0,'b':100}, '0'),
'widthratio04': ('{% widthratio a b 100 %}', {'a':50,'b':100}, '50'),
'widthratio05': ('{% widthratio a b 100 %}', {'a':100,'b':100}, '100'),
# 62.5 should round to 63
'widthratio06': ('{% widthratio a b 100 %}', {'a':50,'b':80}, '63'),
# 71.4 should round to 71
'widthratio07': ('{% widthratio a b 100 %}', {'a':50,'b':70}, '71'),
# Raise exception if we don't have 3 args, last one an integer
'widthratio08': ('{% widthratio %}', {}, template.TemplateSyntaxError),
'widthratio09': ('{% widthratio a b %}', {'a':50,'b':100}, template.TemplateSyntaxError),
'widthratio10': ('{% widthratio a b 100.0 %}', {'a':50,'b':100}, '50'),
# #10043: widthratio should allow max_width to be a variable
'widthratio11': ('{% widthratio a b c %}', {'a':50,'b':100, 'c': 100}, '50'),
### WITH TAG ########################################################
'with01': ('{% with key=dict.key %}{{ key }}{% endwith %}', {'dict': {'key': 50}}, '50'),
'legacywith01': ('{% with dict.key as key %}{{ key }}{% endwith %}', {'dict': {'key': 50}}, '50'),
'with02': ('{{ key }}{% with key=dict.key %}{{ key }}-{{ dict.key }}-{{ key }}{% endwith %}{{ key }}', {'dict': {'key': 50}}, ('50-50-50', 'INVALID50-50-50INVALID')),
'legacywith02': ('{{ key }}{% with dict.key as key %}{{ key }}-{{ dict.key }}-{{ key }}{% endwith %}{{ key }}', {'dict': {'key': 50}}, ('50-50-50', 'INVALID50-50-50INVALID')),
'with03': ('{% with a=alpha b=beta %}{{ a }}{{ b }}{% endwith %}', {'alpha': 'A', 'beta': 'B'}, 'AB'),
'with-error01': ('{% with dict.key xx key %}{{ key }}{% endwith %}', {'dict': {'key': 50}}, template.TemplateSyntaxError),
'with-error02': ('{% with dict.key as %}{{ key }}{% endwith %}', {'dict': {'key': 50}}, template.TemplateSyntaxError),
### NOW TAG ########################################################
# Simple case
'now01': ('{% now "j n Y"%}', {}, str(datetime.now().day) + ' ' + str(datetime.now().month) + ' ' + str(datetime.now().year)),
# Check parsing of escaped and special characters
'now02': ('{% now "j "n" Y"%}', {}, template.TemplateSyntaxError),
# 'now03': ('{% now "j \"n\" Y"%}', {}, str(datetime.now().day) + '"' + str(datetime.now().month) + '"' + str(datetime.now().year)),
# 'now04': ('{% now "j \nn\n Y"%}', {}, str(datetime.now().day) + '\n' + str(datetime.now().month) + '\n' + str(datetime.now().year))
### URL TAG ########################################################
# Successes
'legacyurl02': ('{% url regressiontests.templates.views.client_action id=client.id,action="update" %}', {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'legacyurl02a': ('{% url regressiontests.templates.views.client_action client.id,"update" %}', {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'legacyurl02b': ("{% url regressiontests.templates.views.client_action id=client.id,action='update' %}", {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'legacyurl02c': ("{% url regressiontests.templates.views.client_action client.id,'update' %}", {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'legacyurl10': ('{% url regressiontests.templates.views.client_action id=client.id,action="two words" %}', {'client': {'id': 1}}, '/url_tag/client/1/two%20words/'),
'legacyurl13': ('{% url regressiontests.templates.views.client_action id=client.id, action=arg|join:"-" %}', {'client': {'id': 1}, 'arg':['a','b']}, '/url_tag/client/1/a-b/'),
'legacyurl14': ('{% url regressiontests.templates.views.client_action client.id, arg|join:"-" %}', {'client': {'id': 1}, 'arg':['a','b']}, '/url_tag/client/1/a-b/'),
'legacyurl16': ('{% url regressiontests.templates.views.client_action action="update",id="1" %}', {}, '/url_tag/client/1/update/'),
'legacyurl16a': ("{% url regressiontests.templates.views.client_action action='update',id='1' %}", {}, '/url_tag/client/1/update/'),
'legacyurl17': ('{% url regressiontests.templates.views.client_action client_id=client.my_id,action=action %}', {'client': {'my_id': 1}, 'action': 'update'}, '/url_tag/client/1/update/'),
'old-url01': ('{% url regressiontests.templates.views.client client.id %}', {'client': {'id': 1}}, '/url_tag/client/1/'),
'old-url02': ('{% url regressiontests.templates.views.client_action id=client.id action="update" %}', {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'old-url02a': ('{% url regressiontests.templates.views.client_action client.id "update" %}', {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'old-url02b': ("{% url regressiontests.templates.views.client_action id=client.id action='update' %}", {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'old-url02c': ("{% url regressiontests.templates.views.client_action client.id 'update' %}", {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'old-url03': ('{% url regressiontests.templates.views.index %}', {}, '/url_tag/'),
'old-url04': ('{% url named.client client.id %}', {'client': {'id': 1}}, '/url_tag/named-client/1/'),
'old-url05': (u'{% url метка_оператора v %}', {'v': u'Ω'}, '/url_tag/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/'),
'old-url06': (u'{% url метка_оператора_2 tag=v %}', {'v': u'Ω'}, '/url_tag/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/'),
'old-url07': (u'{% url regressiontests.templates.views.client2 tag=v %}', {'v': u'Ω'}, '/url_tag/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/'),
'old-url08': (u'{% url метка_оператора v %}', {'v': 'Ω'}, '/url_tag/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/'),
'old-url09': (u'{% url метка_оператора_2 tag=v %}', {'v': 'Ω'}, '/url_tag/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/'),
'old-url10': ('{% url regressiontests.templates.views.client_action id=client.id action="two words" %}', {'client': {'id': 1}}, '/url_tag/client/1/two%20words/'),
'old-url11': ('{% url regressiontests.templates.views.client_action id=client.id action="==" %}', {'client': {'id': 1}}, '/url_tag/client/1/==/'),
'old-url12': ('{% url regressiontests.templates.views.client_action id=client.id action="," %}', {'client': {'id': 1}}, '/url_tag/client/1/,/'),
'old-url13': ('{% url regressiontests.templates.views.client_action id=client.id action=arg|join:"-" %}', {'client': {'id': 1}, 'arg':['a','b']}, '/url_tag/client/1/a-b/'),
'old-url14': ('{% url regressiontests.templates.views.client_action client.id arg|join:"-" %}', {'client': {'id': 1}, 'arg':['a','b']}, '/url_tag/client/1/a-b/'),
'old-url15': ('{% url regressiontests.templates.views.client_action 12 "test" %}', {}, '/url_tag/client/12/test/'),
'old-url18': ('{% url regressiontests.templates.views.client "1,2" %}', {}, '/url_tag/client/1,2/'),
# Failures
'old-url-fail01': ('{% url %}', {}, template.TemplateSyntaxError),
'old-url-fail02': ('{% url no_such_view %}', {}, (urlresolvers.NoReverseMatch, urlresolvers.NoReverseMatch, template.TemplateSyntaxError)),
'old-url-fail03': ('{% url regressiontests.templates.views.client %}', {}, (urlresolvers.NoReverseMatch, urlresolvers.NoReverseMatch, template.TemplateSyntaxError)),
'old-url-fail04': ('{% url view id, %}', {}, template.TemplateSyntaxError),
'old-url-fail05': ('{% url view id= %}', {}, template.TemplateSyntaxError),
'old-url-fail06': ('{% url view a.id=id %}', {}, template.TemplateSyntaxError),
'old-url-fail07': ('{% url view a.id!id %}', {}, template.TemplateSyntaxError),
'old-url-fail08': ('{% url view id="unterminatedstring %}', {}, template.TemplateSyntaxError),
'old-url-fail09': ('{% url view id=", %}', {}, template.TemplateSyntaxError),
# {% url ... as var %}
'old-url-asvar01': ('{% url regressiontests.templates.views.index as url %}', {}, ''),
'old-url-asvar02': ('{% url regressiontests.templates.views.index as url %}{{ url }}', {}, '/url_tag/'),
'old-url-asvar03': ('{% url no_such_view as url %}{{ url }}', {}, ''),
# forward compatibility
# Successes
'url01': ('{% load url from future %}{% url "regressiontests.templates.views.client" client.id %}', {'client': {'id': 1}}, '/url_tag/client/1/'),
'url02': ('{% load url from future %}{% url "regressiontests.templates.views.client_action" id=client.id action="update" %}', {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'url02a': ('{% load url from future %}{% url "regressiontests.templates.views.client_action" client.id "update" %}', {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'url02b': ("{% load url from future %}{% url 'regressiontests.templates.views.client_action' id=client.id action='update' %}", {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'url02c': ("{% load url from future %}{% url 'regressiontests.templates.views.client_action' client.id 'update' %}", {'client': {'id': 1}}, '/url_tag/client/1/update/'),
'url03': ('{% load url from future %}{% url "regressiontests.templates.views.index" %}', {}, '/url_tag/'),
'url04': ('{% load url from future %}{% url "named.client" client.id %}', {'client': {'id': 1}}, '/url_tag/named-client/1/'),
'url05': (u'{% load url from future %}{% url "метка_оператора" v %}', {'v': u'Ω'}, '/url_tag/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/'),
'url06': (u'{% load url from future %}{% url "метка_оператора_2" tag=v %}', {'v': u'Ω'}, '/url_tag/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/'),
'url07': (u'{% load url from future %}{% url "regressiontests.templates.views.client2" tag=v %}', {'v': u'Ω'}, '/url_tag/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/'),
'url08': (u'{% load url from future %}{% url "метка_оператора" v %}', {'v': 'Ω'}, '/url_tag/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/'),
'url09': (u'{% load url from future %}{% url "метка_оператора_2" tag=v %}', {'v': 'Ω'}, '/url_tag/%D0%AE%D0%BD%D0%B8%D0%BA%D0%BE%D0%B4/%CE%A9/'),
'url10': ('{% load url from future %}{% url "regressiontests.templates.views.client_action" id=client.id action="two words" %}', {'client': {'id': 1}}, '/url_tag/client/1/two%20words/'),
'url11': ('{% load url from future %}{% url "regressiontests.templates.views.client_action" id=client.id action="==" %}', {'client': {'id': 1}}, '/url_tag/client/1/==/'),
'url12': ('{% load url from future %}{% url "regressiontests.templates.views.client_action" id=client.id action="," %}', {'client': {'id': 1}}, '/url_tag/client/1/,/'),
'url13': ('{% load url from future %}{% url "regressiontests.templates.views.client_action" id=client.id action=arg|join:"-" %}', {'client': {'id': 1}, 'arg':['a','b']}, '/url_tag/client/1/a-b/'),
'url14': ('{% load url from future %}{% url "regressiontests.templates.views.client_action" client.id arg|join:"-" %}', {'client': {'id': 1}, 'arg':['a','b']}, '/url_tag/client/1/a-b/'),
'url15': ('{% load url from future %}{% url "regressiontests.templates.views.client_action" 12 "test" %}', {}, '/url_tag/client/12/test/'),
'url18': ('{% load url from future %}{% url "regressiontests.templates.views.client" "1,2" %}', {}, '/url_tag/client/1,2/'),
'url19': ('{% load url from future %}{% url named_url client.id %}', {'named_url': 'regressiontests.templates.views.client', 'client': {'id': 1}}, '/url_tag/client/1/'),
# Failures
'url-fail01': ('{% load url from future %}{% url %}', {}, template.TemplateSyntaxError),
'url-fail02': ('{% load url from future %}{% url "no_such_view" %}', {}, (urlresolvers.NoReverseMatch, urlresolvers.NoReverseMatch, template.TemplateSyntaxError)),
'url-fail03': ('{% load url from future %}{% url "regressiontests.templates.views.client" %}', {}, (urlresolvers.NoReverseMatch, urlresolvers.NoReverseMatch, template.TemplateSyntaxError)),
'url-fail04': ('{% load url from future %}{% url "view" id, %}', {}, template.TemplateSyntaxError),
'url-fail05': ('{% load url from future %}{% url "view" id= %}', {}, template.TemplateSyntaxError),
'url-fail06': ('{% load url from future %}{% url "view" a.id=id %}', {}, template.TemplateSyntaxError),
'url-fail07': ('{% load url from future %}{% url "view" a.id!id %}', {}, template.TemplateSyntaxError),
'url-fail08': ('{% load url from future %}{% url "view" id="unterminatedstring %}', {}, template.TemplateSyntaxError),
'url-fail09': ('{% load url from future %}{% url "view" id=", %}', {}, template.TemplateSyntaxError),
'url-fail11': ('{% load url from future %}{% url named_url %}', {}, (urlresolvers.NoReverseMatch, urlresolvers.NoReverseMatch, template.TemplateSyntaxError)),
'url-fail12': ('{% load url from future %}{% url named_url %}', {'named_url': 'no_such_view'}, (urlresolvers.NoReverseMatch, urlresolvers.NoReverseMatch, template.TemplateSyntaxError)),
'url-fail13': ('{% load url from future %}{% url named_url %}', {'named_url': 'regressiontests.templates.views.client'}, (urlresolvers.NoReverseMatch, urlresolvers.NoReverseMatch, template.TemplateSyntaxError)),
'url-fail14': ('{% load url from future %}{% url named_url id, %}', {'named_url': 'view'}, template.TemplateSyntaxError),
'url-fail15': ('{% load url from future %}{% url named_url id= %}', {'named_url': 'view'}, template.TemplateSyntaxError),
'url-fail16': ('{% load url from future %}{% url named_url a.id=id %}', {'named_url': 'view'}, template.TemplateSyntaxError),
'url-fail17': ('{% load url from future %}{% url named_url a.id!id %}', {'named_url': 'view'}, template.TemplateSyntaxError),
'url-fail18': ('{% load url from future %}{% url named_url id="unterminatedstring %}', {'named_url': 'view'}, template.TemplateSyntaxError),
'url-fail19': ('{% load url from future %}{% url named_url id=", %}', {'named_url': 'view'}, template.TemplateSyntaxError),
# {% url ... as var %}
'url-asvar01': ('{% load url from future %}{% url "regressiontests.templates.views.index" as url %}', {}, ''),
'url-asvar02': ('{% load url from future %}{% url "regressiontests.templates.views.index" as url %}{{ url }}', {}, '/url_tag/'),
'url-asvar03': ('{% load url from future %}{% url "no_such_view" as url %}{{ url }}', {}, ''),
### CACHE TAG ######################################################
'cache03': ('{% load cache %}{% cache 2 test %}cache03{% endcache %}', {}, 'cache03'),
'cache04': ('{% load cache %}{% cache 2 test %}cache04{% endcache %}', {}, 'cache03'),
'cache05': ('{% load cache %}{% cache 2 test foo %}cache05{% endcache %}', {'foo': 1}, 'cache05'),
'cache06': ('{% load cache %}{% cache 2 test foo %}cache06{% endcache %}', {'foo': 2}, 'cache06'),
'cache07': ('{% load cache %}{% cache 2 test foo %}cache07{% endcache %}', {'foo': 1}, 'cache05'),
# Allow first argument to be a variable.
'cache08': ('{% load cache %}{% cache time test foo %}cache08{% endcache %}', {'foo': 2, 'time': 2}, 'cache06'),
# Raise exception if we don't have at least 2 args, first one integer.
'cache11': ('{% load cache %}{% cache %}{% endcache %}', {}, template.TemplateSyntaxError),
'cache12': ('{% load cache %}{% cache 1 %}{% endcache %}', {}, template.TemplateSyntaxError),
'cache13': ('{% load cache %}{% cache foo bar %}{% endcache %}', {}, template.TemplateSyntaxError),
'cache14': ('{% load cache %}{% cache foo bar %}{% endcache %}', {'foo': 'fail'}, template.TemplateSyntaxError),
'cache15': ('{% load cache %}{% cache foo bar %}{% endcache %}', {'foo': []}, template.TemplateSyntaxError),
# Regression test for #7460.
'cache16': ('{% load cache %}{% cache 1 foo bar %}{% endcache %}', {'foo': 'foo', 'bar': 'with spaces'}, ''),
# Regression test for #11270.
'cache17': ('{% load cache %}{% cache 10 long_cache_key poem %}Some Content{% endcache %}', {'poem': 'Oh freddled gruntbuggly/Thy micturations are to me/As plurdled gabbleblotchits/On a lurgid bee/That mordiously hath bitled out/Its earted jurtles/Into a rancid festering/Or else I shall rend thee in the gobberwarts with my blurglecruncheon/See if I dont.'}, 'Some Content'),
### AUTOESCAPE TAG ##############################################
'autoescape-tag01': ("{% autoescape off %}hello{% endautoescape %}", {}, "hello"),
'autoescape-tag02': ("{% autoescape off %}{{ first }}{% endautoescape %}", {"first": "<b>hello</b>"}, "<b>hello</b>"),
'autoescape-tag03': ("{% autoescape on %}{{ first }}{% endautoescape %}", {"first": "<b>hello</b>"}, "<b>hello</b>"),
# Autoescape disabling and enabling nest in a predictable way.
'autoescape-tag04': ("{% autoescape off %}{{ first }} {% autoescape on%}{{ first }}{% endautoescape %}{% endautoescape %}", {"first": "<a>"}, "<a> <a>"),
'autoescape-tag05': ("{% autoescape on %}{{ first }}{% endautoescape %}", {"first": "<b>first</b>"}, "<b>first</b>"),
# Strings (ASCII or unicode) already marked as "safe" are not
# auto-escaped
'autoescape-tag06': ("{{ first }}", {"first": mark_safe("<b>first</b>")}, "<b>first</b>"),
'autoescape-tag07': ("{% autoescape on %}{{ first }}{% endautoescape %}", {"first": mark_safe(u"<b>Apple</b>")}, u"<b>Apple</b>"),
# Literal string arguments to filters, if used in the result, are
# safe.
'autoescape-tag08': (r'{% autoescape on %}{{ var|default_if_none:" endquote\" hah" }}{% endautoescape %}', {"var": None}, ' endquote" hah'),
# Objects which return safe strings as their __unicode__ method
# won't get double-escaped.
'autoescape-tag09': (r'{{ unsafe }}', {'unsafe': filters.UnsafeClass()}, 'you & me'),
'autoescape-tag10': (r'{{ safe }}', {'safe': filters.SafeClass()}, 'you > me'),
# The "safe" and "escape" filters cannot work due to internal
# implementation details (fortunately, the (no)autoescape block
# tags can be used in those cases)
'autoescape-filtertag01': ("{{ first }}{% filter safe %}{{ first }} x<y{% endfilter %}", {"first": "<a>"}, template.TemplateSyntaxError),
# ifqeual compares unescaped vales.
'autoescape-ifequal01': ('{% ifequal var "this & that" %}yes{% endifequal %}', { "var": "this & that" }, "yes"),
# Arguments to filters are 'safe' and manipulate their input unescaped.
'autoescape-filters01': ('{{ var|cut:"&" }}', { "var": "this & that" }, "this that" ),
'autoescape-filters02': ('{{ var|join:" & \" }}', { "var": ("Tom", "Dick", "Harry") }, "Tom & Dick & Harry"),
# Literal strings are safe.
'autoescape-literals01': ('{{ "this & that" }}',{}, "this & that"),
# Iterating over strings outputs safe characters.
'autoescape-stringiterations01': ('{% for l in var %}{{ l }},{% endfor %}', {'var': 'K&R'}, "K,&,R,"),
# Escape requirement survives lookup.
'autoescape-lookup01': ('{{ var.key }}', { "var": {"key": "this & that" }}, "this & that"),
# Static template tags
'static-prefixtag01': ('{% load static %}{% get_static_prefix %}', {}, settings.STATIC_URL),
'static-prefixtag02': ('{% load static %}{% get_static_prefix as static_prefix %}{{ static_prefix }}', {}, settings.STATIC_URL),
'static-prefixtag03': ('{% load static %}{% get_media_prefix %}', {}, settings.MEDIA_URL),
'static-prefixtag04': ('{% load static %}{% get_media_prefix as media_prefix %}{{ media_prefix }}', {}, settings.MEDIA_URL),
}
class TemplateTagLoading(unittest.TestCase):
def setUp(self):
self.old_path = sys.path[:]
self.old_apps = settings.INSTALLED_APPS
self.egg_dir = '%s/eggs' % os.path.dirname(__file__)
self.old_tag_modules = template_base.templatetags_modules
template_base.templatetags_modules = []
def tearDown(self):
settings.INSTALLED_APPS = self.old_apps
sys.path = self.old_path
template_base.templatetags_modules = self.old_tag_modules
def test_load_error(self):
ttext = "{% load broken_tag %}"
self.assertRaises(template.TemplateSyntaxError, template.Template, ttext)
try:
template.Template(ttext)
except template.TemplateSyntaxError, e:
self.assertTrue('ImportError' in e.args[0])
self.assertTrue('Xtemplate' in e.args[0])
def test_load_error_egg(self):
ttext = "{% load broken_egg %}"
egg_name = '%s/tagsegg.egg' % self.egg_dir
sys.path.append(egg_name)
settings.INSTALLED_APPS = ('tagsegg',)
self.assertRaises(template.TemplateSyntaxError, template.Template, ttext)
try:
template.Template(ttext)
except template.TemplateSyntaxError, e:
self.assertTrue('ImportError' in e.args[0])
self.assertTrue('Xtemplate' in e.args[0])
def test_load_working_egg(self):
ttext = "{% load working_egg %}"
egg_name = '%s/tagsegg.egg' % self.egg_dir
sys.path.append(egg_name)
settings.INSTALLED_APPS = ('tagsegg',)
t = template.Template(ttext)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
Matt-Deacalion/django | tests/admin_changelist/admin.py | 247 | 3931 | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.core.paginator import Paginator
from .models import Child, Event, Parent, Swallow
site = admin.AdminSite(name="admin")
site.register(User, UserAdmin)
class CustomPaginator(Paginator):
def __init__(self, queryset, page_size, orphans=0, allow_empty_first_page=True):
super(CustomPaginator, self).__init__(queryset, 5, orphans=2,
allow_empty_first_page=allow_empty_first_page)
class EventAdmin(admin.ModelAdmin):
list_display = ['event_date_func']
def event_date_func(self, event):
return event.date
site.register(Event, EventAdmin)
class ParentAdmin(admin.ModelAdmin):
list_filter = ['child__name']
search_fields = ['child__name']
class ChildAdmin(admin.ModelAdmin):
list_display = ['name', 'parent']
list_per_page = 10
list_filter = ['parent', 'age']
def get_queryset(self, request):
return super(ChildAdmin, self).get_queryset(request).select_related("parent__name")
class CustomPaginationAdmin(ChildAdmin):
paginator = CustomPaginator
class FilteredChildAdmin(admin.ModelAdmin):
list_display = ['name', 'parent']
list_per_page = 10
def get_queryset(self, request):
return super(FilteredChildAdmin, self).get_queryset(request).filter(
name__contains='filtered')
class BandAdmin(admin.ModelAdmin):
list_filter = ['genres']
class GroupAdmin(admin.ModelAdmin):
list_filter = ['members']
class ConcertAdmin(admin.ModelAdmin):
list_filter = ['group__members']
search_fields = ['group__members__name']
class QuartetAdmin(admin.ModelAdmin):
list_filter = ['members']
class ChordsBandAdmin(admin.ModelAdmin):
list_filter = ['members']
class InvitationAdmin(admin.ModelAdmin):
list_display = ('band', 'player')
list_select_related = ('player',)
class DynamicListDisplayChildAdmin(admin.ModelAdmin):
list_display = ('parent', 'name', 'age')
def get_list_display(self, request):
my_list_display = super(DynamicListDisplayChildAdmin, self).get_list_display(request)
if request.user.username == 'noparents':
my_list_display = list(my_list_display)
my_list_display.remove('parent')
return my_list_display
class DynamicListDisplayLinksChildAdmin(admin.ModelAdmin):
list_display = ('parent', 'name', 'age')
list_display_links = ['parent', 'name']
def get_list_display_links(self, request, list_display):
return ['age']
site.register(Child, DynamicListDisplayChildAdmin)
class NoListDisplayLinksParentAdmin(admin.ModelAdmin):
list_display_links = None
site.register(Parent, NoListDisplayLinksParentAdmin)
class SwallowAdmin(admin.ModelAdmin):
actions = None # prevent ['action_checkbox'] + list(list_display)
list_display = ('origin', 'load', 'speed', 'swallowonetoone')
site.register(Swallow, SwallowAdmin)
class DynamicListFilterChildAdmin(admin.ModelAdmin):
list_filter = ('parent', 'name', 'age')
def get_list_filter(self, request):
my_list_filter = super(DynamicListFilterChildAdmin, self).get_list_filter(request)
if request.user.username == 'noparents':
my_list_filter = list(my_list_filter)
my_list_filter.remove('parent')
return my_list_filter
class DynamicSearchFieldsChildAdmin(admin.ModelAdmin):
search_fields = ('name',)
def get_search_fields(self, request):
search_fields = super(DynamicSearchFieldsChildAdmin, self).get_search_fields(request)
search_fields += ('age',)
return search_fields
class EmptyValueChildAdmin(admin.ModelAdmin):
empty_value_display = '-empty-'
list_display = ('name', 'age_display', 'age')
def age_display(self, obj):
return obj.age
age_display.empty_value_display = '†'
| bsd-3-clause |
ericzolf/ansible | test/support/windows-integration/plugins/modules/win_security_policy.py | 99 | 3842 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub, actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_security_policy
version_added: '2.4'
short_description: Change local security policy settings
description:
- Allows you to set the local security policies that are configured by
SecEdit.exe.
options:
section:
description:
- The ini section the key exists in.
- If the section does not exist then the module will return an error.
- Example sections to use are 'Account Policies', 'Local Policies',
'Event Log', 'Restricted Groups', 'System Services', 'Registry' and
'File System'
- If wanting to edit the C(Privilege Rights) section, use the
M(win_user_right) module instead.
type: str
required: yes
key:
description:
- The ini key of the section or policy name to modify.
- The module will return an error if this key is invalid.
type: str
required: yes
value:
description:
- The value for the ini key or policy name.
- If the key takes in a boolean value then 0 = False and 1 = True.
type: str
required: yes
notes:
- This module uses the SecEdit.exe tool to configure the values, more details
of the areas and keys that can be configured can be found here
U(https://msdn.microsoft.com/en-us/library/bb742512.aspx).
- If you are in a domain environment these policies may be set by a GPO policy,
this module can temporarily change these values but the GPO will override
it if the value differs.
- You can also run C(SecEdit.exe /export /cfg C:\temp\output.ini) to view the
current policies set on your system.
- When assigning user rights, use the M(win_user_right) module instead.
seealso:
- module: win_user_right
author:
- Jordan Borean (@jborean93)
'''
EXAMPLES = r'''
- name: Change the guest account name
win_security_policy:
section: System Access
key: NewGuestName
value: Guest Account
- name: Set the maximum password age
win_security_policy:
section: System Access
key: MaximumPasswordAge
value: 15
- name: Do not store passwords using reversible encryption
win_security_policy:
section: System Access
key: ClearTextPassword
value: 0
- name: Enable system events
win_security_policy:
section: Event Audit
key: AuditSystemEvents
value: 1
'''
RETURN = r'''
rc:
description: The return code after a failure when running SecEdit.exe.
returned: failure with secedit calls
type: int
sample: -1
stdout:
description: The output of the STDOUT buffer after a failure when running
SecEdit.exe.
returned: failure with secedit calls
type: str
sample: check log for error details
stderr:
description: The output of the STDERR buffer after a failure when running
SecEdit.exe.
returned: failure with secedit calls
type: str
sample: failed to import security policy
import_log:
description: The log of the SecEdit.exe /configure job that configured the
local policies. This is used for debugging purposes on failures.
returned: secedit.exe /import run and change occurred
type: str
sample: Completed 6 percent (0/15) \tProcess Privilege Rights area.
key:
description: The key in the section passed to the module to modify.
returned: success
type: str
sample: NewGuestName
section:
description: The section passed to the module to modify.
returned: success
type: str
sample: System Access
value:
description: The value passed to the module to modify to.
returned: success
type: str
sample: Guest Account
'''
| gpl-3.0 |
BIT-SYS/gem5-spm-module | src/arch/x86/isa/insts/simd128/floating_point/data_conversion/convert_floating_point_to_floating_point.py | 90 | 4105 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop CVTSS2SD_XMM_XMM {
cvtf2f xmml, xmmlm, destSize=8, srcSize=4, ext=Scalar
};
def macroop CVTSS2SD_XMM_M {
ldfp ufp1, seg, sib, disp, dataSize=8
cvtf2f xmml, ufp1, destSize=8, srcSize=4, ext=Scalar
};
def macroop CVTSS2SD_XMM_P {
rdip t7
ldfp ufp1, seg, riprel, disp, dataSize=8
cvtf2f xmml, ufp1, destSize=8, srcSize=4, ext=Scalar
};
def macroop CVTSD2SS_XMM_XMM {
cvtf2f xmml, xmmlm, destSize=4, srcSize=8, ext=Scalar
};
def macroop CVTSD2SS_XMM_M {
ldfp ufp1, seg, sib, disp, dataSize=8
cvtf2f xmml, ufp1, destSize=4, srcSize=8, ext=Scalar
};
def macroop CVTSD2SS_XMM_P {
rdip t7
ldfp ufp1, seg, riprel, disp, dataSize=8
cvtf2f xmml, ufp1, destSize=4, srcSize=8, ext=Scalar
};
def macroop CVTPS2PD_XMM_XMM {
cvtf2f xmmh, xmmlm, destSize=8, srcSize=4, ext=2
cvtf2f xmml, xmmlm, destSize=8, srcSize=4, ext=0
};
def macroop CVTPS2PD_XMM_M {
ldfp ufp1, seg, sib, disp, dataSize=8
cvtf2f xmmh, ufp1, destSize=8, srcSize=4, ext=2
cvtf2f xmml, ufp1, destSize=8, srcSize=4, ext=0
};
def macroop CVTPS2PD_XMM_P {
rdip t7
ldfp ufp1, seg, riprel, disp, dataSize=8
cvtf2f xmmh, ufp1, destSize=8, srcSize=4, ext=2
cvtf2f xmml, ufp1, destSize=8, srcSize=4, ext=0
};
def macroop CVTPD2PS_XMM_XMM {
cvtf2f xmml, xmmlm, destSize=4, srcSize=8, ext=0
cvtf2f xmml, xmmhm, destSize=4, srcSize=8, ext=2
lfpimm xmmh, 0
};
def macroop CVTPD2PS_XMM_M {
ldfp ufp1, seg, sib, "DISPLACEMENT", dataSize=8
ldfp ufp2, seg, sib, "DISPLACEMENT + 8", dataSize=8
cvtf2f xmml, ufp1, destSize=4, srcSize=8, ext=0
cvtf2f xmml, ufp2, destSize=4, srcSize=8, ext=2
lfpimm xmmh, 0
};
def macroop CVTPD2PS_XMM_P {
rdip t7
ldfp ufp1, seg, riprel, "DISPLACEMENT", dataSize=8
ldfp ufp2, seg, riprel, "DISPLACEMENT + 8", dataSize=8
cvtf2f xmml, ufp1, destSize=4, srcSize=8, ext=0
cvtf2f xmml, ufp2, destSize=4, srcSize=8, ext=2
lfpimm xmmh, 0
};
'''
| bsd-3-clause |
roisagiv/webrtc-ios | tools/gyp/test/actions/gyptest-all.py | 1 | 3219 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple actions when using an explicit build target of 'all'.
"""
import glob
import os
import TestGyp
test = TestGyp.TestGyp(workdir='workarea_all')
test.run_gyp('actions.gyp', chdir='src')
test.relocate('src', 'relocate/src')
# Test that an "always run" action increases a counter on multiple invocations,
# and that a dependent action updates in step.
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
# The "always run" action only counts to 2, but the dependent target will count
# forever if it's allowed to run. This verifies that the dependent target only
# runs when the "always run" action generates new output, not just because the
# "always run" ran.
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
expect = """\
Hello from program.c
Hello from make-prog1.py
Hello from make-prog2.py
"""
if test.format == 'xcode':
chdir = 'relocate/src/subdir1'
else:
chdir = 'relocate/src'
test.run_built_executable('program', chdir=chdir, stdout=expect)
test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
expect = "Hello from generate_main.py\n"
if test.format == 'xcode':
chdir = 'relocate/src/subdir3'
else:
chdir = 'relocate/src'
test.run_built_executable('null_input', chdir=chdir, stdout=expect)
# Clean out files which may have been created if test.ALL was run.
def clean_dep_files():
for file in (glob.glob('relocate/src/dep_*.txt') +
glob.glob('relocate/src/deps_all_done_*.txt')):
if os.path.exists(file):
os.remove(file)
# Confirm our clean.
clean_dep_files()
test.must_not_exist('relocate/src/dep_1.txt')
test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
# Make sure all deps finish before an action is run on a 'None' target.
# If using the Make builder, add -j to make things more difficult.
arguments = []
if test.format == 'make':
arguments = ['-j']
test.build('actions.gyp', 'action_with_dependencies_123', chdir='relocate/src',
arguments=arguments)
test.must_exist('relocate/src/deps_all_done_first_123.txt')
# Try again with a target that has deps in reverse. Output files from
# previous tests deleted. Confirm this execution did NOT run the ALL
# target which would mess up our dep tests.
clean_dep_files()
test.build('actions.gyp', 'action_with_dependencies_321', chdir='relocate/src',
arguments=arguments)
test.must_exist('relocate/src/deps_all_done_first_321.txt')
test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
test.pass_test()
| bsd-3-clause |
larroy/mxnet | example/gluon/tree_lstm/main.py | 8 | 7577 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This example is inspired by https://github.com/dasguptar/treelstm.pytorch
import argparse, math, os, random
try:
import cPickle as pickle
except ImportError:
import pickle
import logging
logging.basicConfig(level=logging.INFO)
import numpy as np
from tqdm import tqdm
import mxnet as mx
from mxnet import gluon
from mxnet.gluon import nn
from mxnet import autograd as ag
from tree_lstm import SimilarityTreeLSTM
from dataset import Vocab, SICKDataIter
parser = argparse.ArgumentParser(description='TreeLSTM for Sentence Similarity on Dependency Trees')
parser.add_argument('--data', default='data/sick/',
help='path to raw dataset. required when preprocessed dataset is not available.')
parser.add_argument('--word_embed', default='data/glove/glove.840B.300d.txt',
help='directory with word embeddings. required when preprocessed dataset is not available.')
parser.add_argument('--batch_size', type=int, default=25,
help='training batch size per device (CPU/GPU).')
parser.add_argument('--epochs', default=50, type=int,
help='number of total epochs to run')
parser.add_argument('--lr', default=0.02, type=float,
help='initial learning rate')
parser.add_argument('--wd', default=0.0001, type=float,
help='weight decay factor')
parser.add_argument('--optimizer', default='adagrad',
help='optimizer (default: adagrad)')
parser.add_argument('--seed', default=123, type=int,
help='random seed (default: 123)')
parser.add_argument('--use-gpu', action='store_true',
help='whether to use GPU.')
opt = parser.parse_args()
logging.info(opt)
context = [mx.gpu(0) if opt.use_gpu else mx.cpu()]
rnn_hidden_size, sim_hidden_size, num_classes = 150, 50, 5
optimizer = opt.optimizer.lower()
mx.random.seed(opt.seed)
np.random.seed(opt.seed)
random.seed(opt.seed)
batch_size = opt.batch_size
# read dataset
if os.path.exists('dataset.pickle'):
with open('dataset.pickle', 'rb') as f:
train_iter, dev_iter, test_iter, vocab = pickle.load(f)
else:
root_dir = opt.data
segments = ['train', 'dev', 'test']
token_files = [os.path.join(root_dir, seg, '%s.toks'%tok)
for tok in ['a', 'b']
for seg in segments]
vocab = Vocab(filepaths=token_files, embedpath=opt.word_embed)
train_iter, dev_iter, test_iter = [SICKDataIter(os.path.join(root_dir, segment), vocab, num_classes)
for segment in segments]
with open('dataset.pickle', 'wb') as f:
pickle.dump([train_iter, dev_iter, test_iter, vocab], f)
logging.info('==> SICK vocabulary size : %d ' % vocab.size)
logging.info('==> Size of train data : %d ' % len(train_iter))
logging.info('==> Size of dev data : %d ' % len(dev_iter))
logging.info('==> Size of test data : %d ' % len(test_iter))
# get network
net = SimilarityTreeLSTM(sim_hidden_size, rnn_hidden_size, vocab.size, vocab.embed.shape[1], num_classes)
# use pearson correlation and mean-square error for evaluation
metric = mx.metric.create(['pearsonr', 'mse'])
def to_target(x):
target = np.zeros((1, num_classes))
ceil = int(math.ceil(x))
floor = int(math.floor(x))
if ceil==floor:
target[0][floor-1] = 1
else:
target[0][floor-1] = ceil - x
target[0][ceil-1] = x - floor
return mx.nd.array(target)
def to_score(x):
levels = mx.nd.arange(1, 6, ctx=x.context)
return [mx.nd.sum(levels*mx.nd.exp(x), axis=1).reshape((-1,1))]
# when evaluating in validation mode, check and see if pearson-r is improved
# if so, checkpoint and run evaluation on test dataset
def test(ctx, data_iter, best, mode='validation', num_iter=-1):
data_iter.reset()
batches = len(data_iter)
data_iter.set_context(ctx[0])
preds = []
labels = [mx.nd.array(data_iter.labels, ctx=ctx[0]).reshape((-1,1))]
for _ in tqdm(range(batches), desc='Testing in {} mode'.format(mode)):
l_tree, l_sent, r_tree, r_sent, label = data_iter.next()
z = net(mx.nd, l_sent, r_sent, l_tree, r_tree)
preds.append(z)
preds = to_score(mx.nd.concat(*preds, dim=0))
metric.update(preds, labels)
names, values = metric.get()
metric.reset()
for name, acc in zip(names, values):
logging.info(mode+' acc: %s=%f'%(name, acc))
if name == 'pearsonr':
test_r = acc
if mode == 'validation' and num_iter >= 0:
if test_r >= best:
best = test_r
logging.info('New optimum found: {}. Checkpointing.'.format(best))
net.save_parameters('childsum_tree_lstm_{}.params'.format(num_iter))
test(ctx, test_iter, -1, 'test')
return best
def train(epoch, ctx, train_data, dev_data):
# initialization with context
if isinstance(ctx, mx.Context):
ctx = [ctx]
net.initialize(mx.init.Xavier(magnitude=2.24), ctx=ctx[0])
net.embed.weight.set_data(vocab.embed.as_in_context(ctx[0]))
train_data.set_context(ctx[0])
dev_data.set_context(ctx[0])
# set up trainer for optimizing the network.
trainer = gluon.Trainer(net.collect_params(), optimizer, {'learning_rate': opt.lr, 'wd': opt.wd})
best_r = -1
Loss = gluon.loss.KLDivLoss()
for i in range(epoch):
train_data.reset()
num_batches = len(train_data)
# collect predictions and labels for evaluation metrics
preds = []
labels = [mx.nd.array(train_data.labels, ctx=ctx[0]).reshape((-1,1))]
for j in tqdm(range(num_batches), desc='Training epoch {}'.format(i)):
# get next batch
l_tree, l_sent, r_tree, r_sent, label = train_data.next()
# use autograd to record the forward calculation
with ag.record():
# forward calculation. the output is log probability
z = net(mx.nd, l_sent, r_sent, l_tree, r_tree)
# calculate loss
loss = Loss(z, to_target(label).as_in_context(ctx[0]))
# backward calculation for gradients.
loss.backward()
preds.append(z)
# update weight after every batch_size samples
if (j+1) % batch_size == 0:
trainer.step(batch_size)
# translate log-probability to scores, and evaluate
preds = to_score(mx.nd.concat(*preds, dim=0))
metric.update(preds, labels)
names, values = metric.get()
metric.reset()
for name, acc in zip(names, values):
logging.info('training acc at epoch %d: %s=%f'%(i, name, acc))
best_r = test(ctx, dev_data, best_r, num_iter=i)
train(opt.epochs, context, train_iter, dev_iter)
| apache-2.0 |
davidyezsetz/kuma | vendor/packages/translate-toolkit/translate/convert/__init__.py | 7 | 1265 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2002-2005 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""translate.convert is part of the translate package
It contains code to convert between different storage formats for localizations
@group XLIFF: *xliff*
@group Bilingual: pot2po po2tmx oo2po po2oo csv2tbx *wordfast* *ts*
@group Monolingual: *prop* *dtd* csv2po po2csv *html* *ical* *ini* *rc* *txt* moz2po po2moz *php* *sub* *symb* *monopo* *tiki* *web2py* *lang* skype
@group Support: accesskey convert
@group Other: poreplace
"""
| mpl-2.0 |
bencesomogyi/pyCFD | pyCFD_mesh/vertex.py | 1 | 3347 | #-------------------------------------------------------------------------------
# Name: vertex
# Purpose: class for 3D vertices
#
# Author: bencesomogyi
#
# Created: 19.10.2013
# Copyright: (c) bencesomogyi 2013
# Licence: <your licence>
#-------------------------------------------------------------------------------
import numpy
class Vertex:
"""
class for 3D vertices
"""
def __init__(self,X=0.0,Y=0.0,Z=0.0):
"""
**Constructor**
:param X: default: 0.0, x coordinate of the vertex
:type X: float
:param Y: default: 0.0, y coordinate of the vertex
:type Y: float
:param Z: default: 0.0, z coordinate of the vertex
:type Z: float
"""
self.X = X
"""vertex X coordinate"""
self.Y = Y
"""vertex Y coordinate"""
self.Z = Z
"""vertex Z coordinate"""
self.coords = numpy.array([X, Y, Z])
"""vector of vertex coordinates"""
self.father = []
"""reference to father object"""
self.faces = []
"""reference to connected faces"""
self.cells = []
"""reference to connected cells"""
self.id = 0.
"""vertex id"""
def get_coords(self):
"""return coordinates as numpy array"""
return numpy.array([self.X,self.Y,self.Z])
def setX(self,newX):
"""setter for X coordinate of vertex"""
self.X = newX
self.coords[0] = newX
def setY(self,newY):
"""setter for Y coordinate of vertex"""
self.Y = newY
self.coords[1] = newY
def setZ(self,newZ):
"""setter for Z coordinate of vertex"""
self.Z = newZ
self.coords[2] = newZ
def print_coordinates(self):
print "id: "+str(self.id)+" "+str(self.X)+" "+str(self.Y)+" "+str(self.Z)
def get_cell_ids(self):
cell_ids = []
for cell_ in self.cells:
cell_ids.append(cell_.id)
return cell_ids
# def __hash__(self):
# return self.X+10*self.Y+100*self.Z
def __eq__(self,other):
return (self.X == other.X) and (self.Y == other.Y) and (self.Z == other.Z)
def are_vertices_equal(vertex1,vertex2):
return (vertex1.X == vertex2.X) and (vertex1.Y == vertex2.Y) and (vertex1.Z == vertex2.Z)
def get_independent_vertices(vertex_list):
"""
return the list of independent vertices
:param vertex_list: list of vertices
:type vertex_list: dict
:return: list of independent vertices
:rtype: dict
"""
indep_vertices = []
for vertex in vertex_list:
if len(indep_vertices) == 0:
indep_vertices.append(vertex)
match = False
for indep_vertex in indep_vertices:
if vertex == indep_vertex:
match = True
break
if match:
continue
else:
indep_vertices.append(vertex)
return indep_vertices
def get_list_of_ids(vertex_list):
"""
return the id / list of ids for a vertex / list of vertices
"""
id_list = []
if isinstance(vertex_list, Vertex):
id_list.append(vertex_list.id)
else:
for vertex_ in vertex_list:
id_list.append(vertex_.id)
return id_list
| gpl-3.0 |
350dotorg/Django | django/utils/autoreload.py | 135 | 4239 | # Autoreloading launcher.
# Borrowed from Peter Hunt and the CherryPy project (http://www.cherrypy.org).
# Some taken from Ian Bicking's Paste (http://pythonpaste.org/).
#
# Portions copyright (c) 2004, CherryPy Team (team@cherrypy.org)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the CherryPy Team nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os, sys, time
try:
import thread
except ImportError:
import dummy_thread as thread
# This import does nothing, but it's necessary to avoid some race conditions
# in the threading module. See http://code.djangoproject.com/ticket/2330 .
try:
import threading
except ImportError:
pass
RUN_RELOADER = True
_mtimes = {}
_win = (sys.platform == "win32")
def code_changed():
global _mtimes, _win
for filename in filter(lambda v: v, map(lambda m: getattr(m, "__file__", None), sys.modules.values())):
if filename.endswith(".pyc") or filename.endswith(".pyo"):
filename = filename[:-1]
if not os.path.exists(filename):
continue # File might be in an egg, so it can't be reloaded.
stat = os.stat(filename)
mtime = stat.st_mtime
if _win:
mtime -= stat.st_ctime
if filename not in _mtimes:
_mtimes[filename] = mtime
continue
if mtime != _mtimes[filename]:
_mtimes = {}
return True
return False
def reloader_thread():
while RUN_RELOADER:
if code_changed():
sys.exit(3) # force reload
time.sleep(1)
def restart_with_reloader():
while True:
args = [sys.executable] + sys.argv
if sys.platform == "win32":
args = ['"%s"' % arg for arg in args]
new_environ = os.environ.copy()
new_environ["RUN_MAIN"] = 'true'
exit_code = os.spawnve(os.P_WAIT, sys.executable, args, new_environ)
if exit_code != 3:
return exit_code
def python_reloader(main_func, args, kwargs):
if os.environ.get("RUN_MAIN") == "true":
thread.start_new_thread(main_func, args, kwargs)
try:
reloader_thread()
except KeyboardInterrupt:
pass
else:
try:
sys.exit(restart_with_reloader())
except KeyboardInterrupt:
pass
def jython_reloader(main_func, args, kwargs):
from _systemrestart import SystemRestart
thread.start_new_thread(main_func, args)
while True:
if code_changed():
raise SystemRestart
time.sleep(1)
def main(main_func, args=None, kwargs=None):
if args is None:
args = ()
if kwargs is None:
kwargs = {}
if sys.platform.startswith('java'):
reloader = jython_reloader
else:
reloader = python_reloader
reloader(main_func, args, kwargs)
| bsd-3-clause |
lrowe/splinter | tests/type.py | 7 | 2407 | # -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from .fake_webapp import EXAMPLE_APP
class SlowlyTypeTest(object):
def test_simple_type(self):
"should provide a away to change field value using type method"
self.browser.visit(EXAMPLE_APP)
self.browser.type('query', ' with type method')
value = self.browser.find_by_name('query').value
self.assertEqual('default value with type method', value)
self.browser.type('description', 'type into textarea')
value = self.browser.find_by_name('description').value
self.assertEqual('type into textarea', value)
def test_simple_type_on_element(self):
self.browser.visit(EXAMPLE_APP)
self.browser.find_by_name('query').type(' with type method')
value = self.browser.find_by_name('query').value
self.assertEqual('default value with type method', value)
self.browser.find_by_name('description').type('type into textarea')
value = self.browser.find_by_name('description').value
self.assertEqual('type into textarea', value)
def test_slowly_typing(self):
"should be able to slowly type some text in a field"
for name in ['type-input', 'type-textarea']:
self.browser.visit(EXAMPLE_APP + 'type')
num = 0
num_max = 6
for key in self.browser.type(name, 'typing', slowly=True):
self.assertEqual(self.browser.is_text_present("#%d" % num), True)
num += 1
self.assertEqual(num, num_max)
element = self.browser.find_by_name(name)
self.assertEqual(element.value, 'typing')
def test_slowly_typing_on_element(self):
for name in ['type-input', 'type-textarea']:
self.browser.visit(EXAMPLE_APP + 'type')
num = 0
num_max = 6
text_input = self.browser.find_by_name(name)
typing = text_input.type('typing', slowly=True)
for key in typing:
self.assertEqual(self.browser.is_text_present("#%d" % num), True)
num += 1
self.assertEqual(num, num_max)
element = self.browser.find_by_name(name)
self.assertEqual(element.value, 'typing')
| bsd-3-clause |
gouravshenoy/airavata | airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/ttypes.py | 3 | 49003 | #
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import apache.airavata.model.status.ttypes
import apache.airavata.model.application.io.ttypes
import apache.airavata.model.commons.ttypes
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class MessageLevel:
INFO = 0
DEBUG = 1
ERROR = 2
ACK = 3
_VALUES_TO_NAMES = {
0: "INFO",
1: "DEBUG",
2: "ERROR",
3: "ACK",
}
_NAMES_TO_VALUES = {
"INFO": 0,
"DEBUG": 1,
"ERROR": 2,
"ACK": 3,
}
class MessageType:
EXPERIMENT = 0
EXPERIMENT_CANCEL = 1
TASK = 2
PROCESS = 3
JOB = 4
LAUNCHPROCESS = 5
TERMINATEPROCESS = 6
PROCESSOUTPUT = 7
_VALUES_TO_NAMES = {
0: "EXPERIMENT",
1: "EXPERIMENT_CANCEL",
2: "TASK",
3: "PROCESS",
4: "JOB",
5: "LAUNCHPROCESS",
6: "TERMINATEPROCESS",
7: "PROCESSOUTPUT",
}
_NAMES_TO_VALUES = {
"EXPERIMENT": 0,
"EXPERIMENT_CANCEL": 1,
"TASK": 2,
"PROCESS": 3,
"JOB": 4,
"LAUNCHPROCESS": 5,
"TERMINATEPROCESS": 6,
"PROCESSOUTPUT": 7,
}
class ExperimentStatusChangeEvent:
"""
Attributes:
- state
- experimentId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRING, 'experimentId', None, None, ), # 2
(3, TType.STRING, 'gatewayId', None, None, ), # 3
)
def __init__(self, state=None, experimentId=None, gatewayId=None,):
self.state = state
self.experimentId = experimentId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.experimentId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.gatewayId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExperimentStatusChangeEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 2)
oprot.writeString(self.experimentId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 3)
oprot.writeString(self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocol.TProtocolException(message='Required field state is unset!')
if self.experimentId is None:
raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
if self.gatewayId is None:
raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.state)
value = (value * 31) ^ hash(self.experimentId)
value = (value * 31) ^ hash(self.gatewayId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProcessIdentifier:
"""
Attributes:
- processId
- experimentId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'processId', None, None, ), # 1
(2, TType.STRING, 'experimentId', None, None, ), # 2
(3, TType.STRING, 'gatewayId', None, None, ), # 3
)
def __init__(self, processId=None, experimentId=None, gatewayId=None,):
self.processId = processId
self.experimentId = experimentId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.processId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.experimentId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.gatewayId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ProcessIdentifier')
if self.processId is not None:
oprot.writeFieldBegin('processId', TType.STRING, 1)
oprot.writeString(self.processId)
oprot.writeFieldEnd()
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 2)
oprot.writeString(self.experimentId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 3)
oprot.writeString(self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.processId is None:
raise TProtocol.TProtocolException(message='Required field processId is unset!')
if self.experimentId is None:
raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
if self.gatewayId is None:
raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.processId)
value = (value * 31) ^ hash(self.experimentId)
value = (value * 31) ^ hash(self.gatewayId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TaskIdentifier:
"""
Attributes:
- taskId
- processId
- experimentId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'taskId', None, None, ), # 1
(2, TType.STRING, 'processId', None, None, ), # 2
(3, TType.STRING, 'experimentId', None, None, ), # 3
(4, TType.STRING, 'gatewayId', None, None, ), # 4
)
def __init__(self, taskId=None, processId=None, experimentId=None, gatewayId=None,):
self.taskId = taskId
self.processId = processId
self.experimentId = experimentId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.taskId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.processId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.experimentId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.gatewayId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TaskIdentifier')
if self.taskId is not None:
oprot.writeFieldBegin('taskId', TType.STRING, 1)
oprot.writeString(self.taskId)
oprot.writeFieldEnd()
if self.processId is not None:
oprot.writeFieldBegin('processId', TType.STRING, 2)
oprot.writeString(self.processId)
oprot.writeFieldEnd()
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 3)
oprot.writeString(self.experimentId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 4)
oprot.writeString(self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.taskId is None:
raise TProtocol.TProtocolException(message='Required field taskId is unset!')
if self.processId is None:
raise TProtocol.TProtocolException(message='Required field processId is unset!')
if self.experimentId is None:
raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
if self.gatewayId is None:
raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.taskId)
value = (value * 31) ^ hash(self.processId)
value = (value * 31) ^ hash(self.experimentId)
value = (value * 31) ^ hash(self.gatewayId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TaskStatusChangeEvent:
"""
Attributes:
- state
- taskIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'taskIdentity', (TaskIdentifier, TaskIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, taskIdentity=None,):
self.state = state
self.taskIdentity = taskIdentity
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.taskIdentity = TaskIdentifier()
self.taskIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TaskStatusChangeEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.taskIdentity is not None:
oprot.writeFieldBegin('taskIdentity', TType.STRUCT, 2)
self.taskIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocol.TProtocolException(message='Required field state is unset!')
if self.taskIdentity is None:
raise TProtocol.TProtocolException(message='Required field taskIdentity is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.state)
value = (value * 31) ^ hash(self.taskIdentity)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TaskStatusChangeRequestEvent:
"""
Attributes:
- state
- taskIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'taskIdentity', (TaskIdentifier, TaskIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, taskIdentity=None,):
self.state = state
self.taskIdentity = taskIdentity
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.taskIdentity = TaskIdentifier()
self.taskIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TaskStatusChangeRequestEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.taskIdentity is not None:
oprot.writeFieldBegin('taskIdentity', TType.STRUCT, 2)
self.taskIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocol.TProtocolException(message='Required field state is unset!')
if self.taskIdentity is None:
raise TProtocol.TProtocolException(message='Required field taskIdentity is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.state)
value = (value * 31) ^ hash(self.taskIdentity)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProcessStatusChangeEvent:
"""
Attributes:
- state
- processIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'processIdentity', (ProcessIdentifier, ProcessIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, processIdentity=None,):
self.state = state
self.processIdentity = processIdentity
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.processIdentity = ProcessIdentifier()
self.processIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ProcessStatusChangeEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.processIdentity is not None:
oprot.writeFieldBegin('processIdentity', TType.STRUCT, 2)
self.processIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocol.TProtocolException(message='Required field state is unset!')
if self.processIdentity is None:
raise TProtocol.TProtocolException(message='Required field processIdentity is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.state)
value = (value * 31) ^ hash(self.processIdentity)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProcessStatusChangeRequestEvent:
"""
Attributes:
- state
- processIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'processIdentity', (ProcessIdentifier, ProcessIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, processIdentity=None,):
self.state = state
self.processIdentity = processIdentity
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.processIdentity = ProcessIdentifier()
self.processIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ProcessStatusChangeRequestEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.processIdentity is not None:
oprot.writeFieldBegin('processIdentity', TType.STRUCT, 2)
self.processIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocol.TProtocolException(message='Required field state is unset!')
if self.processIdentity is None:
raise TProtocol.TProtocolException(message='Required field processIdentity is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.state)
value = (value * 31) ^ hash(self.processIdentity)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TaskOutputChangeEvent:
"""
Attributes:
- output
- taskIdentity
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'output', (TType.STRUCT,(apache.airavata.model.application.io.ttypes.OutputDataObjectType, apache.airavata.model.application.io.ttypes.OutputDataObjectType.thrift_spec)), None, ), # 1
(2, TType.STRUCT, 'taskIdentity', (TaskIdentifier, TaskIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, output=None, taskIdentity=None,):
self.output = output
self.taskIdentity = taskIdentity
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.output = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in xrange(_size0):
_elem5 = apache.airavata.model.application.io.ttypes.OutputDataObjectType()
_elem5.read(iprot)
self.output.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.taskIdentity = TaskIdentifier()
self.taskIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TaskOutputChangeEvent')
if self.output is not None:
oprot.writeFieldBegin('output', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.output))
for iter6 in self.output:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.taskIdentity is not None:
oprot.writeFieldBegin('taskIdentity', TType.STRUCT, 2)
self.taskIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.output is None:
raise TProtocol.TProtocolException(message='Required field output is unset!')
if self.taskIdentity is None:
raise TProtocol.TProtocolException(message='Required field taskIdentity is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.output)
value = (value * 31) ^ hash(self.taskIdentity)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class JobIdentifier:
"""
Attributes:
- jobId
- taskId
- processId
- experimentId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'jobId', None, None, ), # 1
(2, TType.STRING, 'taskId', None, None, ), # 2
(3, TType.STRING, 'processId', None, None, ), # 3
(4, TType.STRING, 'experimentId', None, None, ), # 4
(5, TType.STRING, 'gatewayId', None, None, ), # 5
)
def __init__(self, jobId=None, taskId=None, processId=None, experimentId=None, gatewayId=None,):
self.jobId = jobId
self.taskId = taskId
self.processId = processId
self.experimentId = experimentId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.jobId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.taskId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.processId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.experimentId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.gatewayId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('JobIdentifier')
if self.jobId is not None:
oprot.writeFieldBegin('jobId', TType.STRING, 1)
oprot.writeString(self.jobId)
oprot.writeFieldEnd()
if self.taskId is not None:
oprot.writeFieldBegin('taskId', TType.STRING, 2)
oprot.writeString(self.taskId)
oprot.writeFieldEnd()
if self.processId is not None:
oprot.writeFieldBegin('processId', TType.STRING, 3)
oprot.writeString(self.processId)
oprot.writeFieldEnd()
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 4)
oprot.writeString(self.experimentId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 5)
oprot.writeString(self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.jobId is None:
raise TProtocol.TProtocolException(message='Required field jobId is unset!')
if self.taskId is None:
raise TProtocol.TProtocolException(message='Required field taskId is unset!')
if self.processId is None:
raise TProtocol.TProtocolException(message='Required field processId is unset!')
if self.experimentId is None:
raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
if self.gatewayId is None:
raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.jobId)
value = (value * 31) ^ hash(self.taskId)
value = (value * 31) ^ hash(self.processId)
value = (value * 31) ^ hash(self.experimentId)
value = (value * 31) ^ hash(self.gatewayId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExperimentSubmitEvent:
"""
Attributes:
- experimentId
- gatewayId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'experimentId', None, None, ), # 1
(2, TType.STRING, 'gatewayId', None, None, ), # 2
)
def __init__(self, experimentId=None, gatewayId=None,):
self.experimentId = experimentId
self.gatewayId = gatewayId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.experimentId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.gatewayId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExperimentSubmitEvent')
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 1)
oprot.writeString(self.experimentId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 2)
oprot.writeString(self.gatewayId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.experimentId is None:
raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
if self.gatewayId is None:
raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.experimentId)
value = (value * 31) ^ hash(self.gatewayId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProcessSubmitEvent:
"""
Attributes:
- processId
- gatewayId
- experimentId
- tokenId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'processId', None, None, ), # 1
(2, TType.STRING, 'gatewayId', None, None, ), # 2
(3, TType.STRING, 'experimentId', None, None, ), # 3
(4, TType.STRING, 'tokenId', None, None, ), # 4
)
def __init__(self, processId=None, gatewayId=None, experimentId=None, tokenId=None,):
self.processId = processId
self.gatewayId = gatewayId
self.experimentId = experimentId
self.tokenId = tokenId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.processId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.gatewayId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.experimentId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.tokenId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ProcessSubmitEvent')
if self.processId is not None:
oprot.writeFieldBegin('processId', TType.STRING, 1)
oprot.writeString(self.processId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 2)
oprot.writeString(self.gatewayId)
oprot.writeFieldEnd()
if self.experimentId is not None:
oprot.writeFieldBegin('experimentId', TType.STRING, 3)
oprot.writeString(self.experimentId)
oprot.writeFieldEnd()
if self.tokenId is not None:
oprot.writeFieldBegin('tokenId', TType.STRING, 4)
oprot.writeString(self.tokenId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.processId is None:
raise TProtocol.TProtocolException(message='Required field processId is unset!')
if self.gatewayId is None:
raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
if self.experimentId is None:
raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
if self.tokenId is None:
raise TProtocol.TProtocolException(message='Required field tokenId is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.processId)
value = (value * 31) ^ hash(self.gatewayId)
value = (value * 31) ^ hash(self.experimentId)
value = (value * 31) ^ hash(self.tokenId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProcessTerminateEvent:
"""
Attributes:
- processId
- gatewayId
- tokenId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'processId', None, None, ), # 1
(2, TType.STRING, 'gatewayId', None, None, ), # 2
(3, TType.STRING, 'tokenId', None, None, ), # 3
)
def __init__(self, processId=None, gatewayId=None, tokenId=None,):
self.processId = processId
self.gatewayId = gatewayId
self.tokenId = tokenId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.processId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.gatewayId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.tokenId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ProcessTerminateEvent')
if self.processId is not None:
oprot.writeFieldBegin('processId', TType.STRING, 1)
oprot.writeString(self.processId)
oprot.writeFieldEnd()
if self.gatewayId is not None:
oprot.writeFieldBegin('gatewayId', TType.STRING, 2)
oprot.writeString(self.gatewayId)
oprot.writeFieldEnd()
if self.tokenId is not None:
oprot.writeFieldBegin('tokenId', TType.STRING, 3)
oprot.writeString(self.tokenId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.processId is None:
raise TProtocol.TProtocolException(message='Required field processId is unset!')
if self.gatewayId is None:
raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
if self.tokenId is None:
raise TProtocol.TProtocolException(message='Required field tokenId is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.processId)
value = (value * 31) ^ hash(self.gatewayId)
value = (value * 31) ^ hash(self.tokenId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class JobStatusChangeEvent:
"""
Attributes:
- state
- jobIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'jobIdentity', (JobIdentifier, JobIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, jobIdentity=None,):
self.state = state
self.jobIdentity = jobIdentity
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.jobIdentity = JobIdentifier()
self.jobIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('JobStatusChangeEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.jobIdentity is not None:
oprot.writeFieldBegin('jobIdentity', TType.STRUCT, 2)
self.jobIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocol.TProtocolException(message='Required field state is unset!')
if self.jobIdentity is None:
raise TProtocol.TProtocolException(message='Required field jobIdentity is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.state)
value = (value * 31) ^ hash(self.jobIdentity)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class JobStatusChangeRequestEvent:
"""
Attributes:
- state
- jobIdentity
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'state', None, None, ), # 1
(2, TType.STRUCT, 'jobIdentity', (JobIdentifier, JobIdentifier.thrift_spec), None, ), # 2
)
def __init__(self, state=None, jobIdentity=None,):
self.state = state
self.jobIdentity = jobIdentity
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.state = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.jobIdentity = JobIdentifier()
self.jobIdentity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('JobStatusChangeRequestEvent')
if self.state is not None:
oprot.writeFieldBegin('state', TType.I32, 1)
oprot.writeI32(self.state)
oprot.writeFieldEnd()
if self.jobIdentity is not None:
oprot.writeFieldBegin('jobIdentity', TType.STRUCT, 2)
self.jobIdentity.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state is None:
raise TProtocol.TProtocolException(message='Required field state is unset!')
if self.jobIdentity is None:
raise TProtocol.TProtocolException(message='Required field jobIdentity is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.state)
value = (value * 31) ^ hash(self.jobIdentity)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Message:
"""
Attributes:
- event
- messageId
- messageType
- updatedTime
- messageLevel
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'event', None, None, ), # 1
(2, TType.STRING, 'messageId', None, "DO_NOT_SET_AT_CLIENTS", ), # 2
(3, TType.I32, 'messageType', None, None, ), # 3
(4, TType.I64, 'updatedTime', None, None, ), # 4
(5, TType.I32, 'messageLevel', None, None, ), # 5
)
def __init__(self, event=None, messageId=thrift_spec[2][4], messageType=None, updatedTime=None, messageLevel=None,):
self.event = event
self.messageId = messageId
self.messageType = messageType
self.updatedTime = updatedTime
self.messageLevel = messageLevel
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.event = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.messageId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.messageType = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.updatedTime = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.messageLevel = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Message')
if self.event is not None:
oprot.writeFieldBegin('event', TType.STRING, 1)
oprot.writeString(self.event)
oprot.writeFieldEnd()
if self.messageId is not None:
oprot.writeFieldBegin('messageId', TType.STRING, 2)
oprot.writeString(self.messageId)
oprot.writeFieldEnd()
if self.messageType is not None:
oprot.writeFieldBegin('messageType', TType.I32, 3)
oprot.writeI32(self.messageType)
oprot.writeFieldEnd()
if self.updatedTime is not None:
oprot.writeFieldBegin('updatedTime', TType.I64, 4)
oprot.writeI64(self.updatedTime)
oprot.writeFieldEnd()
if self.messageLevel is not None:
oprot.writeFieldBegin('messageLevel', TType.I32, 5)
oprot.writeI32(self.messageLevel)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.event is None:
raise TProtocol.TProtocolException(message='Required field event is unset!')
if self.messageId is None:
raise TProtocol.TProtocolException(message='Required field messageId is unset!')
if self.messageType is None:
raise TProtocol.TProtocolException(message='Required field messageType is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.event)
value = (value * 31) ^ hash(self.messageId)
value = (value * 31) ^ hash(self.messageType)
value = (value * 31) ^ hash(self.updatedTime)
value = (value * 31) ^ hash(self.messageLevel)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| apache-2.0 |
r39132/airflow | airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py | 4 | 4641 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.hooks.oracle_hook import OracleHook
from airflow.contrib.hooks.azure_data_lake_hook import AzureDataLakeHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.utils.file import TemporaryDirectory
import unicodecsv as csv
import os
class OracleToAzureDataLakeTransfer(BaseOperator):
"""
Moves data from Oracle to Azure Data Lake. The operator runs the query against
Oracle and stores the file locally before loading it into Azure Data Lake.
:param filename: file name to be used by the csv file.
:type filename: str
:param azure_data_lake_conn_id: destination azure data lake connection.
:type azure_data_lake_conn_id: str
:param azure_data_lake_path: destination path in azure data lake to put the file.
:type azure_data_lake_path: str
:param oracle_conn_id: source Oracle connection.
:type oracle_conn_id: str
:param sql: SQL query to execute against the Oracle database. (templated)
:type sql: str
:param sql_params: Parameters to use in sql query. (templated)
:type sql_params: str
:param delimiter: field delimiter in the file.
:type delimiter: str
:param encoding: encoding type for the file.
:type encoding: str
:param quotechar: Character to use in quoting.
:type quotechar: str
:param quoting: Quoting strategy. See unicodecsv quoting for more information.
:type quoting: str
"""
template_fields = ('filename', 'sql', 'sql_params')
ui_color = '#e08c8c'
@apply_defaults
def __init__(
self,
filename,
azure_data_lake_conn_id,
azure_data_lake_path,
oracle_conn_id,
sql,
sql_params=None,
delimiter=",",
encoding="utf-8",
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
*args, **kwargs):
super(OracleToAzureDataLakeTransfer, self).__init__(*args, **kwargs)
if sql_params is None:
sql_params = {}
self.filename = filename
self.oracle_conn_id = oracle_conn_id
self.sql = sql
self.sql_params = sql_params
self.azure_data_lake_conn_id = azure_data_lake_conn_id
self.azure_data_lake_path = azure_data_lake_path
self.delimiter = delimiter
self.encoding = encoding
self.quotechar = quotechar
self.quoting = quoting
def _write_temp_file(self, cursor, path_to_save):
with open(path_to_save, 'wb') as csvfile:
csv_writer = csv.writer(csvfile, delimiter=self.delimiter,
encoding=self.encoding, quotechar=self.quotechar,
quoting=self.quoting)
csv_writer.writerow(map(lambda field: field[0], cursor.description))
csv_writer.writerows(cursor)
csvfile.flush()
def execute(self, context):
oracle_hook = OracleHook(oracle_conn_id=self.oracle_conn_id)
azure_data_lake_hook = AzureDataLakeHook(
azure_data_lake_conn_id=self.azure_data_lake_conn_id)
self.log.info("Dumping Oracle query results to local file")
conn = oracle_hook.get_conn()
cursor = conn.cursor()
cursor.execute(self.sql, self.sql_params)
with TemporaryDirectory(prefix='airflow_oracle_to_azure_op_') as temp:
self._write_temp_file(cursor, os.path.join(temp, self.filename))
self.log.info("Uploading local file to Azure Data Lake")
azure_data_lake_hook.upload_file(os.path.join(temp, self.filename),
os.path.join(self.azure_data_lake_path,
self.filename))
cursor.close()
conn.close()
| apache-2.0 |
WayneDW/Sentiment-Analysis-in-Event-Driven-Stock-Price-Movement-Prediction | crawler/all_tickers.py | 1 | 2014 | #!/usr/bin/env python3
"""
Download the ticker list from NASDAQ and save as csv.
Output filename: ./input/tickerList.csv
"""
import csv
import sys
from urllib.request import urlopen
import numpy as np
def get_tickers(percent):
"""Keep the top percent market-cap companies."""
assert isinstance(percent, int)
file = open('./input/tickerList.csv', 'w')
writer = csv.writer(file, delimiter=',')
cap_stat, output = np.array([]), []
for exchange in ["NASDAQ", "NYSE", "AMEX"]:
url = "http://www.nasdaq.com/screening/companies-by-industry.aspx?exchange="
repeat_times = 10 # repeat downloading in case of http error
for _ in range(repeat_times):
try:
print("Downloading tickers from {}...".format(exchange))
response = urlopen(url + exchange + '&render=download')
content = response.read().decode('utf-8').split('\n')
for num, line in enumerate(content):
line = line.strip().strip('"').split('","')
if num == 0 or len(line) != 9:
continue # filter unmatched format
# ticker, name, last_sale, market_cap, IPO_year, sector, industry
ticker, name, _, market_cap, _, _, _ = line[0:4] + line[5:8]
cap_stat = np.append(cap_stat, float(market_cap))
output.append([ticker, name.replace(',', '').replace('.', ''),
exchange, market_cap])
break
except:
continue
for data in output:
market_cap = float(data[3])
if market_cap < np.percentile(cap_stat, 100 - percent):
continue
writer.writerow(data)
def main():
if len(sys.argv) < 2:
print('Usage: ./all_tickers.py <int_percent>')
return
top_n = sys.argv[1]
get_tickers(int(top_n)) # keep the top N% market-cap companies
if __name__ == "__main__":
main()
| mit |
elit3ge/SickRage | sickbeard/notifiers/pytivo.py | 12 | 3574 | # Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import os
import sickbeard
from urllib import urlencode
from urllib2 import Request, urlopen, HTTPError
from sickbeard import logger
from sickrage.helper.encoding import ek
from sickrage.helper.exceptions import ex
class pyTivoNotifier:
def notify_snatch(self, ep_name):
pass
def notify_download(self, ep_name):
pass
def notify_subtitle_download(self, ep_name, lang):
pass
def notify_git_update(self, new_version):
pass
def update_library(self, ep_obj):
# Values from config
if not sickbeard.USE_PYTIVO:
return False
host = sickbeard.PYTIVO_HOST
shareName = sickbeard.PYTIVO_SHARE_NAME
tsn = sickbeard.PYTIVO_TIVO_NAME
# There are two more values required, the container and file.
#
# container: The share name, show name and season
#
# file: The file name
#
# Some slicing and dicing of variables is required to get at these values.
#
# There might be better ways to arrive at the values, but this is the best I have been able to
# come up with.
#
# Calculated values
showPath = ep_obj.show.location
showName = ep_obj.show.name
rootShowAndSeason = ek(os.path.dirname, ep_obj.location)
absPath = ep_obj.location
# Some show names have colons in them which are illegal in a path location, so strip them out.
# (Are there other characters?)
showName = showName.replace(":", "")
root = showPath.replace(showName, "")
showAndSeason = rootShowAndSeason.replace(root, "")
container = shareName + "/" + showAndSeason
file = "/" + absPath.replace(root, "")
# Finally create the url and make request
requestUrl = "http://" + host + "/TiVoConnect?" + urlencode(
{'Command': 'Push', 'Container': container, 'File': file, 'tsn': tsn})
logger.log(u"pyTivo notification: Requesting " + requestUrl, logger.DEBUG)
request = Request(requestUrl)
try:
response = urlopen(request) #@UnusedVariable
except HTTPError , e:
if hasattr(e, 'reason'):
logger.log(u"pyTivo notification: Error, failed to reach a server - " + e.reason, logger.ERROR)
return False
elif hasattr(e, 'code'):
logger.log(u"pyTivo notification: Error, the server couldn't fulfill the request - " + e.code, logger.ERROR)
return False
except Exception, e:
logger.log(u"PYTIVO: Unknown exception: " + ex(e), logger.ERROR)
return False
else:
logger.log(u"pyTivo notification: Successfully requested transfer of file")
return True
notifier = pyTivoNotifier
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.