repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
zenodo/invenio
|
refs/heads/zenodo-master
|
invenio/legacy/inveniocfg.py
|
1
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from __future__ import print_function
"""
Invenio configuration and administration CLI tool.
Usage: inveniocfg [options]
General options:
-h, --help print this help
-V, --version print version number
Options to finish your installation:
--create-secret-key generate random CFG_SITE_SECRET_KEY
--create-apache-conf create Apache configuration files
--create-tables create DB tables for Invenio
--load-bibfield-conf load the BibField configuration
--load-webstat-conf load the WebStat configuration
--drop-tables drop DB tables of Invenio
--check-openoffice check for correctly set up of openoffice temporary directory
Options to set up and test a demo site:
--create-demo-site create demo site
--load-demo-records load demo records
--remove-demo-records remove demo records, keeping demo site
--drop-demo-site drop demo site configurations too
--run-unit-tests run unit test suite (needs demo site)
--run-regression-tests run regression test suite (needs demo site)
--run-web-tests run web tests in a browser (needs demo site, Firefox, Selenium IDE)
--run-flask-tests run Flask test suite
Options to update config files in situ:
--update-all perform all the update options
--update-config-py update config.py file from invenio.conf file
--update-dbquery-py update dbquery.py with DB credentials from invenio.conf
--update-dbexec update dbexec with DB credentials from invenio.conf
--update-bibconvert-tpl update bibconvert templates with CFG_SITE_URL from invenio.conf
--update-web-tests update web test cases with CFG_SITE_URL from invenio.conf
Options to update DB tables:
--reset-all perform all the reset options
--reset-sitename reset tables to take account of new CFG_SITE_NAME*
--reset-siteadminemail reset tables to take account of new CFG_SITE_ADMIN_EMAIL
--reset-fieldnames reset tables to take account of new I18N names from PO files
--reset-recstruct-cache reset record structure cache according to CFG_BIBUPLOAD_SERIALIZE_RECORD_STRUCTURE
--reset-recjson-cache reset record json cache according to CFG_BIBUPLOAD_SERIALIZE_RECORD_STRUCTURE
Options to upgrade your installation:
--upgrade apply all pending upgrades
--upgrade-check run pre-upgrade checks for all pending upgrades
--upgrade-show-pending show pending upgrades ready to be applied
--upgrade-show-applied show history of applied upgrades
--upgrade-create-standard-recipe create a new upgrade recipe (for developers)
--upgrade-create-release-recipe create a new release upgrade recipe (for developers)
Options to help the work:
--list print names and values of all options from conf files
--get <some-opt> get value of a given option from conf files
--conf-dir </some/path> path to directory where invenio*.conf files are [optional]
--detect-system-details print system details such as Apache/Python/MySQL versions
"""
__revision__ = "$Id$"
from ConfigParser import ConfigParser
from optparse import OptionParser, OptionGroup, IndentedHelpFormatter, Option, \
OptionError
import os
import pkg_resources
import random
import re
import shutil
import socket
import string
import sys
import zlib
from six import iteritems
from warnings import warn
from sqlalchemy import exc
def print_usage():
"""Print help."""
print(__doc__)
def get_version():
""" Get running version of Invenio """
from invenio.config import CFG_VERSION
return CFG_VERSION
def print_version():
"""Print version information."""
print(get_version())
def convert_conf_option(option_name, option_value):
"""
Convert conf option into Python config.py line, converting
values to ints or strings as appropriate.
"""
## 1) convert option name to uppercase:
option_name = option_name.upper()
## 1a) adjust renamed variables:
if option_name in ['CFG_WEBSUBMIT_DOCUMENT_FILE_MANAGER_DOCTYPES',
'CFG_WEBSUBMIT_DOCUMENT_FILE_MANAGER_RESTRICTIONS',
'CFG_WEBSUBMIT_DOCUMENT_FILE_MANAGER_MISC',
'CFG_WEBSUBMIT_FILESYSTEM_BIBDOC_GROUP_LIMIT',
'CFG_WEBSUBMIT_ADDITIONAL_KNOWN_FILE_EXTENSIONS',
'CFG_WEBSUBMIT_DESIRED_CONVERSIONS']:
new_option_name = option_name.replace('WEBSUBMIT', 'BIBDOCFILE')
print(("""WARNING: %s has been renamed to %s.
Please, update your invenio-local.conf file accordingly.""" % (option_name, new_option_name)), file=sys.stderr)
option_name = new_option_name
## 2) convert option value to int or string:
if option_name in ['CFG_BIBUPLOAD_REFERENCE_TAG',
'CFG_BIBUPLOAD_EXTERNAL_SYSNO_TAG',
'CFG_BIBUPLOAD_EXTERNAL_OAIID_TAG',
'CFG_BIBUPLOAD_EXTERNAL_OAIID_PROVENANCE_TAG',
'CFG_BIBUPLOAD_STRONG_TAGS',
'CFG_BIBFORMAT_HIDDEN_TAGS']:
# some options are supposed be string even when they look like
# numeric
option_value = '"' + option_value + '"'
else:
try:
option_value = int(option_value)
except ValueError:
option_value = '"' + option_value + '"'
## 3a) special cases: chars regexps
if option_name in ['CFG_BIBINDEX_CHARS_ALPHANUMERIC_SEPARATORS',
'CFG_BIBINDEX_CHARS_PUNCTUATION']:
option_value = 'r"[' + option_value[1:-1] + ']"'
## 3abis) special cases: real regexps
if option_name in ['CFG_BIBINDEX_PERFORM_OCR_ON_DOCNAMES',
'CFG_BATCHUPLOADER_WEB_ROBOT_AGENTS',
'CFG_BIBUPLOAD_INTERNAL_DOI_PATTERN']:
option_value = 'r"' + option_value[1:-1] + '"'
## 3b) special cases: True, False, None
if option_value in ['"True"', '"False"', '"None"']:
option_value = option_value[1:-1]
## 3c) special cases: dicts and real pythonic lists
if option_name in ['CFG_WEBSEARCH_FIELDS_CONVERT',
'CFG_BATCHUPLOADER_WEB_ROBOT_RIGHTS',
'CFG_WEBSEARCH_FULLTEXT_SNIPPETS',
'CFG_WEBSEARCH_FULLTEXT_SNIPPETS_CHARS',
'CFG_SITE_EMERGENCY_EMAIL_ADDRESSES',
'CFG_BIBMATCH_FUZZY_WORDLIMITS',
'CFG_BIBMATCH_QUERY_TEMPLATES',
'CFG_WEBSEARCH_SYNONYM_KBRS',
'CFG_BIBINDEX_SYNONYM_KBRS',
'CFG_WEBCOMMENT_EMAIL_REPLIES_TO',
'CFG_WEBCOMMENT_RESTRICTION_DATAFIELD',
'CFG_WEBCOMMENT_ROUND_DATAFIELD',
'CFG_BIBUPLOAD_FFT_ALLOWED_EXTERNAL_URLS',
'CFG_BIBSCHED_NODE_TASKS',
'CFG_BIBEDIT_EXTEND_RECORD_WITH_COLLECTION_TEMPLATE',
'CFG_OAI_METADATA_FORMATS',
'CFG_BIBDOCFILE_DESIRED_CONVERSIONS',
'CFG_BIBDOCFILE_BEST_FORMATS_TO_EXTRACT_TEXT_FROM',
'CFG_WEB_API_KEY_ALLOWED_URL',
'CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_MISC',
'CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_DOCTYPES',
'CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_RESTRICTIONS',
'CFG_DEVEL_TEST_DATABASE_ENGINES',
'CFG_REFEXTRACT_KBS_OVERRIDE',
'CFG_OPENID_CONFIGURATIONS',
'CFG_OAUTH1_CONFIGURATIONS',
'CFG_OAUTH2_CONFIGURATIONS',
'CFG_BIBDOCFILE_ADDITIONAL_KNOWN_MIMETYPES',
'CFG_BIBDOCFILE_PREFERRED_MIMETYPES_MAPPING',
'CFG_BIBSCHED_NON_CONCURRENT_TASKS',
'CFG_REDIS_HOSTS',
'CFG_BIBSCHED_INCOMPATIBLE_TASKS',
'CFG_ICON_CREATION_FORMAT_MAPPINGS',
'CFG_BIBEDIT_AUTOCOMPLETE']:
try:
option_value = option_value[1:-1]
if option_name == "CFG_BIBEDIT_EXTEND_RECORD_WITH_COLLECTION_TEMPLATE" and option_value.strip().startswith("{"):
print(("""ERROR: CFG_BIBEDIT_EXTEND_RECORD_WITH_COLLECTION_TEMPLATE
now accepts only a list of tuples, not a dictionary. Check invenio.conf for an example.
Please, update your invenio-local.conf file accordingly."""), file=sys.stderr)
sys.exit(1)
except TypeError:
if option_name in ('CFG_WEBSEARCH_FULLTEXT_SNIPPETS',):
print("""WARNING: CFG_WEBSEARCH_FULLTEXT_SNIPPETS
has changed syntax: it can be customised to display different snippets for
different document types. See the corresponding documentation in invenio.conf.
You may want to customise your invenio-local.conf configuration accordingly.""", file=sys.stderr)
option_value = """{'': %s}""" % option_value
else:
print("ERROR: type error in %s value %s." %
(option_name, option_value), file=sys.stderr)
sys.exit(1)
## 3cbis) very special cases: dicts with backward compatible string
if option_name in ['CFG_BIBINDEX_SPLASH_PAGES']:
if option_value.startswith('"{') and option_value.endswith('}"'):
option_value = option_value[1:-1]
else:
option_value = """{%s: ".*"}""" % option_value
## 3d) special cases: comma-separated lists
if option_name in ['CFG_SITE_LANGS',
'CFG_BIBDOCFILE_ADDITIONAL_KNOWN_FILE_EXTENSIONS',
'CFG_WEBSEARCH_USE_MATHJAX_FOR_FORMATS',
'CFG_BIBUPLOAD_STRONG_TAGS',
'CFG_BIBFORMAT_HIDDEN_TAGS',
'CFG_BIBFORMAT_HIDDEN_RECJSON_FIELDS',
'CFG_BIBSCHED_GC_TASKS_TO_REMOVE',
'CFG_BIBSCHED_GC_TASKS_TO_ARCHIVE',
'CFG_BIBUPLOAD_FFT_ALLOWED_LOCAL_PATHS',
'CFG_BIBUPLOAD_CONTROLLED_PROVENANCE_TAGS',
'CFG_BIBUPLOAD_DELETE_FORMATS',
'CFG_WEBSTYLE_HTTP_STATUS_ALERT_LIST',
'CFG_WEBSEARCH_RSS_I18N_COLLECTIONS',
'CFG_BATCHUPLOADER_FILENAME_MATCHING_POLICY',
'CFG_BIBAUTHORID_EXTERNAL_CLAIMED_RECORDS_KEY',
'CFG_BIBCIRCULATION_ITEM_STATUS_OPTIONAL',
'CFG_PLOTEXTRACTOR_DISALLOWED_TEX',
'CFG_OAI_FRIENDS',
'CFG_WEBSTYLE_REVERSE_PROXY_IPS',
'CFG_BIBEDIT_AUTOCOMPLETE_INSTITUTIONS_FIELDS',
'CFG_BIBFORMAT_DISABLE_I18N_FOR_CACHED_FORMATS',
'CFG_BIBFORMAT_HIDDEN_FILE_FORMATS',
'CFG_FLASK_DISABLED_BLUEPRINTS',
'CFG_DEVEL_TOOLS',
'CFG_BIBFIELD_MASTER_FORMATS',
'CFG_OPENID_PROVIDERS',
'CFG_OAUTH1_PROVIDERS',
'CFG_OAUTH2_PROVIDERS',
'CFG_BIBFORMAT_CACHED_FORMATS',
'CFG_BIBEDIT_ADD_TICKET_RT_QUEUES',
'CFG_BIBAUTHORID_ENABLED_REMOTE_LOGIN_SYSTEMS', ]:
out = "["
for elem in option_value[1:-1].split(","):
if elem:
elem = elem.strip()
# string values
out += "'%s', " % elem
out += "]"
option_value = out
## 3e) special cases: multiline
if option_name == 'CFG_OAI_IDENTIFY_DESCRIPTION':
# make triple quotes
option_value = '""' + option_value + '""'
## 3f) ignore some options:
if option_name.startswith('CFG_SITE_NAME_INTL'):
# treated elsewhere
return
## 3g) special cases: float
if option_name in ['CFG_BIBDOCFILE_MD5_CHECK_PROBABILITY',
'CFG_BIBMATCH_LOCAL_SLEEPTIME',
'CFG_BIBMATCH_REMOTE_SLEEPTIME',
'CFG_PLOTEXTRACTOR_DOWNLOAD_TIMEOUT',
'CFG_BIBMATCH_FUZZY_MATCH_VALIDATION_LIMIT']:
option_value = float(option_value[1:-1])
## 3h) special cases: bibmatch validation list
if option_name in ['CFG_BIBMATCH_MATCH_VALIDATION_RULESETS']:
option_value = option_value[1:-1]
## 4a) dropped variables
if option_name in ['CFG_BATCHUPLOADER_WEB_ROBOT_AGENT']:
print(("""ERROR: CFG_BATCHUPLOADER_WEB_ROBOT_AGENT has been dropped in favour of
CFG_BATCHUPLOADER_WEB_ROBOT_AGENTS.
Please, update your invenio-local.conf file accordingly."""), file=sys.stderr)
option_value = option_value[1:-1]
elif option_name in ['CFG_WEBSUBMIT_DOCUMENT_FILE_MANAGER_DOCTYPES',
'CFG_WEBSUBMIT_DOCUMENT_FILE_MANAGER_RESTRICTIONS',
'CFG_WEBSUBMIT_DOCUMENT_FILE_MANAGER_MISC',
'CFG_WEBSUBMIT_FILESYSTEM_BIBDOC_GROUP_LIMIT',
'CFG_WEBSUBMIT_ADDITIONAL_KNOWN_FILE_EXTENSIONS',
'CFG_WEBSUBMIT_DESIRED_CONVERSIONS']:
new_option_name = option_name.replace('WEBSUBMIT', 'BIBDOCFILE')
print(("""ERROR: %s has been renamed to %s.
Please, update your invenio-local.conf file accordingly.""" % (option_name, new_option_name)), file=sys.stderr)
option_name = new_option_name
elif option_name in ['CFG_WEBSTYLE_INSPECT_TEMPLATES']:
print(("""ERROR: CFG_WEBSTYLE_INSPECT_TEMPLATES has been dropped in favour of
CFG_DEVEL_TOOLS.
Please, update your invenio-local.conf file accordingly."""), file=sys.stderr)
return
## 5) finally, return output line:
return '%s = %s' % (option_name, option_value)
def update_config_py(conf):
print('>>> NOT NEEDED!!!')
print('>>> quiting ...')
return
def cli_cmd_update_config_py(conf):
"""
Update new config.py from conf options, keeping previous
config.py in a backup copy.
"""
update_config_py(conf)
# from invenio.base.scripts.config import main
# warn('inveniocfg --update-config-py is deprecated. Using instead: inveniomanage config update')
# sys_argv = sys.argv
# sys.argv = 'config_manager.py update'.split()
# main()
# sys.argv = sys_argv
def cli_cmd_update_dbquery_py(conf):
"""
Update lib/dbquery.py file with DB parameters read from conf file.
Note: this edits dbquery.py in situ, taking a backup first.
Use only when you know what you are doing.
"""
print(">>> Going to update dbquery.py...")
## location where dbquery.py is:
dbqueryconfigpyfile = conf.get("Invenio", "CFG_PYLIBDIR") + \
os.sep + 'invenio' + os.sep + 'dbquery_config.py'
## backup current dbquery.py file:
if os.path.exists(dbqueryconfigpyfile + 'c'):
shutil.copy(dbqueryconfigpyfile + 'c', dbqueryconfigpyfile + 'c.OLD')
out = ["%s = '%s'\n" % (item.upper(), value) \
for item, value in conf.items('Invenio') \
if item.upper().startswith('CFG_DATABASE_')]
fdesc = open(dbqueryconfigpyfile, 'w')
fdesc.write("# -*- coding: utf-8 -*-\n")
fdesc.writelines(out)
fdesc.close()
print("You may want to restart Apache now.")
print(">>> dbquery.py updated successfully.")
def cli_cmd_update_dbexec(conf):
"""
Update bin/dbexec file with DB parameters read from conf file.
Note: this edits dbexec in situ, taking a backup first.
Use only when you know what you are doing.
"""
print(">>> Going to update dbexec...")
## location where dbexec is:
dbexecfile = conf.get("Invenio", "CFG_BINDIR") + \
os.sep + 'dbexec'
## backup current dbexec file:
if os.path.exists(dbexecfile):
shutil.copy(dbexecfile, dbexecfile + '.OLD')
## replace db parameters via sed:
out = ''
for line in open(dbexecfile, 'r').readlines():
match = re.search(r'^CFG_DATABASE_(HOST|PORT|NAME|USER|PASS|SLAVE)(\s*=\s*)\'.*\'$', line)
if match:
dbparam = 'CFG_DATABASE_' + match.group(1)
out += "%s%s'%s'\n" % (dbparam, match.group(2),
conf.get("Invenio", dbparam))
else:
out += line
fdesc = open(dbexecfile, 'w')
fdesc.write(out)
fdesc.close()
print(">>> dbexec updated successfully.")
def cli_cmd_update_bibconvert_tpl(conf):
"""
Update bibconvert/config/*.tpl files looking for 856
http://.../CFG_SITE_RECORD lines, replacing URL with CFG_SITE_URL taken
from conf file. Note: this edits tpl files in situ, taking a
backup first. Use only when you know what you are doing.
"""
from invenio.modules.converter.manage import main
warn('inveniocfg --update-bibconvert-tpl is deprecated. Using instead: inveniomanage converter update')
sys_argv = sys.argv
sys.argv = 'bibconvert_manager.py update'.split()
main()
sys.argv = sys_argv
def cli_cmd_update_web_tests(conf):
"""
Update web test cases lib/webtest/test_*.html looking for
<td>http://.+?[</] strings and replacing them with CFG_SITE_URL
taken from conf file. Note: this edits test files in situ, taking
a backup first. Use only when you know what you are doing.
"""
print(">>> Going to update web tests...")
## location where test_*.html files are:
testdir = conf.get("Invenio", 'CFG_PREFIX') + os.sep + \
'lib' + os.sep + 'webtest' + os.sep + 'invenio'
## find all test_*.html files:
for testfilename in os.listdir(testdir):
if testfilename.startswith("test_") and \
testfilename.endswith(".html"):
## change test file:
testfile = testdir + os.sep + testfilename
shutil.copy(testfile, testfile + '.OLD')
out = ''
for line in open(testfile, 'r').readlines():
match = re.search(r'^(.*<td>)http://.+?([</].*)$', line)
if match:
out += "%s%s%s\n" % (match.group(1),
conf.get("Invenio", 'CFG_SITE_URL'),
match.group(2))
else:
match = re.search(r'^(.*<td>)/opt/invenio(.*)$', line)
if match:
out += "%s%s%s\n" % (match.group(1),
conf.get("Invenio", 'CFG_PREFIX'),
match.group(2))
else:
out += line
fdesc = open(testfile, 'w')
fdesc.write(out)
fdesc.close()
print(">>> web tests updated successfully.")
def cli_cmd_reset_sitename(conf):
"""
Reset collection-related tables with new CFG_SITE_NAME and
CFG_SITE_NAME_INTL* read from conf files.
"""
print(">>> Going to reset CFG_SITE_NAME and CFG_SITE_NAME_INTL...")
from invenio.legacy.dbquery import run_sql, IntegrityError
# reset CFG_SITE_NAME:
sitename = conf.get("Invenio", "CFG_SITE_NAME")
try:
run_sql("""INSERT INTO collection (id, name, dbquery, reclist) VALUES
(1,%s,NULL,NULL)""", (sitename,))
except IntegrityError:
run_sql("""UPDATE collection SET name=%s WHERE id=1""", (sitename,))
# reset CFG_SITE_NAME_INTL:
for lang in conf.get("Invenio", "CFG_SITE_LANGS"):
sitename_lang = conf.get("Invenio", "CFG_SITE_NAME_INTL")[lang]
try:
run_sql("""INSERT INTO collectionname (id_collection, ln, type, value) VALUES
(%s,%s,%s,%s)""", (1, lang, 'ln', sitename_lang))
except IntegrityError:
run_sql("""UPDATE collectionname SET value=%s
WHERE ln=%s AND id_collection=1 AND type='ln'""",
(sitename_lang, lang))
print("You may want to restart Apache now.")
print(">>> CFG_SITE_NAME and CFG_SITE_NAME_INTL* reset successfully.")
def cli_cmd_reset_recstruct_cache(conf):
"""If CFG_BIBUPLOAD_SERIALIZE_RECORD_STRUCTURE is changed, this function
will adapt the database to either store or not store the recstruct
format."""
from intbitset import intbitset
from invenio.legacy.dbquery import run_sql, serialize_via_marshal
from invenio.legacy.search_engine import get_record, print_record
from invenio.legacy.bibsched.cli import server_pid, pidfile
enable_recstruct_cache = conf.get("Invenio", "CFG_BIBUPLOAD_SERIALIZE_RECORD_STRUCTURE")
enable_recstruct_cache = enable_recstruct_cache in ('True', '1')
pid = server_pid(ping_the_process=False)
if pid:
print("ERROR: bibsched seems to run with pid %d, according to %s." % (pid, pidfile), file=sys.stderr)
print(" Please stop bibsched before running this procedure.", file=sys.stderr)
sys.exit(1)
if enable_recstruct_cache:
print(">>> Searching records which need recstruct cache resetting; this may take a while...")
all_recids = intbitset(run_sql("SELECT id FROM bibrec"))
good_recids = intbitset(run_sql("SELECT bibrec.id FROM bibrec JOIN bibfmt ON bibrec.id = bibfmt.id_bibrec WHERE format='recstruct' AND modification_date < last_updated"))
recids = all_recids - good_recids
print(">>> Generating recstruct cache...")
tot = len(recids)
count = 0
for recid in recids:
try:
value = serialize_via_marshal(get_record(recid))
except zlib.error, err:
print >> sys.stderr, "Looks like XM is corrupted for record %s. Let's recover it from bibxxx" % recid
run_sql("DELETE FROM bibfmt WHERE id_bibrec=%s AND format='xm'", (recid, ))
xm_value = zlib.compress(print_record(recid, 'xm'))
run_sql("INSERT INTO bibfmt(id_bibrec, format, last_updated, value) VALUES(%s, 'xm', NOW(), %s)", (recid, xm_value))
value = serialize_via_marshal(get_record(recid))
run_sql("DELETE FROM bibfmt WHERE id_bibrec=%s AND format='recstruct'", (recid, ))
run_sql("INSERT INTO bibfmt(id_bibrec, format, last_updated, value) VALUES(%s, 'recstruct', NOW(), %s)", (recid, value))
count += 1
if count % 1000 == 0:
print(" ... done records %s/%s" % (count, tot))
if count % 1000 != 0:
print(" ... done records %s/%s" % (count, tot))
print(">>> recstruct cache generated successfully.")
else:
print(">>> Cleaning recstruct cache...")
run_sql("DELETE FROM bibfmt WHERE format='recstruct'")
def cli_cmd_reset_recjson_cache(conf):
"""If CFG_BIBUPLOAD_SERIALIZE_RECORD_STRUCTURE is changed, this function
will adapt the database to either store or not store the recjson
format."""
from invenio.legacy.bibfield.bibfield_manager import main
warn('inveniocfg --reset-recjson-cache is deprecated. Using instead: inveniomanage bibfield reset')
sys_argv = sys.argv
sys.argv = 'bibfield_manager.py reset'.split()
main()
sys.argv = sys_argv
def cli_cmd_reset_siteadminemail(conf):
"""
Reset user-related tables with new CFG_SITE_ADMIN_EMAIL read from conf files.
"""
print(">>> Going to reset CFG_SITE_ADMIN_EMAIL...")
from invenio.legacy.dbquery import run_sql
siteadminemail = conf.get("Invenio", "CFG_SITE_ADMIN_EMAIL")
from invenio.modules.accounts.models import User
from invenio.ext.sqlalchemy import db
User.query.filter_by(id=1).delete()
u = User(id=1, email=siteadminemail, password='', note=1, nickname='admin')
db.session.add(u)
db.session.commit()
print("You may want to restart Apache now.")
print(">>> CFG_SITE_ADMIN_EMAIL reset successfully.")
def cli_cmd_reset_fieldnames(conf):
"""
Reset I18N field names such as author, title, etc and other I18N
ranking method names such as word similarity. Their translations
are taken from the PO files.
"""
print(">>> Going to reset I18N field names...")
from invenio.base.i18n import gettext_set_language, language_list_long
from invenio.legacy.dbquery import run_sql, IntegrityError
## get field id and name list:
field_id_name_list = run_sql("SELECT id, name FROM field")
## get rankmethod id and name list:
rankmethod_id_name_list = run_sql("SELECT id, name FROM rnkMETHOD")
## update names for every language:
for lang, dummy in language_list_long():
_ = gettext_set_language(lang)
## this list is put here in order for PO system to pick names
## suitable for translation
field_name_names = {"any field": _("any field"),
"title": _("title"),
"author": _("author"),
"abstract": _("abstract"),
"keyword": _("keyword"),
"report number": _("report number"),
"subject": _("subject"),
"reference": _("reference"),
"fulltext": _("fulltext"),
"collection": _("collection"),
"division": _("division"),
"year": _("year"),
"journal": _("journal"),
"experiment": _("experiment"),
"record ID": _("record ID")}
## update I18N names for every language:
for (field_id, field_name) in field_id_name_list:
if field_name in field_name_names:
try:
run_sql("""INSERT INTO fieldname (id_field,ln,type,value) VALUES
(%s,%s,%s,%s)""", (field_id, lang, 'ln',
field_name_names[field_name]))
except IntegrityError:
run_sql("""UPDATE fieldname SET value=%s
WHERE id_field=%s AND ln=%s AND type=%s""",
(field_name_names[field_name], field_id, lang, 'ln',))
## ditto for rank methods:
rankmethod_name_names = {"wrd": _("word similarity"),
"demo_jif": _("journal impact factor"),
"citation": _("times cited"),
"citerank_citation_t": _("time-decay cite count"),
"citerank_pagerank_c": _("all-time-best cite rank"),
"citerank_pagerank_t": _("time-decay cite rank"),}
for (rankmethod_id, rankmethod_name) in rankmethod_id_name_list:
if rankmethod_name in rankmethod_name_names:
try:
run_sql("""INSERT INTO rnkMETHODNAME (id_rnkMETHOD,ln,type,value) VALUES
(%s,%s,%s,%s)""", (rankmethod_id, lang, 'ln',
rankmethod_name_names[rankmethod_name]))
except IntegrityError:
run_sql("""UPDATE rnkMETHODNAME SET value=%s
WHERE id_rnkMETHOD=%s AND ln=%s AND type=%s""",
(rankmethod_name_names[rankmethod_name], rankmethod_id, lang, 'ln',))
print(">>> I18N field names reset successfully.")
def cli_check_openoffice(conf):
"""
If OpenOffice.org integration is enabled, checks whether the system is
properly configured.
"""
from invenio.legacy.bibsched.bibtask import check_running_process_user
from invenio.legacy.websubmit.file_converter import can_unoconv, get_file_converter_logger
logger = get_file_converter_logger()
for handler in logger.handlers:
logger.removeHandler(handler)
check_running_process_user()
print(">>> Checking if Libre/OpenOffice.org is correctly integrated...", end=' ')
sys.stdout.flush()
if can_unoconv(True):
print("ok")
else:
sys.exit(1)
def cli_cmd_create_secret_key(conf):
"""Generate and append CFG_SITE_SECRET_KEY to invenio-local.conf.
Useful for the installation process."""
from invenio.base.scripts.config import main
warn('inveniocfg --create-secret-key is deprecated. Using instead: inveniomanage config create secret-key')
sys_argv = sys.argv
sys.argv = 'config_manager.py create secret-key'.split()
main()
sys.argv = sys_argv
def cli_cmd_create_tables(conf):
"""Create and fill Invenio DB tables. Useful for the installation process."""
from invenio.base.scripts.database import main
warn('inveniocfg --create-tables is deprecated. Using instead: inveniomanage database create')
sys_argv = sys.argv
sys.argv = 'database_manager.py create'.split()
main()
sys.argv = sys_argv
def cli_cmd_load_webstat_conf(conf):
print(">>> Going to load WebStat config...")
from invenio.config import CFG_PREFIX
cmd = "%s/bin/webstatadmin --load-config" % CFG_PREFIX
if os.system(cmd):
print("ERROR: failed execution of", cmd)
sys.exit(1)
print(">>> WebStat config load successfully.")
def cli_cmd_load_bibfield_config(conf):
from invenio.legacy.bibfield.bibfield_manager import main
warn('inveniocfg --load-bibfield-conf is deprecated. Using instead: inveniomanage bibfield config load')
sys_argv = sys.argv
sys.argv = 'bibfield_manager.py config load'.split()
main()
sys.argv = sys_argv
def cli_cmd_drop_tables(conf):
"""Drop Invenio DB tables. Useful for the uninstallation process."""
print(">>> Going to drop tables and related data on filesystem ...")
from invenio.base.scripts.database import main
warn('inveniocfg --drop-tables is deprecated. Using instead: inveniomanage database drop')
sys_argv = sys.argv
if '--yes-i-know' in sys_argv:
sys.argv.append('--yes-i-know')
sys.argv = 'database_manager.py drop'.split()
main()
sys.argv = sys_argv
def cli_cmd_create_demo_site(conf):
"""Create demo site. Useful for testing purposes."""
print(">>> Going to create demo site...")
from invenio.config import CFG_PREFIX
from invenio.legacy.dbquery import run_sql
run_sql("TRUNCATE schTASK")
run_sql("TRUNCATE session")
run_sql("DELETE FROM user WHERE email=''")
for cmd in ["%s/bin/dbexec < %s/lib/sql/invenio/democfgdata.sql" % \
(CFG_PREFIX, CFG_PREFIX),]:
if os.system(cmd):
print("ERROR: failed execution of", cmd)
sys.exit(1)
cli_cmd_reset_fieldnames(conf) # needed for I18N demo ranking method names
for cmd in ["%s/bin/webaccessadmin -u admin -c -r -D" % CFG_PREFIX,
"%s/bin/webcoll -u admin" % CFG_PREFIX,
"%s/bin/webcoll 1" % CFG_PREFIX,
"%s/bin/bibsort -u admin --load-config" % CFG_PREFIX,
"%s/bin/bibsort 2" % CFG_PREFIX, ]:
if os.system(cmd):
print("ERROR: failed execution of", cmd)
sys.exit(1)
print(">>> Demo site created successfully.")
def cli_cmd_load_demo_records(conf):
"""Load demo records. Useful for testing purposes."""
from invenio.config import CFG_PREFIX
from invenio.legacy.dbquery import run_sql
print(">>> Going to load demo records...")
run_sql("TRUNCATE schTASK")
for cmd in ["%s/bin/bibupload -u admin -i %s" % (CFG_PREFIX,
pkg_resources.resource_filename('invenio.testsuite', os.path.join('data','demo_record_marc_data.xml'))),
"%s/bin/bibupload 1" % CFG_PREFIX,
"%s/bin/bibdocfile --textify --with-ocr --recid 97" % CFG_PREFIX,
"%s/bin/bibdocfile --textify --all" % CFG_PREFIX,
"%s/bin/bibindex -u admin" % CFG_PREFIX,
"%s/bin/bibindex 2" % CFG_PREFIX,
"%s/bin/bibindex -u admin -w global" % CFG_PREFIX,
"%s/bin/bibindex 3" % CFG_PREFIX,
"%s/bin/bibreformat -u admin -o HB" % CFG_PREFIX,
"%s/bin/bibreformat 4" % CFG_PREFIX,
"%s/bin/webcoll -u admin" % CFG_PREFIX,
"%s/bin/webcoll 5" % CFG_PREFIX,
"%s/bin/bibrank -u admin" % CFG_PREFIX,
"%s/bin/bibrank 6" % CFG_PREFIX,
"%s/bin/bibsort -u admin -R" % CFG_PREFIX,
"%s/bin/bibsort 7" % CFG_PREFIX,
"%s/bin/oairepositoryupdater -u admin" % CFG_PREFIX,
"%s/bin/oairepositoryupdater 8" % CFG_PREFIX,
"%s/bin/bibupload 9" % CFG_PREFIX,]:
if os.system(cmd):
print("ERROR: failed execution of", cmd)
sys.exit(1)
print(">>> Demo records loaded successfully.")
def cli_cmd_remove_demo_records(conf):
"""Remove demo records. Useful when you are finished testing."""
print(">>> Going to remove demo records...")
from invenio.config import CFG_PREFIX
from invenio.legacy.dbquery import run_sql
from invenio.utils.text import wrap_text_in_a_box, wait_for_user
wait_for_user(wrap_text_in_a_box("""WARNING: You are going to destroy
your records and documents!"""))
if os.path.exists(CFG_PREFIX + os.sep + 'var' + os.sep + 'data'):
shutil.rmtree(CFG_PREFIX + os.sep + 'var' + os.sep + 'data')
run_sql("TRUNCATE schTASK")
for cmd in ["%s/bin/dbexec < %s/lib/sql/invenio/tabbibclean.sql" % (CFG_PREFIX, CFG_PREFIX),
"%s/bin/webcoll -u admin" % CFG_PREFIX,
"%s/bin/webcoll 1" % CFG_PREFIX,]:
if os.system(cmd):
print("ERROR: failed execution of", cmd)
sys.exit(1)
print(">>> Demo records removed successfully.")
def cli_cmd_drop_demo_site(conf):
"""Drop demo site completely. Useful when you are finished testing."""
print(">>> Going to drop demo site...")
from invenio.utils.text import wrap_text_in_a_box, wait_for_user
wait_for_user(wrap_text_in_a_box("""WARNING: You are going to destroy
your site and documents!"""))
cli_cmd_drop_tables(conf)
cli_cmd_create_tables(conf)
cli_cmd_remove_demo_records(conf)
print(">>> Demo site dropped successfully.")
def cli_cmd_run_unit_tests(conf):
"""Run unit tests, usually on the working demo site."""
from invenio.testsuite import build_and_run_unit_test_suite
if not build_and_run_unit_test_suite():
sys.exit(1)
def cli_cmd_run_js_unit_tests(conf):
"""Run JavaScript unit tests, usually on the working demo site."""
from invenio.testsuite import build_and_run_js_unit_test_suite
if not build_and_run_js_unit_test_suite():
sys.exit(1)
def cli_cmd_run_regression_tests(conf):
"""Run regression tests, usually on the working demo site."""
from invenio.testsuite import build_and_run_regression_test_suite
if not build_and_run_regression_test_suite():
sys.exit(1)
def cli_cmd_run_web_tests(conf):
"""Run web tests in a browser. Requires Firefox with Selenium."""
from invenio.testsuite import build_and_run_web_test_suite
if not build_and_run_web_test_suite():
sys.exit(1)
def cli_cmd_run_flask_tests(conf):
"""Run flask tests."""
from invenio.testsuite import build_and_run_flask_test_suite
build_and_run_flask_test_suite()
def _detect_ip_address():
"""Detect IP address of this computer. Useful for creating Apache
vhost conf snippet on RHEL like machines. However, if wanted site
is 0.0.0.0, then use that, since we are running inside Docker.
@return: IP address, or '*' if cannot detect
@rtype: string
@note: creates socket for real in order to detect real IP address,
not the loopback one.
"""
from invenio.base.globals import cfg
if '0.0.0.0' in cfg.get('CFG_SITE_URL'):
return '0.0.0.0'
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('invenio-software.org', 0))
return s.getsockname()[0]
except Exception:
return '*'
def cli_cmd_create_apache_conf(conf):
"""
Create Apache conf files for this site, keeping previous
files in a backup copy.
"""
from invenio.base.scripts.apache import main
warn('inveniocfg --create-apache-conf is deprecated. Using instead: inveniomanage apache create-config')
sys_argv = sys.argv
sys.argv = 'apache_manager.py create-config'.split()
main()
sys.argv = sys_argv
def cli_cmd_get(conf, varname):
"""
Return value of VARNAME read from CONF files. Useful for
third-party programs to access values of conf options such as
CFG_PREFIX. Return None if VARNAME is not found.
"""
from invenio.base.scripts.config import main
warn('inveniocfg --get="%(varname)s" is deprecated. '
'Using instead: inveniomanage config get "%(varname)s"' % {
'varname': varname
})
sys_argv = sys.argv
sys.argv = 'config_manager.py get'.split()
sys.argv.append(varname)
try:
main()
except SystemExit:
pass
sys.argv = sys_argv
def cli_cmd_list(conf):
"""
Print a list of all conf options and values from CONF.
"""
from invenio.base.scripts.config import main
warn('inveniocfg --list is deprecated. '
'Using instead: inveniomanage config list')
sys_argv = sys.argv
sys.argv = 'config_manager.py list'.split()
main()
sys.argv = sys_argv
def _grep_version_from_executable(path_to_exec, version_regexp):
"""
Try to detect a program version by digging into its binary
PATH_TO_EXEC and looking for VERSION_REGEXP. Return program
version as a string. Return empty string if not succeeded.
"""
from invenio.utils.shell import run_shell_command
exec_version = ""
if os.path.exists(path_to_exec):
dummy1, cmd2_out, dummy2 = run_shell_command("strings %s | grep %s",
(path_to_exec, version_regexp))
if cmd2_out:
for cmd2_out_line in cmd2_out.split("\n"):
if len(cmd2_out_line) > len(exec_version):
# the longest the better
exec_version = cmd2_out_line
return exec_version
def cli_cmd_detect_system_details(conf):
"""
Detect and print system details such as Apache/Python/MySQL
versions etc. Useful for debugging problems on various OS.
"""
from invenio.base.manage import main
warn('inveniocfg --detect-system-name is deprecated. Using instead: inveniomanage detect-system-name')
sys_argv = sys.argv
sys.argv = 'inveniomanage detect-system-name'.split()
main()
sys.argv = sys_argv
def cli_cmd_upgrade(conf):
"""
Command for applying upgrades
"""
from invenio.modules.upgrader.manage import main
warn('inveniocfg --upgrade-check is deprecated. Using instead: inveniomanage upgrade run')
sys_argv = sys.argv
sys.argv = 'modules.upgrader.manage.py run'.split()
main()
sys.argv = sys_argv
def cli_cmd_upgrade_check(conf):
"""
Command for running pre-upgrade checks
"""
from invenio.modules.upgrader.manage import main
warn('inveniocfg --upgrade-check is deprecated. Using instead: inveniomanage upgrade check')
sys_argv = sys.argv
sys.argv = 'modules.upgrader.manage.py check'.split()
main()
sys.argv = sys_argv
def cli_cmd_upgrade_show_pending(conf):
"""
Command for showing upgrades ready to be applied
"""
from invenio.modules.upgrader.manage import main
warn('inveniocfg --upgrade-show-pending is deprecated. Using instead: inveniomanage upgrade show pending')
sys_argv = sys.argv
sys.argv = 'modules.upgrader.manage.py show pending'.split()
main()
sys.argv = sys_argv
def cli_cmd_upgrade_show_applied(conf):
"""
Command for showing all upgrades already applied.
"""
from invenio.modules.upgrader.manage import main
warn('inveniocfg --upgrade-show-applied is deprecated. Using instead: inveniomanage upgrade show applied')
sys_argv = sys.argv
sys.argv = 'modules.upgrader.manage.py show applied'.split()
main()
sys.argv = sys_argv
def prepare_option_parser():
"""Parse the command line options."""
class InvenioOption(Option):
"""
Option class that implements the action 'store_append_const' which will
1) append <const> to list in options.<dest>
2) take a value and store in options.<const>
Useful for e.g. appending a const to an actions list, while also taking
an option value and storing it.
This ensures that we can run actions in the order they are given on the
command-line.
Python 2.4 compatibility note: *append_const* action is not available in
Python 2.4, so it is implemented here, together with the new action
*store_append_const*.
"""
ACTIONS = Option.ACTIONS + ("store_append_const", "append_const")
STORE_ACTIONS = Option.STORE_ACTIONS + ("store_append_const", "append_const")
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("store_append_const", )
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("store_append_const", )
CONST_ACTIONS = getattr(Option, 'CONST_ACTIONS', ()) + ("store_append_const", "append_const")
def take_action(self, action, dest, opt, value, values, parser):
if action == "store_append_const":
# Combination of 'store' and 'append_const' actions
values.ensure_value(dest, []).append(self.const)
value_dest = self.const.replace('-', '_')
setattr(values, value_dest, value)
elif action == "append_const" and not hasattr(Option, 'CONST_ACTIONS'):
values.ensure_value(dest, []).append(self.const)
else:
Option.take_action(self, action, dest, opt, value, values, parser)
def _check_const(self):
if self.action not in self.CONST_ACTIONS and self.const is not None:
raise OptionError(
"'const' must not be supplied for action %r" % self.action,
self)
CHECK_METHODS = [
Option._check_action,
Option._check_type,
Option._check_choice,
Option._check_dest,
_check_const,
Option._check_nargs,
Option._check_callback,
]
parser = OptionParser(option_class=InvenioOption, description="Invenio configuration and administration CLI tool", formatter=IndentedHelpFormatter(max_help_position=31))
parser.add_option("-V", "--version", action="store_true", help="print version number")
finish_options = OptionGroup(parser, "Options to finish your installation")
finish_options.add_option("", "--create-secret-key", dest='actions', const='create-secret-key', action="append_const", help="generate random CFG_SITE_SECRET_KEY")
finish_options.add_option("", "--create-apache-conf", dest='actions', const='create-apache-conf', action="append_const", help="create Apache configuration files")
finish_options.add_option("", "--create-tables", dest='actions', const='create-tables', action="append_const", help="create DB tables for Invenio")
finish_options.add_option("", "--load-bibfield-conf", dest='actions', const='load-bibfield-conf', action="append_const", help="load bibfield configuration file")
finish_options.add_option("", "--load-webstat-conf", dest='actions', const='load-webstat-conf', action="append_const", help="load the WebStat configuration")
finish_options.add_option("", "--drop-tables", dest='actions', const='drop-tables', action="append_const", help="drop DB tables of Invenio")
finish_options.add_option("", "--check-openoffice", dest='actions', const='check-openoffice', action="append_const", help="check for correctly set up of openoffice temporary directory")
parser.add_option_group(finish_options)
demotest_options = OptionGroup(parser, "Options to set up and test a demo site")
demotest_options.add_option("", "--create-demo-site", dest='actions', const='create-demo-site', action="append_const", help="create demo site")
demotest_options.add_option("", "--load-demo-records", dest='actions', const='load-demo-records', action="append_const", help="load demo records")
demotest_options.add_option("", "--remove-demo-records", dest='actions', const='remove-demo-records', action="append_const", help="remove demo records, keeping demo site")
demotest_options.add_option("", "--drop-demo-site", dest='actions', const='drop-demo-site', action="append_const", help="drop demo site configurations too")
demotest_options.add_option("", "--run-unit-tests", dest='actions', const='run-unit-tests', action="append_const", help="run unit test suite (needs demo site)")
demotest_options.add_option("", "--run-js-unit-tests", dest='actions', const='run-js-unit-tests', action="append_const", help="run JS unit test suite (needs demo site)")
demotest_options.add_option("", "--run-regression-tests", dest='actions', const='run-regression-tests', action="append_const", help="run regression test suite (needs demo site)")
demotest_options.add_option("", "--run-web-tests", dest='actions', const='run-web-tests', action="append_const", help="run web tests in a browser (needs demo site, Firefox, Selenium IDE)")
demotest_options.add_option("", "--run-flask-tests", dest='actions', const='run-flask-tests', action="append_const", help="run Flask test suite")
parser.add_option_group(demotest_options)
config_options = OptionGroup(parser, "Options to update config files in situ")
config_options.add_option("", "--update-all", dest='actions', const='update-all', action="append_const", help="perform all the update options")
config_options.add_option("", "--update-config-py", dest='actions', const='update-config-py', action="append_const", help="update config.py file from invenio.conf file")
config_options.add_option("", "--update-dbquery-py", dest='actions', const='update-dbquery-py', action="append_const", help="update dbquery.py with DB credentials from invenio.conf")
config_options.add_option("", "--update-dbexec", dest='actions', const='update-dbexec', action="append_const", help="update dbexec with DB credentials from invenio.conf")
config_options.add_option("", "--update-bibconvert-tpl", dest='actions', const='update-bibconvert-tpl', action="append_const", help="update bibconvert templates with CFG_SITE_URL from invenio.conf")
config_options.add_option("", "--update-web-tests", dest='actions', const='update-web-tests', action="append_const", help="update web test cases with CFG_SITE_URL from invenio.conf")
parser.add_option_group(config_options)
reset_options = OptionGroup(parser, "Options to update DB tables")
reset_options.add_option("", "--reset-all", dest='actions', const='reset-all', action="append_const", help="perform all the reset options")
reset_options.add_option("", "--reset-sitename", dest='actions', const='reset-sitename', action="append_const", help="reset tables to take account of new CFG_SITE_NAME*")
reset_options.add_option("", "--reset-siteadminemail", dest='actions', const='reset-siteadminemail', action="append_const", help="reset tables to take account of new CFG_SITE_ADMIN_EMAIL")
reset_options.add_option("", "--reset-fieldnames", dest='actions', const='reset-fieldnames', action="append_const", help="reset tables to take account of new I18N names from PO files")
reset_options.add_option("", "--reset-recstruct-cache", dest='actions', const='reset-recstruct-cache', action="append_const", help="reset record structure cache according to CFG_BIBUPLOAD_SERIALIZE_RECORD_STRUCTURE")
reset_options.add_option("", "--reset-recjson-cache", dest='actions', const='reset-recjson-cache', action="append_const", help="reset record json structure cache according to CFG_BIBUPLOAD_SERIALIZE_RECORD_STRUCTURE")
parser.add_option_group(reset_options)
upgrade_options = OptionGroup(parser, "Options to upgrade your installation")
upgrade_options.add_option("", "--upgrade", dest='actions', const='upgrade', action="append_const", help="apply all pending upgrades")
upgrade_options.add_option("", "--upgrade-check", dest='actions', const='upgrade-check', action="append_const", help="run pre-upgrade checks for pending upgrades")
upgrade_options.add_option("", "--upgrade-show-pending", dest='actions', const='upgrade-show-pending', action="append_const", help="show pending upgrades")
upgrade_options.add_option("", "--upgrade-show-applied", dest='actions', const='upgrade-show-applied', action="append_const", help="show history of applied upgrades")
upgrade_options.add_option("", "--upgrade-create-standard-recipe", dest='actions', metavar='REPOSITORY[,DIR]', const='upgrade-create-standard-recipe', action="append_const", help="use: inveniomanage upgrade create recipe")
upgrade_options.add_option("", "--upgrade-create-release-recipe", dest='actions', metavar='REPOSITORY[,DIR]', const='upgrade-create-release-recipe', action="append_const", help="use: inveniomanage upgrade create release")
parser.add_option_group(upgrade_options)
helper_options = OptionGroup(parser, "Options to help the work")
helper_options.add_option("", "--list", dest='actions', const='list', action="append_const", help="print names and values of all options from conf files")
helper_options.add_option("", "--get", dest='actions', const='get', action="store_append_const", metavar="OPTION", help="get value of a given option from conf files")
helper_options.add_option("", "--conf-dir", action="store", metavar="PATH", help="path to directory where invenio*.conf files are [optional]")
helper_options.add_option("", "--detect-system-details", dest='actions', const='detect-system-details', action="append_const", help="print system details such as Apache/Python/MySQL versions")
parser.add_option_group(helper_options)
parser.add_option('--yes-i-know', action='store_true', dest='yes-i-know', help='use with care!')
parser.add_option('-x', '--stop', action='store_true', dest='stop_on_error', help='When running tests, stop at first error')
return parser
def prepare_conf(options):
""" Read configuration files """
from flask import current_app
conf = ConfigParser()
conf.add_section('Invenio')
for (k, v) in iteritems(current_app.config):
conf.set('Invenio', k, v)
return conf
def main(*cmd_args):
"""Main entry point."""
# Allow easier testing
if not cmd_args:
cmd_args = sys.argv[1:]
# Parse arguments
parser = prepare_option_parser()
(options, dummy_args) = parser.parse_args(list(cmd_args))
if getattr(options, 'stop_on_error', False):
from invenio.testsuite import wrap_failfast
wrap_failfast()
if getattr(options, 'version', False):
from invenio.base import manage
warn('inveniocfg --version is deprecated. Using instead: inveniomanage version')
sys_argv = sys.argv
sys.argv = 'inveniomanage.py version'.split()
manage.main()
sys.argv = sys_argv
else:
# Read configuration
try:
conf = prepare_conf(options)
except Exception as e:
print(e)
sys.exit(1)
## Decide what to do
actions = getattr(options, 'actions', None)
if not actions:
print("""ERROR: Please specify a command. Please see '--help'.""")
sys.exit(1)
if len(actions) > 1:
print("""ERROR: Please specify only one command. Please see '--help'.""")
sys.exit(1)
for action in actions:
if action == 'get':
cli_cmd_get(conf, getattr(options, 'get', None))
elif action == 'list':
cli_cmd_list(conf)
elif action == 'detect-system-details':
cli_cmd_detect_system_details(conf)
elif action == 'create-secret-key':
cli_cmd_create_secret_key(conf)
elif action == 'create-tables':
cli_cmd_create_tables(conf)
elif action == 'load-webstat-conf':
cli_cmd_load_webstat_conf(conf)
elif action == 'drop-tables':
cli_cmd_drop_tables(conf)
elif action == 'check-openoffice':
cli_check_openoffice(conf)
elif action == 'load-bibfield-conf':
cli_cmd_load_bibfield_config(conf)
elif action == 'create-demo-site':
cli_cmd_create_demo_site(conf)
elif action == 'load-demo-records':
cli_cmd_load_demo_records(conf)
elif action == 'remove-demo-records':
cli_cmd_remove_demo_records(conf)
elif action == 'drop-demo-site':
cli_cmd_drop_demo_site(conf)
elif action == 'run-unit-tests':
cli_cmd_run_unit_tests(conf)
elif action == 'run-js-unit-tests':
cli_cmd_run_js_unit_tests(conf)
elif action == 'run-regression-tests':
cli_cmd_run_regression_tests(conf)
elif action == 'run-web-tests':
cli_cmd_run_web_tests(conf)
elif action == 'run-flask-tests':
cli_cmd_run_flask_tests(conf)
elif action == 'update-all':
for f in [cli_cmd_update_config_py,
cli_cmd_update_dbquery_py,
cli_cmd_update_dbexec,
cli_cmd_update_bibconvert_tpl,
cli_cmd_update_web_tests]:
try:
f(conf)
except:
pass
elif action == 'update-config-py':
cli_cmd_update_config_py(conf)
elif action == 'update-dbquery-py':
cli_cmd_update_dbquery_py(conf)
elif action == 'update-dbexec':
cli_cmd_update_dbexec(conf)
elif action == 'update-bibconvert-tpl':
cli_cmd_update_bibconvert_tpl(conf)
elif action == 'update-web-tests':
cli_cmd_update_web_tests(conf)
elif action == 'reset-all':
cli_cmd_reset_sitename(conf)
cli_cmd_reset_siteadminemail(conf)
cli_cmd_reset_fieldnames(conf)
cli_cmd_reset_recstruct_cache(conf)
elif action == 'reset-sitename':
cli_cmd_reset_sitename(conf)
elif action == 'reset-siteadminemail':
cli_cmd_reset_siteadminemail(conf)
elif action == 'reset-fieldnames':
cli_cmd_reset_fieldnames(conf)
elif action == 'reset-recstruct-cache':
cli_cmd_reset_recstruct_cache(conf)
elif action == 'reset-recjson-cache':
cli_cmd_reset_recjson_cache(conf)
elif action == 'create-apache-conf':
cli_cmd_create_apache_conf(conf)
elif action == 'upgrade':
cli_cmd_upgrade(conf)
elif action == 'upgrade-check':
cli_cmd_upgrade_check(conf)
elif action == 'upgrade-show-pending':
cli_cmd_upgrade_show_pending(conf)
elif action == 'upgrade-show-applied':
cli_cmd_upgrade_show_applied(conf)
elif action == 'upgrade-create-standard-recipe':
print('ERROR: inveniocfg --upgrade-create-release-recipe is not supported anymore. Use instead: inveniomanage upgrade create release', file=sys.stderr)
sys.exit(1)
elif action == 'upgrade-create-release-recipe':
print('ERROR: inveniocfg --upgrade-create-standard-recipe is not supported anymore. Use instead: inveniomanage upgrade create recipe', file=sys.stderr)
sys.exit(1)
else:
print("ERROR: Unknown command", action)
sys.exit(1)
if __name__ == '__main__':
main()
|
vicky2135/lucious
|
refs/heads/master
|
oscar/lib/python2.7/site-packages/unidecode/x08c.py
|
251
|
data = (
'Yu ', # 0x00
'Shui ', # 0x01
'Shen ', # 0x02
'Diao ', # 0x03
'Chan ', # 0x04
'Liang ', # 0x05
'Zhun ', # 0x06
'Sui ', # 0x07
'Tan ', # 0x08
'Shen ', # 0x09
'Yi ', # 0x0a
'Mou ', # 0x0b
'Chen ', # 0x0c
'Die ', # 0x0d
'Huang ', # 0x0e
'Jian ', # 0x0f
'Xie ', # 0x10
'Nue ', # 0x11
'Ye ', # 0x12
'Wei ', # 0x13
'E ', # 0x14
'Yu ', # 0x15
'Xuan ', # 0x16
'Chan ', # 0x17
'Zi ', # 0x18
'An ', # 0x19
'Yan ', # 0x1a
'Di ', # 0x1b
'Mi ', # 0x1c
'Pian ', # 0x1d
'Xu ', # 0x1e
'Mo ', # 0x1f
'Dang ', # 0x20
'Su ', # 0x21
'Xie ', # 0x22
'Yao ', # 0x23
'Bang ', # 0x24
'Shi ', # 0x25
'Qian ', # 0x26
'Mi ', # 0x27
'Jin ', # 0x28
'Man ', # 0x29
'Zhe ', # 0x2a
'Jian ', # 0x2b
'Miu ', # 0x2c
'Tan ', # 0x2d
'Zen ', # 0x2e
'Qiao ', # 0x2f
'Lan ', # 0x30
'Pu ', # 0x31
'Jue ', # 0x32
'Yan ', # 0x33
'Qian ', # 0x34
'Zhan ', # 0x35
'Chen ', # 0x36
'Gu ', # 0x37
'Qian ', # 0x38
'Hong ', # 0x39
'Xia ', # 0x3a
'Jue ', # 0x3b
'Hong ', # 0x3c
'Han ', # 0x3d
'Hong ', # 0x3e
'Xi ', # 0x3f
'Xi ', # 0x40
'Huo ', # 0x41
'Liao ', # 0x42
'Han ', # 0x43
'Du ', # 0x44
'Long ', # 0x45
'Dou ', # 0x46
'Jiang ', # 0x47
'Qi ', # 0x48
'Shi ', # 0x49
'Li ', # 0x4a
'Deng ', # 0x4b
'Wan ', # 0x4c
'Bi ', # 0x4d
'Shu ', # 0x4e
'Xian ', # 0x4f
'Feng ', # 0x50
'Zhi ', # 0x51
'Zhi ', # 0x52
'Yan ', # 0x53
'Yan ', # 0x54
'Shi ', # 0x55
'Chu ', # 0x56
'Hui ', # 0x57
'Tun ', # 0x58
'Yi ', # 0x59
'Tun ', # 0x5a
'Yi ', # 0x5b
'Jian ', # 0x5c
'Ba ', # 0x5d
'Hou ', # 0x5e
'E ', # 0x5f
'Cu ', # 0x60
'Xiang ', # 0x61
'Huan ', # 0x62
'Jian ', # 0x63
'Ken ', # 0x64
'Gai ', # 0x65
'Qu ', # 0x66
'Fu ', # 0x67
'Xi ', # 0x68
'Bin ', # 0x69
'Hao ', # 0x6a
'Yu ', # 0x6b
'Zhu ', # 0x6c
'Jia ', # 0x6d
'[?] ', # 0x6e
'Xi ', # 0x6f
'Bo ', # 0x70
'Wen ', # 0x71
'Huan ', # 0x72
'Bin ', # 0x73
'Di ', # 0x74
'Zong ', # 0x75
'Fen ', # 0x76
'Yi ', # 0x77
'Zhi ', # 0x78
'Bao ', # 0x79
'Chai ', # 0x7a
'Han ', # 0x7b
'Pi ', # 0x7c
'Na ', # 0x7d
'Pi ', # 0x7e
'Gou ', # 0x7f
'Na ', # 0x80
'You ', # 0x81
'Diao ', # 0x82
'Mo ', # 0x83
'Si ', # 0x84
'Xiu ', # 0x85
'Huan ', # 0x86
'Kun ', # 0x87
'He ', # 0x88
'He ', # 0x89
'Mo ', # 0x8a
'Han ', # 0x8b
'Mao ', # 0x8c
'Li ', # 0x8d
'Ni ', # 0x8e
'Bi ', # 0x8f
'Yu ', # 0x90
'Jia ', # 0x91
'Tuan ', # 0x92
'Mao ', # 0x93
'Pi ', # 0x94
'Xi ', # 0x95
'E ', # 0x96
'Ju ', # 0x97
'Mo ', # 0x98
'Chu ', # 0x99
'Tan ', # 0x9a
'Huan ', # 0x9b
'Jue ', # 0x9c
'Bei ', # 0x9d
'Zhen ', # 0x9e
'Yuan ', # 0x9f
'Fu ', # 0xa0
'Cai ', # 0xa1
'Gong ', # 0xa2
'Te ', # 0xa3
'Yi ', # 0xa4
'Hang ', # 0xa5
'Wan ', # 0xa6
'Pin ', # 0xa7
'Huo ', # 0xa8
'Fan ', # 0xa9
'Tan ', # 0xaa
'Guan ', # 0xab
'Ze ', # 0xac
'Zhi ', # 0xad
'Er ', # 0xae
'Zhu ', # 0xaf
'Shi ', # 0xb0
'Bi ', # 0xb1
'Zi ', # 0xb2
'Er ', # 0xb3
'Gui ', # 0xb4
'Pian ', # 0xb5
'Bian ', # 0xb6
'Mai ', # 0xb7
'Dai ', # 0xb8
'Sheng ', # 0xb9
'Kuang ', # 0xba
'Fei ', # 0xbb
'Tie ', # 0xbc
'Yi ', # 0xbd
'Chi ', # 0xbe
'Mao ', # 0xbf
'He ', # 0xc0
'Bi ', # 0xc1
'Lu ', # 0xc2
'Ren ', # 0xc3
'Hui ', # 0xc4
'Gai ', # 0xc5
'Pian ', # 0xc6
'Zi ', # 0xc7
'Jia ', # 0xc8
'Xu ', # 0xc9
'Zei ', # 0xca
'Jiao ', # 0xcb
'Gai ', # 0xcc
'Zang ', # 0xcd
'Jian ', # 0xce
'Ying ', # 0xcf
'Xun ', # 0xd0
'Zhen ', # 0xd1
'She ', # 0xd2
'Bin ', # 0xd3
'Bin ', # 0xd4
'Qiu ', # 0xd5
'She ', # 0xd6
'Chuan ', # 0xd7
'Zang ', # 0xd8
'Zhou ', # 0xd9
'Lai ', # 0xda
'Zan ', # 0xdb
'Si ', # 0xdc
'Chen ', # 0xdd
'Shang ', # 0xde
'Tian ', # 0xdf
'Pei ', # 0xe0
'Geng ', # 0xe1
'Xian ', # 0xe2
'Mai ', # 0xe3
'Jian ', # 0xe4
'Sui ', # 0xe5
'Fu ', # 0xe6
'Tan ', # 0xe7
'Cong ', # 0xe8
'Cong ', # 0xe9
'Zhi ', # 0xea
'Ji ', # 0xeb
'Zhang ', # 0xec
'Du ', # 0xed
'Jin ', # 0xee
'Xiong ', # 0xef
'Shun ', # 0xf0
'Yun ', # 0xf1
'Bao ', # 0xf2
'Zai ', # 0xf3
'Lai ', # 0xf4
'Feng ', # 0xf5
'Cang ', # 0xf6
'Ji ', # 0xf7
'Sheng ', # 0xf8
'Ai ', # 0xf9
'Zhuan ', # 0xfa
'Fu ', # 0xfb
'Gou ', # 0xfc
'Sai ', # 0xfd
'Ze ', # 0xfe
'Liao ', # 0xff
)
|
e-gob/plataforma-kioscos-autoatencion
|
refs/heads/master
|
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/plugins/lookup/keyring.py
|
82
|
# (c) 2016, Samuel Boucher <boucher.samuel.c@gmail.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: keyring
author:
- Samuel Boucher <boucher.samuel.c@gmail.com>
version_added: "2.3"
requirements:
- keyring (python library)
short_description: grab secrets from the OS keyring
description:
- Allows you to access data stored in the OS provided keyring/keychain.
"""
EXAMPLES = """
- name : output secrets to screen (BAD IDEA)
debug:
msg: "Password: {{item}}"
with_keyring:
- 'servicename username'
- name: access mysql with password from keyring
mysql_db: login_password={{lookup('keyring','mysql joe')}} login_user=joe
"""
RETURN = """
_raw:
description: secrets stored
"""
HAS_KEYRING = True
from ansible.errors import AnsibleError
try:
import keyring
except ImportError:
HAS_KEYRING = False
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, **kwargs):
if not HAS_KEYRING:
raise AnsibleError(u"Can't LOOKUP(keyring): missing required python library 'keyring'")
display.vvvv(u"keyring: %s" % keyring.get_keyring())
ret = []
for term in terms:
(servicename, username) = (term.split()[0], term.split()[1])
display.vvvv(u"username: %s, servicename: %s " % (username, servicename))
password = keyring.get_password(servicename, username)
if password is None:
raise AnsibleError(u"servicename: %s for user %s not found" % (servicename, username))
ret.append(password.rstrip())
return ret
|
alimuldal/numpy
|
refs/heads/master
|
numpy/fft/__init__.py
|
83
|
from __future__ import division, absolute_import, print_function
# To get sub-modules
from .info import __doc__
from .fftpack import *
from .helper import *
from numpy.testing.nosetester import _numpy_tester
test = _numpy_tester().test
bench = _numpy_tester().bench
|
sampadsaha5/sympy
|
refs/heads/master
|
sympy/series/tests/test_limitseq.py
|
62
|
from sympy import symbols, oo, Sum, harmonic, Add, S, binomial, factorial
from sympy.series.limitseq import limit_seq
from sympy.series.limitseq import difference_delta as dd
from sympy.utilities.pytest import raises, XFAIL
n, m, k = symbols('n m k', integer=True)
def test_difference_delta():
e = n*(n + 1)
e2 = e * k
assert dd(e) == 2*n + 2
assert dd(e2, n, 2) == k*(4*n + 6)
raises(ValueError, lambda: dd(e2))
raises(ValueError, lambda: dd(e2, n, oo))
def test_difference_delta__Sum():
e = Sum(1/k, (k, 1, n))
assert dd(e, n) == 1/(n + 1)
assert dd(e, n, 5) == Add(*[1/(i + n + 1) for i in range(5)])
e = Sum(1/k, (k, 1, 3*n))
assert dd(e, n) == Add(*[1/(i + 3*n + 1) for i in range(3)])
e = n * Sum(1/k, (k, 1, n))
assert dd(e, n) == 1 + Sum(1/k, (k, 1, n))
e = Sum(1/k, (k, 1, n), (m, 1, n))
assert dd(e, n) == harmonic(n)
def test_difference_delta__Add():
e = n + n*(n + 1)
assert dd(e, n) == 2*n + 3
assert dd(e, n, 2) == 4*n + 8
e = n + Sum(1/k, (k, 1, n))
assert dd(e, n) == 1 + 1/(n + 1)
assert dd(e, n, 5) == 5 + Add(*[1/(i + n + 1) for i in range(5)])
def test_difference_delta__Pow():
e = 4**n
assert dd(e, n) == 3*4**n
assert dd(e, n, 2) == 15*4**n
e = 4**(2*n)
assert dd(e, n) == 15*4**(2*n)
assert dd(e, n, 2) == 255*4**(2*n)
e = n**4
assert dd(e, n) == (n + 1)**4 - n**4
e = n**n
assert dd(e, n) == (n + 1)**(n + 1) - n**n
def test_limit_seq():
e = binomial(2*n, n) / Sum(binomial(2*k, k), (k, 1, n))
assert limit_seq(e) == S(3) / 4
assert limit_seq(e, m) == e
e = (5*n**3 + 3*n**2 + 4) / (3*n**3 + 4*n - 5)
assert limit_seq(e, n) == S(5) / 3
e = (harmonic(n) * Sum(harmonic(k), (k, 1, n))) / (n * harmonic(2*n)**2)
assert limit_seq(e, n) == 1
e = Sum(k**2 * Sum(2**m/m, (m, 1, k)), (k, 1, n)) / (2**n*n)
assert limit_seq(e, n) == 4
e = (Sum(binomial(3*k, k) * binomial(5*k, k), (k, 1, n)) /
(binomial(3*n, n) * binomial(5*n, n)))
assert limit_seq(e, n) == S(84375) / 83351
e = Sum(harmonic(k)**2/k, (k, 1, 2*n)) / harmonic(n)**3
assert limit_seq(e, n) == S(1) / 3
raises(ValueError, lambda: limit_seq(e * m))
@XFAIL
def test_limit_seq_fail():
# improve Summation algorithm or add ad-hoc criteria
e = (harmonic(n)**3 * Sum(1/harmonic(k), (k, 1, n)) /
(n * Sum(harmonic(k)/k, (k, 1, n))))
assert limit_seq(e, n) == 2
# No unique dominant term
e = (Sum(2**k * binomial(2*k, k) / k**2, (k, 1, n)) /
(Sum(2**k/k*2, (k, 1, n)) * Sum(binomial(2*k, k), (k, 1, n))))
assert limit_seq(e, n) == S(3) / 7
# Simplifications of summations needs to be improved.
e = n**3*Sum(2**k/k**2, (k, 1, n))**2 / (2**n * Sum(2**k/k, (k, 1, n)))
assert limit_seq(e, n) == 2
e = (harmonic(n) * Sum(2**k/k, (k, 1, n)) /
(n * Sum(2**k*harmonic(k)/k**2, (k, 1, n))))
assert limit_seq(e, n) == 1
e = (Sum(2**k*factorial(k) / k**2, (k, 1, 2*n)) /
(Sum(4**k/k**2, (k, 1, n)) * Sum(factorial(k), (k, 1, 2*n))))
assert limit_seq(e, n) == S(3) / 16
|
sencha/chromium-spacewalk
|
refs/heads/master
|
tools/usb_gadget/linux_gadgetfs.py
|
54
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Linux gadgetfs glue.
Exposes a USB gadget using a USB peripheral controller on Linux. The userspace
ABI is documented here:
https://github.com/torvalds/linux/blob/master/drivers/usb/gadget/inode.c
"""
import errno
import multiprocessing
import os
import struct
from tornado import ioloop
import usb_constants
import usb_descriptors
GADGETFS_NOP = 0
GADGETFS_CONNECT = 1
GADGETFS_DISCONNECT = 2
GADGETFS_SETUP = 3
GADGETFS_SUSPEND = 4
BULK = 0x01
INTERRUPT = 0x02
ISOCHRONOUS = 0x04
USB_TRANSFER_TYPE_TO_MASK = {
usb_constants.TransferType.BULK: BULK,
usb_constants.TransferType.INTERRUPT: INTERRUPT,
usb_constants.TransferType.ISOCHRONOUS: ISOCHRONOUS
}
IN = 0x01
OUT = 0x02
HARDWARE = {
'beaglebone-black': (
'musb-hdrc', # Gadget controller name,
{
0x01: ('ep1out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x81: ('ep1in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x02: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x82: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x03: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x83: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x04: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x84: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x05: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x85: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x06: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x86: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x07: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x87: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x08: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x88: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x09: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x89: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x0A: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x8A: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x0B: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x8B: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x0C: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x8C: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
0x0D: ('ep13', BULK | INTERRUPT | ISOCHRONOUS, 4096),
0x8D: ('ep13', BULK | INTERRUPT | ISOCHRONOUS, 4096),
0x0E: ('ep14', BULK | INTERRUPT | ISOCHRONOUS, 1024),
0x8E: ('ep14', BULK | INTERRUPT | ISOCHRONOUS, 1024),
0x0F: ('ep15', BULK | INTERRUPT | ISOCHRONOUS, 1024),
0x8F: ('ep15', BULK | INTERRUPT | ISOCHRONOUS, 1024),
}
)
}
class LinuxGadgetfs(object):
"""Linux gadgetfs-based gadget driver.
"""
def __init__(self, hardware, mountpoint='/dev/gadget'):
"""Initialize bindings to the Linux gadgetfs interface.
Args:
hardware: Hardware type.
mountpoint: Gadget filesystem mount point.
"""
self._chip, self._hw_eps = HARDWARE[hardware]
self._ep_dir = mountpoint
self._gadget = None
self._fd = None
# map from bEndpointAddress to hardware ep name and open file descriptor
self._ep_fds = {}
self._io_loop = ioloop.IOLoop.current()
def Create(self, gadget):
"""Bind a gadget to the USB peripheral controller."""
self._gadget = gadget
self._fd = os.open(os.path.join(self._ep_dir, self._chip), os.O_RDWR)
buf = ''.join([struct.pack('=I', 0),
gadget.GetFullSpeedConfigurationDescriptor().Encode(),
gadget.GetHighSpeedConfigurationDescriptor().Encode(),
gadget.GetDeviceDescriptor().Encode()])
os.write(self._fd, buf)
self._io_loop.add_handler(self._fd, self.HandleEvent, self._io_loop.READ)
def Destroy(self):
"""Unbind the gadget from the USB peripheral controller."""
self.Disconnected()
self._io_loop.remove_handler(self._fd)
os.close(self._fd)
self._gadget = None
self._fd = None
def IsConfigured(self):
return self._gadget is not None
def HandleEvent(self, unused_fd, unused_events):
buf = os.read(self._fd, 12)
event_type, = struct.unpack_from('=I', buf, 8)
if event_type == GADGETFS_NOP:
print 'NOP'
elif event_type == GADGETFS_CONNECT:
speed, = struct.unpack('=Ixxxxxxxx', buf)
self.Connected(speed)
elif event_type == GADGETFS_DISCONNECT:
self.Disconnected()
elif event_type == GADGETFS_SETUP:
request_type, request, value, index, length = struct.unpack(
'<BBHHHxxxx', buf)
self.HandleSetup(request_type, request, value, index, length)
elif event_type == GADGETFS_SUSPEND:
print 'SUSPEND'
else:
print 'Unknown gadgetfs event type:', event_type
def Connected(self, speed):
print 'CONNECT speed={}'.format(speed)
self._gadget.Connected(self, speed)
def Disconnected(self):
print 'DISCONNECT'
for endpoint_addr in self._ep_fds.keys():
self.StopEndpoint(endpoint_addr)
self._ep_fds.clear()
self._gadget.Disconnected()
def HandleSetup(self, request_type, request, value, index, length):
print ('SETUP bmRequestType=0x{:02X} bRequest=0x{:02X} wValue=0x{:04X} '
'wIndex=0x{:04X} wLength={}'
.format(request_type, request, value, index, length))
if request_type & usb_constants.Dir.IN:
data = self._gadget.ControlRead(
request_type, request, value, index, length)
if data is None:
print 'SETUP STALL'
try:
os.read(self._fd, 0) # Backwards I/O stalls the pipe.
except OSError, e:
# gadgetfs always returns EL2HLT which we should ignore.
if e.errno != errno.EL2HLT:
raise
else:
os.write(self._fd, data)
else:
data = ''
if length:
data = os.read(self._fd, length)
result = self._gadget.ControlWrite(
request_type, request, value, index, data)
if result is None:
print 'SETUP STALL'
try:
os.write(self._fd, '') # Backwards I/O stalls the pipe.
except OSError, e:
# gadgetfs always returns EL2HLT which we should ignore.
if e.errno != errno.EL2HLT:
raise
elif not length:
# Only empty OUT transfers can be ACKed.
os.read(self._fd, 0)
def StartEndpoint(self, endpoint_desc):
"""Activate an endpoint.
To enable a hardware endpoint the appropriate endpoint file must be opened
and the endpoint descriptors written to it. Linux requires both full- and
high-speed descriptors to be written for a high-speed device but since the
endpoint is always reinitialized after disconnect only the high-speed
endpoint will be valid in this case.
Args:
endpoint_desc: Endpoint descriptor.
Raises:
RuntimeError: If the hardware endpoint is in use or the configuration
is not supported by the hardware.
"""
endpoint_addr = endpoint_desc.bEndpointAddress
name, hw_ep_type, hw_ep_size = self._hw_eps[endpoint_addr]
if name in self._ep_fds:
raise RuntimeError('Hardware endpoint {} already in use.'.format(name))
ep_type = USB_TRANSFER_TYPE_TO_MASK[
endpoint_desc.bmAttributes & usb_constants.TransferType.MASK]
ep_size = endpoint_desc.wMaxPacketSize
if not hw_ep_type & ep_type:
raise RuntimeError('Hardware endpoint {} does not support this transfer '
'type.'.format(name))
elif hw_ep_size < ep_size:
raise RuntimeError('Hardware endpoint {} only supports a maximum packet '
'size of {}, {} requested.'
.format(name, hw_ep_size, ep_size))
fd = os.open(os.path.join(self._ep_dir, name), os.O_RDWR)
buf = struct.pack('=I', 1)
if self._gadget.GetSpeed() == usb_constants.Speed.HIGH:
# The full speed endpoint descriptor will not be used but Linux requires
# one to be provided.
full_speed_endpoint = usb_descriptors.EndpointDescriptor(
bEndpointAddress=endpoint_desc.bEndpointAddress,
bmAttributes=0,
wMaxPacketSize=0,
bInterval=0)
buf = ''.join([buf, full_speed_endpoint.Encode(), endpoint_desc.Encode()])
else:
buf = ''.join([buf, endpoint_desc.Encode()])
os.write(fd, buf)
pipe_r, pipe_w = multiprocessing.Pipe(False)
child = None
# gadgetfs doesn't support polling on the endpoint file descriptors (why?)
# so we have to start background threads for each.
if endpoint_addr & usb_constants.Dir.IN:
def WriterProcess():
while True:
data = pipe_r.recv()
written = os.write(fd, data)
print('IN bEndpointAddress=0x{:02X} length={}'
.format(endpoint_addr, written))
child = multiprocessing.Process(target=WriterProcess)
self._ep_fds[endpoint_addr] = fd, child, pipe_w
else:
def ReceivePacket(unused_fd, unused_events):
data = pipe_r.recv()
print('OUT bEndpointAddress=0x{:02X} length={}'
.format(endpoint_addr, len(data)))
self._gadget.ReceivePacket(endpoint_addr, data)
def ReaderProcess():
while True:
data = os.read(fd, ep_size)
pipe_w.send(data)
child = multiprocessing.Process(target=ReaderProcess)
pipe_fd = pipe_r.fileno()
self._io_loop.add_handler(pipe_fd, ReceivePacket, self._io_loop.READ)
self._ep_fds[endpoint_addr] = fd, child, pipe_r
child.start()
print 'Started endpoint 0x{:02X}.'.format(endpoint_addr)
def StopEndpoint(self, endpoint_addr):
"""Deactivate the given endpoint."""
fd, child, pipe = self._ep_fds.pop(endpoint_addr)
pipe_fd = pipe.fileno()
child.terminate()
child.join()
if not endpoint_addr & usb_constants.Dir.IN:
self._io_loop.remove_handler(pipe_fd)
os.close(fd)
print 'Stopped endpoint 0x{:02X}.'.format(endpoint_addr)
def SendPacket(self, endpoint_addr, data):
"""Send a packet on the given endpoint."""
_, _, pipe = self._ep_fds[endpoint_addr]
pipe.send(data)
def HaltEndpoint(self, endpoint_addr):
"""Signal a stall condition on the given endpoint."""
fd, _ = self._ep_fds[endpoint_addr]
# Reverse I/O direction sets the halt condition on the pipe.
try:
if endpoint_addr & usb_constants.Dir.IN:
os.read(fd, 0)
else:
os.write(fd, '')
except OSError, e:
# gadgetfs always returns EBADMSG which we should ignore.
if e.errno != errno.EBADMSG:
raise
|
yatinkumbhare/openstack-nova
|
refs/heads/master
|
nova/tests/unit/virt/vmwareapi/test_network_util.py
|
21
|
# Copyright (c) 2014 VMware, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import mock
from oslo_vmware import vim_util
from nova import test
from nova.tests.unit.virt.vmwareapi import fake
from nova.tests.unit.virt.vmwareapi import stubs
from nova.virt.vmwareapi import driver
from nova.virt.vmwareapi import network_util
ResultSet = collections.namedtuple('ResultSet', ['objects'])
ObjectContent = collections.namedtuple('ObjectContent', ['obj', 'propSet'])
DynamicProperty = collections.namedtuple('DynamicProperty', ['name', 'val'])
class VMwareNetworkUtilTestCase(test.NoDBTestCase):
def setUp(self):
super(VMwareNetworkUtilTestCase, self).setUp()
fake.reset()
self.stubs.Set(driver.VMwareAPISession, "vim", stubs.fake_vim_prop)
self.stubs.Set(driver.VMwareAPISession, "_is_vim_object",
stubs.fake_is_vim_object)
self._session = driver.VMwareAPISession()
def _build_cluster_networks(self, networks):
"""Returns a set of results for a cluster network lookup.
This is an example:
(ObjectContent){
obj =
(obj){
value = "domain-c7"
_type = "ClusterComputeResource"
}
propSet[] =
(DynamicProperty){
name = "network"
val =
(ArrayOfManagedObjectReference){
ManagedObjectReference[] =
(ManagedObjectReference){
value = "network-54"
_type = "Network"
},
(ManagedObjectReference){
value = "dvportgroup-14"
_type = "DistributedVirtualPortgroup"
},
}
},
}]
"""
objects = []
obj = ObjectContent(obj=vim_util.get_moref("domain-c7",
"ClusterComputeResource"),
propSet=[])
value = fake.DataObject()
value.ManagedObjectReference = []
for network in networks:
value.ManagedObjectReference.append(network)
obj.propSet.append(
DynamicProperty(name='network',
val=value))
objects.append(obj)
return ResultSet(objects=objects)
def test_get_network_no_match(self):
net_morefs = [vim_util.get_moref("dvportgroup-135",
"DistributedVirtualPortgroup"),
vim_util.get_moref("dvportgroup-136",
"DistributedVirtualPortgroup")]
networks = self._build_cluster_networks(net_morefs)
self._continue_retrieval_called = False
def mock_call_method(module, method, *args, **kwargs):
if method == 'get_object_properties':
return networks
if method == 'get_dynamic_property':
result = fake.DataObject()
result.name = 'no-match'
return result
if method == 'continue_retrieval':
self._continue_retrieval_called = True
with mock.patch.object(self._session, '_call_method',
mock_call_method):
res = network_util.get_network_with_the_name(self._session,
'fake_net',
'fake_cluster')
self.assertTrue(self._continue_retrieval_called)
self.assertIsNone(res)
def _get_network_dvs_match(self, name, token=False):
net_morefs = [vim_util.get_moref("dvportgroup-135",
"DistributedVirtualPortgroup")]
networks = self._build_cluster_networks(net_morefs)
def mock_call_method(module, method, *args, **kwargs):
if method == 'get_object_properties':
return networks
if method == 'get_dynamic_property':
result = fake.DataObject()
if not token or self._continue_retrieval_called:
result.name = name
else:
result.name = 'fake_name'
result.key = 'fake_key'
result.distributedVirtualSwitch = 'fake_dvs'
return result
if method == 'continue_retrieval':
if token:
self._continue_retrieval_called = True
return networks
if method == 'cancel_retrieval':
self._cancel_retrieval_called = True
with mock.patch.object(self._session, '_call_method',
mock_call_method):
res = network_util.get_network_with_the_name(self._session,
'fake_net',
'fake_cluster')
self.assertIsNotNone(res)
def test_get_network_dvs_exact_match(self):
self._cancel_retrieval_called = False
self._get_network_dvs_match('fake_net')
self.assertTrue(self._cancel_retrieval_called)
def test_get_network_dvs_match(self):
self._cancel_retrieval_called = False
self._get_network_dvs_match('dvs_7-virtualwire-7-fake_net')
self.assertTrue(self._cancel_retrieval_called)
def test_get_network_dvs_match_with_token(self):
self._continue_retrieval_called = False
self._cancel_retrieval_called = False
self._get_network_dvs_match('dvs_7-virtualwire-7-fake_net',
token=True)
self.assertTrue(self._continue_retrieval_called)
self.assertTrue(self._cancel_retrieval_called)
def test_get_network_network_match(self):
net_morefs = [vim_util.get_moref("network-54", "Network")]
networks = self._build_cluster_networks(net_morefs)
def mock_call_method(module, method, *args, **kwargs):
if method == 'get_object_properties':
return networks
if method == 'get_dynamic_property':
return 'fake_net'
with mock.patch.object(self._session, '_call_method',
mock_call_method):
res = network_util.get_network_with_the_name(self._session,
'fake_net',
'fake_cluster')
self.assertIsNotNone(res)
|
ridfrustum/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.3/django/contrib/gis/tests/geogapp/tests.py
|
222
|
"""
Tests for geography support in PostGIS 1.5+
"""
import os
from django.contrib.gis import gdal
from django.contrib.gis.measure import D
from django.test import TestCase
from models import City, County, Zipcode
class GeographyTest(TestCase):
def test01_fixture_load(self):
"Ensure geography features loaded properly."
self.assertEqual(8, City.objects.count())
def test02_distance_lookup(self):
"Testing GeoQuerySet distance lookup support on non-point geography fields."
z = Zipcode.objects.get(code='77002')
cities1 = list(City.objects
.filter(point__distance_lte=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
cities2 = list(City.objects
.filter(point__dwithin=(z.poly, D(mi=500)))
.order_by('name')
.values_list('name', flat=True))
for cities in [cities1, cities2]:
self.assertEqual(['Dallas', 'Houston', 'Oklahoma City'], cities)
def test03_distance_method(self):
"Testing GeoQuerySet.distance() support on non-point geography fields."
# `GeoQuerySet.distance` is not allowed geometry fields.
htown = City.objects.get(name='Houston')
qs = Zipcode.objects.distance(htown.point)
def test04_invalid_operators_functions(self):
"Ensuring exceptions are raised for operators & functions invalid on geography fields."
# Only a subset of the geometry functions & operator are available
# to PostGIS geography types. For more information, visit:
# http://postgis.refractions.net/documentation/manual-1.5/ch08.html#PostGIS_GeographyFunctions
z = Zipcode.objects.get(code='77002')
# ST_Within not available.
self.assertRaises(ValueError, City.objects.filter(point__within=z.poly).count)
# `@` operator not available.
self.assertRaises(ValueError, City.objects.filter(point__contained=z.poly).count)
# Regression test for #14060, `~=` was never really implemented for PostGIS.
htown = City.objects.get(name='Houston')
self.assertRaises(ValueError, City.objects.get, point__exact=htown.point)
def test05_geography_layermapping(self):
"Testing LayerMapping support on models with geography fields."
# There is a similar test in `layermap` that uses the same data set,
# but the County model here is a bit different.
if not gdal.HAS_GDAL: return
from django.contrib.gis.utils import LayerMapping
# Getting the shapefile and mapping dictionary.
shp_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'data'))
co_shp = os.path.join(shp_path, 'counties', 'counties.shp')
co_mapping = {'name' : 'Name',
'state' : 'State',
'mpoly' : 'MULTIPOLYGON',
}
# Reference county names, number of polygons, and state names.
names = ['Bexar', 'Galveston', 'Harris', 'Honolulu', 'Pueblo']
num_polys = [1, 2, 1, 19, 1] # Number of polygons for each.
st_names = ['Texas', 'Texas', 'Texas', 'Hawaii', 'Colorado']
lm = LayerMapping(County, co_shp, co_mapping, source_srs=4269, unique='name')
lm.save(silent=True, strict=True)
for c, name, num_poly, state in zip(County.objects.order_by('name'), names, num_polys, st_names):
self.assertEqual(4326, c.mpoly.srid)
self.assertEqual(num_poly, len(c.mpoly))
self.assertEqual(name, c.name)
self.assertEqual(state, c.state)
def test06_geography_area(self):
"Testing that Area calculations work on geography columns."
from django.contrib.gis.measure import A
# SELECT ST_Area(poly) FROM geogapp_zipcode WHERE code='77002';
ref_area = 5439084.70637573
tol = 5
z = Zipcode.objects.area().get(code='77002')
self.assertAlmostEqual(z.area.sq_m, ref_area, tol)
|
hetajen/vnpy161
|
refs/heads/master
|
vn.api/vn.ctp/pyscript/generate_md_functions.py
|
25
|
# encoding: UTF-8
__author__ = 'CHENXY'
from string import join
from ctp_struct import structDict
def processCallBack(line):
orignalLine = line
line = line.replace('\tvirtual void ', '') # 删除行首的无效内容
line = line.replace('{};\n', '') # 删除行尾的无效内容
content = line.split('(')
cbName = content[0] # 回调函数名称
cbArgs = content[1] # 回调函数参数
if cbArgs[-1] == ' ':
cbArgs = cbArgs.replace(') ', '')
else:
cbArgs = cbArgs.replace(')', '')
cbArgsList = cbArgs.split(', ') # 将每个参数转化为列表
cbArgsTypeList = []
cbArgsValueList = []
for arg in cbArgsList: # 开始处理参数
content = arg.split(' ')
if len(content) > 1:
cbArgsTypeList.append(content[0]) # 参数类型列表
cbArgsValueList.append(content[1]) # 参数数据列表
createTask(cbName, cbArgsTypeList, cbArgsValueList, orignalLine)
createProcess(cbName, cbArgsTypeList, cbArgsValueList)
# 生成.h文件中的process部分
process_line = 'void process' + cbName[2:] + '(Task task);\n'
fheaderprocess.write(process_line)
fheaderprocess.write('\n')
# 生成.h文件中的on部分
if 'OnRspError' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict error, int id, bool last) {};\n'
elif 'OnRsp' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error, int id, bool last) {};\n'
elif 'OnRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data) {};\n'
elif 'OnErrRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error) {};\n'
else:
on_line = ''
fheaderon.write(on_line)
fheaderon.write('\n')
# 生成封装部分
createWrap(cbName)
#----------------------------------------------------------------------
def createWrap(cbName):
"""在Python封装段代码中进行处理"""
# 生成.h文件中的on部分
if 'OnRspError' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict error, int id, bool last)\n'
override_line = '("on' + cbName[2:] + '")(error, id, last);\n'
elif 'OnRsp' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error, int id, bool last)\n'
override_line = '("on' + cbName[2:] + '")(data, error, id, last);\n'
elif 'OnRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data)\n'
override_line = '("on' + cbName[2:] + '")(data);\n'
elif 'OnErrRtn' in cbName:
on_line = 'virtual void on' + cbName[2:] + '(dict data, dict error)\n'
override_line = '("on' + cbName[2:] + '")(data, error);\n'
else:
on_line = ''
if on_line is not '':
fwrap.write(on_line)
fwrap.write('{\n')
fwrap.write('\ttry\n')
fwrap.write('\t{\n')
fwrap.write('\t\tthis->get_override'+override_line)
fwrap.write('\t}\n')
fwrap.write('\tcatch (error_already_set const &)\n')
fwrap.write('\t{\n')
fwrap.write('\t\tPyErr_Print();\n')
fwrap.write('\t}\n')
fwrap.write('};\n')
fwrap.write('\n')
def createTask(cbName, cbArgsTypeList, cbArgsValueList, orignalLine):
# 从回调函数生成任务对象,并放入队列
funcline = orignalLine.replace('\tvirtual void ', 'void ' + apiName + '::')
funcline = funcline.replace('{};', '')
ftask.write(funcline)
ftask.write('{\n')
ftask.write("\tTask task = Task();\n")
ftask.write("\ttask.task_name = " + cbName.upper() + ";\n")
# define常量
global define_count
fdefine.write("#define " + cbName.upper() + ' ' + str(define_count) + '\n')
define_count = define_count + 1
# switch段代码
fswitch.write("case " + cbName.upper() + ':\n')
fswitch.write("{\n")
fswitch.write("\tthis->" + cbName.replace('On', 'process') + '(task);\n')
fswitch.write("\tbreak;\n")
fswitch.write("}\n")
fswitch.write("\n")
for i, type_ in enumerate(cbArgsTypeList):
if type_ == 'int':
ftask.write("\ttask.task_id = " + cbArgsValueList[i] + ";\n")
elif type_ == 'bool':
ftask.write("\ttask.task_last = " + cbArgsValueList[i] + ";\n")
elif 'RspInfoField' in type_:
ftask.write("\n")
ftask.write("\tif (pRspInfo)\n")
ftask.write("\t{\n")
ftask.write("\t\ttask.task_error = " + cbArgsValueList[i] + ";\n")
ftask.write("\t}\n")
ftask.write("\telse\n")
ftask.write("\t{\n")
ftask.write("\t\tCThostFtdcRspInfoField empty_error = CThostFtdcRspInfoField();\n")
ftask.write("\t\tmemset(&empty_error, 0, sizeof(empty_error));\n")
ftask.write("\t\ttask.task_error = empty_error;\n")
ftask.write("\t}\n")
else:
ftask.write("\n")
ftask.write("\tif (" + cbArgsValueList[i][1:] + ")\n")
ftask.write("\t{\n")
ftask.write("\t\ttask.task_data = " + cbArgsValueList[i] + ";\n")
ftask.write("\t}\n")
ftask.write("\telse\n")
ftask.write("\t{\n")
ftask.write("\t\t" + type_ + " empty_data = " + type_ + "();\n")
ftask.write("\t\tmemset(&empty_data, 0, sizeof(empty_data));\n")
ftask.write("\t\ttask.task_data = empty_data;\n")
ftask.write("\t}\n")
ftask.write("\tthis->task_queue.push(task);\n")
ftask.write("};\n")
ftask.write("\n")
def createProcess(cbName, cbArgsTypeList, cbArgsValueList):
# 从队列中提取任务,并转化为python字典
fprocess.write("void " + apiName + '::' + cbName.replace('On', 'process') + '(Task task)' + "\n")
fprocess.write("{\n")
fprocess.write("\tPyLock lock;\n")
onArgsList = []
for i, type_ in enumerate(cbArgsTypeList):
if 'RspInfoField' in type_:
fprocess.write("\t"+ type_ + ' task_error = any_cast<' + type_ + '>(task.task_error);\n')
fprocess.write("\t"+ "dict error;\n")
struct = structDict[type_]
for key in struct.keys():
fprocess.write("\t"+ 'error["' + key + '"] = task_error.' + key + ';\n')
fprocess.write("\n")
onArgsList.append('error')
elif type_ in structDict:
fprocess.write("\t"+ type_ + ' task_data = any_cast<' + type_ + '>(task.task_data);\n')
fprocess.write("\t"+ "dict data;\n")
struct = structDict[type_]
for key in struct.keys():
fprocess.write("\t"+ 'data["' + key + '"] = task_data.' + key + ';\n')
fprocess.write("\n")
onArgsList.append('data')
elif type_ == 'bool':
onArgsList.append('task.task_last')
elif type_ == 'int':
onArgsList.append('task.task_id')
onArgs = join(onArgsList, ', ')
fprocess.write('\tthis->' + cbName.replace('On', 'on') + '(' + onArgs +');\n')
fprocess.write("};\n")
fprocess.write("\n")
def processFunction(line):
line = line.replace('\tvirtual int ', '') # 删除行首的无效内容
line = line.replace(') = 0;\n', '') # 删除行尾的无效内容
content = line.split('(')
fcName = content[0] # 回调函数名称
fcArgs = content[1] # 回调函数参数
fcArgs = fcArgs.replace(')', '')
fcArgsList = fcArgs.split(', ') # 将每个参数转化为列表
fcArgsTypeList = []
fcArgsValueList = []
for arg in fcArgsList: # 开始处理参数
content = arg.split(' ')
if len(content) > 1:
fcArgsTypeList.append(content[0]) # 参数类型列表
fcArgsValueList.append(content[1]) # 参数数据列表
if len(fcArgsTypeList)>0 and fcArgsTypeList[0] in structDict:
createFunction(fcName, fcArgsTypeList, fcArgsValueList)
# 生成.h文件中的主动函数部分
if 'Req' in fcName:
req_line = 'int req' + fcName[3:] + '(dict req, int nRequestID);\n'
fheaderfunction.write(req_line)
fheaderfunction.write('\n')
def createFunction(fcName, fcArgsTypeList, fcArgsValueList):
type_ = fcArgsTypeList[0]
struct = structDict[type_]
ffunction.write('int MdApi::req' + fcName[3:] + '(dict req, int nRequestID)\n')
ffunction.write('{\n')
ffunction.write('\t' + type_ +' myreq = ' + type_ + '();\n')
ffunction.write('\tmemset(&myreq, 0, sizeof(myreq));\n')
for key, value in struct.items():
if value == 'string':
line = '\tgetStr(req, "' + key + '", myreq.' + key + ');\n'
elif value == 'char':
line = '\tgetChar(req, "' + key + '", &myreq.' + key + ');\n'
elif value == 'int':
line = '\tgetInt(req, "' + key + '", &myreq.' + key + ');\n'
elif value == 'double':
line = '\tgetDouble(req, "' + key + '", &myreq.' + key + ');\n'
ffunction.write(line)
ffunction.write('\tint i = this->api->' + fcName + '(&myreq, nRequestID);\n')
ffunction.write('\treturn i;\n')
ffunction.write('};\n')
ffunction.write('\n')
#########################################################
apiName = 'MdApi'
fcpp = open('ThostFtdcMdApi.h', 'r')
ftask = open('ctp_md_task.cpp', 'w')
fprocess = open('ctp_md_process.cpp', 'w')
ffunction = open('ctp_md_function.cpp', 'w')
fdefine = open('ctp_md_define.cpp', 'w')
fswitch = open('ctp_md_switch.cpp', 'w')
fheaderprocess = open('ctp_md_header_process.h', 'w')
fheaderon = open('ctp_md_header_on.h', 'w')
fheaderfunction = open('ctp_md_header_function.h', 'w')
fwrap = open('ctp_md_wrap.cpp', 'w')
define_count = 1
for line in fcpp:
if "\tvirtual void On" in line:
processCallBack(line)
elif "\tvirtual int" in line:
processFunction(line)
fcpp.close()
ftask.close()
fprocess.close()
ffunction.close()
fswitch.close()
fdefine.close()
fheaderprocess.close()
fheaderon.close()
fheaderfunction.close()
fwrap.close()
|
sarvex/django
|
refs/heads/master
|
tests/base/__init__.py
|
12133432
| |
kevinlee12/oppia
|
refs/heads/develop
|
extensions/value_generators/models/__init__.py
|
12133432
| |
pelme/pytest-contextfixture
|
refs/heads/master
|
pytest_contextfixture.py
|
1
|
import pytest
from contextlib import contextmanager
from functools import wraps
def _make_fixture(fn, fixture_args, fixture_kwargs):
ctxmgr = contextmanager(fn)
@pytest.fixture(*fixture_args, **fixture_kwargs)
@wraps(fn)
def actual_fixture(request):
ctxinst = ctxmgr(request)
# TODO: Proper exception propagation?
request.addfinalizer(lambda: ctxinst.__exit__(None, None, None))
return ctxinst.__enter__()
return actual_fixture
def pytest_namespace():
def contextfixture(*args, **kwargs):
if args and callable(args[0]):
# @pytest.contextfixture
# def foo(request): ...
fn = args[0]
return _make_fixture(fn, (), {})
else:
# @pytest.contextfixture(...)
# def foo(request): ...
return lambda fn: _make_fixture(fn, args, kwargs)
return {'contextfixture': contextfixture}
|
SerCeMan/intellij-community
|
refs/heads/master
|
python/helpers/pycharm/nose_helper/suite.py
|
85
|
"""
Test Suites
"""
from __future__ import generators
import sys
import unittest
from nose_helper.case import Test
from nose_helper.config import Config
from nose_helper.util import isclass, resolve_name, try_run
PYTHON_VERSION_MAJOR = sys.version_info[0]
class LazySuite(unittest.TestSuite):
"""A suite that may use a generator as its list of tests
"""
def __init__(self, tests=()):
self._set_tests(tests)
def __iter__(self):
return iter(self._tests)
def __hash__(self):
return object.__hash__(self)
def addTest(self, test):
self._precache.append(test)
def __nonzero__(self):
if self._precache:
return True
if self.test_generator is None:
return False
try:
test = self.test_generator.next()
if test is not None:
self._precache.append(test)
return True
except StopIteration:
pass
return False
def _get_tests(self):
if self.test_generator is not None:
for i in self.test_generator:
yield i
for test in self._precache:
yield test
def _set_tests(self, tests):
self._precache = []
is_suite = isinstance(tests, unittest.TestSuite)
if hasattr(tests, '__call__') and not is_suite:
self.test_generator = tests()
self.test_generator_counter = list(tests())
elif is_suite:
self.addTests([tests])
self.test_generator = None
self.test_generator_counter = None
else:
self.addTests(tests)
self.test_generator = None
self.test_generator_counter = None
def countTestCases(self):
counter = 0
generator = self.test_generator_counter
if generator is not None:
for test in generator:
counter +=1
for test in self._precache:
counter += test.countTestCases()
return counter
_tests = property(_get_tests, _set_tests, None,
"Access the tests in this suite.")
class ContextSuite(LazySuite):
"""A suite with context.
"""
was_setup = False
was_torndown = False
classSetup = ('setup_class', 'setup_all', 'setupClass', 'setupAll',
'setUpClass', 'setUpAll')
classTeardown = ('teardown_class', 'teardown_all', 'teardownClass',
'teardownAll', 'tearDownClass', 'tearDownAll')
moduleSetup = ('setup_module', 'setupModule', 'setUpModule', 'setup',
'setUp')
moduleTeardown = ('teardown_module', 'teardownModule', 'tearDownModule',
'teardown', 'tearDown')
packageSetup = ('setup_package', 'setupPackage', 'setUpPackage')
packageTeardown = ('teardown_package', 'teardownPackage',
'tearDownPackage')
def __init__(self, tests=(), context=None, factory=None,
config=None):
self.context = context
self.factory = factory
if config is None:
config = Config()
self.config = config
self.has_run = False
self.error_context = None
LazySuite.__init__(self, tests)
def __hash__(self):
return object.__hash__(self)
def __call__(self, *arg, **kw):
return self.run(*arg, **kw)
def _exc_info(self):
return sys.exc_info()
def addTests(self, tests, context=None):
if context:
self.context = context
if PYTHON_VERSION_MAJOR < 3 and isinstance(tests, basestring):
raise TypeError("tests must be an iterable of tests, not a string")
else:
if isinstance(tests, str):
raise TypeError("tests must be an iterable of tests, not a string")
for test in tests:
self.addTest(test)
def run(self, result):
"""Run tests in suite inside of suite fixtures.
"""
result, orig = result, result
try:
self.setUp()
except KeyboardInterrupt:
raise
except:
self.error_context = 'setup'
result.addError(self, self._exc_info())
return
try:
for test in self._tests:
if result.shouldStop:
break
test(orig)
finally:
self.has_run = True
try:
self.tearDown()
except KeyboardInterrupt:
raise
except:
self.error_context = 'teardown'
result.addError(self, self._exc_info())
def setUp(self):
if not self:
return
if self.was_setup:
return
context = self.context
if context is None:
return
factory = self.factory
if factory:
ancestors = factory.context.get(self, [])[:]
while ancestors:
ancestor = ancestors.pop()
if ancestor in factory.was_setup:
continue
self.setupContext(ancestor)
if not context in factory.was_setup:
self.setupContext(context)
else:
self.setupContext(context)
self.was_setup = True
def setupContext(self, context):
if self.factory:
if context in self.factory.was_setup:
return
self.factory.was_setup[context] = self
if isclass(context):
names = self.classSetup
else:
names = self.moduleSetup
if hasattr(context, '__path__'):
names = self.packageSetup + names
try_run(context, names)
def tearDown(self):
if not self.was_setup or self.was_torndown:
return
self.was_torndown = True
context = self.context
if context is None:
return
factory = self.factory
if factory:
ancestors = factory.context.get(self, []) + [context]
for ancestor in ancestors:
if not ancestor in factory.was_setup:
continue
if ancestor in factory.was_torndown:
continue
setup = factory.was_setup[ancestor]
if setup is self:
self.teardownContext(ancestor)
else:
self.teardownContext(context)
def teardownContext(self, context):
if self.factory:
if context in self.factory.was_torndown:
return
self.factory.was_torndown[context] = self
if isclass(context):
names = self.classTeardown
else:
names = self.moduleTeardown
if hasattr(context, '__path__'):
names = self.packageTeardown + names
try_run(context, names)
def _get_wrapped_tests(self):
for test in self._get_tests():
if isinstance(test, Test) or isinstance(test, unittest.TestSuite):
yield test
else:
yield Test(test,
config=self.config)
_tests = property(_get_wrapped_tests, LazySuite._set_tests, None,
"Access the tests in this suite. Tests are returned "
"inside of a context wrapper.")
class ContextSuiteFactory(object):
suiteClass = ContextSuite
def __init__(self, config=None):
if config is None:
config = Config()
self.config = config
self.suites = {}
self.context = {}
self.was_setup = {}
self.was_torndown = {}
def __call__(self, tests, **kw):
"""Return 'ContextSuite' for tests.
"""
context = kw.pop('context', getattr(tests, 'context', None))
if context is None:
tests = self.wrapTests(tests)
context = self.findContext(tests)
return self.makeSuite(tests, context, **kw)
def ancestry(self, context):
"""Return the ancestry of the context
"""
if context is None:
return
if hasattr(context, 'im_class'):
context = context.im_class
if hasattr(context, '__module__'):
ancestors = context.__module__.split('.')
elif hasattr(context, '__name__'):
ancestors = context.__name__.split('.')[:-1]
else:
raise TypeError("%s has no ancestors?" % context)
while ancestors:
yield resolve_name('.'.join(ancestors))
ancestors.pop()
def findContext(self, tests):
if hasattr(tests, '__call__') or isinstance(tests, unittest.TestSuite):
return None
context = None
for test in tests:
# Don't look at suites for contexts, only tests
ctx = getattr(test, 'context', None)
if ctx is None:
continue
if context is None:
context = ctx
return context
def makeSuite(self, tests, context, **kw):
suite = self.suiteClass(
tests, context=context, config=self.config, factory=self, **kw)
if context is not None:
self.suites.setdefault(context, []).append(suite)
self.context.setdefault(suite, []).append(context)
for ancestor in self.ancestry(context):
self.suites.setdefault(ancestor, []).append(suite)
self.context[suite].append(ancestor)
return suite
def wrapTests(self, tests):
if hasattr(tests, '__call__') or isinstance(tests, unittest.TestSuite):
return tests
wrapped = []
for test in tests:
if isinstance(test, Test) or isinstance(test, unittest.TestSuite):
wrapped.append(test)
elif isinstance(test, ContextList):
wrapped.append(self.makeSuite(test, context=test.context))
else:
wrapped.append(
Test(test, config=self.config)
)
return wrapped
class ContextList(object):
"""a group of tests in a context.
"""
def __init__(self, tests, context=None):
self.tests = tests
self.context = context
def __iter__(self):
return iter(self.tests)
|
fredsmith/will
|
refs/heads/master
|
will/acl.py
|
11
|
# -*- coding: utf-8 -*-
from . import settings
def get_acl_members(acl):
acl_members = []
acl = acl.lower()
if getattr(settings, "ACL", None):
try:
# Case-insensitive checks
for k, v in settings.ACL.items():
if k.lower() == acl:
acl_members = settings.ACL[k]
break
except AttributeError:
pass
return acl_members
def is_acl_allowed(nick, acl):
nick = nick.lower()
for a in acl:
acl_members = get_acl_members(a)
if nick in acl_members:
return True
return False
|
lucadealfaro/crowdranker
|
refs/heads/master
|
models/keystore.py
|
1
|
# -*- coding: utf-8 -*-
# Implementation of a key-value store API.
db.define_table('key_value_store',
Field('content', 'text'))
def keystore_write(v):
"""Writes a new string v to the key-value store, returning the string key."""
id = db.key_value_store.insert(content = v)
logger.info("inserting keystore key " + str(id) + " value " + str(v)[:20])
return str(id)
def keystore_read(k, default=None):
"""Returns the string read from the keystore for a key k."""
logger.info("Reading keystore key " + str(k))
try:
id = int(k)
except ValueError:
return default
except TypeError:
return default
v = db.key_value_store(id)
if v is None:
return default
return v.content
def keystore_multi_read(k_list, default=None):
"""Gets a list of keys to read and a default value, and returns a dictionary
mapping every key to the value read, using the default value if the value could
not be found."""
logger.info("Reading keystore keys: %r" % k_list)
r = {}
for k in k_list:
v = keystore_read(k, default=default)
r[k] = v
return r
def keystore_update(k, v):
"""Updates the keystore, replacing the previous value for key k
(if any) with value v. If the key k is invalid, creates a new key.
It returns the key that has been used."""
logger.info("Updating keystore for key " + str(k) + " and value: " + str(v)[:20])
try:
id = int(k)
except ValueError:
id = db.key_value_store.insert(content = v)
return str(id)
except TypeError:
id = db.key_value_store.insert(content = v)
return str(id)
db.key_value_store.update_or_insert(db.key_value_store.id == id, content = v)
return k
def keystore_delete(k):
"""Deletes the entry for key k, if present."""
logger.info("Requesting deletion of keystore key " + str(k))
try:
id = int(k)
except ValueError:
return
except TypeError:
return
db(db.key_value_store.id == id).delete()
|
StyXman/ayrton
|
refs/heads/develop
|
ayrton/parser/astcompiler/consts.py
|
1
|
"""
Various flags used during the compilation process.
"""
CO_OPTIMIZED = 0x0001
CO_NEWLOCALS = 0x0002
CO_VARARGS = 0x0004
CO_VARKEYWORDS = 0x0008
CO_NESTED = 0x0010
CO_GENERATOR = 0x0020
CO_NOFREE = 0x0040
CO_COROUTINE = 0x0080
CO_ITERABLE_COROUTINE = 0x0100 # set by @types.coroutine
CO_GENERATOR_ALLOWED = 0x1000
CO_FUTURE_DIVISION = 0x2000
CO_FUTURE_ABSOLUTE_IMPORT = 0x4000
CO_FUTURE_WITH_STATEMENT = 0x8000
CO_FUTURE_PRINT_FUNCTION = 0x10000
CO_FUTURE_UNICODE_LITERALS = 0x20000
CO_FUTURE_BARRY_AS_BDFL = 0x40000
CO_FUTURE_GENERATOR_STOP = 0x80000
#pypy specific:
CO_KILL_DOCSTRING = 0x100000
CO_YIELD_INSIDE_TRY = 0x200000
PyCF_MASK = (CO_FUTURE_DIVISION | CO_FUTURE_ABSOLUTE_IMPORT |
CO_FUTURE_WITH_STATEMENT | CO_FUTURE_PRINT_FUNCTION |
CO_FUTURE_UNICODE_LITERALS | CO_FUTURE_BARRY_AS_BDFL |
CO_FUTURE_GENERATOR_STOP)
PyCF_SOURCE_IS_UTF8 = 0x0100
PyCF_DONT_IMPLY_DEDENT = 0x0200
PyCF_ONLY_AST = 0x0400
PyCF_IGNORE_COOKIE = 0x0800
PyCF_ACCEPT_NULL_BYTES = 0x10000000 # PyPy only, for compile()
PyCF_FOUND_ENCODING = 0x20000000 # PyPy only, for pytokenizer
# Masks and values used by FORMAT_VALUE opcode
FVC_MASK = 0x3
FVC_NONE = 0x0
FVC_STR = 0x1
FVC_REPR = 0x2
FVC_ASCII = 0x3
FVS_MASK = 0x4
FVS_HAVE_SPEC = 0x4
|
invisiblek/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/curses/panel.py
|
192
|
"""curses.panel
Module for using panels with curses.
"""
from _curses_panel import *
|
csdms-contrib/gFlex
|
refs/heads/master
|
input/run_in_script_1D.py
|
1
|
#! /usr/bin/env python
import gflex
import numpy as np
from matplotlib import pyplot as plt
flex = gflex.F1D()
flex.Quiet = True
flex.Method = 'SAS' # Solution method: * FD (finite difference)
# * SAS (superposition of analytical solutions)
# * SAS_NG (ungridded SAS)
#flex.Solver = 'direct' # direct or iterative
# convergence = 1E-3 # convergence between iterations, if an iterative solution
# method is chosen
flex.g = 9.8 # acceleration due to gravity
flex.E = 65E9 # Young's Modulus
flex.nu = 0.25 # Poisson's Ratio
flex.rho_m = 3300. # MantleDensity
flex.rho_fill = 1000. # InfiillMaterialDensity
flex.Te = 30000.#*np.ones(500) # Elastic thickness -- scalar but may be an array
#flex.Te[-3:] = 0
flex.qs = np.zeros(300); flex.qs[150] += 1E6 # surface load stresses
flex.dx = 4000. # grid cell size [m]
flex.BC_W = '0Displacement0Slope' # west boundary condition
flex. BC_E = '0Moment0Shear' # east boundary condition
flex.initialize()
flex.run()
flex.finalize()
# If you want to plot the output
flex.plotChoice='combo'
# An output file for deflections could also be defined here
# flex.wOutFile =
flex.output() # Plots and/or saves output, or does nothing, depending on
# whether flex.plotChoice and/or flex.wOutFile have been set
# TO OBTAIN OUTPUT DIRECTLY IN PYTHON, you can assign the internal variable,
# flex.w, to another variable -- or as an element in a list if you are looping
# over many runs of gFlex:
deflection = flex.w
|
rwillmer/django
|
refs/heads/master
|
tests/syndication_tests/tests.py
|
190
|
from __future__ import unicode_literals
import datetime
from xml.dom import minidom
from django.contrib.sites.models import Site
from django.contrib.syndication import views
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings
from django.test.utils import requires_tz_support
from django.utils import timezone
from django.utils.feedgenerator import rfc2822_date, rfc3339_date
from .models import Article, Entry
try:
import pytz
except ImportError:
pytz = None
TZ = timezone.get_default_timezone()
class FeedTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.e1 = Entry.objects.create(
title='My first entry', updated=datetime.datetime(1980, 1, 1, 12, 30),
published=datetime.datetime(1986, 9, 25, 20, 15, 00)
)
cls.e2 = Entry.objects.create(
title='My second entry', updated=datetime.datetime(2008, 1, 2, 12, 30),
published=datetime.datetime(2006, 3, 17, 18, 0)
)
cls.e3 = Entry.objects.create(
title='My third entry', updated=datetime.datetime(2008, 1, 2, 13, 30),
published=datetime.datetime(2005, 6, 14, 10, 45)
)
cls.e4 = Entry.objects.create(
title='A & B < C > D', updated=datetime.datetime(2008, 1, 3, 13, 30),
published=datetime.datetime(2005, 11, 25, 12, 11, 23)
)
cls.e5 = Entry.objects.create(
title='My last entry', updated=datetime.datetime(2013, 1, 20, 0, 0),
published=datetime.datetime(2013, 3, 25, 20, 0)
)
cls.a1 = Article.objects.create(title='My first article', entry=cls.e1)
def assertChildNodes(self, elem, expected):
actual = set(n.nodeName for n in elem.childNodes)
expected = set(expected)
self.assertEqual(actual, expected)
def assertChildNodeContent(self, elem, expected):
for k, v in expected.items():
self.assertEqual(
elem.getElementsByTagName(k)[0].firstChild.wholeText, v)
def assertCategories(self, elem, expected):
self.assertEqual(set(i.firstChild.wholeText for i in elem.childNodes if i.nodeName == 'category'), set(expected))
######################################
# Feed view
######################################
@override_settings(ROOT_URLCONF='syndication_tests.urls')
class SyndicationFeedTest(FeedTestCase):
"""
Tests for the high-level syndication feed framework.
"""
@classmethod
def setUpClass(cls):
super(SyndicationFeedTest, cls).setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
def test_rss2_feed(self):
"""
Test the structure and content of feeds generated by Rss201rev2Feed.
"""
response = self.client.get('/syndication/rss2/')
doc = minidom.parseString(response.content)
# Making sure there's only 1 `rss` element and that the correct
# RSS version was specified.
feed_elem = doc.getElementsByTagName('rss')
self.assertEqual(len(feed_elem), 1)
feed = feed_elem[0]
self.assertEqual(feed.getAttribute('version'), '2.0')
# Making sure there's only one `channel` element w/in the
# `rss` element.
chan_elem = feed.getElementsByTagName('channel')
self.assertEqual(len(chan_elem), 1)
chan = chan_elem[0]
# Find the last build date
d = Entry.objects.latest('published').published
last_build_date = rfc2822_date(timezone.make_aware(d, TZ))
self.assertChildNodes(chan, ['title', 'link', 'description', 'language', 'lastBuildDate', 'item', 'atom:link', 'ttl', 'copyright', 'category'])
self.assertChildNodeContent(chan, {
'title': 'My blog',
'description': 'A more thorough description of my blog.',
'link': 'http://example.com/blog/',
'language': 'en',
'lastBuildDate': last_build_date,
'ttl': '600',
'copyright': 'Copyright (c) 2007, Sally Smith',
})
self.assertCategories(chan, ['python', 'django'])
# Ensure the content of the channel is correct
self.assertChildNodeContent(chan, {
'title': 'My blog',
'link': 'http://example.com/blog/',
})
# Check feed_url is passed
self.assertEqual(
chan.getElementsByTagName('atom:link')[0].getAttribute('href'),
'http://example.com/syndication/rss2/'
)
# Find the pubdate of the first feed item
d = Entry.objects.get(pk=1).published
pub_date = rfc2822_date(timezone.make_aware(d, TZ))
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), Entry.objects.count())
self.assertChildNodeContent(items[0], {
'title': 'My first entry',
'description': 'Overridden description: My first entry',
'link': 'http://example.com/blog/1/',
'guid': 'http://example.com/blog/1/',
'pubDate': pub_date,
'author': 'test@example.com (Sally Smith)',
})
self.assertCategories(items[0], ['python', 'testing'])
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'category', 'pubDate', 'author'])
# Assert that <guid> does not have any 'isPermaLink' attribute
self.assertIsNone(item.getElementsByTagName(
'guid')[0].attributes.get('isPermaLink'))
def test_rss2_feed_guid_permalink_false(self):
"""
Test if the 'isPermaLink' attribute of <guid> element of an item
in the RSS feed is 'false'.
"""
response = self.client.get(
'/syndication/rss2/guid_ispermalink_false/')
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName(
'rss')[0].getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
for item in items:
self.assertEqual(
item.getElementsByTagName('guid')[0].attributes.get(
'isPermaLink').value, "false")
def test_rss2_feed_guid_permalink_true(self):
"""
Test if the 'isPermaLink' attribute of <guid> element of an item
in the RSS feed is 'true'.
"""
response = self.client.get(
'/syndication/rss2/guid_ispermalink_true/')
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName(
'rss')[0].getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
for item in items:
self.assertEqual(
item.getElementsByTagName('guid')[0].attributes.get(
'isPermaLink').value, "true")
def test_rss091_feed(self):
"""
Test the structure and content of feeds generated by RssUserland091Feed.
"""
response = self.client.get('/syndication/rss091/')
doc = minidom.parseString(response.content)
# Making sure there's only 1 `rss` element and that the correct
# RSS version was specified.
feed_elem = doc.getElementsByTagName('rss')
self.assertEqual(len(feed_elem), 1)
feed = feed_elem[0]
self.assertEqual(feed.getAttribute('version'), '0.91')
# Making sure there's only one `channel` element w/in the
# `rss` element.
chan_elem = feed.getElementsByTagName('channel')
self.assertEqual(len(chan_elem), 1)
chan = chan_elem[0]
self.assertChildNodes(chan, ['title', 'link', 'description', 'language', 'lastBuildDate', 'item', 'atom:link', 'ttl', 'copyright', 'category'])
# Ensure the content of the channel is correct
self.assertChildNodeContent(chan, {
'title': 'My blog',
'link': 'http://example.com/blog/',
})
self.assertCategories(chan, ['python', 'django'])
# Check feed_url is passed
self.assertEqual(
chan.getElementsByTagName('atom:link')[0].getAttribute('href'),
'http://example.com/syndication/rss091/'
)
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), Entry.objects.count())
self.assertChildNodeContent(items[0], {
'title': 'My first entry',
'description': 'Overridden description: My first entry',
'link': 'http://example.com/blog/1/',
})
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description'])
self.assertCategories(item, [])
def test_atom_feed(self):
"""
Test the structure and content of feeds generated by Atom1Feed.
"""
response = self.client.get('/syndication/atom/')
feed = minidom.parseString(response.content).firstChild
self.assertEqual(feed.nodeName, 'feed')
self.assertEqual(feed.getAttribute('xmlns'), 'http://www.w3.org/2005/Atom')
self.assertChildNodes(feed, ['title', 'subtitle', 'link', 'id', 'updated', 'entry', 'rights', 'category', 'author'])
for link in feed.getElementsByTagName('link'):
if link.getAttribute('rel') == 'self':
self.assertEqual(link.getAttribute('href'), 'http://example.com/syndication/atom/')
entries = feed.getElementsByTagName('entry')
self.assertEqual(len(entries), Entry.objects.count())
for entry in entries:
self.assertChildNodes(entry, [
'title',
'link',
'id',
'summary',
'category',
'updated',
'published',
'rights',
'author',
])
summary = entry.getElementsByTagName('summary')[0]
self.assertEqual(summary.getAttribute('type'), 'html')
def test_atom_feed_published_and_updated_elements(self):
"""
Test that the published and updated elements are not
the same and now adhere to RFC 4287.
"""
response = self.client.get('/syndication/atom/')
feed = minidom.parseString(response.content).firstChild
entries = feed.getElementsByTagName('entry')
published = entries[0].getElementsByTagName('published')[0].firstChild.wholeText
updated = entries[0].getElementsByTagName('updated')[0].firstChild.wholeText
self.assertNotEqual(published, updated)
def test_latest_post_date(self):
"""
Test that both the published and updated dates are
considered when determining the latest post date.
"""
# this feed has a `published` element with the latest date
response = self.client.get('/syndication/atom/')
feed = minidom.parseString(response.content).firstChild
updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText
d = Entry.objects.latest('published').published
latest_published = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest_published)
# this feed has an `updated` element with the latest date
response = self.client.get('/syndication/latest/')
feed = minidom.parseString(response.content).firstChild
updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText
d = Entry.objects.exclude(pk=5).latest('updated').updated
latest_updated = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest_updated)
def test_custom_feed_generator(self):
response = self.client.get('/syndication/custom/')
feed = minidom.parseString(response.content).firstChild
self.assertEqual(feed.nodeName, 'feed')
self.assertEqual(feed.getAttribute('django'), 'rocks')
self.assertChildNodes(feed, ['title', 'subtitle', 'link', 'id', 'updated', 'entry', 'spam', 'rights', 'category', 'author'])
entries = feed.getElementsByTagName('entry')
self.assertEqual(len(entries), Entry.objects.count())
for entry in entries:
self.assertEqual(entry.getAttribute('bacon'), 'yum')
self.assertChildNodes(entry, [
'title',
'link',
'id',
'summary',
'ministry',
'rights',
'author',
'updated',
'published',
'category',
])
summary = entry.getElementsByTagName('summary')[0]
self.assertEqual(summary.getAttribute('type'), 'html')
def test_title_escaping(self):
"""
Tests that titles are escaped correctly in RSS feeds.
"""
response = self.client.get('/syndication/rss2/')
doc = minidom.parseString(response.content)
for item in doc.getElementsByTagName('item'):
link = item.getElementsByTagName('link')[0]
if link.firstChild.wholeText == 'http://example.com/blog/4/':
title = item.getElementsByTagName('title')[0]
self.assertEqual(title.firstChild.wholeText, 'A & B < C > D')
def test_naive_datetime_conversion(self):
"""
Test that datetimes are correctly converted to the local time zone.
"""
# Naive date times passed in get converted to the local time zone, so
# check the received zone offset against the local offset.
response = self.client.get('/syndication/naive-dates/')
doc = minidom.parseString(response.content)
updated = doc.getElementsByTagName('updated')[0].firstChild.wholeText
d = Entry.objects.latest('published').published
latest = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest)
def test_aware_datetime_conversion(self):
"""
Test that datetimes with timezones don't get trodden on.
"""
response = self.client.get('/syndication/aware-dates/')
doc = minidom.parseString(response.content)
published = doc.getElementsByTagName('published')[0].firstChild.wholeText
self.assertEqual(published[-6:], '+00:42')
@requires_tz_support
def test_feed_last_modified_time_naive_date(self):
"""
Tests the Last-Modified header with naive publication dates.
"""
response = self.client.get('/syndication/naive-dates/')
self.assertEqual(response['Last-Modified'], 'Tue, 26 Mar 2013 01:00:00 GMT')
def test_feed_last_modified_time(self):
"""
Tests the Last-Modified header with aware publication dates.
"""
response = self.client.get('/syndication/aware-dates/')
self.assertEqual(response['Last-Modified'], 'Mon, 25 Mar 2013 19:18:00 GMT')
# No last-modified when feed has no item_pubdate
response = self.client.get('/syndication/no_pubdate/')
self.assertFalse(response.has_header('Last-Modified'))
def test_feed_url(self):
"""
Test that the feed_url can be overridden.
"""
response = self.client.get('/syndication/feedurl/')
doc = minidom.parseString(response.content)
for link in doc.getElementsByTagName('link'):
if link.getAttribute('rel') == 'self':
self.assertEqual(link.getAttribute('href'), 'http://example.com/customfeedurl/')
def test_secure_urls(self):
"""
Test URLs are prefixed with https:// when feed is requested over HTTPS.
"""
response = self.client.get('/syndication/rss2/', **{
'wsgi.url_scheme': 'https',
})
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName('channel')[0]
self.assertEqual(
chan.getElementsByTagName('link')[0].firstChild.wholeText[0:5],
'https'
)
atom_link = chan.getElementsByTagName('atom:link')[0]
self.assertEqual(atom_link.getAttribute('href')[0:5], 'https')
for link in doc.getElementsByTagName('link'):
if link.getAttribute('rel') == 'self':
self.assertEqual(link.getAttribute('href')[0:5], 'https')
def test_item_link_error(self):
"""
Test that an ImproperlyConfigured is raised if no link could be found
for the item(s).
"""
self.assertRaises(ImproperlyConfigured,
self.client.get,
'/syndication/articles/')
def test_template_feed(self):
"""
Test that the item title and description can be overridden with
templates.
"""
response = self.client.get('/syndication/template/')
doc = minidom.parseString(response.content)
feed = doc.getElementsByTagName('rss')[0]
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertChildNodeContent(items[0], {
'title': 'Title in your templates: My first entry\n',
'description': 'Description in your templates: My first entry\n',
'link': 'http://example.com/blog/1/',
})
def test_template_context_feed(self):
"""
Test that custom context data can be passed to templates for title
and description.
"""
response = self.client.get('/syndication/template_context/')
doc = minidom.parseString(response.content)
feed = doc.getElementsByTagName('rss')[0]
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertChildNodeContent(items[0], {
'title': 'My first entry (foo is bar)\n',
'description': 'My first entry (foo is bar)\n',
})
def test_add_domain(self):
"""
Test add_domain() prefixes domains onto the correct URLs.
"""
self.assertEqual(
views.add_domain('example.com', '/foo/?arg=value'),
'http://example.com/foo/?arg=value'
)
self.assertEqual(
views.add_domain('example.com', '/foo/?arg=value', True),
'https://example.com/foo/?arg=value'
)
self.assertEqual(
views.add_domain('example.com', 'http://djangoproject.com/doc/'),
'http://djangoproject.com/doc/'
)
self.assertEqual(
views.add_domain('example.com', 'https://djangoproject.com/doc/'),
'https://djangoproject.com/doc/'
)
self.assertEqual(
views.add_domain('example.com', 'mailto:uhoh@djangoproject.com'),
'mailto:uhoh@djangoproject.com'
)
self.assertEqual(
views.add_domain('example.com', '//example.com/foo/?arg=value'),
'http://example.com/foo/?arg=value'
)
|
DirkHoffmann/indico
|
refs/heads/master
|
indico/modules/announcement/controllers.py
|
4
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import flash, redirect
from indico.modules.admin import RHAdminBase
from indico.modules.announcement import announcement_settings
from indico.modules.announcement.forms import AnnouncementForm
from indico.modules.announcement.views import WPAnnouncement
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.forms.base import FormDefaults
class RHAnnouncement(RHAdminBase):
def _process(self):
form = AnnouncementForm(obj=FormDefaults(**announcement_settings.get_all()))
if form.validate_on_submit():
announcement_settings.set_multi(form.data)
flash(_('Settings have been saved'), 'success')
return redirect(url_for('announcement.manage'))
return WPAnnouncement.render_template('settings.html', 'announcement', form=form)
|
nsteinme/phy
|
refs/heads/master
|
phy/io/kwik/mock.py
|
2
|
# -*- coding: utf-8 -*-
"""Mock Kwik files."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os.path as op
import numpy as np
from ...electrode.mea import staggered_positions
from ..mock import (artificial_spike_samples,
artificial_spike_clusters,
artificial_features,
artificial_masks,
artificial_traces)
from ..h5 import open_h5
from .model import _create_clustering
#------------------------------------------------------------------------------
# Mock Kwik file
#------------------------------------------------------------------------------
def create_mock_kwik(dir_path, n_clusters=None, n_spikes=None,
n_channels=None, n_features_per_channel=None,
n_samples_traces=None,
with_kwx=True,
with_kwd=True,
add_original=True,
):
"""Create a test kwik file."""
filename = op.join(dir_path, '_test.kwik')
kwx_filename = op.join(dir_path, '_test.kwx')
kwd_filename = op.join(dir_path, '_test.raw.kwd')
# Create the kwik file.
with open_h5(filename, 'w') as f:
f.write_attr('/', 'kwik_version', 2)
def _write_metadata(key, value):
f.write_attr('/application_data/spikedetekt', key, value)
_write_metadata('sample_rate', 20000.)
# Filter parameters.
_write_metadata('filter_low', 500.)
_write_metadata('filter_high_factor', 0.95 * .5)
_write_metadata('filter_butter_order', 3)
_write_metadata('extract_s_before', 15)
_write_metadata('extract_s_after', 25)
_write_metadata('n_features_per_channel', n_features_per_channel)
# Create spike times.
spike_samples = artificial_spike_samples(n_spikes).astype(np.int64)
spike_recordings = np.zeros(n_spikes, dtype=np.uint16)
# Size of the first recording.
recording_size = 2 * n_spikes // 3
if recording_size > 0:
# Find the recording offset.
recording_offset = spike_samples[recording_size]
recording_offset += spike_samples[recording_size + 1]
recording_offset //= 2
spike_recordings[recording_size:] = 1
# Make sure the spike samples of the second recording start over.
spike_samples[recording_size:] -= spike_samples[recording_size]
spike_samples[recording_size:] += 10
else:
recording_offset = 1
if spike_samples.max() >= n_samples_traces:
raise ValueError("There are too many spikes: decrease 'n_spikes'.")
f.write('/channel_groups/1/spikes/time_samples', spike_samples)
f.write('/channel_groups/1/spikes/recording', spike_recordings)
f.write_attr('/channel_groups/1', 'channel_order',
np.arange(1, n_channels - 1)[::-1])
graph = np.array([[1, 2], [2, 3]])
f.write_attr('/channel_groups/1', 'adjacency_graph', graph)
# Create channels.
positions = staggered_positions(n_channels)
for channel in range(n_channels):
group = '/channel_groups/1/channels/{0:d}'.format(channel)
f.write_attr(group, 'name', str(channel))
f.write_attr(group, 'position', positions[channel])
# Create spike clusters.
clusterings = [('main', n_clusters)]
if add_original:
clusterings += [('original', n_clusters * 2)]
for clustering, n_clusters_rec in clusterings:
spike_clusters = artificial_spike_clusters(n_spikes,
n_clusters_rec)
groups = {0: 0, 1: 1, 2: 2}
_create_clustering(f, clustering, 1, spike_clusters, groups)
# Create recordings.
f.write_attr('/recordings/0', 'name', 'recording_0')
f.write_attr('/recordings/1', 'name', 'recording_1')
f.write_attr('/recordings/0/raw', 'hdf5_path', kwd_filename)
f.write_attr('/recordings/1/raw', 'hdf5_path', kwd_filename)
# Create the kwx file.
if with_kwx:
with open_h5(kwx_filename, 'w') as f:
f.write_attr('/', 'kwik_version', 2)
features = artificial_features(n_spikes,
(n_channels - 2) *
n_features_per_channel)
masks = artificial_masks(n_spikes,
(n_channels - 2) *
n_features_per_channel)
fm = np.dstack((features, masks)).astype(np.float32)
f.write('/channel_groups/1/features_masks', fm)
# Create the raw kwd file.
if with_kwd:
with open_h5(kwd_filename, 'w') as f:
f.write_attr('/', 'kwik_version', 2)
traces = artificial_traces(n_samples_traces, n_channels)
# TODO: int16 traces
f.write('/recordings/0/data',
traces[:recording_offset, ...].astype(np.float32))
f.write('/recordings/1/data',
traces[recording_offset:, ...].astype(np.float32))
return filename
|
coxmediagroup/django-json-rpc
|
refs/heads/master
|
test/jsontesturls.py
|
3
|
from django.conf.urls.defaults import *
from jsonrpc.site import jsonrpc_site
urlpatterns = patterns('',
url(r'^json/browse/$', 'jsonrpc.views.browse', name='jsonrpc_browser'),
url(r'^json/$', jsonrpc_site.dispatch, name='jsonrpc_mountpoint'),
(r'^json/(?P<method>[a-zA-Z0-9.-_]+)$', jsonrpc_site.dispatch),
)
|
jmcarp/django
|
refs/heads/master
|
django/core/management/commands/startproject.py
|
503
|
from importlib import import_module
from django.core.management.base import CommandError
from django.core.management.templates import TemplateCommand
from django.utils.crypto import get_random_string
class Command(TemplateCommand):
help = ("Creates a Django project directory structure for the given "
"project name in the current directory or optionally in the "
"given directory.")
missing_args_message = "You must provide a project name."
def handle(self, **options):
project_name, target = options.pop('name'), options.pop('directory')
self.validate_name(project_name, "project")
# Check that the project_name cannot be imported.
try:
import_module(project_name)
except ImportError:
pass
else:
raise CommandError("%r conflicts with the name of an existing "
"Python module and cannot be used as a "
"project name. Please try another name." %
project_name)
# Create a random SECRET_KEY to put it in the main settings.
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
options['secret_key'] = get_random_string(50, chars)
super(Command, self).handle('project', project_name, target, **options)
|
synicalsyntax/zulip
|
refs/heads/master
|
zerver/views/development/integrations.py
|
3
|
import os
from typing import Any, Dict, List, Optional
import ujson
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from django.test import Client
from zerver.lib.integrations import WEBHOOK_INTEGRATIONS
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_error, json_success
from zerver.lib.validator import check_bool
from zerver.lib.webhooks.common import get_fixture_http_headers, standardize_headers
from zerver.models import UserProfile, get_realm
ZULIP_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../../')
def get_webhook_integrations() -> List[str]:
return [integration.name for integration in WEBHOOK_INTEGRATIONS]
def get_valid_integration_name(name: str) -> Optional[str]:
for integration_name in get_webhook_integrations():
if name == integration_name:
return integration_name
return None
def dev_panel(request: HttpRequest) -> HttpResponse:
integrations = get_webhook_integrations()
bots = UserProfile.objects.filter(is_bot=True, bot_type=UserProfile.INCOMING_WEBHOOK_BOT)
context = {"integrations": integrations, "bots": bots}
return render(request, "zerver/integrations/development/dev_panel.html", context)
def send_webhook_fixture_message(url: str,
body: str,
is_json: bool,
custom_headers: Dict[str, Any]) -> HttpResponse:
client = Client()
realm = get_realm("zulip")
standardized_headers = standardize_headers(custom_headers)
http_host = standardized_headers.pop("HTTP_HOST", realm.host)
if is_json:
content_type = standardized_headers.pop("HTTP_CONTENT_TYPE", "application/json")
else:
content_type = standardized_headers.pop("HTTP_CONTENT_TYPE", "text/plain")
return client.post(url, body, content_type=content_type, HTTP_HOST=http_host,
**standardized_headers)
@has_request_variables
def get_fixtures(request: HttpResponse,
integration_name: str=REQ()) -> HttpResponse:
valid_integration_name = get_valid_integration_name(integration_name)
if not valid_integration_name:
return json_error(f"\"{integration_name}\" is not a valid webhook integration.", status=404)
fixtures = {}
fixtures_dir = os.path.join(ZULIP_PATH, f"zerver/webhooks/{valid_integration_name}/fixtures")
if not os.path.exists(fixtures_dir):
msg = ("The integration \"{valid_integration_name}\" does not have fixtures.").format(
valid_integration_name=valid_integration_name)
return json_error(msg, status=404)
for fixture in os.listdir(fixtures_dir):
fixture_path = os.path.join(fixtures_dir, fixture)
with open(fixture_path) as f:
body = f.read()
try:
body = ujson.loads(body)
except ValueError:
pass # The file extension will be used to determine the type.
headers_raw = get_fixture_http_headers(valid_integration_name,
"".join(fixture.split(".")[:-1]))
def fix_name(header: str) -> str:
if header.startswith("HTTP_"): # HTTP_ is a prefix intended for Django.
return header[len("HTTP_"):]
return header
headers = {fix_name(k): v for k, v in headers_raw.items()}
fixtures[fixture] = {"body": body, "headers": headers}
return json_success({"fixtures": fixtures})
@has_request_variables
def check_send_webhook_fixture_message(request: HttpRequest,
url: str=REQ(),
body: str=REQ(),
is_json: bool=REQ(validator=check_bool),
custom_headers: str=REQ()) -> HttpResponse:
try:
custom_headers_dict = ujson.loads(custom_headers)
except ValueError as ve:
return json_error(f"Custom HTTP headers are not in a valid JSON format. {ve}") # nolint
response = send_webhook_fixture_message(url, body, is_json,
custom_headers_dict)
if response.status_code == 200:
responses = [{"status_code": response.status_code,
"message": response.content}]
return json_success({"responses": responses})
else:
return response
@has_request_variables
def send_all_webhook_fixture_messages(request: HttpRequest,
url: str=REQ(),
integration_name: str=REQ()) -> HttpResponse:
valid_integration_name = get_valid_integration_name(integration_name)
if not valid_integration_name:
return json_error(f"\"{integration_name}\" is not a valid webhook integration.", status=404)
fixtures_dir = os.path.join(ZULIP_PATH, f"zerver/webhooks/{valid_integration_name}/fixtures")
if not os.path.exists(fixtures_dir):
msg = ("The integration \"{valid_integration_name}\" does not have fixtures.").format(
valid_integration_name=valid_integration_name)
return json_error(msg, status=404)
responses = []
for fixture in os.listdir(fixtures_dir):
fixture_path = os.path.join(fixtures_dir, fixture)
with open(fixture_path) as f:
content = f.read()
x = fixture.split(".")
fixture_name, fixture_format = "".join(_ for _ in x[:-1]), x[-1]
headers = get_fixture_http_headers(valid_integration_name, fixture_name)
if fixture_format == "json":
is_json = True
else:
is_json = False
response = send_webhook_fixture_message(url, content, is_json, headers)
responses.append({"status_code": response.status_code,
"fixture_name": fixture,
"message": response.content})
return json_success({"responses": responses})
|
thiagopnts/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/conformance-checkers/tools/dl.py
|
107
|
# -*- coding: utf-8 -*-
import os
ccdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
template = """<!DOCTYPE html>
<meta charset=utf-8>
"""
errors = {
"dl-in-p": "<p><dl><dt>text<dd>text</dl></p>",
"header-in-dt": "<dl><dt><header>text</header><dd>text</dl>",
"footer-in-dt": "<dl><dt><footer>text</footer><dd>text</dl>",
"article-in-dt": "<dl><dt><article><h2>text</h2></article><dd>text</dl>",
"aside-in-dt": "<dl><dt><aside><h2>text</h2></aside><dd>text</dl>",
"nav-in-dt": "<dl><dt><nav><h2>text</h2></nav><dd>text</dl>",
"section-in-dt": "<dl><dt><section><h2>text</h2></section><dd>text</dl>",
"h1-in-dt": "<dl><dt><h1>text</h1><dd>text</dl>",
"h2-in-dt": "<dl><dt><h2>text</h2><dd>text</dl>",
"h3-in-dt": "<dl><dt><h3>text</h3><dd>text</dl>",
"h4-in-dt": "<dl><dt><h4>text</h4><dd>text</dl>",
"h5-in-dt": "<dl><dt><h5>text</h5><dd>text</dl>",
"h6-in-dt": "<dl><dt><h6>text</h6><dd>text</dl>",
"hgroup-in-dt": "<dl><dt><hgroup><h1>text</h1></hgroup><dd>text</dl>",
"only-dt": "<dl><dt>1</dl>",
"only-dd": "<dl><dd>a</dl>",
"first-dd": "<dl><dd>a<dt>2<dd>b</dl>",
"last-dt": "<dl><dt>1<dd>a<dt>2</dl>",
"dd-in-template": "<dl><dt>1</dt><template><dd>a</dd></template></dl>",
"dt-in-template": "<dl><template><dt>1</dt></template><dd>a</dl>",
"dl-contains-text": "<dl><dt>1</dt>x</dl>",
"dl-contains-text-2": "<dl><dt>1<dd>a</dd>x</dl>",
"dl-contains-dl": "<dl><dt>1<dd>a</dd><dl></dl></dl>",
# div
"empty-div": "<dl><div></div></dl>",
"empty-div-2": "<dl><div></div><div><dt>2<dd>b</div></dl>",
"mixed-dt-dd-div": "<dl><dt>1<dd>a</dd><div><dt>2<dd>b</div></dl>",
"mixed-div-dt-dd": "<dl><div><dt>1<dd>a</div><dt>2<dd>b</dd></dl>",
"nested-divs": "<dl><div><div><dt>1<dd>a</div></div></dl>",
"div-splitting-groups": "<dl><div><dt>1</div><div><dd>a</div></dl>",
"div-splitting-groups-2": "<dl><div><dt>1<dd>a</div><div><dd>b</div></dl>",
"div-splitting-groups-3": "<dl><div><dt>1</div><div><dt>2<dd>b</div></dl>",
"div-contains-text": "<dl><div>x</div><dt>2<dd>b</div></dl>",
"div-contains-dl": "<dl><div><dl></dl></div><dt>2<dd>b</div></dl>",
"div-multiple-groups": "<dl><div><dt>1<dd>a<dt>2<dd>a<dd>b<dt>3<dt>4<dt>5<dd>a</div></dl>",
}
non_errors_in_head = {
"parent-template-in-head": "<template><dl><dt>text<dd>text</dl></template>",
}
non_errors = {
"basic": "<dl><dt>text<dd>text</dl>",
"empty": "<dl></dl>",
"empty-dt-dd": "<dl><dt><dd></dl>",
"multiple-groups": "<dl><dt>1<dd>a<dt>2<dd>a<dd>b<dt>3<dt>4<dt>5<dd>a</dl>",
"header-in-dd": "<dl><dt>text<dd><header>text</header></dl>",
"footer-in-dd": "<dl><dt>text<dd><footer>text</footer></dl>",
"article-in-dd": "<dl><dt>text<dd><article><h2>text</h2></article></dl>",
"aside-in-dd": "<dl><dt>text<dd><aside><h2>text</h2></aside></dl>",
"nav-in-dd": "<dl><dt>text<dd><nav><h2>text</h2></nav></dl>",
"section-in-dd": "<dl><dt>text<dd><section><h2>text</h2></section></dl>",
"h1-in-dd": "<dl><dt>text<dd><h1>text</h1></dl>",
"h2-in-dd": "<dl><dt>text<dd><h2>text</h2></dl>",
"h3-in-dd": "<dl><dt>text<dd><h3>text</h3></dl>",
"h4-in-dd": "<dl><dt>text<dd><h4>text</h4></dl>",
"h5-in-dd": "<dl><dt>text<dd><h5>text</h5></dl>",
"h6-in-dd": "<dl><dt>text<dd><h6>text</h6></dl>",
"p-in-dt": "<dl><dt><p>1<p>1<dd>a</dl>",
"dl-in-dt": "<dl><dt><dl><dt>1<dd>a</dl><dd>b</dl>",
"dl-in-dd": "<dl><dt>1<dd><dl><dt>2<dd>a</dl></dl>",
"interactive": "<dl><dt><a href='#'>1</a><dd><a href='#'>a</a></dl>",
"script": "<dl><script></script></dl>",
"dt-script-dd": "<dl><dt>1</dt><script></script><dd>a</dl>",
"dt-template-dd": "<dl><dt>1</dt><template></template><dd>a</dl>",
# div
"div-basic": "<dl><div><dt>1<dd>a</div></dl>",
"div-script": "<dl><div><dt>1<dd>a</div><script></script></dl>",
"div-script-2": "<dl><div><dt>1</dt><script></script><dd>a</div></dl>",
"div-template": "<dl><div><dt>1<dd>a</div><template></template></dl>",
"div-template-2": "<dl><div><dt>1</dt><template></template><dd>a</div></dl>",
"div-multiple-groups": "<dl><div><dt>1<dd>a</div><div><dt>2<dd>a<dd>b</div><div><dt>3<dt>4<dt>5<dd>a</div></dl>",
}
for key in errors.keys():
template_error = template
template_error += '<title>invalid %s</title>\n' % key
template_error += errors[key]
file = open(os.path.join(ccdir, "html/elements/dl/%s-novalid.html" % key), 'wb')
file.write(template_error)
file.close()
file = open(os.path.join(ccdir, "html/elements/dl/dl-isvalid.html"), 'wb')
file.write(template + '<title>valid dl</title>\n')
for key in non_errors_in_head.keys():
file.write('%s <!-- %s -->\n' % (non_errors_in_head[key], key))
file.write('<body>\n')
for key in non_errors.keys():
file.write('%s <!-- %s -->\n' % (non_errors[key], key))
file.close()
# vim: ts=4:sw=4
|
iulian787/spack
|
refs/heads/develop
|
var/spack/repos/builtin/packages/cppzmq/package.py
|
2
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Cppzmq(CMakePackage):
"""C++ binding for 0MQ"""
homepage = "http://www.zeromq.org"
url = "https://github.com/zeromq/cppzmq/archive/v4.2.2.tar.gz"
git = "https://github.com/zeromq/cppzmq.git"
version('master', branch='master')
version('4.6.0', sha256='e9203391a0b913576153a2ad22a2dc1479b1ec325beb6c46a3237c669aef5a52')
version('4.5.0', sha256='64eb4e58eaf0c77505391c6c9a606cffcb57c6086f3431567a1ef4a25b01fa36')
version('4.4.1', sha256='117fc1ca24d98dbe1a60c072cde13be863d429134907797f8e03f654ce679385')
version('4.4.0', sha256='118b9ff117f07d1aabadfb905d7227362049d7940d16b7863b3dd3cebd28be85')
version('4.3.0', sha256='27d1f56406ba94ee779e639203218820975cf68174f92fbeae0f645df0fcada4')
version('4.2.3', sha256='3e6b57bf49115f4ae893b1ff7848ead7267013087dc7be1ab27636a97144d373')
version('4.2.2', sha256='3ef50070ac5877c06c6bb25091028465020e181bbfd08f110294ed6bc419737d')
depends_on('cmake@3.0.0:', type='build')
depends_on('libzmq')
depends_on('libzmq@4.2.2', when='@4.2.2:4.2.3')
def cmake_args(self):
args = []
# https://github.com/zeromq/cppzmq/issues/422
# https://github.com/zeromq/cppzmq/pull/288
args.append('-DCPPZMQ_BUILD_TESTS=OFF')
return args
|
rghe/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/panos/panos_nat_rule.py
|
16
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, techbizdev <techbizdev@paloaltonetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: panos_nat_rule
short_description: create a policy NAT rule
description: >
- Create a policy nat rule. Keep in mind that we can either end up configuring source NAT, destination NAT, or
both. Instead of splitting it into two we will make a fair attempt to determine which one the user wants.
author: "Luigi Mori (@jtschichold), Ivan Bojer (@ivanbojer), Robert Hagen (@rnh556)"
version_added: "2.4"
requirements:
- pan-python can be obtained from PyPi U(https://pypi.org/project/pan-python/)
- pandevice can be obtained from PyPi U(https://pypi.org/project/pandevice/)
notes:
- Checkmode is not supported.
- Panorama is supported.
options:
ip_address:
description:
- IP address (or hostname) of PAN-OS device being configured.
required: true
username:
description:
- Username credentials to use for auth unless I(api_key) is set.
default: "admin"
password:
description:
- Password credentials to use for auth unless I(api_key) is set.
required: true
api_key:
description:
- API key that can be used instead of I(username)/I(password) credentials.
operation:
description:
- The action to be taken. Supported values are I(add)/I(update)/I(find)/I(delete).
rule_name:
description:
- name of the SNAT rule
required: true
source_zone:
description:
- list of source zones
required: true
destination_zone:
description:
- destination zone
required: true
source_ip:
description:
- list of source addresses
default: ["any"]
destination_ip:
description:
- list of destination addresses
default: ["any"]
service:
description:
- service
default: "any"
snat_type:
description:
- type of source translation
snat_address_type:
description:
- type of source translation. Supported values are I(translated-address)/I(translated-address).
default: 'translated-address'
snat_static_address:
description:
- Source NAT translated address. Used with Static-IP translation.
snat_dynamic_address:
description:
- Source NAT translated address. Used with Dynamic-IP and Dynamic-IP-and-Port.
snat_interface:
description:
- snat interface
snat_interface_address:
description:
- snat interface address
snat_bidirectional:
description:
- bidirectional flag
type: bool
default: 'no'
dnat_address:
description:
- dnat translated address
dnat_port:
description:
- dnat translated port
commit:
description:
- Commit configuration if changed.
type: bool
default: 'yes'
'''
EXAMPLES = '''
# Create a source and destination nat rule
- name: Create NAT SSH rule for 10.0.1.101
panos_nat_rule:
ip_address: '{{ ip_address }}'
username: '{{ username }}'
password: '{{ password }}'
rule_name: "Web SSH"
source_zone: ["external"]
destination_zone: "external"
source: ["any"]
destination: ["10.0.0.100"]
service: "service-tcp-221"
snat_type: "dynamic-ip-and-port"
snat_interface: "ethernet1/2"
dnat_address: "10.0.1.101"
dnat_port: "22"
'''
RETURN = '''
# Default return values
'''
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
# import pydevd
# pydevd.settrace('localhost', port=60374, stdoutToServer=True, stderrToServer=True)
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
try:
import pan.xapi
from pan.xapi import PanXapiError
import pandevice
from pandevice import base
from pandevice import firewall
from pandevice import panorama
from pandevice import objects
from pandevice import policies
import xmltodict
import json
HAS_LIB = True
except ImportError:
HAS_LIB = False
def get_devicegroup(device, devicegroup):
dg_list = device.refresh_devices()
for group in dg_list:
if isinstance(group, pandevice.panorama.DeviceGroup):
if group.name == devicegroup:
return group
return False
def get_rulebase(device, devicegroup):
# Build the rulebase
if isinstance(device, pandevice.firewall.Firewall):
rulebase = pandevice.policies.Rulebase()
device.add(rulebase)
elif isinstance(device, pandevice.panorama.Panorama):
dg = panorama.DeviceGroup(devicegroup)
device.add(dg)
rulebase = policies.PreRulebase()
dg.add(rulebase)
else:
return False
policies.NatRule.refreshall(rulebase)
return rulebase
def find_rule(rulebase, rule_name):
# Search for the rule name
rule = rulebase.find(rule_name)
if rule:
return rule
else:
return False
def create_nat_rule(**kwargs):
nat_rule = policies.NatRule(
name=kwargs['rule_name'],
description=kwargs['description'],
fromzone=kwargs['source_zone'],
source=kwargs['source_ip'],
tozone=kwargs['destination_zone'],
destination=kwargs['destination_ip'],
service=kwargs['service'],
to_interface=kwargs['to_interface'],
nat_type=kwargs['nat_type']
)
# Source translation: Static IP
if kwargs['snat_type'] in ['static-ip'] and kwargs['snat_static_address']:
nat_rule.source_translation_type = kwargs['snat_type']
nat_rule.source_translation_static_translated_address = kwargs['snat_static_address']
# Bi-directional flag set?
if kwargs['snat_bidirectional']:
nat_rule.source_translation_static_bi_directional = kwargs['snat_bidirectional']
# Source translation: Dynamic IP and port
elif kwargs['snat_type'] in ['dynamic-ip-and-port']:
nat_rule.source_translation_type = kwargs['snat_type']
nat_rule.source_translation_address_type = kwargs['snat_address_type']
# Interface address?
if kwargs['snat_interface']:
nat_rule.source_translation_interface = kwargs['snat_interface']
# Interface IP?
if kwargs['snat_interface_address']:
nat_rule.source_translation_ip_address = kwargs['snat_interface_address']
else:
nat_rule.source_translation_translated_addresses = kwargs['snat_dynamic_address']
# Source translation: Dynamic IP
elif kwargs['snat_type'] in ['dynamic-ip']:
if kwargs['snat_dynamic_address']:
nat_rule.source_translation_type = kwargs['snat_type']
nat_rule.source_translation_translated_addresses = kwargs['snat_dynamic_address']
else:
return False
# Destination translation
if kwargs['dnat_address']:
nat_rule.destination_translated_address = kwargs['dnat_address']
if kwargs['dnat_port']:
nat_rule.destination_translated_port = kwargs['dnat_port']
# Any tags?
if 'tag_name' in kwargs:
nat_rule.tag = kwargs['tag_name']
return nat_rule
def add_rule(rulebase, nat_rule):
if rulebase:
rulebase.add(nat_rule)
nat_rule.create()
return True
else:
return False
def update_rule(rulebase, nat_rule):
if rulebase:
rulebase.add(nat_rule)
nat_rule.apply()
return True
else:
return False
def main():
argument_spec = dict(
ip_address=dict(required=True),
username=dict(default='admin'),
password=dict(required=True, no_log=True),
api_key=dict(no_log=True),
operation=dict(required=True, choices=['add', 'update', 'delete', 'find']),
rule_name=dict(required=True),
description=dict(),
tag_name=dict(),
source_zone=dict(type='list'),
source_ip=dict(type='list', default=['any']),
destination_zone=dict(),
destination_ip=dict(type='list', default=['any']),
service=dict(default='any'),
to_interface=dict(default='any'),
snat_type=dict(choices=['static-ip', 'dynamic-ip-and-port', 'dynamic-ip']),
snat_address_type=dict(choices=['interface-address', 'translated-address'], default='interface-address'),
snat_static_address=dict(),
snat_dynamic_address=dict(type='list'),
snat_interface=dict(),
snat_interface_address=dict(),
snat_bidirectional=dict(type='bool', default=False),
dnat_address=dict(),
dnat_port=dict(),
devicegroup=dict(),
commit=dict(type='bool', default=True)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False,
required_one_of=[['api_key', 'password']])
if not HAS_LIB:
module.fail_json(msg='Missing required libraries.')
ip_address = module.params["ip_address"]
password = module.params["password"]
username = module.params['username']
api_key = module.params['api_key']
operation = module.params['operation']
rule_name = module.params['rule_name']
description = module.params['description']
tag_name = module.params['tag_name']
source_zone = module.params['source_zone']
source_ip = module.params['source_ip']
destination_zone = module.params['destination_zone']
destination_ip = module.params['destination_ip']
service = module.params['service']
to_interface = module.params['to_interface']
nat_type = 'ipv4'
snat_type = module.params['snat_type']
snat_address_type = module.params['snat_address_type']
snat_static_address = module.params['snat_static_address']
snat_dynamic_address = module.params['snat_dynamic_address']
snat_interface = module.params['snat_interface']
snat_interface_address = module.params['snat_interface_address']
snat_bidirectional = module.params['snat_bidirectional']
dnat_address = module.params['dnat_address']
dnat_port = module.params['dnat_port']
devicegroup = module.params['devicegroup']
commit = module.params['commit']
# Create the device with the appropriate pandevice type
device = base.PanDevice.create_from_device(ip_address, username, password, api_key=api_key)
# If Panorama, validate the devicegroup
dev_group = None
if devicegroup and isinstance(device, panorama.Panorama):
dev_group = get_devicegroup(device, devicegroup)
if dev_group:
device.add(dev_group)
else:
module.fail_json(msg='\'%s\' device group not found in Panorama. Is the name correct?' % devicegroup)
# Get the rulebase
rulebase = get_rulebase(device, dev_group)
# Which action shall we take on the object?
if operation == "find":
# Search for the rule
match = find_rule(rulebase, rule_name)
# If found, format and return the result
if match:
match_dict = xmltodict.parse(match.element_str())
module.exit_json(
stdout_lines=json.dumps(match_dict, indent=2),
msg='Rule matched'
)
else:
module.fail_json(msg='Rule \'%s\' not found. Is the name correct?' % rule_name)
elif operation == "delete":
# Search for the object
match = find_rule(rulebase, rule_name)
# If found, delete it
if match:
try:
match.delete()
if commit:
device.commit(sync=True)
except PanXapiError as exc:
module.fail_json(msg=to_native(exc))
module.exit_json(changed=True, msg='Rule \'%s\' successfully deleted.' % rule_name)
else:
module.fail_json(msg='Rule \'%s\' not found. Is the name correct?' % rule_name)
elif operation == "add":
# Look for required parameters
if source_zone and destination_zone and nat_type:
pass
else:
module.fail_json(msg='Missing parameter. Required: source_zone, destination_zone, nat_type')
# Search for the rule. Fail if found.
match = find_rule(rulebase, rule_name)
if match:
module.fail_json(msg='Rule \'%s\' already exists. Use operation: \'update\' to change it.' % rule_name)
else:
try:
new_rule = create_nat_rule(
rule_name=rule_name,
description=description,
tag_name=tag_name,
source_zone=source_zone,
destination_zone=destination_zone,
source_ip=source_ip,
destination_ip=destination_ip,
service=service,
to_interface=to_interface,
nat_type=nat_type,
snat_type=snat_type,
snat_address_type=snat_address_type,
snat_static_address=snat_static_address,
snat_dynamic_address=snat_dynamic_address,
snat_interface=snat_interface,
snat_interface_address=snat_interface_address,
snat_bidirectional=snat_bidirectional,
dnat_address=dnat_address,
dnat_port=dnat_port
)
changed = add_rule(rulebase, new_rule)
if changed and commit:
device.commit(sync=True)
except PanXapiError as exc:
module.fail_json(msg=to_native(exc))
module.exit_json(changed=changed, msg='Rule \'%s\' successfully added.' % rule_name)
elif operation == 'update':
# Search for the rule. Update if found.
match = find_rule(rulebase, rule_name)
if match:
try:
new_rule = create_nat_rule(
rule_name=rule_name,
description=description,
tag_name=tag_name,
source_zone=source_zone,
destination_zone=destination_zone,
source_ip=source_ip,
destination_ip=destination_ip,
service=service,
to_interface=to_interface,
nat_type=nat_type,
snat_type=snat_type,
snat_address_type=snat_address_type,
snat_static_address=snat_static_address,
snat_dynamic_address=snat_dynamic_address,
snat_interface=snat_interface,
snat_interface_address=snat_interface_address,
snat_bidirectional=snat_bidirectional,
dnat_address=dnat_address,
dnat_port=dnat_port
)
changed = update_rule(rulebase, new_rule)
if changed and commit:
device.commit(sync=True)
except PanXapiError as exc:
module.fail_json(msg=to_native(exc))
module.exit_json(changed=changed, msg='Rule \'%s\' successfully updated.' % rule_name)
else:
module.fail_json(msg='Rule \'%s\' does not exist. Use operation: \'add\' to add it.' % rule_name)
if __name__ == '__main__':
main()
|
ToonTownInfiniteRepo/ToontownInfinite
|
refs/heads/master
|
Panda3D-1.9.0/python/Lib/encodings/johab.py
|
816
|
#
# johab.py: Python Unicode Codec for JOHAB
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_kr, codecs
import _multibytecodec as mbc
codec = _codecs_kr.getcodec('johab')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='johab',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
cntnboys/410Lab6
|
refs/heads/master
|
build/django/django/conf/locale/ko/__init__.py
|
12133432
| |
doduytrung/odoo-8.0
|
refs/heads/master
|
addons/procurement_jit_stock/procurement_jit_stock.py
|
44
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2013 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class procurement_order(osv.osv):
_inherit = "procurement.order"
def run(self, cr, uid, ids, context=None):
context = dict(context or {}, procurement_autorun_defer=True)
res = super(procurement_order, self).run(cr, uid, ids, context=context)
procurement_ids = self.search(cr, uid, [('move_dest_id.procurement_id', 'in', ids)], order='id', context=context)
if procurement_ids:
return self.run(cr, uid, procurement_ids, context=context)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
IPMITMO/statan
|
refs/heads/master
|
coala/tests/bearlib/aspects/conftest.py
|
12
|
from coalib.bearlib.aspects import Taste, aspectclass
from coalib.bearlib.aspects.base import aspectbase
import pytest
@pytest.fixture
def RootAspect():
"""
An exclusive Root aspectclass for unit tests.
"""
class RootAspect(aspectbase, metaclass=aspectclass):
parent = None
_tastes = {}
return RootAspect
@pytest.fixture
def SubAspect_tastes():
"""
Taste definitions for an exclusive SubAspect class for unit tests.
"""
return {
'salty': Taste[str](
'The saltiness', ('high', 'low'), default='low'),
'sweet': Taste[int](
'The sweetness', (1, 23, 45), default=23,
languages=('py', )),
'sour': Taste[bool](
'Is it sour?', (True, False), default=False),
}
@pytest.fixture
def SubAspect_taste_values():
"""
Taste definitions for an exclusive SubAspect class for unit tests.
"""
return {
'salty': 'high',
'sweet': 45,
'sour': True,
}
@pytest.fixture
def SubAspect_docs():
"""
Docs definitions for an exclusive SubAspect class for unit tests.
"""
class docs:
example = 'An example'
example_language = 'The example language'
importance_reason = 'The reason of importance'
fix_suggestions = 'Suggestions for fixing'
return docs
@pytest.fixture
def SubAspect(RootAspect, SubAspect_docs, SubAspect_tastes):
"""
An exclusive SubAspect class for unit tests.
"""
@RootAspect.subaspect
class SubAspect:
"""
Definition
"""
docs = SubAspect_docs
salty = SubAspect_tastes['salty']
sweet = SubAspect_tastes['sweet']
sour = SubAspect_tastes['sour']
return SubAspect
@pytest.fixture
def SubSubAspect_tastes():
"""
Taste definitions for an exclusive SubSubAspect class for unit tests.
"""
return {
'salty': Taste[str](
'The saltiness', ('high', 'low'), default='low'),
'sweet': Taste[int](
'The sweetness', (1, 23, 45), default=23,
languages=('py', )),
'sour': Taste[bool](
'Is it sour?', (True, False), default=False),
}
@pytest.fixture
def SubSubAspect_taste_values():
"""
Taste definitions for an exclusive SubSubAspect class for unit tests.
"""
return {
'salty': 'high',
'sweet': 45,
'sour': True,
}
@pytest.fixture
def SubSubAspect_docs():
"""
Docs definitions for an exclusive SubSubAspect class for unit tests.
"""
class docs:
example = 'An example'
example_language = 'The example language'
importance_reason = 'The reason of importance'
fix_suggestions = 'Suggestions for fixing'
return docs
@pytest.fixture
def SubSubAspect(SubAspect, SubSubAspect_docs, SubAspect_tastes):
"""
An exclusive SubAspect class for unit tests.
"""
@SubAspect.subaspect
class SubSubAspect:
"""
Definition
"""
docs = SubAspect_docs
salty = SubAspect_tastes['salty']
sweet = SubAspect_tastes['sweet']
sour = SubAspect_tastes['sour']
return SubSubAspect
|
gxyang/hstore
|
refs/heads/master
|
third_party/python/boto/mashups/server.py
|
91
|
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
High-level abstraction of an EC2 server
"""
import boto
import boto.utils
from boto.mashups.iobject import IObject
from boto.pyami.config import Config, BotoConfigPath
from boto.mashups.interactive import interactive_shell
from boto.sdb.db.model import Model
from boto.sdb.db.property import StringProperty
import os
import StringIO
class ServerSet(list):
def __getattr__(self, name):
results = []
is_callable = False
for server in self:
try:
val = getattr(server, name)
if callable(val):
is_callable = True
results.append(val)
except:
results.append(None)
if is_callable:
self.map_list = results
return self.map
return results
def map(self, *args):
results = []
for fn in self.map_list:
results.append(fn(*args))
return results
class Server(Model):
@property
def ec2(self):
if self._ec2 is None:
self._ec2 = boto.connect_ec2()
return self._ec2
@classmethod
def Inventory(cls):
"""
Returns a list of Server instances, one for each Server object
persisted in the db
"""
l = ServerSet()
rs = cls.find()
for server in rs:
l.append(server)
return l
@classmethod
def Register(cls, name, instance_id, description=''):
s = cls()
s.name = name
s.instance_id = instance_id
s.description = description
s.save()
return s
def __init__(self, id=None, **kw):
Model.__init__(self, id, **kw)
self._reservation = None
self._instance = None
self._ssh_client = None
self._pkey = None
self._config = None
self._ec2 = None
name = StringProperty(unique=True, verbose_name="Name")
instance_id = StringProperty(verbose_name="Instance ID")
config_uri = StringProperty()
ami_id = StringProperty(verbose_name="AMI ID")
zone = StringProperty(verbose_name="Availability Zone")
security_group = StringProperty(verbose_name="Security Group", default="default")
key_name = StringProperty(verbose_name="Key Name")
elastic_ip = StringProperty(verbose_name="Elastic IP")
instance_type = StringProperty(verbose_name="Instance Type")
description = StringProperty(verbose_name="Description")
log = StringProperty()
def setReadOnly(self, value):
raise AttributeError
def getInstance(self):
if not self._instance:
if self.instance_id:
try:
rs = self.ec2.get_all_instances([self.instance_id])
except:
return None
if len(rs) > 0:
self._reservation = rs[0]
self._instance = self._reservation.instances[0]
return self._instance
instance = property(getInstance, setReadOnly, None, 'The Instance for the server')
def getAMI(self):
if self.instance:
return self.instance.image_id
ami = property(getAMI, setReadOnly, None, 'The AMI for the server')
def getStatus(self):
if self.instance:
self.instance.update()
return self.instance.state
status = property(getStatus, setReadOnly, None,
'The status of the server')
def getHostname(self):
if self.instance:
return self.instance.public_dns_name
hostname = property(getHostname, setReadOnly, None,
'The public DNS name of the server')
def getPrivateHostname(self):
if self.instance:
return self.instance.private_dns_name
private_hostname = property(getPrivateHostname, setReadOnly, None,
'The private DNS name of the server')
def getLaunchTime(self):
if self.instance:
return self.instance.launch_time
launch_time = property(getLaunchTime, setReadOnly, None,
'The time the Server was started')
def getConsoleOutput(self):
if self.instance:
return self.instance.get_console_output()
console_output = property(getConsoleOutput, setReadOnly, None,
'Retrieve the console output for server')
def getGroups(self):
if self._reservation:
return self._reservation.groups
else:
return None
groups = property(getGroups, setReadOnly, None,
'The Security Groups controlling access to this server')
def getConfig(self):
if not self._config:
remote_file = BotoConfigPath
local_file = '%s.ini' % self.instance.id
self.get_file(remote_file, local_file)
self._config = Config(local_file)
return self._config
def setConfig(self, config):
local_file = '%s.ini' % self.instance.id
fp = open(local_file)
config.write(fp)
fp.close()
self.put_file(local_file, BotoConfigPath)
self._config = config
config = property(getConfig, setConfig, None,
'The instance data for this server')
def set_config(self, config):
"""
Set SDB based config
"""
self._config = config
self._config.dump_to_sdb("botoConfigs", self.id)
def load_config(self):
self._config = Config(do_load=False)
self._config.load_from_sdb("botoConfigs", self.id)
def stop(self):
if self.instance:
self.instance.stop()
def start(self):
self.stop()
ec2 = boto.connect_ec2()
ami = ec2.get_all_images(image_ids = [str(self.ami_id)])[0]
groups = ec2.get_all_security_groups(groupnames=[str(self.security_group)])
if not self._config:
self.load_config()
if not self._config.has_section("Credentials"):
self._config.add_section("Credentials")
self._config.set("Credentials", "aws_access_key_id", ec2.aws_access_key_id)
self._config.set("Credentials", "aws_secret_access_key", ec2.aws_secret_access_key)
if not self._config.has_section("Pyami"):
self._config.add_section("Pyami")
if self._manager.domain:
self._config.set('Pyami', 'server_sdb_domain', self._manager.domain.name)
self._config.set("Pyami", 'server_sdb_name', self.name)
cfg = StringIO.StringIO()
self._config.write(cfg)
cfg = cfg.getvalue()
r = ami.run(min_count=1,
max_count=1,
key_name=self.key_name,
security_groups = groups,
instance_type = self.instance_type,
placement = self.zone,
user_data = cfg)
i = r.instances[0]
self.instance_id = i.id
self.put()
if self.elastic_ip:
ec2.associate_address(self.instance_id, self.elastic_ip)
def reboot(self):
if self.instance:
self.instance.reboot()
def get_ssh_client(self, key_file=None, host_key_file='~/.ssh/known_hosts',
uname='root'):
import paramiko
if not self.instance:
print 'No instance yet!'
return
if not self._ssh_client:
if not key_file:
iobject = IObject()
key_file = iobject.get_filename('Path to OpenSSH Key file')
self._pkey = paramiko.RSAKey.from_private_key_file(key_file)
self._ssh_client = paramiko.SSHClient()
self._ssh_client.load_system_host_keys()
self._ssh_client.load_host_keys(os.path.expanduser(host_key_file))
self._ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self._ssh_client.connect(self.instance.public_dns_name,
username=uname, pkey=self._pkey)
return self._ssh_client
def get_file(self, remotepath, localpath):
ssh_client = self.get_ssh_client()
sftp_client = ssh_client.open_sftp()
sftp_client.get(remotepath, localpath)
def put_file(self, localpath, remotepath):
ssh_client = self.get_ssh_client()
sftp_client = ssh_client.open_sftp()
sftp_client.put(localpath, remotepath)
def listdir(self, remotepath):
ssh_client = self.get_ssh_client()
sftp_client = ssh_client.open_sftp()
return sftp_client.listdir(remotepath)
def shell(self, key_file=None):
ssh_client = self.get_ssh_client(key_file)
channel = ssh_client.invoke_shell()
interactive_shell(channel)
def bundle_image(self, prefix, key_file, cert_file, size):
print 'bundling image...'
print '\tcopying cert and pk over to /mnt directory on server'
ssh_client = self.get_ssh_client()
sftp_client = ssh_client.open_sftp()
path, name = os.path.split(key_file)
remote_key_file = '/mnt/%s' % name
self.put_file(key_file, remote_key_file)
path, name = os.path.split(cert_file)
remote_cert_file = '/mnt/%s' % name
self.put_file(cert_file, remote_cert_file)
print '\tdeleting %s' % BotoConfigPath
# delete the metadata.ini file if it exists
try:
sftp_client.remove(BotoConfigPath)
except:
pass
command = 'sudo ec2-bundle-vol '
command += '-c %s -k %s ' % (remote_cert_file, remote_key_file)
command += '-u %s ' % self._reservation.owner_id
command += '-p %s ' % prefix
command += '-s %d ' % size
command += '-d /mnt '
if self.instance.instance_type == 'm1.small' or self.instance_type == 'c1.medium':
command += '-r i386'
else:
command += '-r x86_64'
print '\t%s' % command
t = ssh_client.exec_command(command)
response = t[1].read()
print '\t%s' % response
print '\t%s' % t[2].read()
print '...complete!'
def upload_bundle(self, bucket, prefix):
print 'uploading bundle...'
command = 'ec2-upload-bundle '
command += '-m /mnt/%s.manifest.xml ' % prefix
command += '-b %s ' % bucket
command += '-a %s ' % self.ec2.aws_access_key_id
command += '-s %s ' % self.ec2.aws_secret_access_key
print '\t%s' % command
ssh_client = self.get_ssh_client()
t = ssh_client.exec_command(command)
response = t[1].read()
print '\t%s' % response
print '\t%s' % t[2].read()
print '...complete!'
def create_image(self, bucket=None, prefix=None, key_file=None, cert_file=None, size=None):
iobject = IObject()
if not bucket:
bucket = iobject.get_string('Name of S3 bucket')
if not prefix:
prefix = iobject.get_string('Prefix for AMI file')
if not key_file:
key_file = iobject.get_filename('Path to RSA private key file')
if not cert_file:
cert_file = iobject.get_filename('Path to RSA public cert file')
if not size:
size = iobject.get_int('Size (in MB) of bundled image')
self.bundle_image(prefix, key_file, cert_file, size)
self.upload_bundle(bucket, prefix)
print 'registering image...'
self.image_id = self.ec2.register_image('%s/%s.manifest.xml' % (bucket, prefix))
return self.image_id
def attach_volume(self, volume, device="/dev/sdp"):
"""
Attach an EBS volume to this server
:param volume: EBS Volume to attach
:type volume: boto.ec2.volume.Volume
:param device: Device to attach to (default to /dev/sdp)
:type device: string
"""
if hasattr(volume, "id"):
volume_id = volume.id
else:
volume_id = volume
return self.ec2.attach_volume(volume_id=volume_id, instance_id=self.instance_id, device=device)
def detach_volume(self, volume):
"""
Detach an EBS volume from this server
:param volume: EBS Volume to detach
:type volume: boto.ec2.volume.Volume
"""
if hasattr(volume, "id"):
volume_id = volume.id
else:
volume_id = volume
return self.ec2.detach_volume(volume_id=volume_id, instance_id=self.instance_id)
def install_package(self, package_name):
print 'installing %s...' % package_name
command = 'yum -y install %s' % package_name
print '\t%s' % command
ssh_client = self.get_ssh_client()
t = ssh_client.exec_command(command)
response = t[1].read()
print '\t%s' % response
print '\t%s' % t[2].read()
print '...complete!'
|
wvengen/ndg_oauth_server
|
refs/heads/master
|
ndg/oauth/server/lib/authorize/authorizer_interface.py
|
3
|
"""OAuth 2.0 WSGI server middleware providing MyProxy certificates as access tokens
"""
__author__ = "R B Wilkinson"
__date__ = "12/12/11"
__copyright__ = "(C) 2011 Science and Technology Facilities Council"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "Philip.Kershaw@stfc.ac.uk"
__revision__ = "$Id$"
from abc import ABCMeta, abstractmethod
class AuthorizerInterface(object):
"""
Interface for generation of authorization grants.
"""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, lifetime, **kw):
"""
@type lifetime: int
@param lifetime: lifetimes of generated tokens in seconds
@type kw:dict
@param kw: additional keywords
"""
pass
@abstractmethod
def generate_authorization_grant(self, auth_request, request):
"""Generates an authorization grant.
@type auth_request: ndgoauthserver.lib.oauth.authorize.AuthorizeRequest
@param auth_request: authorization request
@type request: webob.Request
@param request: HTTP request object
@rtype: tuple (
ndgoauthserver.lib.register.authorization_grant.AuthorizationGrant
str
)
@return: tuple (
authorization grant
authorization code
)
"""
return None
|
nhejazi/scikit-learn
|
refs/heads/master
|
sklearn/linear_model/tests/test_huber.py
|
26
|
# Authors: Manoj Kumar mks542@nyu.edu
# License: BSD 3 clause
import numpy as np
from scipy import optimize, sparse
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_false
from sklearn.datasets import make_regression
from sklearn.linear_model import (
HuberRegressor, LinearRegression, SGDRegressor, Ridge)
from sklearn.linear_model.huber import _huber_loss_and_gradient
def make_regression_with_outliers(n_samples=50, n_features=20):
rng = np.random.RandomState(0)
# Generate data with outliers by replacing 10% of the samples with noise.
X, y = make_regression(
n_samples=n_samples, n_features=n_features,
random_state=0, noise=0.05)
# Replace 10% of the sample with noise.
num_noise = int(0.1 * n_samples)
random_samples = rng.randint(0, n_samples, num_noise)
X[random_samples, :] = 2.0 * rng.normal(0, 1, (num_noise, X.shape[1]))
return X, y
def test_huber_equals_lr_for_high_epsilon():
# Test that Ridge matches LinearRegression for large epsilon
X, y = make_regression_with_outliers()
lr = LinearRegression(fit_intercept=True)
lr.fit(X, y)
huber = HuberRegressor(fit_intercept=True, epsilon=1e3, alpha=0.0)
huber.fit(X, y)
assert_almost_equal(huber.coef_, lr.coef_, 3)
assert_almost_equal(huber.intercept_, lr.intercept_, 2)
def test_huber_gradient():
# Test that the gradient calculated by _huber_loss_and_gradient is correct
rng = np.random.RandomState(1)
X, y = make_regression_with_outliers()
sample_weight = rng.randint(1, 3, (y.shape[0]))
loss_func = lambda x, *args: _huber_loss_and_gradient(x, *args)[0]
grad_func = lambda x, *args: _huber_loss_and_gradient(x, *args)[1]
# Check using optimize.check_grad that the gradients are equal.
for _ in range(5):
# Check for both fit_intercept and otherwise.
for n_features in [X.shape[1] + 1, X.shape[1] + 2]:
w = rng.randn(n_features)
w[-1] = np.abs(w[-1])
grad_same = optimize.check_grad(
loss_func, grad_func, w, X, y, 0.01, 0.1, sample_weight)
assert_almost_equal(grad_same, 1e-6, 4)
def test_huber_sample_weights():
# Test sample_weights implementation in HuberRegressor"""
X, y = make_regression_with_outliers()
huber = HuberRegressor(fit_intercept=True)
huber.fit(X, y)
huber_coef = huber.coef_
huber_intercept = huber.intercept_
# Rescale coefs before comparing with assert_array_almost_equal to make sure
# that the number of decimal places used is somewhat insensitive to the
# amplitude of the coefficients and therefore to the scale of the data
# and the regularization parameter
scale = max(np.mean(np.abs(huber.coef_)),
np.mean(np.abs(huber.intercept_)))
huber.fit(X, y, sample_weight=np.ones(y.shape[0]))
assert_array_almost_equal(huber.coef_ / scale, huber_coef / scale)
assert_array_almost_equal(huber.intercept_ / scale,
huber_intercept / scale)
X, y = make_regression_with_outliers(n_samples=5, n_features=20)
X_new = np.vstack((X, np.vstack((X[1], X[1], X[3]))))
y_new = np.concatenate((y, [y[1]], [y[1]], [y[3]]))
huber.fit(X_new, y_new)
huber_coef = huber.coef_
huber_intercept = huber.intercept_
sample_weight = np.ones(X.shape[0])
sample_weight[1] = 3
sample_weight[3] = 2
huber.fit(X, y, sample_weight=sample_weight)
assert_array_almost_equal(huber.coef_ / scale, huber_coef / scale)
assert_array_almost_equal(huber.intercept_ / scale,
huber_intercept / scale)
# Test sparse implementation with sample weights.
X_csr = sparse.csr_matrix(X)
huber_sparse = HuberRegressor(fit_intercept=True)
huber_sparse.fit(X_csr, y, sample_weight=sample_weight)
assert_array_almost_equal(huber_sparse.coef_ / scale,
huber_coef / scale)
def test_huber_sparse():
X, y = make_regression_with_outliers()
huber = HuberRegressor(fit_intercept=True, alpha=0.1)
huber.fit(X, y)
X_csr = sparse.csr_matrix(X)
huber_sparse = HuberRegressor(fit_intercept=True, alpha=0.1)
huber_sparse.fit(X_csr, y)
assert_array_almost_equal(huber_sparse.coef_, huber.coef_)
assert_array_equal(huber.outliers_, huber_sparse.outliers_)
def test_huber_scaling_invariant():
# Test that outliers filtering is scaling independent.
X, y = make_regression_with_outliers()
huber = HuberRegressor(fit_intercept=False, alpha=0.0, max_iter=100)
huber.fit(X, y)
n_outliers_mask_1 = huber.outliers_
assert_false(np.all(n_outliers_mask_1))
huber.fit(X, 2. * y)
n_outliers_mask_2 = huber.outliers_
assert_array_equal(n_outliers_mask_2, n_outliers_mask_1)
huber.fit(2. * X, 2. * y)
n_outliers_mask_3 = huber.outliers_
assert_array_equal(n_outliers_mask_3, n_outliers_mask_1)
def test_huber_and_sgd_same_results():
# Test they should converge to same coefficients for same parameters
X, y = make_regression_with_outliers(n_samples=10, n_features=2)
# Fit once to find out the scale parameter. Scale down X and y by scale
# so that the scale parameter is optimized to 1.0
huber = HuberRegressor(fit_intercept=False, alpha=0.0, max_iter=100,
epsilon=1.35)
huber.fit(X, y)
X_scale = X / huber.scale_
y_scale = y / huber.scale_
huber.fit(X_scale, y_scale)
assert_almost_equal(huber.scale_, 1.0, 3)
sgdreg = SGDRegressor(
alpha=0.0, loss="huber", shuffle=True, random_state=0, max_iter=10000,
fit_intercept=False, epsilon=1.35, tol=None)
sgdreg.fit(X_scale, y_scale)
assert_array_almost_equal(huber.coef_, sgdreg.coef_, 1)
def test_huber_warm_start():
X, y = make_regression_with_outliers()
huber_warm = HuberRegressor(
fit_intercept=True, alpha=1.0, max_iter=10000, warm_start=True, tol=1e-1)
huber_warm.fit(X, y)
huber_warm_coef = huber_warm.coef_.copy()
huber_warm.fit(X, y)
# SciPy performs the tol check after doing the coef updates, so
# these would be almost same but not equal.
assert_array_almost_equal(huber_warm.coef_, huber_warm_coef, 1)
# No n_iter_ in old SciPy (<=0.9)
if huber_warm.n_iter_ is not None:
assert_equal(0, huber_warm.n_iter_)
def test_huber_better_r2_score():
# Test that huber returns a better r2 score than non-outliers"""
X, y = make_regression_with_outliers()
huber = HuberRegressor(fit_intercept=True, alpha=0.01, max_iter=100)
huber.fit(X, y)
linear_loss = np.dot(X, huber.coef_) + huber.intercept_ - y
mask = np.abs(linear_loss) < huber.epsilon * huber.scale_
huber_score = huber.score(X[mask], y[mask])
huber_outlier_score = huber.score(X[~mask], y[~mask])
# The Ridge regressor should be influenced by the outliers and hence
# give a worse score on the non-outliers as compared to the huber regressor.
ridge = Ridge(fit_intercept=True, alpha=0.01)
ridge.fit(X, y)
ridge_score = ridge.score(X[mask], y[mask])
ridge_outlier_score = ridge.score(X[~mask], y[~mask])
assert_greater(huber_score, ridge_score)
# The huber model should also fit poorly on the outliers.
assert_greater(ridge_outlier_score, huber_outlier_score)
|
Acidburn0zzz/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/third_party/pytest/src/pytest.py
|
34
|
# PYTHON_ARGCOMPLETE_OK
"""
pytest: unit and functional testing with Python.
"""
# else we are imported
from _pytest.config import main, UsageError, cmdline, hookspec, hookimpl
from _pytest.fixtures import fixture, yield_fixture
from _pytest.assertion import register_assert_rewrite
from _pytest.freeze_support import freeze_includes
from _pytest import __version__
from _pytest.debugging import pytestPDB as __pytestPDB
from _pytest.recwarn import warns, deprecated_call
from _pytest.outcomes import fail, skip, importorskip, exit, xfail
from _pytest.mark import MARK_GEN as mark, param
from _pytest.main import Session
from _pytest.nodes import Item, Collector, File
from _pytest.fixtures import fillfixtures as _fillfuncargs
from _pytest.python import Module, Class, Instance, Function, Generator
from _pytest.python_api import approx, raises
set_trace = __pytestPDB.set_trace
__all__ = [
"main",
"UsageError",
"cmdline",
"hookspec",
"hookimpl",
"__version__",
"register_assert_rewrite",
"freeze_includes",
"set_trace",
"warns",
"deprecated_call",
"fixture",
"yield_fixture",
"fail",
"skip",
"xfail",
"importorskip",
"exit",
"mark",
"param",
"approx",
"_fillfuncargs",
"Item",
"File",
"Collector",
"Session",
"Module",
"Class",
"Instance",
"Function",
"Generator",
"raises",
]
if __name__ == "__main__":
# if run as a script or by 'python -m pytest'
# we trigger the below "else" condition by the following import
import pytest
raise SystemExit(pytest.main())
else:
from _pytest.compat import _setup_collect_fakemodule
_setup_collect_fakemodule()
|
mollstam/UnrealPy
|
refs/heads/master
|
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/django-1.8.2/tests/m2o_recursive/__init__.py
|
12133432
| |
CXQERP/ODOOERP
|
refs/heads/master
|
addons/web/tests/common.py
|
12133432
| |
oppia/oppia
|
refs/heads/develop
|
jobs/io/__init__.py
|
12133432
| |
XiaJieCom/change
|
refs/heads/master
|
stu103151/days15/mysite/IDC/migrations/__init__.py
|
12133432
| |
akaariai/django
|
refs/heads/master
|
tests/template_tests/syntax_tests/test_include.py
|
31
|
from django.template import (
Context, Template, TemplateDoesNotExist, TemplateSyntaxError, engines,
)
from django.test import SimpleTestCase, override_settings
from ..utils import setup
from .test_basic import basic_templates
include_fail_templates = {
'include-fail1': '{% load bad_tag %}{% badtag %}',
'include-fail2': '{% load broken_tag %}',
}
class IncludeTagTests(SimpleTestCase):
@setup({'include01': '{% include "basic-syntax01" %}'}, basic_templates)
def test_include01(self):
output = self.engine.render_to_string('include01')
self.assertEqual(output, 'something cool')
@setup({'include02': '{% include "basic-syntax02" %}'}, basic_templates)
def test_include02(self):
output = self.engine.render_to_string('include02', {'headline': 'Included'})
self.assertEqual(output, 'Included')
@setup({'include03': '{% include template_name %}'}, basic_templates)
def test_include03(self):
output = self.engine.render_to_string(
'include03',
{'template_name': 'basic-syntax02', 'headline': 'Included'},
)
self.assertEqual(output, 'Included')
@setup({'include04': 'a{% include "nonexistent" %}b'})
def test_include04(self):
template = self.engine.get_template('include04')
if self.engine.debug:
with self.assertRaises(TemplateDoesNotExist):
template.render(Context({}))
else:
output = template.render(Context({}))
self.assertEqual(output, "ab")
@setup({
'include 05': 'template with a space',
'include06': '{% include "include 05"%}',
})
def test_include06(self):
output = self.engine.render_to_string('include06')
self.assertEqual(output, "template with a space")
@setup({'include07': '{% include "basic-syntax02" with headline="Inline" %}'}, basic_templates)
def test_include07(self):
output = self.engine.render_to_string('include07', {'headline': 'Included'})
self.assertEqual(output, 'Inline')
@setup({'include08': '{% include headline with headline="Dynamic" %}'}, basic_templates)
def test_include08(self):
output = self.engine.render_to_string('include08', {'headline': 'basic-syntax02'})
self.assertEqual(output, 'Dynamic')
@setup(
{'include09': '{{ first }}--'
'{% include "basic-syntax03" with first=second|lower|upper second=first|upper %}'
'--{{ second }}'},
basic_templates,
)
def test_include09(self):
output = self.engine.render_to_string('include09', {'first': 'Ul', 'second': 'lU'})
self.assertEqual(output, 'Ul--LU --- UL--lU')
@setup({'include10': '{% include "basic-syntax03" only %}'}, basic_templates)
def test_include10(self):
output = self.engine.render_to_string('include10', {'first': '1'})
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID --- INVALID')
else:
self.assertEqual(output, ' --- ')
@setup({'include11': '{% include "basic-syntax03" only with second=2 %}'}, basic_templates)
def test_include11(self):
output = self.engine.render_to_string('include11', {'first': '1'})
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID --- 2')
else:
self.assertEqual(output, ' --- 2')
@setup({'include12': '{% include "basic-syntax03" with first=1 only %}'}, basic_templates)
def test_include12(self):
output = self.engine.render_to_string('include12', {'second': '2'})
if self.engine.string_if_invalid:
self.assertEqual(output, '1 --- INVALID')
else:
self.assertEqual(output, '1 --- ')
@setup(
{'include13': '{% autoescape off %}{% include "basic-syntax03" %}{% endautoescape %}'},
basic_templates,
)
def test_include13(self):
output = self.engine.render_to_string('include13', {'first': '&'})
if self.engine.string_if_invalid:
self.assertEqual(output, '& --- INVALID')
else:
self.assertEqual(output, '& --- ')
@setup(
{'include14': '{% autoescape off %}'
'{% include "basic-syntax03" with first=var1 only %}'
'{% endautoescape %}'},
basic_templates,
)
def test_include14(self):
output = self.engine.render_to_string('include14', {'var1': '&'})
if self.engine.string_if_invalid:
self.assertEqual(output, '& --- INVALID')
else:
self.assertEqual(output, '& --- ')
# Include syntax errors
@setup({'include-error01': '{% include "basic-syntax01" with %}'})
def test_include_error01(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('include-error01')
@setup({'include-error02': '{% include "basic-syntax01" with "no key" %}'})
def test_include_error02(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('include-error02')
@setup({'include-error03': '{% include "basic-syntax01" with dotted.arg="error" %}'})
def test_include_error03(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('include-error03')
@setup({'include-error04': '{% include "basic-syntax01" something_random %}'})
def test_include_error04(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('include-error04')
@setup({'include-error05': '{% include "basic-syntax01" foo="duplicate" foo="key" %}'})
def test_include_error05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('include-error05')
@setup({'include-error06': '{% include "basic-syntax01" only only %}'})
def test_include_error06(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('include-error06')
@setup(include_fail_templates)
def test_include_fail1(self):
with self.assertRaises(RuntimeError):
self.engine.get_template('include-fail1')
@setup(include_fail_templates)
def test_include_fail2(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('include-fail2')
@setup({'include-error07': '{% include "include-fail1" %}'}, include_fail_templates)
def test_include_error07(self):
template = self.engine.get_template('include-error07')
if self.engine.debug:
with self.assertRaises(RuntimeError):
template.render(Context())
else:
self.assertEqual(template.render(Context()), '')
@setup({'include-error08': '{% include "include-fail2" %}'}, include_fail_templates)
def test_include_error08(self):
template = self.engine.get_template('include-error08')
if self.engine.debug:
with self.assertRaises(TemplateSyntaxError):
template.render(Context())
else:
self.assertEqual(template.render(Context()), '')
@setup({'include-error09': '{% include failed_include %}'}, include_fail_templates)
def test_include_error09(self):
context = Context({'failed_include': 'include-fail1'})
template = self.engine.get_template('include-error09')
if self.engine.debug:
with self.assertRaises(RuntimeError):
template.render(context)
else:
self.assertEqual(template.render(context), '')
@setup({'include-error10': '{% include failed_include %}'}, include_fail_templates)
def test_include_error10(self):
context = Context({'failed_include': 'include-fail2'})
template = self.engine.get_template('include-error10')
if self.engine.debug:
with self.assertRaises(TemplateSyntaxError):
template.render(context)
else:
self.assertEqual(template.render(context), '')
class IncludeTests(SimpleTestCase):
# Test the base loader class via the app loader. load_template
# from base is used by all shipped loaders excepting cached,
# which has its own test.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
# Enable debug, otherwise the exception raised during
# {% include %} processing will be suppressed.
'debug': True,
}
}])
def test_include_missing_template(self):
"""
Tests that the correct template is identified as not existing
when {% include %} specifies a template that does not exist.
"""
template = engines['django'].get_template('test_include_error.html')
with self.assertRaises(TemplateDoesNotExist) as e:
template.render()
self.assertEqual(e.exception.args[0], 'missing.html')
# Test the base loader class via the app loader. load_template
# from base is used by all shipped loaders excepting cached,
# which has its own test.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
# Enable debug, otherwise the exception raised during
# {% include %} processing will be suppressed.
'debug': True,
}
}])
def test_extends_include_missing_baseloader(self):
"""
#12787 -- Tests that the correct template is identified as not existing
when {% extends %} specifies a template that does exist, but that
template has an {% include %} of something that does not exist.
"""
template = engines['django'].get_template('test_extends_error.html')
with self.assertRaises(TemplateDoesNotExist) as e:
template.render()
self.assertEqual(e.exception.args[0], 'missing.html')
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'debug': True,
'loaders': [
('django.template.loaders.cached.Loader', [
'django.template.loaders.app_directories.Loader',
]),
],
},
}])
def test_extends_include_missing_cachedloader(self):
"""
Test the cache loader separately since it overrides load_template.
"""
template = engines['django'].get_template('test_extends_error.html')
with self.assertRaises(TemplateDoesNotExist) as e:
template.render()
self.assertEqual(e.exception.args[0], 'missing.html')
# Repeat to ensure it still works when loading from the cache
template = engines['django'].get_template('test_extends_error.html')
with self.assertRaises(TemplateDoesNotExist) as e:
template.render()
self.assertEqual(e.exception.args[0], 'missing.html')
def test_include_template_argument(self):
"""
Support any render() supporting object
"""
ctx = Context({
'tmpl': Template('This worked!'),
})
outer_tmpl = Template('{% include tmpl %}')
output = outer_tmpl.render(ctx)
self.assertEqual(output, 'This worked!')
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'debug': True,
},
}])
def test_include_immediate_missing(self):
"""
#16417 -- Include tags pointing to missing templates should not raise
an error at parsing time.
"""
template = Template('{% include "this_does_not_exist.html" %}')
self.assertIsInstance(template, Template)
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'debug': True,
},
}])
def test_include_recursive(self):
comments = [
{
'comment': 'A1',
'children': [
{'comment': 'B1', 'children': []},
{'comment': 'B2', 'children': []},
{'comment': 'B3', 'children': [
{'comment': 'C1', 'children': []}
]},
]
}
]
t = engines['django'].get_template('recursive_include.html')
self.assertEqual(
"Recursion! A1 Recursion! B1 B2 B3 Recursion! C1",
t.render({'comments': comments}).replace(' ', '').replace('\n', ' ').strip(),
)
|
for-the-repose/comine
|
refs/heads/master
|
source/comine/gdb/__init__.py
|
12133432
| |
jnerin/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/net_tools/nios/__init__.py
|
12133432
| |
vprime/puuuu
|
refs/heads/master
|
env/lib/python2.7/site-packages/pip/_vendor/html5lib/filters/__init__.py
|
12133432
| |
labcodes/django
|
refs/heads/master
|
tests/gis_tests/geoadmin/__init__.py
|
12133432
| |
himanshu-dixit/oppia
|
refs/heads/develop
|
core/storage/__init__.py
|
12133432
| |
GNOME/niepce
|
refs/heads/master
|
third_party/googletest/googletest/test/gtest_xml_test_utils.py
|
364
|
#!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test utilities for gtest_xml_output"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import re
from xml.dom import minidom, Node
import gtest_test_utils
GTEST_OUTPUT_FLAG = '--gtest_output'
GTEST_DEFAULT_OUTPUT_FILE = 'test_detail.xml'
class GTestXMLTestCase(gtest_test_utils.TestCase):
"""
Base class for tests of Google Test's XML output functionality.
"""
def AssertEquivalentNodes(self, expected_node, actual_node):
"""
Asserts that actual_node (a DOM node object) is equivalent to
expected_node (another DOM node object), in that either both of
them are CDATA nodes and have the same value, or both are DOM
elements and actual_node meets all of the following conditions:
* It has the same tag name as expected_node.
* It has the same set of attributes as expected_node, each with
the same value as the corresponding attribute of expected_node.
Exceptions are any attribute named "time", which needs only be
convertible to a floating-point number and any attribute named
"type_param" which only has to be non-empty.
* It has an equivalent set of child nodes (including elements and
CDATA sections) as expected_node. Note that we ignore the
order of the children as they are not guaranteed to be in any
particular order.
"""
if expected_node.nodeType == Node.CDATA_SECTION_NODE:
self.assertEquals(Node.CDATA_SECTION_NODE, actual_node.nodeType)
self.assertEquals(expected_node.nodeValue, actual_node.nodeValue)
return
self.assertEquals(Node.ELEMENT_NODE, actual_node.nodeType)
self.assertEquals(Node.ELEMENT_NODE, expected_node.nodeType)
self.assertEquals(expected_node.tagName, actual_node.tagName)
expected_attributes = expected_node.attributes
actual_attributes = actual_node .attributes
self.assertEquals(
expected_attributes.length, actual_attributes.length,
'attribute numbers differ in element %s:\nExpected: %r\nActual: %r' % (
actual_node.tagName, expected_attributes.keys(),
actual_attributes.keys()))
for i in range(expected_attributes.length):
expected_attr = expected_attributes.item(i)
actual_attr = actual_attributes.get(expected_attr.name)
self.assert_(
actual_attr is not None,
'expected attribute %s not found in element %s' %
(expected_attr.name, actual_node.tagName))
self.assertEquals(
expected_attr.value, actual_attr.value,
' values of attribute %s in element %s differ: %s vs %s' %
(expected_attr.name, actual_node.tagName,
expected_attr.value, actual_attr.value))
expected_children = self._GetChildren(expected_node)
actual_children = self._GetChildren(actual_node)
self.assertEquals(
len(expected_children), len(actual_children),
'number of child elements differ in element ' + actual_node.tagName)
for child_id, child in expected_children.items():
self.assert_(child_id in actual_children,
'<%s> is not in <%s> (in element %s)' %
(child_id, actual_children, actual_node.tagName))
self.AssertEquivalentNodes(child, actual_children[child_id])
identifying_attribute = {
'testsuites': 'name',
'testsuite': 'name',
'testcase': 'name',
'failure': 'message',
}
def _GetChildren(self, element):
"""
Fetches all of the child nodes of element, a DOM Element object.
Returns them as the values of a dictionary keyed by the IDs of the
children. For <testsuites>, <testsuite> and <testcase> elements, the ID
is the value of their "name" attribute; for <failure> elements, it is
the value of the "message" attribute; CDATA sections and non-whitespace
text nodes are concatenated into a single CDATA section with ID
"detail". An exception is raised if any element other than the above
four is encountered, if two child elements with the same identifying
attributes are encountered, or if any other type of node is encountered.
"""
children = {}
for child in element.childNodes:
if child.nodeType == Node.ELEMENT_NODE:
self.assert_(child.tagName in self.identifying_attribute,
'Encountered unknown element <%s>' % child.tagName)
childID = child.getAttribute(self.identifying_attribute[child.tagName])
self.assert_(childID not in children)
children[childID] = child
elif child.nodeType in [Node.TEXT_NODE, Node.CDATA_SECTION_NODE]:
if 'detail' not in children:
if (child.nodeType == Node.CDATA_SECTION_NODE or
not child.nodeValue.isspace()):
children['detail'] = child.ownerDocument.createCDATASection(
child.nodeValue)
else:
children['detail'].nodeValue += child.nodeValue
else:
self.fail('Encountered unexpected node type %d' % child.nodeType)
return children
def NormalizeXml(self, element):
"""
Normalizes Google Test's XML output to eliminate references to transient
information that may change from run to run.
* The "time" attribute of <testsuites>, <testsuite> and <testcase>
elements is replaced with a single asterisk, if it contains
only digit characters.
* The "timestamp" attribute of <testsuites> elements is replaced with a
single asterisk, if it contains a valid ISO8601 datetime value.
* The "type_param" attribute of <testcase> elements is replaced with a
single asterisk (if it sn non-empty) as it is the type name returned
by the compiler and is platform dependent.
* The line info reported in the first line of the "message"
attribute and CDATA section of <failure> elements is replaced with the
file's basename and a single asterisk for the line number.
* The directory names in file paths are removed.
* The stack traces are removed.
"""
if element.tagName == 'testsuites':
timestamp = element.getAttributeNode('timestamp')
timestamp.value = re.sub(r'^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d$',
'*', timestamp.value)
if element.tagName in ('testsuites', 'testsuite', 'testcase'):
time = element.getAttributeNode('time')
time.value = re.sub(r'^\d+(\.\d+)?$', '*', time.value)
type_param = element.getAttributeNode('type_param')
if type_param and type_param.value:
type_param.value = '*'
elif element.tagName == 'failure':
source_line_pat = r'^.*[/\\](.*:)\d+\n'
# Replaces the source line information with a normalized form.
message = element.getAttributeNode('message')
message.value = re.sub(source_line_pat, '\\1*\n', message.value)
for child in element.childNodes:
if child.nodeType == Node.CDATA_SECTION_NODE:
# Replaces the source line information with a normalized form.
cdata = re.sub(source_line_pat, '\\1*\n', child.nodeValue)
# Removes the actual stack trace.
child.nodeValue = re.sub(r'\nStack trace:\n(.|\n)*',
'', cdata)
for child in element.childNodes:
if child.nodeType == Node.ELEMENT_NODE:
self.NormalizeXml(child)
|
jayceyxc/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/tests/cache/tests.py
|
40
|
# -*- coding: utf-8 -*-
# Unit tests for cache framework
# Uses whatever cache backend is set in the test settings file.
from __future__ import absolute_import, unicode_literals
import hashlib
import os
import random
import re
import string
import tempfile
import time
import warnings
import pickle
from django.conf import settings
from django.core import management
from django.core.cache import get_cache
from django.core.cache.backends.base import (CacheKeyWarning,
InvalidCacheBackendError)
from django.core.context_processors import csrf
from django.db import connections, router, transaction
from django.core.cache.utils import make_template_fragment_key
from django.http import (HttpResponse, HttpRequest, StreamingHttpResponse,
QueryDict)
from django.middleware.cache import (FetchFromCacheMiddleware,
UpdateCacheMiddleware, CacheMiddleware)
from django.middleware.csrf import CsrfViewMiddleware
from django.template import Template
from django.template.response import TemplateResponse
from django.test import TestCase, TransactionTestCase, RequestFactory
from django.test.utils import override_settings, IgnorePendingDeprecationWarningsMixin
from django.utils import six, timezone, translation, unittest
from django.utils.cache import (patch_vary_headers, get_cache_key,
learn_cache_key, patch_cache_control, patch_response_headers)
from django.utils.encoding import force_text
from django.views.decorators.cache import cache_page
from .models import Poll, expensive_calculation
# functions/classes for complex data type tests
def f():
return 42
class C:
def m(n):
return 24
class DummyCacheTests(unittest.TestCase):
# The Dummy cache backend doesn't really behave like a test backend,
# so it has different test requirements.
backend_name = 'django.core.cache.backends.dummy.DummyCache'
def setUp(self):
self.cache = get_cache(self.backend_name)
def test_simple(self):
"Dummy cache backend ignores cache set calls"
self.cache.set("key", "value")
self.assertEqual(self.cache.get("key"), None)
def test_add(self):
"Add doesn't do anything in dummy cache backend"
self.cache.add("addkey1", "value")
result = self.cache.add("addkey1", "newvalue")
self.assertEqual(result, True)
self.assertEqual(self.cache.get("addkey1"), None)
def test_non_existent(self):
"Non-existent keys aren't found in the dummy cache backend"
self.assertEqual(self.cache.get("does_not_exist"), None)
self.assertEqual(self.cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
"get_many returns nothing for the dummy cache backend"
self.cache.set('a', 'a')
self.cache.set('b', 'b')
self.cache.set('c', 'c')
self.cache.set('d', 'd')
self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {})
self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {})
def test_delete(self):
"Cache deletion is transparently ignored on the dummy cache backend"
self.cache.set("key1", "spam")
self.cache.set("key2", "eggs")
self.assertEqual(self.cache.get("key1"), None)
self.cache.delete("key1")
self.assertEqual(self.cache.get("key1"), None)
self.assertEqual(self.cache.get("key2"), None)
def test_has_key(self):
"The has_key method doesn't ever return True for the dummy cache backend"
self.cache.set("hello1", "goodbye1")
self.assertEqual(self.cache.has_key("hello1"), False)
self.assertEqual(self.cache.has_key("goodbye1"), False)
def test_in(self):
"The in operator doesn't ever return True for the dummy cache backend"
self.cache.set("hello2", "goodbye2")
self.assertEqual("hello2" in self.cache, False)
self.assertEqual("goodbye2" in self.cache, False)
def test_incr(self):
"Dummy cache values can't be incremented"
self.cache.set('answer', 42)
self.assertRaises(ValueError, self.cache.incr, 'answer')
self.assertRaises(ValueError, self.cache.incr, 'does_not_exist')
def test_decr(self):
"Dummy cache values can't be decremented"
self.cache.set('answer', 42)
self.assertRaises(ValueError, self.cache.decr, 'answer')
self.assertRaises(ValueError, self.cache.decr, 'does_not_exist')
def test_data_types(self):
"All data types are ignored equally by the dummy cache"
stuff = {
'string' : 'this is a string',
'int' : 42,
'list' : [1, 2, 3, 4],
'tuple' : (1, 2, 3, 4),
'dict' : {'A': 1, 'B' : 2},
'function' : f,
'class' : C,
}
self.cache.set("stuff", stuff)
self.assertEqual(self.cache.get("stuff"), None)
def test_expiration(self):
"Expiration has no effect on the dummy cache"
self.cache.set('expire1', 'very quickly', 1)
self.cache.set('expire2', 'very quickly', 1)
self.cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertEqual(self.cache.get("expire1"), None)
self.cache.add("expire2", "newvalue")
self.assertEqual(self.cache.get("expire2"), None)
self.assertEqual(self.cache.has_key("expire3"), False)
def test_unicode(self):
"Unicode values are ignored by the dummy cache"
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x' : 1 }
}
for (key, value) in stuff.items():
self.cache.set(key, value)
self.assertEqual(self.cache.get(key), None)
def test_set_many(self):
"set_many does nothing for the dummy cache backend"
self.cache.set_many({'a': 1, 'b': 2})
self.cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1')
def test_delete_many(self):
"delete_many does nothing for the dummy cache backend"
self.cache.delete_many(['a', 'b'])
def test_clear(self):
"clear does nothing for the dummy cache backend"
self.cache.clear()
def test_incr_version(self):
"Dummy cache versions can't be incremented"
self.cache.set('answer', 42)
self.assertRaises(ValueError, self.cache.incr_version, 'answer')
self.assertRaises(ValueError, self.cache.incr_version, 'does_not_exist')
def test_decr_version(self):
"Dummy cache versions can't be decremented"
self.cache.set('answer', 42)
self.assertRaises(ValueError, self.cache.decr_version, 'answer')
self.assertRaises(ValueError, self.cache.decr_version, 'does_not_exist')
class BaseCacheTests(object):
# A common set of tests to apply to all cache backends
def _get_request_cache(self, path):
request = HttpRequest()
request.META = {
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
}
request.path = request.path_info = path
request._cache_update_cache = True
request.method = 'GET'
return request
def test_simple(self):
# Simple cache set/get works
self.cache.set("key", "value")
self.assertEqual(self.cache.get("key"), "value")
def test_add(self):
# A key can be added to a cache
self.cache.add("addkey1", "value")
result = self.cache.add("addkey1", "newvalue")
self.assertEqual(result, False)
self.assertEqual(self.cache.get("addkey1"), "value")
def test_prefix(self):
# Test for same cache key conflicts between shared backend
self.cache.set('somekey', 'value')
# should not be set in the prefixed cache
self.assertFalse(self.prefix_cache.has_key('somekey'))
self.prefix_cache.set('somekey', 'value2')
self.assertEqual(self.cache.get('somekey'), 'value')
self.assertEqual(self.prefix_cache.get('somekey'), 'value2')
def test_non_existent(self):
# Non-existent cache keys return as None/default
# get with non-existent keys
self.assertEqual(self.cache.get("does_not_exist"), None)
self.assertEqual(self.cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
# Multiple cache keys can be returned using get_many
self.cache.set('a', 'a')
self.cache.set('b', 'b')
self.cache.set('c', 'c')
self.cache.set('d', 'd')
self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {'a' : 'a', 'c' : 'c', 'd' : 'd'})
self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {'a' : 'a', 'b' : 'b'})
def test_delete(self):
# Cache keys can be deleted
self.cache.set("key1", "spam")
self.cache.set("key2", "eggs")
self.assertEqual(self.cache.get("key1"), "spam")
self.cache.delete("key1")
self.assertEqual(self.cache.get("key1"), None)
self.assertEqual(self.cache.get("key2"), "eggs")
def test_has_key(self):
# The cache can be inspected for cache keys
self.cache.set("hello1", "goodbye1")
self.assertEqual(self.cache.has_key("hello1"), True)
self.assertEqual(self.cache.has_key("goodbye1"), False)
def test_in(self):
# The in operator can be used to inspect cache contents
self.cache.set("hello2", "goodbye2")
self.assertEqual("hello2" in self.cache, True)
self.assertEqual("goodbye2" in self.cache, False)
def test_incr(self):
# Cache values can be incremented
self.cache.set('answer', 41)
self.assertEqual(self.cache.incr('answer'), 42)
self.assertEqual(self.cache.get('answer'), 42)
self.assertEqual(self.cache.incr('answer', 10), 52)
self.assertEqual(self.cache.get('answer'), 52)
self.assertEqual(self.cache.incr('answer', -10), 42)
self.assertRaises(ValueError, self.cache.incr, 'does_not_exist')
def test_decr(self):
# Cache values can be decremented
self.cache.set('answer', 43)
self.assertEqual(self.cache.decr('answer'), 42)
self.assertEqual(self.cache.get('answer'), 42)
self.assertEqual(self.cache.decr('answer', 10), 32)
self.assertEqual(self.cache.get('answer'), 32)
self.assertEqual(self.cache.decr('answer', -10), 42)
self.assertRaises(ValueError, self.cache.decr, 'does_not_exist')
def test_close(self):
self.assertTrue(hasattr(self.cache, 'close'))
self.cache.close()
def test_data_types(self):
# Many different data types can be cached
stuff = {
'string' : 'this is a string',
'int' : 42,
'list' : [1, 2, 3, 4],
'tuple' : (1, 2, 3, 4),
'dict' : {'A': 1, 'B' : 2},
'function' : f,
'class' : C,
}
self.cache.set("stuff", stuff)
self.assertEqual(self.cache.get("stuff"), stuff)
def test_cache_read_for_model_instance(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
my_poll = Poll.objects.create(question="Well?")
self.assertEqual(Poll.objects.count(), 1)
pub_date = my_poll.pub_date
self.cache.set('question', my_poll)
cached_poll = self.cache.get('question')
self.assertEqual(cached_poll.pub_date, pub_date)
# We only want the default expensive calculation run once
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_write_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache write
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
my_poll = Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
self.assertEqual(expensive_calculation.num_runs, 1)
self.cache.set('deferred_queryset', defer_qs)
# cache set should not re-evaluate default functions
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_read_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
my_poll = Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
self.cache.set('deferred_queryset', defer_qs)
self.assertEqual(expensive_calculation.num_runs, 1)
runs_before_cache_read = expensive_calculation.num_runs
cached_polls = self.cache.get('deferred_queryset')
# We only want the default expensive calculation run on creation and set
self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read)
def test_expiration(self):
# Cache values can be set to expire
self.cache.set('expire1', 'very quickly', 1)
self.cache.set('expire2', 'very quickly', 1)
self.cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertEqual(self.cache.get("expire1"), None)
self.cache.add("expire2", "newvalue")
self.assertEqual(self.cache.get("expire2"), "newvalue")
self.assertEqual(self.cache.has_key("expire3"), False)
def test_unicode(self):
# Unicode values can be cached
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x' : 1 }
}
# Test `set`
for (key, value) in stuff.items():
self.cache.set(key, value)
self.assertEqual(self.cache.get(key), value)
# Test `add`
for (key, value) in stuff.items():
self.cache.delete(key)
self.cache.add(key, value)
self.assertEqual(self.cache.get(key), value)
# Test `set_many`
for (key, value) in stuff.items():
self.cache.delete(key)
self.cache.set_many(stuff)
for (key, value) in stuff.items():
self.assertEqual(self.cache.get(key), value)
def test_binary_string(self):
# Binary strings should be cacheable
from zlib import compress, decompress
value = 'value_to_be_compressed'
compressed_value = compress(value.encode())
# Test set
self.cache.set('binary1', compressed_value)
compressed_result = self.cache.get('binary1')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test add
self.cache.add('binary1-add', compressed_value)
compressed_result = self.cache.get('binary1-add')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test set_many
self.cache.set_many({'binary1-set_many': compressed_value})
compressed_result = self.cache.get('binary1-set_many')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
def test_set_many(self):
# Multiple keys can be set using set_many
self.cache.set_many({"key1": "spam", "key2": "eggs"})
self.assertEqual(self.cache.get("key1"), "spam")
self.assertEqual(self.cache.get("key2"), "eggs")
def test_set_many_expiration(self):
# set_many takes a second ``timeout`` parameter
self.cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
time.sleep(2)
self.assertEqual(self.cache.get("key1"), None)
self.assertEqual(self.cache.get("key2"), None)
def test_delete_many(self):
# Multiple keys can be deleted using delete_many
self.cache.set("key1", "spam")
self.cache.set("key2", "eggs")
self.cache.set("key3", "ham")
self.cache.delete_many(["key1", "key2"])
self.assertEqual(self.cache.get("key1"), None)
self.assertEqual(self.cache.get("key2"), None)
self.assertEqual(self.cache.get("key3"), "ham")
def test_clear(self):
# The cache can be emptied using clear
self.cache.set("key1", "spam")
self.cache.set("key2", "eggs")
self.cache.clear()
self.assertEqual(self.cache.get("key1"), None)
self.assertEqual(self.cache.get("key2"), None)
def test_long_timeout(self):
'''
Using a timeout greater than 30 days makes memcached think
it is an absolute expiration timestamp instead of a relative
offset. Test that we honour this convention. Refs #12399.
'''
self.cache.set('key1', 'eggs', 60*60*24*30 + 1) #30 days + 1 second
self.assertEqual(self.cache.get('key1'), 'eggs')
self.cache.add('key2', 'ham', 60*60*24*30 + 1)
self.assertEqual(self.cache.get('key2'), 'ham')
self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60*60*24*30 + 1)
self.assertEqual(self.cache.get('key3'), 'sausage')
self.assertEqual(self.cache.get('key4'), 'lobster bisque')
def test_forever_timeout(self):
'''
Passing in None into timeout results in a value that is cached forever
'''
self.cache.set('key1', 'eggs', None)
self.assertEqual(self.cache.get('key1'), 'eggs')
self.cache.add('key2', 'ham', None)
self.assertEqual(self.cache.get('key2'), 'ham')
self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None)
self.assertEqual(self.cache.get('key3'), 'sausage')
self.assertEqual(self.cache.get('key4'), 'lobster bisque')
def test_zero_timeout(self):
'''
Passing in None into timeout results in a value that is cached forever
'''
self.cache.set('key1', 'eggs', 0)
self.assertEqual(self.cache.get('key1'), None)
self.cache.add('key2', 'ham', 0)
self.assertEqual(self.cache.get('key2'), None)
self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0)
self.assertEqual(self.cache.get('key3'), None)
self.assertEqual(self.cache.get('key4'), None)
def test_float_timeout(self):
# Make sure a timeout given as a float doesn't crash anything.
self.cache.set("key1", "spam", 100.2)
self.assertEqual(self.cache.get("key1"), "spam")
def perform_cull_test(self, initial_count, final_count):
"""This is implemented as a utility method, because only some of the backends
implement culling. The culling algorithm also varies slightly, so the final
number of entries will vary between backends"""
# Create initial cache key entries. This will overflow the cache, causing a cull
for i in range(1, initial_count):
self.cache.set('cull%d' % i, 'value', 1000)
count = 0
# Count how many keys are left in the cache.
for i in range(1, initial_count):
if self.cache.has_key('cull%d' % i):
count = count + 1
self.assertEqual(count, final_count)
def test_invalid_keys(self):
"""
All the builtin backends (except memcached, see below) should warn on
keys that would be refused by memcached. This encourages portable
caching code without making it too difficult to use production backends
with more liberal key rules. Refs #6447.
"""
# mimic custom ``make_key`` method being defined since the default will
# never show the below warnings
def func(key, *args):
return key
old_func = self.cache.key_func
self.cache.key_func = func
try:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
# memcached does not allow whitespace or control characters in keys
self.cache.set('key with spaces', 'value')
self.assertEqual(len(w), 2)
self.assertIsInstance(w[0].message, CacheKeyWarning)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
# memcached limits key length to 250
self.cache.set('a' * 251, 'value')
self.assertEqual(len(w), 1)
self.assertIsInstance(w[0].message, CacheKeyWarning)
finally:
self.cache.key_func = old_func
def test_cache_versioning_get_set(self):
# set, using default version = 1
self.cache.set('answer1', 42)
self.assertEqual(self.cache.get('answer1'), 42)
self.assertEqual(self.cache.get('answer1', version=1), 42)
self.assertEqual(self.cache.get('answer1', version=2), None)
self.assertEqual(self.v2_cache.get('answer1'), None)
self.assertEqual(self.v2_cache.get('answer1', version=1), 42)
self.assertEqual(self.v2_cache.get('answer1', version=2), None)
# set, default version = 1, but manually override version = 2
self.cache.set('answer2', 42, version=2)
self.assertEqual(self.cache.get('answer2'), None)
self.assertEqual(self.cache.get('answer2', version=1), None)
self.assertEqual(self.cache.get('answer2', version=2), 42)
self.assertEqual(self.v2_cache.get('answer2'), 42)
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
# v2 set, using default version = 2
self.v2_cache.set('answer3', 42)
self.assertEqual(self.cache.get('answer3'), None)
self.assertEqual(self.cache.get('answer3', version=1), None)
self.assertEqual(self.cache.get('answer3', version=2), 42)
self.assertEqual(self.v2_cache.get('answer3'), 42)
self.assertEqual(self.v2_cache.get('answer3', version=1), None)
self.assertEqual(self.v2_cache.get('answer3', version=2), 42)
# v2 set, default version = 2, but manually override version = 1
self.v2_cache.set('answer4', 42, version=1)
self.assertEqual(self.cache.get('answer4'), 42)
self.assertEqual(self.cache.get('answer4', version=1), 42)
self.assertEqual(self.cache.get('answer4', version=2), None)
self.assertEqual(self.v2_cache.get('answer4'), None)
self.assertEqual(self.v2_cache.get('answer4', version=1), 42)
self.assertEqual(self.v2_cache.get('answer4', version=2), None)
def test_cache_versioning_add(self):
# add, default version = 1, but manually override version = 2
self.cache.add('answer1', 42, version=2)
self.assertEqual(self.cache.get('answer1', version=1), None)
self.assertEqual(self.cache.get('answer1', version=2), 42)
self.cache.add('answer1', 37, version=2)
self.assertEqual(self.cache.get('answer1', version=1), None)
self.assertEqual(self.cache.get('answer1', version=2), 42)
self.cache.add('answer1', 37, version=1)
self.assertEqual(self.cache.get('answer1', version=1), 37)
self.assertEqual(self.cache.get('answer1', version=2), 42)
# v2 add, using default version = 2
self.v2_cache.add('answer2', 42)
self.assertEqual(self.cache.get('answer2', version=1), None)
self.assertEqual(self.cache.get('answer2', version=2), 42)
self.v2_cache.add('answer2', 37)
self.assertEqual(self.cache.get('answer2', version=1), None)
self.assertEqual(self.cache.get('answer2', version=2), 42)
self.v2_cache.add('answer2', 37, version=1)
self.assertEqual(self.cache.get('answer2', version=1), 37)
self.assertEqual(self.cache.get('answer2', version=2), 42)
# v2 add, default version = 2, but manually override version = 1
self.v2_cache.add('answer3', 42, version=1)
self.assertEqual(self.cache.get('answer3', version=1), 42)
self.assertEqual(self.cache.get('answer3', version=2), None)
self.v2_cache.add('answer3', 37, version=1)
self.assertEqual(self.cache.get('answer3', version=1), 42)
self.assertEqual(self.cache.get('answer3', version=2), None)
self.v2_cache.add('answer3', 37)
self.assertEqual(self.cache.get('answer3', version=1), 42)
self.assertEqual(self.cache.get('answer3', version=2), 37)
def test_cache_versioning_has_key(self):
self.cache.set('answer1', 42)
# has_key
self.assertTrue(self.cache.has_key('answer1'))
self.assertTrue(self.cache.has_key('answer1', version=1))
self.assertFalse(self.cache.has_key('answer1', version=2))
self.assertFalse(self.v2_cache.has_key('answer1'))
self.assertTrue(self.v2_cache.has_key('answer1', version=1))
self.assertFalse(self.v2_cache.has_key('answer1', version=2))
def test_cache_versioning_delete(self):
self.cache.set('answer1', 37, version=1)
self.cache.set('answer1', 42, version=2)
self.cache.delete('answer1')
self.assertEqual(self.cache.get('answer1', version=1), None)
self.assertEqual(self.cache.get('answer1', version=2), 42)
self.cache.set('answer2', 37, version=1)
self.cache.set('answer2', 42, version=2)
self.cache.delete('answer2', version=2)
self.assertEqual(self.cache.get('answer2', version=1), 37)
self.assertEqual(self.cache.get('answer2', version=2), None)
self.cache.set('answer3', 37, version=1)
self.cache.set('answer3', 42, version=2)
self.v2_cache.delete('answer3')
self.assertEqual(self.cache.get('answer3', version=1), 37)
self.assertEqual(self.cache.get('answer3', version=2), None)
self.cache.set('answer4', 37, version=1)
self.cache.set('answer4', 42, version=2)
self.v2_cache.delete('answer4', version=1)
self.assertEqual(self.cache.get('answer4', version=1), None)
self.assertEqual(self.cache.get('answer4', version=2), 42)
def test_cache_versioning_incr_decr(self):
self.cache.set('answer1', 37, version=1)
self.cache.set('answer1', 42, version=2)
self.cache.incr('answer1')
self.assertEqual(self.cache.get('answer1', version=1), 38)
self.assertEqual(self.cache.get('answer1', version=2), 42)
self.cache.decr('answer1')
self.assertEqual(self.cache.get('answer1', version=1), 37)
self.assertEqual(self.cache.get('answer1', version=2), 42)
self.cache.set('answer2', 37, version=1)
self.cache.set('answer2', 42, version=2)
self.cache.incr('answer2', version=2)
self.assertEqual(self.cache.get('answer2', version=1), 37)
self.assertEqual(self.cache.get('answer2', version=2), 43)
self.cache.decr('answer2', version=2)
self.assertEqual(self.cache.get('answer2', version=1), 37)
self.assertEqual(self.cache.get('answer2', version=2), 42)
self.cache.set('answer3', 37, version=1)
self.cache.set('answer3', 42, version=2)
self.v2_cache.incr('answer3')
self.assertEqual(self.cache.get('answer3', version=1), 37)
self.assertEqual(self.cache.get('answer3', version=2), 43)
self.v2_cache.decr('answer3')
self.assertEqual(self.cache.get('answer3', version=1), 37)
self.assertEqual(self.cache.get('answer3', version=2), 42)
self.cache.set('answer4', 37, version=1)
self.cache.set('answer4', 42, version=2)
self.v2_cache.incr('answer4', version=1)
self.assertEqual(self.cache.get('answer4', version=1), 38)
self.assertEqual(self.cache.get('answer4', version=2), 42)
self.v2_cache.decr('answer4', version=1)
self.assertEqual(self.cache.get('answer4', version=1), 37)
self.assertEqual(self.cache.get('answer4', version=2), 42)
def test_cache_versioning_get_set_many(self):
# set, using default version = 1
self.cache.set_many({'ford1': 37, 'arthur1': 42})
self.assertEqual(self.cache.get_many(['ford1','arthur1']),
{'ford1': 37, 'arthur1': 42})
self.assertEqual(self.cache.get_many(['ford1','arthur1'], version=1),
{'ford1': 37, 'arthur1': 42})
self.assertEqual(self.cache.get_many(['ford1','arthur1'], version=2), {})
self.assertEqual(self.v2_cache.get_many(['ford1','arthur1']), {})
self.assertEqual(self.v2_cache.get_many(['ford1','arthur1'], version=1),
{'ford1': 37, 'arthur1': 42})
self.assertEqual(self.v2_cache.get_many(['ford1','arthur1'], version=2), {})
# set, default version = 1, but manually override version = 2
self.cache.set_many({'ford2': 37, 'arthur2': 42}, version=2)
self.assertEqual(self.cache.get_many(['ford2','arthur2']), {})
self.assertEqual(self.cache.get_many(['ford2','arthur2'], version=1), {})
self.assertEqual(self.cache.get_many(['ford2','arthur2'], version=2),
{'ford2': 37, 'arthur2': 42})
self.assertEqual(self.v2_cache.get_many(['ford2','arthur2']),
{'ford2': 37, 'arthur2': 42})
self.assertEqual(self.v2_cache.get_many(['ford2','arthur2'], version=1), {})
self.assertEqual(self.v2_cache.get_many(['ford2','arthur2'], version=2),
{'ford2': 37, 'arthur2': 42})
# v2 set, using default version = 2
self.v2_cache.set_many({'ford3': 37, 'arthur3': 42})
self.assertEqual(self.cache.get_many(['ford3','arthur3']), {})
self.assertEqual(self.cache.get_many(['ford3','arthur3'], version=1), {})
self.assertEqual(self.cache.get_many(['ford3','arthur3'], version=2),
{'ford3': 37, 'arthur3': 42})
self.assertEqual(self.v2_cache.get_many(['ford3','arthur3']),
{'ford3': 37, 'arthur3': 42})
self.assertEqual(self.v2_cache.get_many(['ford3','arthur3'], version=1), {})
self.assertEqual(self.v2_cache.get_many(['ford3','arthur3'], version=2),
{'ford3': 37, 'arthur3': 42})
# v2 set, default version = 2, but manually override version = 1
self.v2_cache.set_many({'ford4': 37, 'arthur4': 42}, version=1)
self.assertEqual(self.cache.get_many(['ford4','arthur4']),
{'ford4': 37, 'arthur4': 42})
self.assertEqual(self.cache.get_many(['ford4','arthur4'], version=1),
{'ford4': 37, 'arthur4': 42})
self.assertEqual(self.cache.get_many(['ford4','arthur4'], version=2), {})
self.assertEqual(self.v2_cache.get_many(['ford4','arthur4']), {})
self.assertEqual(self.v2_cache.get_many(['ford4','arthur4'], version=1),
{'ford4': 37, 'arthur4': 42})
self.assertEqual(self.v2_cache.get_many(['ford4','arthur4'], version=2), {})
def test_incr_version(self):
self.cache.set('answer', 42, version=2)
self.assertEqual(self.cache.get('answer'), None)
self.assertEqual(self.cache.get('answer', version=1), None)
self.assertEqual(self.cache.get('answer', version=2), 42)
self.assertEqual(self.cache.get('answer', version=3), None)
self.assertEqual(self.cache.incr_version('answer', version=2), 3)
self.assertEqual(self.cache.get('answer'), None)
self.assertEqual(self.cache.get('answer', version=1), None)
self.assertEqual(self.cache.get('answer', version=2), None)
self.assertEqual(self.cache.get('answer', version=3), 42)
self.v2_cache.set('answer2', 42)
self.assertEqual(self.v2_cache.get('answer2'), 42)
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
self.assertEqual(self.v2_cache.get('answer2', version=3), None)
self.assertEqual(self.v2_cache.incr_version('answer2'), 3)
self.assertEqual(self.v2_cache.get('answer2'), None)
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
self.assertEqual(self.v2_cache.get('answer2', version=2), None)
self.assertEqual(self.v2_cache.get('answer2', version=3), 42)
self.assertRaises(ValueError, self.cache.incr_version, 'does_not_exist')
def test_decr_version(self):
self.cache.set('answer', 42, version=2)
self.assertEqual(self.cache.get('answer'), None)
self.assertEqual(self.cache.get('answer', version=1), None)
self.assertEqual(self.cache.get('answer', version=2), 42)
self.assertEqual(self.cache.decr_version('answer', version=2), 1)
self.assertEqual(self.cache.get('answer'), 42)
self.assertEqual(self.cache.get('answer', version=1), 42)
self.assertEqual(self.cache.get('answer', version=2), None)
self.v2_cache.set('answer2', 42)
self.assertEqual(self.v2_cache.get('answer2'), 42)
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
self.assertEqual(self.v2_cache.decr_version('answer2'), 1)
self.assertEqual(self.v2_cache.get('answer2'), None)
self.assertEqual(self.v2_cache.get('answer2', version=1), 42)
self.assertEqual(self.v2_cache.get('answer2', version=2), None)
self.assertRaises(ValueError, self.cache.decr_version, 'does_not_exist', version=2)
def test_custom_key_func(self):
# Two caches with different key functions aren't visible to each other
self.cache.set('answer1', 42)
self.assertEqual(self.cache.get('answer1'), 42)
self.assertEqual(self.custom_key_cache.get('answer1'), None)
self.assertEqual(self.custom_key_cache2.get('answer1'), None)
self.custom_key_cache.set('answer2', 42)
self.assertEqual(self.cache.get('answer2'), None)
self.assertEqual(self.custom_key_cache.get('answer2'), 42)
self.assertEqual(self.custom_key_cache2.get('answer2'), 42)
def test_cache_write_unpickable_object(self):
update_middleware = UpdateCacheMiddleware()
update_middleware.cache = self.cache
fetch_middleware = FetchFromCacheMiddleware()
fetch_middleware.cache = self.cache
request = self._get_request_cache('/cache/test')
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertEqual(get_cache_data, None)
response = HttpResponse()
content = 'Testing cookie serialization.'
response.content = content
response.set_cookie('foo', 'bar')
update_middleware.process_response(request, response)
get_cache_data = fetch_middleware.process_request(request)
self.assertNotEqual(get_cache_data, None)
self.assertEqual(get_cache_data.content, content.encode('utf-8'))
self.assertEqual(get_cache_data.cookies, response.cookies)
update_middleware.process_response(request, get_cache_data)
get_cache_data = fetch_middleware.process_request(request)
self.assertNotEqual(get_cache_data, None)
self.assertEqual(get_cache_data.content, content.encode('utf-8'))
self.assertEqual(get_cache_data.cookies, response.cookies)
def custom_key_func(key, key_prefix, version):
"A customized cache key function"
return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])
class DBCacheTests(BaseCacheTests, TransactionTestCase):
available_apps = ['cache']
backend_name = 'django.core.cache.backends.db.DatabaseCache'
def setUp(self):
# Spaces are used in the table name to ensure quoting/escaping is working
self._table_name = 'test cache table'
management.call_command('createcachetable', self._table_name, verbosity=0, interactive=False)
self.cache = get_cache(self.backend_name, LOCATION=self._table_name, OPTIONS={'MAX_ENTRIES': 30})
self.prefix_cache = get_cache(self.backend_name, LOCATION=self._table_name, KEY_PREFIX='cacheprefix')
self.v2_cache = get_cache(self.backend_name, LOCATION=self._table_name, VERSION=2)
self.custom_key_cache = get_cache(self.backend_name, LOCATION=self._table_name, KEY_FUNCTION=custom_key_func)
self.custom_key_cache2 = get_cache(self.backend_name, LOCATION=self._table_name, KEY_FUNCTION='cache.tests.custom_key_func')
def tearDown(self):
from django.db import connection
cursor = connection.cursor()
cursor.execute('DROP TABLE %s' % connection.ops.quote_name(self._table_name))
connection.commit()
def test_cull(self):
self.perform_cull_test(50, 29)
def test_zero_cull(self):
self.cache = get_cache(self.backend_name, LOCATION=self._table_name, OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0})
self.perform_cull_test(50, 18)
def test_old_initialization(self):
self.cache = get_cache('db://%s?max_entries=30&cull_frequency=0' % self._table_name)
self.perform_cull_test(50, 18)
def test_second_call_doesnt_crash(self):
with six.assertRaisesRegex(self, management.CommandError,
"Cache table 'test cache table' could not be created"):
management.call_command(
'createcachetable',
self._table_name,
verbosity=0,
interactive=False
)
def test_clear_commits_transaction(self):
# Ensure the database transaction is committed (#19896)
self.cache.set("key1", "spam")
self.cache.clear()
transaction.rollback()
self.assertEqual(self.cache.get("key1"), None)
@override_settings(USE_TZ=True)
class DBCacheWithTimeZoneTests(DBCacheTests):
pass
class DBCacheRouter(object):
"""A router that puts the cache table on the 'other' database."""
def db_for_read(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
def db_for_write(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
def allow_syncdb(self, db, model):
if model._meta.app_label == 'django_cache':
return db == 'other'
class CreateCacheTableForDBCacheTests(TestCase):
multi_db = True
def test_createcachetable_observes_database_router(self):
old_routers = router.routers
try:
router.routers = [DBCacheRouter()]
# cache table should not be created on 'default'
with self.assertNumQueries(0, using='default'):
management.call_command('createcachetable', 'cache_table',
database='default',
verbosity=0, interactive=False)
# cache table should be created on 'other'
# Queries:
# 1: create savepoint (if transactional DDL is supported)
# 2: create the table
# 3: create the index
# 4: release savepoint (if transactional DDL is supported)
from django.db import connections
num = 4 if connections['other'].features.can_rollback_ddl else 2
with self.assertNumQueries(num, using='other'):
management.call_command('createcachetable',
'cache_table',
database='other',
verbosity=0, interactive=False)
finally:
router.routers = old_routers
class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
backend_name = 'django.core.cache.backends.locmem.LocMemCache'
def setUp(self):
self.cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30})
self.prefix_cache = get_cache(self.backend_name, KEY_PREFIX='cacheprefix')
self.v2_cache = get_cache(self.backend_name, VERSION=2)
self.custom_key_cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30}, KEY_FUNCTION=custom_key_func)
self.custom_key_cache2 = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30}, KEY_FUNCTION='cache.tests.custom_key_func')
# LocMem requires a hack to make the other caches
# share a data store with the 'normal' cache.
self.prefix_cache._cache = self.cache._cache
self.prefix_cache._expire_info = self.cache._expire_info
self.v2_cache._cache = self.cache._cache
self.v2_cache._expire_info = self.cache._expire_info
self.custom_key_cache._cache = self.cache._cache
self.custom_key_cache._expire_info = self.cache._expire_info
self.custom_key_cache2._cache = self.cache._cache
self.custom_key_cache2._expire_info = self.cache._expire_info
def tearDown(self):
self.cache.clear()
def test_cull(self):
self.perform_cull_test(50, 29)
def test_zero_cull(self):
self.cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0})
self.perform_cull_test(50, 19)
def test_old_initialization(self):
self.cache = get_cache('locmem://?max_entries=30&cull_frequency=0')
self.perform_cull_test(50, 19)
def test_multiple_caches(self):
"Check that multiple locmem caches are isolated"
mirror_cache = get_cache(self.backend_name)
other_cache = get_cache(self.backend_name, LOCATION='other')
self.cache.set('value1', 42)
self.assertEqual(mirror_cache.get('value1'), 42)
self.assertEqual(other_cache.get('value1'), None)
def test_incr_decr_timeout(self):
"""incr/decr does not modify expiry time (matches memcached behavior)"""
key = 'value'
_key = self.cache.make_key(key)
self.cache.set(key, 1, timeout=self.cache.default_timeout*10)
expire = self.cache._expire_info[_key]
self.cache.incr(key)
self.assertEqual(expire, self.cache._expire_info[_key])
self.cache.decr(key)
self.assertEqual(expire, self.cache._expire_info[_key])
# memcached backend isn't guaranteed to be available.
# To check the memcached backend, the test settings file will
# need to contain at least one cache backend setting that points at
# your memcache server.
@unittest.skipUnless(
any(cache['BACKEND'].startswith('django.core.cache.backends.memcached.')
for cache in settings.CACHES.values()),
"memcached not available")
class MemcachedCacheTests(unittest.TestCase, BaseCacheTests):
def setUp(self):
for cache_key, cache in settings.CACHES.items():
if cache['BACKEND'].startswith('django.core.cache.backends.memcached.'):
break
random_prefix = ''.join(random.choice(string.ascii_letters) for x in range(10))
self.cache = get_cache(cache_key)
self.prefix_cache = get_cache(cache_key, KEY_PREFIX=random_prefix)
self.v2_cache = get_cache(cache_key, VERSION=2)
self.custom_key_cache = get_cache(cache_key, KEY_FUNCTION=custom_key_func)
self.custom_key_cache2 = get_cache(cache_key, KEY_FUNCTION='cache.tests.custom_key_func')
def tearDown(self):
self.cache.clear()
def test_invalid_keys(self):
"""
On memcached, we don't introduce a duplicate key validation
step (for speed reasons), we just let the memcached API
library raise its own exception on bad keys. Refs #6447.
In order to be memcached-API-library agnostic, we only assert
that a generic exception of some kind is raised.
"""
# memcached does not allow whitespace or control characters in keys
self.assertRaises(Exception, self.cache.set, 'key with spaces', 'value')
# memcached limits key length to 250
self.assertRaises(Exception, self.cache.set, 'a' * 251, 'value')
# Explicitly display a skipped test if no configured cache uses MemcachedCache
@unittest.skipUnless(
any(cache['BACKEND'] == 'django.core.cache.backends.memcached.MemcachedCache'
for cache in settings.CACHES.values()),
"cache with python-memcached library not available")
def test_memcached_uses_highest_pickle_version(self):
# Regression test for #19810
for cache_key, cache in settings.CACHES.items():
if cache['BACKEND'] == 'django.core.cache.backends.memcached.MemcachedCache':
self.assertEqual(get_cache(cache_key)._cache.pickleProtocol,
pickle.HIGHEST_PROTOCOL)
class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
"""
Specific test cases for the file-based cache.
"""
backend_name = 'django.core.cache.backends.filebased.FileBasedCache'
def setUp(self):
self.dirname = tempfile.mkdtemp()
self.cache = get_cache(self.backend_name, LOCATION=self.dirname, OPTIONS={'MAX_ENTRIES': 30})
self.prefix_cache = get_cache(self.backend_name, LOCATION=self.dirname, KEY_PREFIX='cacheprefix')
self.v2_cache = get_cache(self.backend_name, LOCATION=self.dirname, VERSION=2)
self.custom_key_cache = get_cache(self.backend_name, LOCATION=self.dirname, KEY_FUNCTION=custom_key_func)
self.custom_key_cache2 = get_cache(self.backend_name, LOCATION=self.dirname, KEY_FUNCTION='cache.tests.custom_key_func')
def tearDown(self):
self.cache.clear()
def test_hashing(self):
"""Test that keys are hashed into subdirectories correctly"""
self.cache.set("foo", "bar")
key = self.cache.make_key("foo")
keyhash = hashlib.md5(key.encode()).hexdigest()
keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
self.assertTrue(os.path.exists(keypath))
def test_subdirectory_removal(self):
"""
Make sure that the created subdirectories are correctly removed when empty.
"""
self.cache.set("foo", "bar")
key = self.cache.make_key("foo")
keyhash = hashlib.md5(key.encode()).hexdigest()
keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
self.assertTrue(os.path.exists(keypath))
self.cache.delete("foo")
self.assertTrue(not os.path.exists(keypath))
self.assertTrue(not os.path.exists(os.path.dirname(keypath)))
self.assertTrue(not os.path.exists(os.path.dirname(os.path.dirname(keypath))))
def test_cull(self):
self.perform_cull_test(50, 29)
def test_old_initialization(self):
self.cache = get_cache('file://%s?max_entries=30' % self.dirname)
self.perform_cull_test(50, 29)
class CustomCacheKeyValidationTests(unittest.TestCase):
"""
Tests for the ability to mixin a custom ``validate_key`` method to
a custom cache backend that otherwise inherits from a builtin
backend, and override the default key validation. Refs #6447.
"""
def test_custom_key_validation(self):
cache = get_cache('cache.liberal_backend://')
# this key is both longer than 250 characters, and has spaces
key = 'some key with spaces' * 15
val = 'a value'
cache.set(key, val)
self.assertEqual(cache.get(key), val)
class GetCacheTests(unittest.TestCase):
def test_simple(self):
cache = get_cache('locmem://')
from django.core.cache.backends.locmem import LocMemCache
self.assertIsInstance(cache, LocMemCache)
from django.core.cache import cache
self.assertIsInstance(cache, get_cache('default').__class__)
cache = get_cache(
'django.core.cache.backends.dummy.DummyCache', **{'TIMEOUT': 120})
self.assertEqual(cache.default_timeout, 120)
self.assertRaises(InvalidCacheBackendError, get_cache, 'does_not_exist')
def test_close(self):
from django.core import signals
cache = get_cache('cache.closeable_cache.CacheClass')
self.assertFalse(cache.closed)
signals.request_finished.send(self.__class__)
self.assertTrue(cache.closed)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
)
class CacheUtils(TestCase):
"""TestCase for django.utils.cache functions."""
def setUp(self):
self.path = '/cache/test/'
self.cache = get_cache('default')
def tearDown(self):
self.cache.clear()
def _get_request(self, path, method='GET'):
request = HttpRequest()
request.META = {
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
}
request.method = method
request.path = request.path_info = "/cache/%s" % path
return request
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
response = HttpResponse()
if initial_vary is not None:
response['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self._get_request(self.path)
response = HttpResponse()
key_prefix = 'localprefix'
# Expect None if no headers have been set yet.
self.assertEqual(get_cache_key(request), None)
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
# Verify that a specified key_prefix is taken into account.
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
def test_get_cache_key_with_query(self):
request = self._get_request(self.path + '?test=1')
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertEqual(get_cache_key(request), None)
# Set headers to an empty list.
learn_cache_key(request, response)
# Verify that the querystring is taken into account.
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.bd889c5a59603af44333ed21504db3cd.d41d8cd98f00b204e9800998ecf8427e')
def test_learn_cache_key(self):
request = self._get_request(self.path, 'HEAD')
response = HttpResponse()
response['Vary'] = 'Pony'
# Make sure that the Vary header is added to the key hash
learn_cache_key(request, response)
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
def test_patch_cache_control(self):
tests = (
# Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts
(None, {'private' : True}, set(['private'])),
# Test whether private/public attributes are mutually exclusive
('private', {'private' : True}, set(['private'])),
('private', {'public' : True}, set(['public'])),
('public', {'public' : True}, set(['public'])),
('public', {'private' : True}, set(['private'])),
('must-revalidate,max-age=60,private', {'public' : True}, set(['must-revalidate', 'max-age=60', 'public'])),
('must-revalidate,max-age=60,public', {'private' : True}, set(['must-revalidate', 'max-age=60', 'private'])),
('must-revalidate,max-age=60', {'public' : True}, set(['must-revalidate', 'max-age=60', 'public'])),
)
cc_delim_re = re.compile(r'\s*,\s*')
for initial_cc, newheaders, expected_cc in tests:
response = HttpResponse()
if initial_cc is not None:
response['Cache-Control'] = initial_cc
patch_cache_control(response, **newheaders)
parts = set(cc_delim_re.split(response['Cache-Control']))
self.assertEqual(parts, expected_cc)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix',
},
},
)
class PrefixedCacheUtils(CacheUtils):
pass
@override_settings(
CACHE_MIDDLEWARE_SECONDS=60,
CACHE_MIDDLEWARE_KEY_PREFIX='test',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
)
class CacheHEADTest(TestCase):
def setUp(self):
self.path = '/cache/test/'
self.cache = get_cache('default')
def tearDown(self):
self.cache.clear()
def _get_request(self, method):
request = HttpRequest()
request.META = {
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
}
request.method = method
request.path = request.path_info = self.path
return request
def _get_request_cache(self, method):
request = self._get_request(method)
request._cache_update_cache = True
return request
def _set_cache(self, request, msg):
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
def test_head_caches_correctly(self):
test_content = 'test content'
request = self._get_request_cache('HEAD')
self._set_cache(request, test_content)
request = self._get_request('HEAD')
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertNotEqual(get_cache_data, None)
self.assertEqual(test_content.encode(), get_cache_data.content)
def test_head_with_cached_get(self):
test_content = 'test content'
request = self._get_request_cache('GET')
self._set_cache(request, test_content)
request = self._get_request('HEAD')
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertNotEqual(get_cache_data, None)
self.assertEqual(test_content.encode(), get_cache_data.content)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
LANGUAGES=(
('en', 'English'),
('es', 'Spanish'),
),
)
class CacheI18nTest(TestCase):
def setUp(self):
self.path = '/cache/test/'
self.cache = get_cache('default')
def tearDown(self):
self.cache.clear()
def _get_request(self, method='GET'):
request = HttpRequest()
request.META = {
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
}
request.method = method
request.path = request.path_info = self.path
return request
def _get_request_cache(self, query_string=None):
request = HttpRequest()
request.META = {
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
}
if query_string:
request.META['QUERY_STRING'] = query_string
request.GET = QueryDict(query_string)
request.path = request.path_info = self.path
request._cache_update_cache = True
request.method = 'GET'
request.session = {}
return request
@override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False)
def test_cache_key_i18n_translation(self):
request = self._get_request()
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
def check_accept_language_vary(self, accept_language, vary, reference_key):
request = self._get_request()
request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response['Vary'] = vary
key = learn_cache_key(request, response)
key2 = get_cache_key(request)
self.assertEqual(key, reference_key)
self.assertEqual(key2, reference_key)
@override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False)
def test_cache_key_i18n_translation_accept_language(self):
lang = translation.get_language()
self.assertEqual(lang, 'en')
request = self._get_request()
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response['Vary'] = 'accept-encoding'
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
self.check_accept_language_vary(
'en-us',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'en-US',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8',
'accept-encoding, accept-language, cookie',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8,ko;q=0.6',
'accept-language, cookie, accept-encoding',
key
)
self.check_accept_language_vary(
'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ',
'accept-encoding, cookie, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4',
'accept-language, accept-encoding, cookie',
key
)
self.check_accept_language_vary(
'ko;q=1.0,en;q=0.5',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'ko, en',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR, en-US',
'accept-encoding, accept-language, cookie',
key
)
@override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False)
def test_cache_key_i18n_formatting(self):
request = self._get_request()
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when formatting is active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
def test_cache_key_i18n_timezone(self):
request = self._get_request()
# This is tightly coupled to the implementation,
# but it's the most straightforward way to test the key.
tz = force_text(timezone.get_current_timezone_name(), errors='ignore')
tz = tz.encode('ascii', 'ignore').decode('ascii').replace(' ', '_')
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False, USE_L10N=False)
def test_cache_key_no_i18n (self):
request = self._get_request()
lang = translation.get_language()
tz = force_text(timezone.get_current_timezone_name(), errors='ignore')
tz = tz.encode('ascii', 'ignore').decode('ascii').replace(' ', '_')
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active")
self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active")
@override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
def test_cache_key_with_non_ascii_tzname(self):
# Regression test for #17476
class CustomTzName(timezone.UTC):
name = ''
def tzname(self, dt):
return self.name
request = self._get_request()
response = HttpResponse()
with timezone.override(CustomTzName()):
CustomTzName.name = 'Hora estándar de Argentina'.encode('UTF-8') # UTF-8 string
sanitized_name = 'Hora_estndar_de_Argentina'
self.assertIn(sanitized_name, learn_cache_key(request, response),
"Cache keys should include the time zone name when time zones are active")
CustomTzName.name = 'Hora estándar de Argentina' # unicode
sanitized_name = 'Hora_estndar_de_Argentina'
self.assertIn(sanitized_name, learn_cache_key(request, response),
"Cache keys should include the time zone name when time zones are active")
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
USE_ETAGS=True,
USE_I18N=True,
)
def test_middleware(self):
def set_cache(request, lang, msg):
translation.activate(lang)
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
# cache with non empty request.GET
request = self._get_request_cache(query_string='foo=bar&other=true')
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# first access, cache must return None
self.assertEqual(get_cache_data, None)
response = HttpResponse()
content = 'Check for cache with QUERY_STRING'
response.content = content
UpdateCacheMiddleware().process_response(request, response)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# cache must return content
self.assertNotEqual(get_cache_data, None)
self.assertEqual(get_cache_data.content, content.encode())
# different QUERY_STRING, cache must be empty
request = self._get_request_cache(query_string='foo=bar&somethingelse=true')
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertEqual(get_cache_data, None)
# i18n tests
en_message ="Hello world!"
es_message ="Hola mundo!"
request = self._get_request_cache()
set_cache(request, 'en', en_message)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# Check that we can recover the cache
self.assertNotEqual(get_cache_data, None)
self.assertEqual(get_cache_data.content, en_message.encode())
# Check that we use etags
self.assertTrue(get_cache_data.has_header('ETag'))
# Check that we can disable etags
with self.settings(USE_ETAGS=False):
request._cache_update_cache = True
set_cache(request, 'en', en_message)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertFalse(get_cache_data.has_header('ETag'))
# change the session language and set content
request = self._get_request_cache()
set_cache(request, 'es', es_message)
# change again the language
translation.activate('en')
# retrieve the content from cache
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertEqual(get_cache_data.content, en_message.encode())
# change again the language
translation.activate('es')
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertEqual(get_cache_data.content, es_message.encode())
# reset the language
translation.deactivate()
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
USE_ETAGS=True,
)
def test_middleware_doesnt_cache_streaming_response(self):
request = self._get_request()
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
# This test passes on Python < 3.3 even without the corresponding code
# in UpdateCacheMiddleware, because pickling a StreamingHttpResponse
# fails (http://bugs.python.org/issue14288). LocMemCache silently
# swallows the exception and doesn't store the response in cache.
content = ['Check for cache with streaming content.']
response = StreamingHttpResponse(content)
UpdateCacheMiddleware().process_response(request, response)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix'
},
},
)
class PrefixedCacheI18nTest(CacheI18nTest):
pass
def hello_world_view(request, value):
return HttpResponse('Hello World %s' % value)
def csrf_view(request):
return HttpResponse(csrf(request)['csrf_token'])
@override_settings(
CACHE_MIDDLEWARE_ALIAS='other',
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
CACHE_MIDDLEWARE_SECONDS=30,
CACHE_MIDDLEWARE_ANONYMOUS_ONLY=False,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other',
'TIMEOUT': '1',
},
},
)
class CacheMiddlewareTest(IgnorePendingDeprecationWarningsMixin, TestCase):
def setUp(self):
super(CacheMiddlewareTest, self).setUp()
self.factory = RequestFactory()
self.default_cache = get_cache('default')
self.other_cache = get_cache('other')
def tearDown(self):
self.default_cache.clear()
self.other_cache.clear()
super(CacheMiddlewareTest, self).tearDown()
def test_constructor(self):
"""
Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as
Middleware vs. usage of CacheMiddleware as view decorator and setting attributes
appropriately.
"""
# If no arguments are passed in construction, it's being used as middleware.
middleware = CacheMiddleware()
# Now test object attributes against values defined in setUp above
self.assertEqual(middleware.cache_timeout, 30)
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
self.assertEqual(middleware.cache_alias, 'other')
self.assertEqual(middleware.cache_anonymous_only, False)
# If arguments are being passed in construction, it's being used as a decorator.
# First, test with "defaults":
as_view_decorator = CacheMiddleware(cache_alias=None, key_prefix=None)
self.assertEqual(as_view_decorator.cache_timeout, 300) # Timeout value for 'default' cache, i.e. 300
self.assertEqual(as_view_decorator.key_prefix, '')
self.assertEqual(as_view_decorator.cache_alias, 'default') # Value of DEFAULT_CACHE_ALIAS from django.core.cache
self.assertEqual(as_view_decorator.cache_anonymous_only, False)
# Next, test with custom values:
as_view_decorator_with_custom = CacheMiddleware(cache_anonymous_only=True, cache_timeout=60, cache_alias='other', key_prefix='foo')
self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60)
self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo')
self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other')
self.assertEqual(as_view_decorator_with_custom.cache_anonymous_only, True)
def test_middleware(self):
middleware = CacheMiddleware()
prefix_middleware = CacheMiddleware(key_prefix='prefix1')
timeout_middleware = CacheMiddleware(cache_timeout=1)
request = self.factory.get('/view/')
# Put the request through the request middleware
result = middleware.process_request(request)
self.assertEqual(result, None)
response = hello_world_view(request, '1')
# Now put the response through the response middleware
response = middleware.process_response(request, response)
# Repeating the request should result in a cache hit
result = middleware.process_request(request)
self.assertNotEqual(result, None)
self.assertEqual(result.content, b'Hello World 1')
# The same request through a different middleware won't hit
result = prefix_middleware.process_request(request)
self.assertEqual(result, None)
# The same request with a timeout _will_ hit
result = timeout_middleware.process_request(request)
self.assertNotEqual(result, None)
self.assertEqual(result.content, b'Hello World 1')
@override_settings(CACHE_MIDDLEWARE_ANONYMOUS_ONLY=True)
def test_cache_middleware_anonymous_only_wont_cause_session_access(self):
""" The cache middleware shouldn't cause a session access due to
CACHE_MIDDLEWARE_ANONYMOUS_ONLY if nothing else has accessed the
session. Refs 13283 """
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.auth.middleware import AuthenticationMiddleware
middleware = CacheMiddleware()
session_middleware = SessionMiddleware()
auth_middleware = AuthenticationMiddleware()
request = self.factory.get('/view_anon/')
# Put the request through the request middleware
session_middleware.process_request(request)
auth_middleware.process_request(request)
result = middleware.process_request(request)
self.assertEqual(result, None)
response = hello_world_view(request, '1')
# Now put the response through the response middleware
session_middleware.process_response(request, response)
response = middleware.process_response(request, response)
self.assertEqual(request.session.accessed, False)
@override_settings(CACHE_MIDDLEWARE_ANONYMOUS_ONLY=True)
def test_cache_middleware_anonymous_only_with_cache_page(self):
"""CACHE_MIDDLEWARE_ANONYMOUS_ONLY should still be effective when used
with the cache_page decorator: the response to a request from an
authenticated user should not be cached."""
request = self.factory.get('/view_anon/')
class MockAuthenticatedUser(object):
def is_authenticated(self):
return True
class MockAccessedSession(object):
accessed = True
request.user = MockAuthenticatedUser()
request.session = MockAccessedSession()
response = cache_page(60)(hello_world_view)(request, '1')
self.assertFalse("Cache-Control" in response)
def test_view_decorator(self):
# decorate the same view with different cache decorators
default_view = cache_page(3)(hello_world_view)
default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view)
explicit_default_view = cache_page(3, cache='default')(hello_world_view)
explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view)
other_view = cache_page(1, cache='other')(hello_world_view)
other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view)
request = self.factory.get('/view/')
# Request the view once
response = default_view(request, '1')
self.assertEqual(response.content, b'Hello World 1')
# Request again -- hit the cache
response = default_view(request, '2')
self.assertEqual(response.content, b'Hello World 1')
# Requesting the same view with the explicit cache should yield the same result
response = explicit_default_view(request, '3')
self.assertEqual(response.content, b'Hello World 1')
# Requesting with a prefix will hit a different cache key
response = explicit_default_with_prefix_view(request, '4')
self.assertEqual(response.content, b'Hello World 4')
# Hitting the same view again gives a cache hit
response = explicit_default_with_prefix_view(request, '5')
self.assertEqual(response.content, b'Hello World 4')
# And going back to the implicit cache will hit the same cache
response = default_with_prefix_view(request, '6')
self.assertEqual(response.content, b'Hello World 4')
# Requesting from an alternate cache won't hit cache
response = other_view(request, '7')
self.assertEqual(response.content, b'Hello World 7')
# But a repeated hit will hit cache
response = other_view(request, '8')
self.assertEqual(response.content, b'Hello World 7')
# And prefixing the alternate cache yields yet another cache entry
response = other_with_prefix_view(request, '9')
self.assertEqual(response.content, b'Hello World 9')
# But if we wait a couple of seconds...
time.sleep(2)
# ... the default cache will still hit
cache = get_cache('default')
response = default_view(request, '11')
self.assertEqual(response.content, b'Hello World 1')
# ... the default cache with a prefix will still hit
response = default_with_prefix_view(request, '12')
self.assertEqual(response.content, b'Hello World 4')
# ... the explicit default cache will still hit
response = explicit_default_view(request, '13')
self.assertEqual(response.content, b'Hello World 1')
# ... the explicit default cache with a prefix will still hit
response = explicit_default_with_prefix_view(request, '14')
self.assertEqual(response.content, b'Hello World 4')
# .. but a rapidly expiring cache won't hit
response = other_view(request, '15')
self.assertEqual(response.content, b'Hello World 15')
# .. even if it has a prefix
response = other_with_prefix_view(request, '16')
self.assertEqual(response.content, b'Hello World 16')
def test_sensitive_cookie_not_cached(self):
"""
Django must prevent caching of responses that set a user-specific (and
maybe security sensitive) cookie in response to a cookie-less request.
"""
csrf_middleware = CsrfViewMiddleware()
cache_middleware = CacheMiddleware()
request = self.factory.get('/view/')
self.assertIsNone(cache_middleware.process_request(request))
csrf_middleware.process_view(request, csrf_view, (), {})
response = csrf_view(request)
response = csrf_middleware.process_response(request, response)
response = cache_middleware.process_response(request, response)
# Inserting a CSRF cookie in a cookie-less request prevented caching.
self.assertIsNone(cache_middleware.process_request(request))
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
)
class TestWithTemplateResponse(TestCase):
"""
Tests various headers w/ TemplateResponse.
Most are probably redundant since they manipulate the same object
anyway but the Etag header is 'special' because it relies on the
content being complete (which is not necessarily always the case
with a TemplateResponse)
"""
def setUp(self):
self.path = '/cache/test/'
self.cache = get_cache('default')
def tearDown(self):
self.cache.clear()
def _get_request(self, path, method='GET'):
request = HttpRequest()
request.META = {
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
}
request.method = method
request.path = request.path_info = "/cache/%s" % path
return request
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
response = TemplateResponse(HttpResponse(), Template("This is a test"))
if initial_vary is not None:
response['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self._get_request(self.path)
response = TemplateResponse(HttpResponse(), Template("This is a test"))
key_prefix = 'localprefix'
# Expect None if no headers have been set yet.
self.assertEqual(get_cache_key(request), None)
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
# Verify that a specified key_prefix is taken into account.
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
def test_get_cache_key_with_query(self):
request = self._get_request(self.path + '?test=1')
response = TemplateResponse(HttpResponse(), Template("This is a test"))
# Expect None if no headers have been set yet.
self.assertEqual(get_cache_key(request), None)
# Set headers to an empty list.
learn_cache_key(request, response)
# Verify that the querystring is taken into account.
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.bd889c5a59603af44333ed21504db3cd.d41d8cd98f00b204e9800998ecf8427e')
@override_settings(USE_ETAGS=False)
def test_without_etag(self):
response = TemplateResponse(HttpResponse(), Template("This is a test"))
self.assertFalse(response.has_header('ETag'))
patch_response_headers(response)
self.assertFalse(response.has_header('ETag'))
response = response.render()
self.assertFalse(response.has_header('ETag'))
@override_settings(USE_ETAGS=True)
def test_with_etag(self):
response = TemplateResponse(HttpResponse(), Template("This is a test"))
self.assertFalse(response.has_header('ETag'))
patch_response_headers(response)
self.assertFalse(response.has_header('ETag'))
response = response.render()
self.assertTrue(response.has_header('ETag'))
class TestEtagWithAdmin(TestCase):
# See https://code.djangoproject.com/ticket/16003
urls = "admin_views.urls"
def test_admin(self):
with self.settings(USE_ETAGS=False):
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
self.assertFalse(response.has_header('ETag'))
with self.settings(USE_ETAGS=True):
response = self.client.get('/test_admin/admin/')
self.assertEqual(response.status_code, 200)
self.assertTrue(response.has_header('ETag'))
class TestMakeTemplateFragmentKey(TestCase):
def test_without_vary_on(self):
key = make_template_fragment_key('a.fragment')
self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e')
def test_with_one_vary_on(self):
key = make_template_fragment_key('foo', ['abc'])
self.assertEqual(key,
'template.cache.foo.900150983cd24fb0d6963f7d28e17f72')
def test_with_many_vary_on(self):
key = make_template_fragment_key('bar', ['abc', 'def'])
self.assertEqual(key,
'template.cache.bar.4b35f12ab03cec09beec4c21b2d2fa88')
def test_proper_escaping(self):
key = make_template_fragment_key('spam', ['abc:def%'])
self.assertEqual(key,
'template.cache.spam.f27688177baec990cdf3fbd9d9c3f469')
|
seken/nink
|
refs/heads/master
|
pyglet/window/xlib/__init__.py
|
4
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
from ctypes import *
import unicodedata
import warnings
import pyglet
from pyglet.window import WindowException, NoSuchDisplayException, \
MouseCursorException, MouseCursor, \
DefaultMouseCursor, ImageMouseCursor, BaseWindow, _PlatformEventHandler, \
_ViewEventHandler
from pyglet.window import key
from pyglet.window import mouse
from pyglet.event import EventDispatcher
from pyglet.canvas.xlib import XlibCanvas
from pyglet.libs.x11 import xlib
from pyglet.libs.x11 import cursorfont
from pyglet.compat import asbytes
try:
from pyglet.libs.x11 import xsync
_have_xsync = True
except:
_have_xsync = False
class mwmhints_t(Structure):
_fields_ = [
('flags', c_uint32),
('functions', c_uint32),
('decorations', c_uint32),
('input_mode', c_int32),
('status', c_uint32)
]
XA_CARDINAL = 6 # Xatom.h:14
# Do we have the November 2000 UTF8 extension?
_have_utf8 = hasattr(xlib._lib, 'Xutf8TextListToTextProperty')
# symbol,ctrl -> motion mapping
_motion_map = {
(key.UP, False): key.MOTION_UP,
(key.RIGHT, False): key.MOTION_RIGHT,
(key.DOWN, False): key.MOTION_DOWN,
(key.LEFT, False): key.MOTION_LEFT,
(key.RIGHT, True): key.MOTION_NEXT_WORD,
(key.LEFT, True): key.MOTION_PREVIOUS_WORD,
(key.HOME, False): key.MOTION_BEGINNING_OF_LINE,
(key.END, False): key.MOTION_END_OF_LINE,
(key.PAGEUP, False): key.MOTION_PREVIOUS_PAGE,
(key.PAGEDOWN, False): key.MOTION_NEXT_PAGE,
(key.HOME, True): key.MOTION_BEGINNING_OF_FILE,
(key.END, True): key.MOTION_END_OF_FILE,
(key.BACKSPACE, False): key.MOTION_BACKSPACE,
(key.DELETE, False): key.MOTION_DELETE,
}
class XlibException(WindowException):
'''An X11-specific exception. This exception is probably a programming
error in pyglet.'''
pass
class XlibMouseCursor(MouseCursor):
drawable = False
def __init__(self, cursor):
self.cursor = cursor
# Platform event data is single item, so use platform event handler directly.
XlibEventHandler = _PlatformEventHandler
ViewEventHandler = _ViewEventHandler
class XlibWindow(BaseWindow):
_x_display = None # X display connection
_x_screen_id = None # X screen index
_x_ic = None # X input context
_window = None # Xlib window handle
_minimum_size = None
_maximum_size = None
_override_redirect = False
_x = 0
_y = 0 # Last known window position
_width = 0
_height = 0 # Last known window size
_mouse_exclusive_client = None # x,y of "real" mouse during exclusive
_mouse_buttons = [False] * 6 # State of each xlib button
_keyboard_exclusive = False
_active = True
_applied_mouse_exclusive = False
_applied_keyboard_exclusive = False
_mapped = False
_lost_context = False
_lost_context_state = False
_enable_xsync = False
_current_sync_value = None
_current_sync_valid = False
_needs_resize = False # True when resize event has been received but not
# dispatched
_default_event_mask = (0x1ffffff
& ~xlib.PointerMotionHintMask
& ~xlib.ResizeRedirectMask
& ~xlib.SubstructureNotifyMask)
def __init__(self, *args, **kwargs):
# Bind event handlers
self._event_handlers = {}
self._view_event_handlers = {}
for name in self._platform_event_names:
if not hasattr(self, name):
continue
func = getattr(self, name)
for message in func._platform_event_data:
if hasattr(func, '_view'):
self._view_event_handlers[message] = func
else:
self._event_handlers[message] = func
super(XlibWindow, self).__init__(*args, **kwargs)
def _recreate(self, changes):
# If flipping to/from fullscreen, need to recreate the window. (This
# is the case with both override_redirect method and
# _NET_WM_STATE_FULLSCREEN).
#
# A possible improvement could be to just hide the top window,
# destroy the GLX window, and reshow it again when leaving fullscreen.
# This would prevent the floating window from being moved by the
# WM.
if ('fullscreen' in changes or 'resizable' in changes):
# clear out the GLX context
self.context.detach()
xlib.XDestroyWindow(self._x_display, self._window)
del self.display._window_map[self._window]
del self.display._window_map[self._view]
self._window = None
self._mapped = False
# TODO: detect state loss only by examining context share.
if 'context' in changes:
self._lost_context = True
self._lost_context_state = True
self._create()
def _create(self):
# Unmap existing window if necessary while we fiddle with it.
if self._window and self._mapped:
self._unmap()
self._x_display = self.display._display
self._x_screen_id = self.display.x_screen
# Create X window if not already existing.
if not self._window:
root = xlib.XRootWindow(self._x_display, self._x_screen_id)
visual_info = self.config.get_visual_info()
visual = visual_info.visual
visual_id = xlib.XVisualIDFromVisual(visual)
default_visual = xlib.XDefaultVisual(
self._x_display, self._x_screen_id)
default_visual_id = xlib.XVisualIDFromVisual(default_visual)
window_attributes = xlib.XSetWindowAttributes()
if visual_id != default_visual_id:
window_attributes.colormap = xlib.XCreateColormap(
self._x_display, root, visual, xlib.AllocNone)
else:
window_attributes.colormap = xlib.XDefaultColormap(
self._x_display, self._x_screen_id)
window_attributes.bit_gravity = xlib.StaticGravity
# Issue 287: Compiz on Intel/Mesa doesn't draw window decoration
# unless CWBackPixel is given in mask. Should have
# no effect on other systems, so it's set
# unconditionally.
mask = xlib.CWColormap | xlib.CWBitGravity | xlib.CWBackPixel
if self._fullscreen:
width, height = self.screen.width, self.screen.height
self._view_x = (width - self._width) // 2
self._view_y = (height - self._height) // 2
else:
width, height = self._width, self._height
self._view_x = self._view_y = 0
self._window = xlib.XCreateWindow(self._x_display, root,
0, 0, width, height, 0, visual_info.depth,
xlib.InputOutput, visual, mask,
byref(window_attributes))
self._view = xlib.XCreateWindow(self._x_display,
self._window, self._view_x, self._view_y,
self._width, self._height, 0, visual_info.depth,
xlib.InputOutput, visual, mask,
byref(window_attributes));
xlib.XMapWindow(self._x_display, self._view)
xlib.XSelectInput(
self._x_display, self._view, self._default_event_mask)
self.display._window_map[self._window] = \
self.dispatch_platform_event
self.display._window_map[self._view] = \
self.dispatch_platform_event_view
self.canvas = XlibCanvas(self.display, self._view)
self.context.attach(self.canvas)
self.context.set_vsync(self._vsync) # XXX ?
# Setting null background pixmap disables drawing the background,
# preventing flicker while resizing (in theory).
#
# Issue 287: Compiz on Intel/Mesa doesn't draw window decoration if
# this is called. As it doesn't seem to have any
# effect anyway, it's just commented out.
#xlib.XSetWindowBackgroundPixmap(self._x_display, self._window, 0)
self._enable_xsync = (pyglet.options['xsync'] and
self.display._enable_xsync and
self.config.double_buffer)
# Set supported protocols
protocols = []
protocols.append(xlib.XInternAtom(self._x_display,
asbytes('WM_DELETE_WINDOW'), False))
if self._enable_xsync:
protocols.append(xlib.XInternAtom(self._x_display,
asbytes('_NET_WM_SYNC_REQUEST'),
False))
protocols = (c_ulong * len(protocols))(*protocols)
xlib.XSetWMProtocols(self._x_display, self._window,
protocols, len(protocols))
# Create window resize sync counter
if self._enable_xsync:
value = xsync.XSyncValue()
self._sync_counter = xlib.XID(
xsync.XSyncCreateCounter(self._x_display, value))
atom = xlib.XInternAtom(self._x_display,
asbytes('_NET_WM_SYNC_REQUEST_COUNTER'), False)
ptr = pointer(self._sync_counter)
xlib.XChangeProperty(self._x_display, self._window,
atom, XA_CARDINAL, 32,
xlib.PropModeReplace,
cast(ptr, POINTER(c_ubyte)), 1)
# Set window attributes
attributes = xlib.XSetWindowAttributes()
attributes_mask = 0
self._override_redirect = False
if self._fullscreen:
if pyglet.options['xlib_fullscreen_override_redirect']:
# Try not to use this any more, it causes problems; disabled
# by default in favour of _NET_WM_STATE_FULLSCREEN.
attributes.override_redirect = self._fullscreen
attributes_mask |= xlib.CWOverrideRedirect
self._override_redirect = True
else:
self._set_wm_state('_NET_WM_STATE_FULLSCREEN')
if self._fullscreen:
xlib.XMoveResizeWindow(self._x_display, self._window,
self.screen.x, self.screen.y,
self.screen.width, self.screen.height)
else:
xlib.XResizeWindow(self._x_display, self._window,
self._width, self._height)
xlib.XChangeWindowAttributes(self._x_display, self._window,
attributes_mask, byref(attributes))
# Set style
styles = {
self.WINDOW_STYLE_DEFAULT: '_NET_WM_WINDOW_TYPE_NORMAL',
self.WINDOW_STYLE_DIALOG: '_NET_WM_WINDOW_TYPE_DIALOG',
self.WINDOW_STYLE_TOOL: '_NET_WM_WINDOW_TYPE_UTILITY',
}
if self._style in styles:
self._set_atoms_property('_NET_WM_WINDOW_TYPE',
(styles[self._style],))
elif self._style == self.WINDOW_STYLE_BORDERLESS:
MWM_HINTS_DECORATIONS = 1 << 1
PROP_MWM_HINTS_ELEMENTS = 5
mwmhints = mwmhints_t()
mwmhints.flags = MWM_HINTS_DECORATIONS
mwmhints.decorations = 0
name = xlib.XInternAtom(self._x_display, '_MOTIF_WM_HINTS', False)
xlib.XChangeProperty(self._x_display, self._window,
name, name, 32, xlib.PropModeReplace,
cast(pointer(mwmhints), POINTER(c_ubyte)),
PROP_MWM_HINTS_ELEMENTS)
# Set resizeable
if not self._resizable and not self._fullscreen:
self.set_minimum_size(self._width, self._height)
self.set_maximum_size(self._width, self._height)
# Set caption
self.set_caption(self._caption)
# Create input context. A good but very outdated reference for this
# is http://www.sbin.org/doc/Xlib/chapt_11.html
if _have_utf8 and not self._x_ic:
if not self.display._x_im:
xlib.XSetLocaleModifiers(asbytes('@im=none'))
self.display._x_im = \
xlib.XOpenIM(self._x_display, None, None, None)
xlib.XFlush(self._x_display);
# Need to set argtypes on this function because it's vararg,
# and ctypes guesses wrong.
xlib.XCreateIC.argtypes = [xlib.XIM,
c_char_p, c_int,
c_char_p, xlib.Window,
c_char_p, xlib.Window,
c_void_p]
self._x_ic = xlib.XCreateIC(self.display._x_im,
asbytes('inputStyle'), xlib.XIMPreeditNothing|xlib.XIMStatusNothing,
asbytes('clientWindow'), self._window,
asbytes('focusWindow'), self._window,
None)
filter_events = c_ulong()
xlib.XGetICValues(self._x_ic,
'filterEvents', byref(filter_events),
None)
self._default_event_mask |= filter_events.value
xlib.XSetICFocus(self._x_ic)
self.switch_to()
if self._visible:
self.set_visible(True)
self.set_mouse_platform_visible()
self._applied_mouse_exclusive = None
self._update_exclusivity()
def _map(self):
if self._mapped:
return
# Map the window, wait for map event before continuing.
xlib.XSelectInput(
self._x_display, self._window, xlib.StructureNotifyMask)
xlib.XMapRaised(self._x_display, self._window)
e = xlib.XEvent()
while True:
xlib.XNextEvent(self._x_display, e)
if e.type == xlib.MapNotify:
break
xlib.XSelectInput(
self._x_display, self._window, self._default_event_mask)
self._mapped = True
if self._override_redirect:
# Possibly an override_redirect issue.
self.activate()
self.dispatch_event('on_resize', self._width, self._height)
self.dispatch_event('on_show')
self.dispatch_event('on_expose')
def _unmap(self):
if not self._mapped:
return
xlib.XSelectInput(
self._x_display, self._window, xlib.StructureNotifyMask)
xlib.XUnmapWindow(self._x_display, self._window)
e = xlib.XEvent()
while True:
xlib.XNextEvent(self._x_display, e)
if e.type == xlib.UnmapNotify:
break
xlib.XSelectInput(
self._x_display, self._window, self._default_event_mask)
self._mapped = False
def _get_root(self):
attributes = xlib.XWindowAttributes()
xlib.XGetWindowAttributes(self._x_display, self._window,
byref(attributes))
return attributes.root
def close(self):
if not self._window:
return
self.context.destroy()
self._unmap()
if self._window:
xlib.XDestroyWindow(self._x_display, self._window)
del self.display._window_map[self._window]
self._window = None
if _have_utf8:
xlib.XDestroyIC(self._x_ic)
self._x_ic = None
super(XlibWindow, self).close()
def switch_to(self):
if self.context:
self.context.set_current()
def flip(self):
self.draw_mouse_cursor()
# TODO canvas.flip?
if self.context:
self.context.flip()
self._sync_resize()
def set_vsync(self, vsync):
if pyglet.options['vsync'] is not None:
vsync = pyglet.options['vsync']
self._vsync = vsync
self.context.set_vsync(vsync)
def set_caption(self, caption):
if caption is None:
caption = ''
self._caption = caption
self._set_text_property('WM_NAME', caption, allow_utf8=False)
self._set_text_property('WM_ICON_NAME', caption, allow_utf8=False)
self._set_text_property('_NET_WM_NAME', caption)
self._set_text_property('_NET_WM_ICON_NAME', caption)
def get_caption(self):
return self._caption
def set_size(self, width, height):
if self._fullscreen:
raise WindowException('Cannot set size of fullscreen window.')
self._width = width
self._height = height
if not self._resizable:
self.set_minimum_size(width, height)
self.set_maximum_size(width, height)
xlib.XResizeWindow(self._x_display, self._window, width, height)
self._update_view_size()
self.dispatch_event('on_resize', width, height)
def _update_view_size(self):
xlib.XResizeWindow(self._x_display, self._view,
self._width, self._height)
def get_size(self):
# XGetGeometry and XWindowAttributes seem to always return the
# original size of the window, which is wrong after the user
# has resized it.
# XXX this is probably fixed now, with fix of resize.
return self._width, self._height
def set_location(self, x, y):
# Assume the window manager has reparented our top-level window
# only once, in which case attributes.x/y give the offset from
# the frame to the content window. Better solution would be
# to use _NET_FRAME_EXTENTS, where supported.
attributes = xlib.XWindowAttributes()
xlib.XGetWindowAttributes(self._x_display, self._window,
byref(attributes))
# XXX at least under KDE's WM these attrs are both 0
x -= attributes.x
y -= attributes.y
xlib.XMoveWindow(self._x_display, self._window, x, y)
def get_location(self):
child = xlib.Window()
x = c_int()
y = c_int()
xlib.XTranslateCoordinates(self._x_display,
self._window,
self._get_root(),
0, 0,
byref(x),
byref(y),
byref(child))
return x.value, y.value
def activate(self):
xlib.XSetInputFocus(self._x_display, self._window,
xlib.RevertToParent, xlib.CurrentTime)
def set_visible(self, visible=True):
if visible:
self._map()
else:
self._unmap()
self._visible = visible
def set_minimum_size(self, width, height):
self._minimum_size = width, height
self._set_wm_normal_hints()
def set_maximum_size(self, width, height):
self._maximum_size = width, height
self._set_wm_normal_hints()
def minimize(self):
xlib.XIconifyWindow(self._x_display, self._window, self._x_screen_id)
def maximize(self):
self._set_wm_state('_NET_WM_STATE_MAXIMIZED_HORZ',
'_NET_WM_STATE_MAXIMIZED_VERT')
def set_mouse_platform_visible(self, platform_visible=None):
if platform_visible is None:
platform_visible = self._mouse_visible and \
not self._mouse_cursor.drawable
if not platform_visible:
# Hide pointer by creating an empty cursor
black = xlib.XBlackPixel(self._x_display, self._x_screen_id)
black = xlib.XColor()
bmp = xlib.XCreateBitmapFromData(self._x_display, self._window,
c_buffer(8), 8, 8)
cursor = xlib.XCreatePixmapCursor(self._x_display, bmp, bmp,
black, black, 0, 0)
xlib.XDefineCursor(self._x_display, self._window, cursor)
xlib.XFreeCursor(self._x_display, cursor)
xlib.XFreePixmap(self._x_display, bmp)
else:
# Restore cursor
if isinstance(self._mouse_cursor, XlibMouseCursor):
xlib.XDefineCursor(self._x_display, self._window,
self._mouse_cursor.cursor)
else:
xlib.XUndefineCursor(self._x_display, self._window)
def set_mouse_position(self, x, y):
xlib.XWarpPointer(self._x_display,
0, # src window
self._window, # dst window
0, 0, # src x, y
0, 0, # src w, h
x, self._height - y,
)
def _update_exclusivity(self):
mouse_exclusive = self._active and self._mouse_exclusive
keyboard_exclusive = self._active and self._keyboard_exclusive
if mouse_exclusive != self._applied_mouse_exclusive:
if mouse_exclusive:
self.set_mouse_platform_visible(False)
# Restrict to client area
xlib.XGrabPointer(self._x_display, self._window,
True,
0,
xlib.GrabModeAsync,
xlib.GrabModeAsync,
self._window,
0,
xlib.CurrentTime)
# Move pointer to center of window
x = self._width / 2
y = self._height / 2
self._mouse_exclusive_client = x, y
self.set_mouse_position(x, y)
elif self._fullscreen and not self.screen._xinerama:
# Restrict to fullscreen area (prevent viewport scrolling)
self.set_mouse_position(0, 0)
r = xlib.XGrabPointer(self._x_display, self._view,
True, 0,
xlib.GrabModeAsync,
xlib.GrabModeAsync,
self._view,
0,
xlib.CurrentTime)
if r:
# Failed to grab, try again later
self._applied_mouse_exclusive = None
return
self.set_mouse_platform_visible()
else:
# Unclip
xlib.XUngrabPointer(self._x_display, xlib.CurrentTime)
self.set_mouse_platform_visible()
self._applied_mouse_exclusive = mouse_exclusive
if keyboard_exclusive != self._applied_keyboard_exclusive:
if keyboard_exclusive:
xlib.XGrabKeyboard(self._x_display,
self._window,
False,
xlib.GrabModeAsync,
xlib.GrabModeAsync,
xlib.CurrentTime)
else:
xlib.XUngrabKeyboard(self._x_display, xlib.CurrentTime)
self._applied_keyboard_exclusive = keyboard_exclusive
def set_exclusive_mouse(self, exclusive=True):
if exclusive == self._mouse_exclusive:
return
self._mouse_exclusive = exclusive
self._update_exclusivity()
def set_exclusive_keyboard(self, exclusive=True):
if exclusive == self._keyboard_exclusive:
return
self._keyboard_exclusive = exclusive
self._update_exclusivity()
def get_system_mouse_cursor(self, name):
if name == self.CURSOR_DEFAULT:
return DefaultMouseCursor()
# NQR means default shape is not pretty... surely there is another
# cursor font?
cursor_shapes = {
self.CURSOR_CROSSHAIR: cursorfont.XC_crosshair,
self.CURSOR_HAND: cursorfont.XC_hand2,
self.CURSOR_HELP: cursorfont.XC_question_arrow, # NQR
self.CURSOR_NO: cursorfont.XC_pirate, # NQR
self.CURSOR_SIZE: cursorfont.XC_fleur,
self.CURSOR_SIZE_UP: cursorfont.XC_top_side,
self.CURSOR_SIZE_UP_RIGHT: cursorfont.XC_top_right_corner,
self.CURSOR_SIZE_RIGHT: cursorfont.XC_right_side,
self.CURSOR_SIZE_DOWN_RIGHT: cursorfont.XC_bottom_right_corner,
self.CURSOR_SIZE_DOWN: cursorfont.XC_bottom_side,
self.CURSOR_SIZE_DOWN_LEFT: cursorfont.XC_bottom_left_corner,
self.CURSOR_SIZE_LEFT: cursorfont.XC_left_side,
self.CURSOR_SIZE_UP_LEFT: cursorfont.XC_top_left_corner,
self.CURSOR_SIZE_UP_DOWN: cursorfont.XC_sb_v_double_arrow,
self.CURSOR_SIZE_LEFT_RIGHT: cursorfont.XC_sb_h_double_arrow,
self.CURSOR_TEXT: cursorfont.XC_xterm,
self.CURSOR_WAIT: cursorfont.XC_watch,
self.CURSOR_WAIT_ARROW: cursorfont.XC_watch, # NQR
}
if name not in cursor_shapes:
raise MouseCursorException('Unknown cursor name "%s"' % name)
cursor = xlib.XCreateFontCursor(self._x_display, cursor_shapes[name])
return XlibMouseCursor(cursor)
def set_icon(self, *images):
# Careful! XChangeProperty takes an array of long when data type
# is 32-bit (but long can be 64 bit!), so pad high bytes of format if
# necessary.
import sys
format = {
('little', 4): 'BGRA',
('little', 8): 'BGRAAAAA',
('big', 4): 'ARGB',
('big', 8): 'AAAAARGB'
}[(sys.byteorder, sizeof(c_ulong))]
data = ''
for image in images:
image = image.get_image_data()
pitch = -(image.width * len(format))
s = c_buffer(sizeof(c_ulong) * 2)
memmove(s, cast((c_ulong * 2)(image.width, image.height),
POINTER(c_ubyte)), len(s))
data += s.raw + image.get_data(format, pitch)
buffer = (c_ubyte * len(data))()
memmove(buffer, data, len(data))
atom = xlib.XInternAtom(self._x_display, '_NET_WM_ICON', False)
xlib.XChangeProperty(self._x_display, self._window, atom, XA_CARDINAL,
32, xlib.PropModeReplace, buffer, len(data)/sizeof(c_ulong))
# Private utility
def _set_wm_normal_hints(self):
hints = xlib.XAllocSizeHints().contents
if self._minimum_size:
hints.flags |= xlib.PMinSize
hints.min_width, hints.min_height = self._minimum_size
if self._maximum_size:
hints.flags |= xlib.PMaxSize
hints.max_width, hints.max_height = self._maximum_size
xlib.XSetWMNormalHints(self._x_display, self._window, byref(hints))
def _set_text_property(self, name, value, allow_utf8=True):
atom = xlib.XInternAtom(self._x_display, asbytes(name), False)
if not atom:
raise XlibException('Undefined atom "%s"' % name)
assert type(value) in (str, unicode)
property = xlib.XTextProperty()
if _have_utf8 and allow_utf8:
buf = create_string_buffer(value.encode('utf8'))
result = xlib.Xutf8TextListToTextProperty(self._x_display,
cast(pointer(buf), c_char_p), 1, xlib.XUTF8StringStyle,
byref(property))
if result < 0:
raise XlibException('Could not create UTF8 text property')
else:
buf = create_string_buffer(value.encode('ascii', 'ignore'))
result = xlib.XStringListToTextProperty(
cast(pointer(buf), c_char_p), 1, byref(property))
if result < 0:
raise XlibException('Could not create text property')
xlib.XSetTextProperty(self._x_display,
self._window, byref(property), atom)
# XXX <rj> Xlib doesn't like us freeing this
#xlib.XFree(property.value)
def _set_atoms_property(self, name, values, mode=xlib.PropModeReplace):
name_atom = xlib.XInternAtom(self._x_display, asbytes(name), False)
atoms = []
for value in values:
atoms.append(xlib.XInternAtom(self._x_display, asbytes(value), False))
atom_type = xlib.XInternAtom(self._x_display, asbytes('ATOM'), False)
if len(atoms):
atoms_ar = (xlib.Atom * len(atoms))(*atoms)
xlib.XChangeProperty(self._x_display, self._window,
name_atom, atom_type, 32, mode,
cast(pointer(atoms_ar), POINTER(c_ubyte)), len(atoms))
else:
xlib.XDeleteProperty(self._x_display, self._window, net_wm_state)
def _set_wm_state(self, *states):
# Set property
net_wm_state = xlib.XInternAtom(self._x_display, '_NET_WM_STATE', False)
atoms = []
for state in states:
atoms.append(xlib.XInternAtom(self._x_display, state, False))
atom_type = xlib.XInternAtom(self._x_display, 'ATOM', False)
if len(atoms):
atoms_ar = (xlib.Atom * len(atoms))(*atoms)
xlib.XChangeProperty(self._x_display, self._window,
net_wm_state, atom_type, 32, xlib.PropModePrepend,
cast(pointer(atoms_ar), POINTER(c_ubyte)), len(atoms))
else:
xlib.XDeleteProperty(self._x_display, self._window, net_wm_state)
# Nudge the WM
e = xlib.XEvent()
e.xclient.type = xlib.ClientMessage
e.xclient.message_type = net_wm_state
e.xclient.display = cast(self._x_display, POINTER(xlib.Display))
e.xclient.window = self._window
e.xclient.format = 32
e.xclient.data.l[0] = xlib.PropModePrepend
for i, atom in enumerate(atoms):
e.xclient.data.l[i + 1] = atom
xlib.XSendEvent(self._x_display, self._get_root(),
False, xlib.SubstructureRedirectMask, byref(e))
# Event handling
def dispatch_events(self):
self.dispatch_pending_events()
self._allow_dispatch_event = True
e = xlib.XEvent()
# Cache these in case window is closed from an event handler
_x_display = self._x_display
_window = self._window
_view = self._view
# Check for the events specific to this window
while xlib.XCheckWindowEvent(_x_display, _window,
0x1ffffff, byref(e)):
# Key events are filtered by the xlib window event
# handler so they get a shot at the prefiltered event.
if e.xany.type not in (xlib.KeyPress, xlib.KeyRelease):
if xlib.XFilterEvent(e, 0):
continue
self.dispatch_platform_event(e)
# Check for the events specific to this view
while xlib.XCheckWindowEvent(_x_display, _view,
0x1ffffff, byref(e)):
# Key events are filtered by the xlib window event
# handler so they get a shot at the prefiltered event.
if e.xany.type not in (xlib.KeyPress, xlib.KeyRelease):
if xlib.XFilterEvent(e, 0):
continue
self.dispatch_platform_event_view(e)
# Generic events for this window (the window close event).
while xlib.XCheckTypedWindowEvent(_x_display, _window,
xlib.ClientMessage, byref(e)):
self.dispatch_platform_event(e)
if self._needs_resize:
self.dispatch_event('on_resize', self._width, self._height)
self.dispatch_event('on_expose')
self._needs_resize = False
self._allow_dispatch_event = False
def dispatch_pending_events(self):
while self._event_queue:
EventDispatcher.dispatch_event(self, *self._event_queue.pop(0))
# Dispatch any context-related events
if self._lost_context:
self._lost_context = False
EventDispatcher.dispatch_event(self, 'on_context_lost')
if self._lost_context_state:
self._lost_context_state = False
EventDispatcher.dispatch_event(self, 'on_context_state_lost')
def dispatch_platform_event(self, e):
if self._applied_mouse_exclusive is None:
self._update_exclusivity()
event_handler = self._event_handlers.get(e.type)
if event_handler:
event_handler(e)
def dispatch_platform_event_view(self, e):
event_handler = self._view_event_handlers.get(e.type)
if event_handler:
event_handler(e)
@staticmethod
def _translate_modifiers(state):
modifiers = 0
if state & xlib.ShiftMask:
modifiers |= key.MOD_SHIFT
if state & xlib.ControlMask:
modifiers |= key.MOD_CTRL
if state & xlib.LockMask:
modifiers |= key.MOD_CAPSLOCK
if state & xlib.Mod1Mask:
modifiers |= key.MOD_ALT
if state & xlib.Mod2Mask:
modifiers |= key.MOD_NUMLOCK
if state & xlib.Mod4Mask:
modifiers |= key.MOD_WINDOWS
if state & xlib.Mod5Mask:
modifiers |= key.MOD_SCROLLLOCK
return modifiers
# Event handlers
'''
def _event_symbol(self, event):
# pyglet.self.key keysymbols are identical to X11 keysymbols, no
# need to map the keysymbol.
symbol = xlib.XKeycodeToKeysym(self._x_display, event.xkey.keycode, 0)
if symbol == 0:
# XIM event
return None
elif symbol not in key._key_names.keys():
symbol = key.user_key(event.xkey.keycode)
return symbol
'''
def _event_text_symbol(self, ev):
text = None
symbol = xlib.KeySym()
buffer = create_string_buffer(128)
# Look up raw keysym before XIM filters it (default for keypress and
# keyrelease)
count = xlib.XLookupString(ev.xkey,
buffer, len(buffer) - 1,
byref(symbol), None)
# Give XIM a shot
filtered = xlib.XFilterEvent(ev, ev.xany.window)
if ev.type == xlib.KeyPress and not filtered:
status = c_int()
if _have_utf8:
encoding = 'utf8'
count = xlib.Xutf8LookupString(self._x_ic,
ev.xkey,
buffer, len(buffer) - 1,
byref(symbol), byref(status))
if status.value == xlib.XBufferOverflow:
raise NotImplementedError('TODO: XIM buffer resize')
else:
encoding = 'ascii'
count = xlib.XLookupString(ev.xkey,
buffer, len(buffer) - 1,
byref(symbol), None)
if count:
status.value = xlib.XLookupBoth
if status.value & (xlib.XLookupChars | xlib.XLookupBoth):
text = buffer.value[:count].decode(encoding)
# Don't treat Unicode command codepoints as text, except Return.
if text and unicodedata.category(text) == 'Cc' and text != '\r':
text = None
symbol = symbol.value
# If the event is a XIM filtered event, the keysym will be virtual
# (e.g., aacute instead of A after a dead key). Drop it, we don't
# want these kind of key events.
if ev.xkey.keycode == 0 and not filtered:
symbol = None
# pyglet.self.key keysymbols are identical to X11 keysymbols, no
# need to map the keysymbol. For keysyms outside the pyglet set, map
# raw key code to a user key.
if symbol and symbol not in key._key_names and ev.xkey.keycode:
# Issue 353: Symbol is uppercase when shift key held down.
symbol = ord(unichr(symbol).lower())
# If still not recognised, use the keycode
if symbol not in key._key_names:
symbol = key.user_key(ev.xkey.keycode)
if filtered:
# The event was filtered, text must be ignored, but the symbol is
# still good.
return None, symbol
return text, symbol
def _event_text_motion(self, symbol, modifiers):
if modifiers & key.MOD_ALT:
return None
ctrl = modifiers & key.MOD_CTRL != 0
return _motion_map.get((symbol, ctrl), None)
@ViewEventHandler
@XlibEventHandler(xlib.KeyPress)
@XlibEventHandler(xlib.KeyRelease)
def _event_key_view(self, ev):
if ev.type == xlib.KeyRelease:
# Look in the queue for a matching KeyPress with same timestamp,
# indicating an auto-repeat rather than actual key event.
saved = []
while True:
auto_event = xlib.XEvent()
result = xlib.XCheckWindowEvent(self._x_display,
self._window, xlib.KeyPress|xlib.KeyRelease,
byref(auto_event))
if not result:
break
saved.append(auto_event)
if auto_event.type == xlib.KeyRelease:
# just save this off for restoration back to the queue
continue
if ev.xkey.keycode == auto_event.xkey.keycode:
# Found a key repeat: dispatch EVENT_TEXT* event
text, symbol = self._event_text_symbol(auto_event)
modifiers = self._translate_modifiers(ev.xkey.state)
modifiers_ctrl = modifiers & (key.MOD_CTRL | key.MOD_ALT)
motion = self._event_text_motion(symbol, modifiers)
if motion:
if modifiers & key.MOD_SHIFT:
self.dispatch_event(
'on_text_motion_select', motion)
else:
self.dispatch_event('on_text_motion', motion)
elif text and not modifiers_ctrl:
self.dispatch_event('on_text', text)
ditched = saved.pop()
for auto_event in reversed(saved):
xlib.XPutBackEvent(self._x_display, byref(auto_event))
return
else:
# Key code of press did not match, therefore no repeating
# is going on, stop searching.
break
# Whoops, put the events back, it's for real.
for auto_event in reversed(saved):
xlib.XPutBackEvent(self._x_display, byref(auto_event))
text, symbol = self._event_text_symbol(ev)
modifiers = self._translate_modifiers(ev.xkey.state)
modifiers_ctrl = modifiers & (key.MOD_CTRL | key.MOD_ALT)
motion = self._event_text_motion(symbol, modifiers)
if ev.type == xlib.KeyPress:
if symbol:
self.dispatch_event('on_key_press', symbol, modifiers)
if motion:
if modifiers & key.MOD_SHIFT:
self.dispatch_event('on_text_motion_select', motion)
else:
self.dispatch_event('on_text_motion', motion)
elif text and not modifiers_ctrl:
self.dispatch_event('on_text', text)
elif ev.type == xlib.KeyRelease:
if symbol:
self.dispatch_event('on_key_release', symbol, modifiers)
@XlibEventHandler(xlib.KeyPress)
@XlibEventHandler(xlib.KeyRelease)
def _event_key(self, ev):
return self._event_key_view(ev)
@ViewEventHandler
@XlibEventHandler(xlib.MotionNotify)
def _event_motionnotify_view(self, ev):
x = ev.xmotion.x
y = self.height - ev.xmotion.y
if self._mouse_in_window:
dx = x - self._mouse_x
dy = y - self._mouse_y
else:
dx = dy = 0
if self._applied_mouse_exclusive and \
(ev.xmotion.x, ev.xmotion.y) == self._mouse_exclusive_client:
# Ignore events caused by XWarpPointer
self._mouse_x = x
self._mouse_y = y
return
if self._applied_mouse_exclusive:
# Reset pointer position
ex, ey = self._mouse_exclusive_client
xlib.XWarpPointer(self._x_display,
0,
self._window,
0, 0,
0, 0,
ex, ey)
self._mouse_x = x
self._mouse_y = y
self._mouse_in_window = True
buttons = 0
if ev.xmotion.state & xlib.Button1MotionMask:
buttons |= mouse.LEFT
if ev.xmotion.state & xlib.Button2MotionMask:
buttons |= mouse.MIDDLE
if ev.xmotion.state & xlib.Button3MotionMask:
buttons |= mouse.RIGHT
if buttons:
# Drag event
modifiers = self._translate_modifiers(ev.xmotion.state)
self.dispatch_event('on_mouse_drag',
x, y, dx, dy, buttons, modifiers)
else:
# Motion event
self.dispatch_event('on_mouse_motion', x, y, dx, dy)
@XlibEventHandler(xlib.MotionNotify)
def _event_motionnotify(self, ev):
# Window motion looks for drags that are outside the view but within
# the window.
buttons = 0
if ev.xmotion.state & xlib.Button1MotionMask:
buttons |= mouse.LEFT
if ev.xmotion.state & xlib.Button2MotionMask:
buttons |= mouse.MIDDLE
if ev.xmotion.state & xlib.Button3MotionMask:
buttons |= mouse.RIGHT
if buttons:
# Drag event
x = ev.xmotion.x - self._view_x
y = self._height - (ev.xmotion.y - self._view_y)
if self._mouse_in_window:
dx = x - self._mouse_x
dy = y - self._mouse_y
else:
dx = dy = 0
self._mouse_x = x
self._mouse_y = y
modifiers = self._translate_modifiers(ev.xmotion.state)
self.dispatch_event('on_mouse_drag',
x, y, dx, dy, buttons, modifiers)
@XlibEventHandler(xlib.ClientMessage)
def _event_clientmessage(self, ev):
atom = ev.xclient.data.l[0]
if atom == xlib.XInternAtom(ev.xclient.display,
asbytes('WM_DELETE_WINDOW'), False):
self.dispatch_event('on_close')
elif (self._enable_xsync and
atom == xlib.XInternAtom(ev.xclient.display,
asbytes('_NET_WM_SYNC_REQUEST'), False)):
lo = ev.xclient.data.l[2]
hi = ev.xclient.data.l[3]
self._current_sync_value = xsync.XSyncValue(hi, lo)
def _sync_resize(self):
if self._enable_xsync and self._current_sync_valid:
if xsync.XSyncValueIsZero(self._current_sync_value):
self._current_sync_valid = False
return
xsync.XSyncSetCounter(self._x_display,
self._sync_counter,
self._current_sync_value)
self._current_sync_value = None
self._current_sync_valid = False
@ViewEventHandler
@XlibEventHandler(xlib.ButtonPress)
@XlibEventHandler(xlib.ButtonRelease)
def _event_button(self, ev):
x = ev.xbutton.x
y = self.height - ev.xbutton.y
button = 1 << (ev.xbutton.button - 1) # 1, 2, 3 -> 1, 2, 4
modifiers = self._translate_modifiers(ev.xbutton.state)
if ev.type == xlib.ButtonPress:
# override_redirect issue: manually activate this window if
# fullscreen.
if self._override_redirect and not self._active:
self.activate()
if ev.xbutton.button == 4:
self.dispatch_event('on_mouse_scroll', x, y, 0, 1)
elif ev.xbutton.button == 5:
self.dispatch_event('on_mouse_scroll', x, y, 0, -1)
elif ev.xbutton.button < len(self._mouse_buttons):
self._mouse_buttons[ev.xbutton.button] = True
self.dispatch_event('on_mouse_press',
x, y, button, modifiers)
else:
if ev.xbutton.button < 4:
self._mouse_buttons[ev.xbutton.button] = False
self.dispatch_event('on_mouse_release',
x, y, button, modifiers)
@ViewEventHandler
@XlibEventHandler(xlib.Expose)
def _event_expose(self, ev):
# Ignore all expose events except the last one. We could be told
# about exposure rects - but I don't see the point since we're
# working with OpenGL and we'll just redraw the whole scene.
if ev.xexpose.count > 0: return
self.dispatch_event('on_expose')
@ViewEventHandler
@XlibEventHandler(xlib.EnterNotify)
def _event_enternotify(self, ev):
# figure active mouse buttons
# XXX ignore modifier state?
state = ev.xcrossing.state
self._mouse_buttons[1] = state & xlib.Button1Mask
self._mouse_buttons[2] = state & xlib.Button2Mask
self._mouse_buttons[3] = state & xlib.Button3Mask
self._mouse_buttons[4] = state & xlib.Button4Mask
self._mouse_buttons[5] = state & xlib.Button5Mask
# mouse position
x = self._mouse_x = ev.xcrossing.x
y = self._mouse_y = self.height - ev.xcrossing.y
self._mouse_in_window = True
# XXX there may be more we could do here
self.dispatch_event('on_mouse_enter', x, y)
@ViewEventHandler
@XlibEventHandler(xlib.LeaveNotify)
def _event_leavenotify(self, ev):
x = self._mouse_x = ev.xcrossing.x
y = self._mouse_y = self.height - ev.xcrossing.y
self._mouse_in_window = False
self.dispatch_event('on_mouse_leave', x, y)
@XlibEventHandler(xlib.ConfigureNotify)
def _event_configurenotify(self, ev):
if self._enable_xsync and self._current_sync_value:
self._current_sync_valid = True
if self._fullscreen:
return
self.switch_to()
w, h = ev.xconfigure.width, ev.xconfigure.height
x, y = ev.xconfigure.x, ev.xconfigure.y
if self._width != w or self._height != h:
self._update_view_size()
self._width = w
self._height = h
self._needs_resize = True
if self._x != x or self._y != y:
self.dispatch_event('on_move', x, y)
self._x = x
self._y = y
@XlibEventHandler(xlib.FocusIn)
def _event_focusin(self, ev):
self._active = True
self._update_exclusivity()
self.dispatch_event('on_activate')
xlib.XSetICFocus(self._x_ic)
@XlibEventHandler(xlib.FocusOut)
def _event_focusout(self, ev):
self._active = False
self._update_exclusivity()
self.dispatch_event('on_deactivate')
xlib.XUnsetICFocus(self._x_ic)
@XlibEventHandler(xlib.MapNotify)
def _event_mapnotify(self, ev):
self._mapped = True
self.dispatch_event('on_show')
self._update_exclusivity()
@XlibEventHandler(xlib.UnmapNotify)
def _event_unmapnotify(self, ev):
self._mapped = False
self.dispatch_event('on_hide')
|
rikribbers/smpchecker
|
refs/heads/master
|
smpchecker/model/model.py
|
1
|
from smpchecker import smpcheckerapp as smpchecker
from enum import Enum
from datetime import datetime
class SupportedDocumentTypes(Enum):
SI_10_CREDITNOTE = 'urn:oasis:names:specification:ubl:schema:xsd:CreditNote-2::CreditNote##urn:www.cenbii.eu:transaction:biicoretrdm014:ver1.0:#urn:www.peppol.eu:bis:peppol5a:ver1.0#urn:www.simplerinvoicing.org:si-ubl:credit-note:ver1.0.x::2.0'
SI_10_INVOICE = 'urn:oasis:names:specification:ubl:schema:xsd:Invoice-2::Invoice##urn:www.cenbii.eu:transaction:biicoretrdm010:ver1.0:#urn:www.peppol.eu:bis:peppol4a:ver1.0#urn:www.simplerinvoicing.org:si-ubl:invoice:ver1.0.x::2.0'
PEPPOL_4A = 'urn:oasis:names:specification:ubl:schema:xsd:Invoice-2::Invoice##urn:www.cenbii.eu:transaction:biitrns010:ver2.0:extended:urn:www.peppol.eu:bis:peppol4a:ver2.0::2.1'
SI_11 = 'urn:oasis:names:specification:ubl:schema:xsd:Invoice-2::Invoice##urn:www.cenbii.eu:transaction:biitrns010:ver2.0:extended:urn:www.peppol.eu:bis:peppol4a:ver2.0:extended:urn:www.simplerinvoicing.org:si:si-ubl:ver1.1.x::2.1'
SI_12 = 'urn:oasis:names:specification:ubl:schema:xsd:Invoice-2::Invoice##urn:www.cenbii.eu:transaction:biitrns010:ver2.0:extended:urn:www.peppol.eu:bis:peppol4a:ver2.0:extended:urn:www.simplerinvoicing.org:si:si-ubl:ver1.2::2.1'
class PeppolMember:
'''
Object representing a PeppolMember
'''
def __init__(self, peppolidentifier):
self.id = None
self.peppolidentifier = peppolidentifier
self.firstseen = datetime.now()
self.lastseen = None
def create(self):
smpchecker.query_db('insert into peppolmembers(peppolidentifier,first_seen) values (?,?)',
[self.peppolidentifier, self.firstseen])
self.load(self.peppolidentifier)
def reload(self):
self.load(self.peppolidentifier)
def load(self, peppolidentifier):
rows = smpchecker.query_db(
'select id, peppolidentifier, first_seen, last_seen from peppolmembers where peppolidentifier=?',
[peppolidentifier])
if len(rows) > 0:
row = rows.pop(0)
self.id = row[0]
self.peppolidentifier = row[1]
if row[2] is not None:
self.firstseen = datetime.strptime(row[2],'%Y-%m-%d %H:%M:%S.%f')
if row[3] is not None:
self.lastseen = datetime.strptime(row[3],'%Y-%m-%d %H:%M:%S.%f')
def exists(self):
for row in smpchecker.query_db('select id from peppolmembers where peppolidentifier=?',
[self.peppolidentifier]):
return True
# no rows found
return False
def update(self):
time = datetime.now()
smpchecker.query_db('update peppolmembers set last_seen=? where peppolidentifier=?',
[time, self.peppolidentifier])
self.lastseen = time
def get_scan_result(self):
rows = smpchecker.query_db('select peppolmember_id, documentidentifier from smpentries where peppolmember_id=?',
[self.id])
result = []
for row in rows:
e = SMPEntry(row[1], row[0])
e.reload()
result.append(e)
return SMPScanResult(self, result)
def serialize(self):
return {
'peppolidentifier': self.peppolidentifier,
'firstseen': self.firstseen,
'lastseen': self.lastseen,
}
class SMPEntry:
'''
Object representing SMP entry
'''
def __init__(self, documentidentifier, peppolmember_id):
self.id = None
self.documentidentifier = documentidentifier
self.certificate_not_before = None
self.certificate_not_after = None
self.endpointurl = None
self.peppolmember_id = peppolmember_id
self.firstseen = datetime.now()
self.lastseen = None
def create(self):
sql = 'insert into smpentries(documentidentifier, certificate_not_before, certificate_not_after, '
sql += 'endpointurl, peppolmember_id, first_seen, last_seen) values (?,?,?,?,?,?,?)'
smpchecker.query_db(sql, [self.documentidentifier, self.certificate_not_before,
self.certificate_not_after, self.endpointurl,
self.peppolmember_id, self.firstseen, self.lastseen])
self.load(self.documentidentifier, self.peppolmember_id)
def reload(self):
self.load(self.documentidentifier, self.peppolmember_id)
def load(self, documentidentifier, peppolmember_id):
sql = 'select id, documentidentifier, certificate_not_before, certificate_not_after,'
sql += 'endpointurl, peppolmember_id, first_seen, last_seen from smpentries where documentidentifier=? '
sql += 'and peppolmember_id=?'
rows = smpchecker.query_db(sql, [documentidentifier, peppolmember_id])
if len(rows) > 0:
row = rows.pop(0)
self.id = row[0]
self.documentidentifier = row[1]
self.certificate_not_before = datetime.strptime(row[2],'%Y-%m-%d %H:%M:%S.%f')
self.certificate_not_after = datetime.strptime(row[3],'%Y-%m-%d %H:%M:%S.%f')
self.endpointurl = row[4]
self.peppolmember_id = row[5]
if row[6] is not None:
self.firstseen = datetime.strptime(row[6],'%Y-%m-%d %H:%M:%S.%f')
if row[7] is not None:
self.lastseen = datetime.strptime(row[7],'%Y-%m-%d %H:%M:%S.%f')
def exists(self):
for row in smpchecker.query_db('select id from smpentries where peppolmember_id=? and documentidentifier=?',
[self.peppolmember_id, self.documentidentifier]):
return True
# no rows found
return False
def update(self):
# TODO update fiels from scan.
time = datetime.now()
smpchecker.query_db('update smpentries set last_seen=? where peppolmember_id=? and documentidentifier=?',
[time, self.peppolmember_id, self.documentidentifier])
self.lastseen = time
class SMPScanResult:
def __init__(self, member, smpentries):
self.peppolidentifier = member.peppolidentifier
self.si_10_creditnote = False
self.si_10_invoice = False
self.si_11 = False
self.si_12 = False
self.peppol4a = False
self.smpentries = smpentries
# SMP entries not supported if last_seen from smp_entry is before last seen peppol member
# i.o.w. when the last scan is done the last_seen from smp entry is not updated
# TODO make a unittest for this.
for smpentry in smpentries:
if SupportedDocumentTypes.SI_10_CREDITNOTE.value == smpentry.documentidentifier:
self.si_10_creditnote = True
elif SupportedDocumentTypes.SI_10_INVOICE.value == smpentry.documentidentifier:
self.si_10_invoice = True
elif SupportedDocumentTypes.SI_11.value == smpentry.documentidentifier:
self.si_11 = True
elif SupportedDocumentTypes.SI_12.value == smpentry.documentidentifier:
self.si_12 = True
elif SupportedDocumentTypes.PEPPOL_4A.value == smpentry.documentidentifier:
self.peppol4a = True
class Accesspoint:
def __init__(self, endpointurl, certificate_not_after):
self.endpointurl = endpointurl,
self.certificate_not_after = certificate_not_after
def serialize(self):
return {
'endpointurl': self.endpointurl,
'certificate_not_after': self.certificate_not_after,
}
class Accesspoints:
def __init__(self):
self.accesspoints = None
def load(self):
sql = 'select distinct endpointurl,certificate_not_after from smpentries'
rows = smpchecker.query_db(sql)
for row in rows:
a = Accesspoint(row[0], row[2])
self.accesspoints.append(a)
|
xyzz/vcmi-build
|
refs/heads/master
|
project/jni/python/src/Lib/test/test_fcntl.py
|
51
|
"""Test program for the fcntl C module.
OS/2+EMX doesn't support the file locking operations.
"""
import fcntl
import os
import struct
import sys
import unittest
from test.test_support import verbose, TESTFN, unlink, run_unittest
# TODO - Write tests for flock() and lockf().
def get_lockdata():
if sys.platform.startswith('atheos'):
start_len = "qq"
else:
try:
os.O_LARGEFILE
except AttributeError:
start_len = "ll"
else:
start_len = "qq"
if sys.platform in ('netbsd1', 'netbsd2', 'netbsd3',
'Darwin1.2', 'darwin',
'freebsd2', 'freebsd3', 'freebsd4', 'freebsd5',
'freebsd6', 'freebsd7', 'freebsd8',
'bsdos2', 'bsdos3', 'bsdos4',
'openbsd', 'openbsd2', 'openbsd3', 'openbsd4'):
if struct.calcsize('l') == 8:
off_t = 'l'
pid_t = 'i'
else:
off_t = 'lxxxx'
pid_t = 'l'
lockdata = struct.pack(off_t + off_t + pid_t + 'hh', 0, 0, 0,
fcntl.F_WRLCK, 0)
elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']:
lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
elif sys.platform in ['os2emx']:
lockdata = None
else:
lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0)
if lockdata:
if verbose:
print 'struct.pack: ', repr(lockdata)
return lockdata
lockdata = get_lockdata()
class TestFcntl(unittest.TestCase):
def setUp(self):
self.f = None
def tearDown(self):
if not self.f.closed:
self.f.close()
unlink(TESTFN)
def test_fcntl_fileno(self):
# the example from the library docs
self.f = open(TESTFN, 'w')
rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
if verbose:
print 'Status from fcntl with O_NONBLOCK: ', rv
if sys.platform not in ['os2emx']:
rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata)
if verbose:
print 'String from fcntl with F_SETLKW: ', repr(rv)
self.f.close()
def test_fcntl_file_descriptor(self):
# again, but pass the file rather than numeric descriptor
self.f = open(TESTFN, 'w')
rv = fcntl.fcntl(self.f, fcntl.F_SETFL, os.O_NONBLOCK)
if sys.platform not in ['os2emx']:
rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata)
self.f.close()
def test_main():
run_unittest(TestFcntl)
if __name__ == '__main__':
test_main()
|
FireWRT/OpenWrt-Firefly-Libraries
|
refs/heads/master
|
staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/lib2to3/fixes/fix_raw_input.py
|
203
|
"""Fixer that changes raw_input(...) into input(...)."""
# Author: Andre Roberge
# Local imports
from .. import fixer_base
from ..fixer_util import Name
class FixRawInput(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< name='raw_input' trailer< '(' [any] ')' > any* >
"""
def transform(self, node, results):
name = results["name"]
name.replace(Name("input", prefix=name.prefix))
|
Zeken/audacity
|
refs/heads/master
|
lib-src/lv2/suil/waflib/Tools/g95.py
|
316
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import re
from waflib import Utils
from waflib.Tools import fc,fc_config,fc_scan,ar
from waflib.Configure import conf
@conf
def find_g95(conf):
fc=conf.find_program('g95',var='FC')
fc=conf.cmd_to_list(fc)
conf.get_g95_version(fc)
conf.env.FC_NAME='G95'
@conf
def g95_flags(conf):
v=conf.env
v['FCFLAGS_fcshlib']=['-fPIC']
v['FORTRANMODFLAG']=['-fmod=','']
v['FCFLAGS_DEBUG']=['-Werror']
@conf
def g95_modifier_win32(conf):
fc_config.fortran_modifier_win32(conf)
@conf
def g95_modifier_cygwin(conf):
fc_config.fortran_modifier_cygwin(conf)
@conf
def g95_modifier_darwin(conf):
fc_config.fortran_modifier_darwin(conf)
@conf
def g95_modifier_platform(conf):
dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform()
g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None)
if g95_modifier_func:
g95_modifier_func()
@conf
def get_g95_version(conf,fc):
version_re=re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
cmd=fc+['--version']
out,err=fc_config.getoutput(conf,cmd,stdin=False)
if out:
match=version_re(out)
else:
match=version_re(err)
if not match:
conf.fatal('cannot determine g95 version')
k=match.groupdict()
conf.env['FC_VERSION']=(k['major'],k['minor'])
def configure(conf):
conf.find_g95()
conf.find_ar()
conf.fc_flags()
conf.fc_add_flags()
conf.g95_flags()
conf.g95_modifier_platform()
|
ninefold/libcloud
|
refs/heads/trunk
|
libcloud/common/types.py
|
4
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
"LibcloudError",
"MalformedResponseError",
"InvalidCredsError",
"InvalidCredsException",
"LazyList"
]
class LibcloudError(Exception):
"""The base class for other libcloud exceptions"""
def __init__(self, value, driver=None):
self.value = value
self.driver = driver
def __str__(self):
return self.__repr__()
def __repr__(self):
return ("<LibcloudError in "
+ repr(self.driver)
+ " "
+ repr(self.value) + ">")
class MalformedResponseError(LibcloudError):
"""Exception for the cases when a provider returns a malformed
response, e.g. you request JSON and provider returns
'<h3>something</h3>' due to some error on their side."""
def __init__(self, value, body=None, driver=None):
self.value = value
self.driver = driver
self.body = body
def __str__(self):
return self.__repr__()
def __repr__(self):
return ("<MalformedResponseException in "
+ repr(self.driver)
+ " "
+ repr(self.value)
+ ">: "
+ repr(self.body))
class InvalidCredsError(LibcloudError):
"""Exception used when invalid credentials are used on a provider."""
def __init__(self, value='Invalid credentials with the provider',
driver=None):
self.value = value
self.driver = driver
def __str__(self):
return self.__repr__()
def __repr__(self):
return repr(self.value)
# Deprecated alias of L{InvalidCredsError}
InvalidCredsException = InvalidCredsError
class LazyList(object):
def __init__(self, get_more, value_dict=None):
self._data = []
self._last_key = None
self._exhausted = False
self._all_loaded = False
self._get_more = get_more
self._value_dict = value_dict or {}
def __iter__(self):
if not self._all_loaded:
self._load_all()
data = self._data
for i in data:
yield i
def __getitem__(self, index):
if index >= len(self._data) and not self._all_loaded:
self._load_all()
return self._data[index]
def __len__(self):
self._load_all()
return len(self._data)
def __repr__(self):
self._load_all()
repr_string = ', ' .join([repr(item) for item in self._data])
repr_string = '[%s]' % (repr_string)
return repr_string
def _load_all(self):
while not self._exhausted:
newdata, self._last_key, self._exhausted = \
self._get_more(last_key=self._last_key,
value_dict=self._value_dict)
self._data.extend(newdata)
self._all_loaded = True
|
LuminateWireless/grpc
|
refs/heads/master
|
examples/python/multiplex/route_guide_pb2.py
|
88
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: route_guide.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='route_guide.proto',
package='routeguide',
syntax='proto3',
serialized_pb=_b('\n\x11route_guide.proto\x12\nrouteguide\",\n\x05Point\x12\x10\n\x08latitude\x18\x01 \x01(\x05\x12\x11\n\tlongitude\x18\x02 \x01(\x05\"I\n\tRectangle\x12\x1d\n\x02lo\x18\x01 \x01(\x0b\x32\x11.routeguide.Point\x12\x1d\n\x02hi\x18\x02 \x01(\x0b\x32\x11.routeguide.Point\"<\n\x07\x46\x65\x61ture\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x08location\x18\x02 \x01(\x0b\x32\x11.routeguide.Point\"A\n\tRouteNote\x12#\n\x08location\x18\x01 \x01(\x0b\x32\x11.routeguide.Point\x12\x0f\n\x07message\x18\x02 \x01(\t\"b\n\x0cRouteSummary\x12\x13\n\x0bpoint_count\x18\x01 \x01(\x05\x12\x15\n\rfeature_count\x18\x02 \x01(\x05\x12\x10\n\x08\x64istance\x18\x03 \x01(\x05\x12\x14\n\x0c\x65lapsed_time\x18\x04 \x01(\x05\x32\x85\x02\n\nRouteGuide\x12\x36\n\nGetFeature\x12\x11.routeguide.Point\x1a\x13.routeguide.Feature\"\x00\x12>\n\x0cListFeatures\x12\x15.routeguide.Rectangle\x1a\x13.routeguide.Feature\"\x00\x30\x01\x12>\n\x0bRecordRoute\x12\x11.routeguide.Point\x1a\x18.routeguide.RouteSummary\"\x00(\x01\x12?\n\tRouteChat\x12\x15.routeguide.RouteNote\x1a\x15.routeguide.RouteNote\"\x00(\x01\x30\x01\x42\x36\n\x1bio.grpc.examples.routeguideB\x0fRouteGuideProtoP\x01\xa2\x02\x03RTGb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_POINT = _descriptor.Descriptor(
name='Point',
full_name='routeguide.Point',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='latitude', full_name='routeguide.Point.latitude', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='longitude', full_name='routeguide.Point.longitude', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=33,
serialized_end=77,
)
_RECTANGLE = _descriptor.Descriptor(
name='Rectangle',
full_name='routeguide.Rectangle',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='lo', full_name='routeguide.Rectangle.lo', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hi', full_name='routeguide.Rectangle.hi', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=79,
serialized_end=152,
)
_FEATURE = _descriptor.Descriptor(
name='Feature',
full_name='routeguide.Feature',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='routeguide.Feature.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='location', full_name='routeguide.Feature.location', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=154,
serialized_end=214,
)
_ROUTENOTE = _descriptor.Descriptor(
name='RouteNote',
full_name='routeguide.RouteNote',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='location', full_name='routeguide.RouteNote.location', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='routeguide.RouteNote.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=216,
serialized_end=281,
)
_ROUTESUMMARY = _descriptor.Descriptor(
name='RouteSummary',
full_name='routeguide.RouteSummary',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='point_count', full_name='routeguide.RouteSummary.point_count', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='feature_count', full_name='routeguide.RouteSummary.feature_count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='distance', full_name='routeguide.RouteSummary.distance', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='elapsed_time', full_name='routeguide.RouteSummary.elapsed_time', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=283,
serialized_end=381,
)
_RECTANGLE.fields_by_name['lo'].message_type = _POINT
_RECTANGLE.fields_by_name['hi'].message_type = _POINT
_FEATURE.fields_by_name['location'].message_type = _POINT
_ROUTENOTE.fields_by_name['location'].message_type = _POINT
DESCRIPTOR.message_types_by_name['Point'] = _POINT
DESCRIPTOR.message_types_by_name['Rectangle'] = _RECTANGLE
DESCRIPTOR.message_types_by_name['Feature'] = _FEATURE
DESCRIPTOR.message_types_by_name['RouteNote'] = _ROUTENOTE
DESCRIPTOR.message_types_by_name['RouteSummary'] = _ROUTESUMMARY
Point = _reflection.GeneratedProtocolMessageType('Point', (_message.Message,), dict(
DESCRIPTOR = _POINT,
__module__ = 'route_guide_pb2'
# @@protoc_insertion_point(class_scope:routeguide.Point)
))
_sym_db.RegisterMessage(Point)
Rectangle = _reflection.GeneratedProtocolMessageType('Rectangle', (_message.Message,), dict(
DESCRIPTOR = _RECTANGLE,
__module__ = 'route_guide_pb2'
# @@protoc_insertion_point(class_scope:routeguide.Rectangle)
))
_sym_db.RegisterMessage(Rectangle)
Feature = _reflection.GeneratedProtocolMessageType('Feature', (_message.Message,), dict(
DESCRIPTOR = _FEATURE,
__module__ = 'route_guide_pb2'
# @@protoc_insertion_point(class_scope:routeguide.Feature)
))
_sym_db.RegisterMessage(Feature)
RouteNote = _reflection.GeneratedProtocolMessageType('RouteNote', (_message.Message,), dict(
DESCRIPTOR = _ROUTENOTE,
__module__ = 'route_guide_pb2'
# @@protoc_insertion_point(class_scope:routeguide.RouteNote)
))
_sym_db.RegisterMessage(RouteNote)
RouteSummary = _reflection.GeneratedProtocolMessageType('RouteSummary', (_message.Message,), dict(
DESCRIPTOR = _ROUTESUMMARY,
__module__ = 'route_guide_pb2'
# @@protoc_insertion_point(class_scope:routeguide.RouteSummary)
))
_sym_db.RegisterMessage(RouteSummary)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\033io.grpc.examples.routeguideB\017RouteGuideProtoP\001\242\002\003RTG'))
try:
# THESE ELEMENTS WILL BE DEPRECATED.
# Please use the generated *_pb2_grpc.py files instead.
import grpc
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
class RouteGuideStub(object):
"""Interface exported by the server.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetFeature = channel.unary_unary(
'/routeguide.RouteGuide/GetFeature',
request_serializer=Point.SerializeToString,
response_deserializer=Feature.FromString,
)
self.ListFeatures = channel.unary_stream(
'/routeguide.RouteGuide/ListFeatures',
request_serializer=Rectangle.SerializeToString,
response_deserializer=Feature.FromString,
)
self.RecordRoute = channel.stream_unary(
'/routeguide.RouteGuide/RecordRoute',
request_serializer=Point.SerializeToString,
response_deserializer=RouteSummary.FromString,
)
self.RouteChat = channel.stream_stream(
'/routeguide.RouteGuide/RouteChat',
request_serializer=RouteNote.SerializeToString,
response_deserializer=RouteNote.FromString,
)
class RouteGuideServicer(object):
"""Interface exported by the server.
"""
def GetFeature(self, request, context):
"""A simple RPC.
Obtains the feature at a given position.
A feature with an empty name is returned if there's no feature at the given
position.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListFeatures(self, request, context):
"""A server-to-client streaming RPC.
Obtains the Features available within the given Rectangle. Results are
streamed rather than returned at once (e.g. in a response message with a
repeated field), as the rectangle may cover a large area and contain a
huge number of features.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RecordRoute(self, request_iterator, context):
"""A client-to-server streaming RPC.
Accepts a stream of Points on a route being traversed, returning a
RouteSummary when traversal is completed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RouteChat(self, request_iterator, context):
"""A Bidirectional streaming RPC.
Accepts a stream of RouteNotes sent while a route is being traversed,
while receiving other RouteNotes (e.g. from other users).
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_RouteGuideServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetFeature': grpc.unary_unary_rpc_method_handler(
servicer.GetFeature,
request_deserializer=Point.FromString,
response_serializer=Feature.SerializeToString,
),
'ListFeatures': grpc.unary_stream_rpc_method_handler(
servicer.ListFeatures,
request_deserializer=Rectangle.FromString,
response_serializer=Feature.SerializeToString,
),
'RecordRoute': grpc.stream_unary_rpc_method_handler(
servicer.RecordRoute,
request_deserializer=Point.FromString,
response_serializer=RouteSummary.SerializeToString,
),
'RouteChat': grpc.stream_stream_rpc_method_handler(
servicer.RouteChat,
request_deserializer=RouteNote.FromString,
response_serializer=RouteNote.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'routeguide.RouteGuide', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class BetaRouteGuideServicer(object):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
"""Interface exported by the server.
"""
def GetFeature(self, request, context):
"""A simple RPC.
Obtains the feature at a given position.
A feature with an empty name is returned if there's no feature at the given
position.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def ListFeatures(self, request, context):
"""A server-to-client streaming RPC.
Obtains the Features available within the given Rectangle. Results are
streamed rather than returned at once (e.g. in a response message with a
repeated field), as the rectangle may cover a large area and contain a
huge number of features.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def RecordRoute(self, request_iterator, context):
"""A client-to-server streaming RPC.
Accepts a stream of Points on a route being traversed, returning a
RouteSummary when traversal is completed.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def RouteChat(self, request_iterator, context):
"""A Bidirectional streaming RPC.
Accepts a stream of RouteNotes sent while a route is being traversed,
while receiving other RouteNotes (e.g. from other users).
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
class BetaRouteGuideStub(object):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
"""Interface exported by the server.
"""
def GetFeature(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""A simple RPC.
Obtains the feature at a given position.
A feature with an empty name is returned if there's no feature at the given
position.
"""
raise NotImplementedError()
GetFeature.future = None
def ListFeatures(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""A server-to-client streaming RPC.
Obtains the Features available within the given Rectangle. Results are
streamed rather than returned at once (e.g. in a response message with a
repeated field), as the rectangle may cover a large area and contain a
huge number of features.
"""
raise NotImplementedError()
def RecordRoute(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None):
"""A client-to-server streaming RPC.
Accepts a stream of Points on a route being traversed, returning a
RouteSummary when traversal is completed.
"""
raise NotImplementedError()
RecordRoute.future = None
def RouteChat(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None):
"""A Bidirectional streaming RPC.
Accepts a stream of RouteNotes sent while a route is being traversed,
while receiving other RouteNotes (e.g. from other users).
"""
raise NotImplementedError()
def beta_create_RouteGuide_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This function was
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
request_deserializers = {
('routeguide.RouteGuide', 'GetFeature'): Point.FromString,
('routeguide.RouteGuide', 'ListFeatures'): Rectangle.FromString,
('routeguide.RouteGuide', 'RecordRoute'): Point.FromString,
('routeguide.RouteGuide', 'RouteChat'): RouteNote.FromString,
}
response_serializers = {
('routeguide.RouteGuide', 'GetFeature'): Feature.SerializeToString,
('routeguide.RouteGuide', 'ListFeatures'): Feature.SerializeToString,
('routeguide.RouteGuide', 'RecordRoute'): RouteSummary.SerializeToString,
('routeguide.RouteGuide', 'RouteChat'): RouteNote.SerializeToString,
}
method_implementations = {
('routeguide.RouteGuide', 'GetFeature'): face_utilities.unary_unary_inline(servicer.GetFeature),
('routeguide.RouteGuide', 'ListFeatures'): face_utilities.unary_stream_inline(servicer.ListFeatures),
('routeguide.RouteGuide', 'RecordRoute'): face_utilities.stream_unary_inline(servicer.RecordRoute),
('routeguide.RouteGuide', 'RouteChat'): face_utilities.stream_stream_inline(servicer.RouteChat),
}
server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
return beta_implementations.server(method_implementations, options=server_options)
def beta_create_RouteGuide_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
"""The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This function was
generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
request_serializers = {
('routeguide.RouteGuide', 'GetFeature'): Point.SerializeToString,
('routeguide.RouteGuide', 'ListFeatures'): Rectangle.SerializeToString,
('routeguide.RouteGuide', 'RecordRoute'): Point.SerializeToString,
('routeguide.RouteGuide', 'RouteChat'): RouteNote.SerializeToString,
}
response_deserializers = {
('routeguide.RouteGuide', 'GetFeature'): Feature.FromString,
('routeguide.RouteGuide', 'ListFeatures'): Feature.FromString,
('routeguide.RouteGuide', 'RecordRoute'): RouteSummary.FromString,
('routeguide.RouteGuide', 'RouteChat'): RouteNote.FromString,
}
cardinalities = {
'GetFeature': cardinality.Cardinality.UNARY_UNARY,
'ListFeatures': cardinality.Cardinality.UNARY_STREAM,
'RecordRoute': cardinality.Cardinality.STREAM_UNARY,
'RouteChat': cardinality.Cardinality.STREAM_STREAM,
}
stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
return beta_implementations.dynamic_stub(channel, 'routeguide.RouteGuide', cardinalities, options=stub_options)
except ImportError:
pass
# @@protoc_insertion_point(module_scope)
|
EvanK/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/cnos/cnos_vlag.py
|
52
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
#
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to send VLAG commands to Lenovo Switches
# Lenovo Networking
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cnos_vlag
author: "Anil Kumar Muraleedharan (@amuraleedhar)"
short_description: Manage VLAG resources and attributes on devices running
Lenovo CNOS
description:
- This module allows you to work with virtual Link Aggregation Groups
(vLAG) related configurations. The operators used are overloaded to ensure
control over switch vLAG configurations. Apart from the regular device
connection related attributes, there are four vLAG arguments which are
overloaded variables that will perform further configurations. They are
vlagArg1, vlagArg2, vlagArg3, and vlagArg4. For more details on how to use
these arguments, see [Overloaded Variables].
This module uses SSH to manage network device configuration.
The results of the operation will be placed in a directory named 'results'
that must be created by the user in their local directory to where the
playbook is run.
version_added: "2.3"
extends_documentation_fragment: cnos
options:
vlagArg1:
description:
- This is an overloaded vlag first argument. Usage of this argument can
be found is the User Guide referenced above.
required: Yes
default: Null
choices: [enable, auto-recovery,config-consistency,isl,mac-address-table,
peer-gateway,priority,startup-delay,tier-id,vrrp,instance,hlthchk]
vlagArg2:
description:
- This is an overloaded vlag second argument. Usage of this argument can
be found is the User Guide referenced above.
required: No
default: Null
choices: [Interval in seconds,disable or strict,Port Aggregation Number,
VLAG priority,Delay time in seconds,VLAG tier-id value,
VLAG instance number,keepalive-attempts,keepalive-interval,
retry-interval,peer-ip]
vlagArg3:
description:
- This is an overloaded vlag third argument. Usage of this argument can
be found is the User Guide referenced above.
required: No
default: Null
choices: [enable or port-aggregation,Number of keepalive attempts,
Interval in seconds,Interval in seconds,
VLAG health check peer IP4 address]
vlagArg4:
description:
- This is an overloaded vlag fourth argument. Usage of this argument can
be found is the User Guide referenced above.
required: No
default: Null
choices: [Port Aggregation Number,default or management]
'''
EXAMPLES = '''
Tasks : The following are examples of using the module cnos_vlag. These are
written in the main.yml file of the tasks directory.
---
- name: Test Vlag - enable
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "enable"
- name: Test Vlag - autorecovery
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "auto-recovery"
vlagArg2: 266
- name: Test Vlag - config-consistency
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "config-consistency"
vlagArg2: "strict"
- name: Test Vlag - isl
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "isl"
vlagArg2: 23
- name: Test Vlag - mac-address-table
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "mac-address-table"
- name: Test Vlag - peer-gateway
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "peer-gateway"
- name: Test Vlag - priority
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "priority"
vlagArg2: 1313
- name: Test Vlag - startup-delay
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "startup-delay"
vlagArg2: 323
- name: Test Vlag - tier-id
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "tier-id"
vlagArg2: 313
- name: Test Vlag - vrrp
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "vrrp"
- name: Test Vlag - instance
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "instance"
vlagArg2: 33
vlagArg3: 333
- name: Test Vlag - instance2
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "instance"
vlagArg2: "33"
- name: Test Vlag - keepalive-attempts
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "hlthchk"
vlagArg2: "keepalive-attempts"
vlagArg3: 13
- name: Test Vlag - keepalive-interval
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "hlthchk"
vlagArg2: "keepalive-interval"
vlagArg3: 131
- name: Test Vlag - retry-interval
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "hlthchk"
vlagArg2: "retry-interval"
vlagArg3: 133
- name: Test Vlag - peer ip
cnos_vlag:
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "hlthchk"
vlagArg2: "peer-ip"
vlagArg3: "1.2.3.4"
'''
RETURN = '''
msg:
description: Success or failure message
returned: always
type: str
sample: "vLAG configurations accomplished"
'''
import sys
import time
import socket
import array
import json
import time
import re
try:
from ansible.module_utils.network.cnos import cnos
HAS_LIB = True
except Exception:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from collections import defaultdict
def vlagConfig(module, prompt, answer):
retVal = ''
# vlag config command happens here.
command = 'vlag '
vlagArg1 = module.params['vlagArg1']
vlagArg2 = module.params['vlagArg2']
vlagArg3 = module.params['vlagArg3']
vlagArg4 = module.params['vlagArg4']
deviceType = module.params['deviceType']
if(vlagArg1 == "enable"):
# debugOutput("enable")
command = command + vlagArg1 + " "
elif(vlagArg1 == "auto-recovery"):
# debugOutput("auto-recovery")
command = command + vlagArg1 + " "
value = cnos.checkSanityofVariable(
deviceType, "vlag_auto_recovery", vlagArg2)
if(value == "ok"):
command = command + vlagArg2
else:
retVal = "Error-160"
return retVal
elif(vlagArg1 == "config-consistency"):
# debugOutput("config-consistency")
command = command + vlagArg1 + " "
value = cnos.checkSanityofVariable(
deviceType, "vlag_config_consistency", vlagArg2)
if(value == "ok"):
command = command + vlagArg2
else:
retVal = "Error-161"
return retVal
elif(vlagArg1 == "isl"):
# debugOutput("isl")
command = command + vlagArg1 + " port-channel "
value = cnos.checkSanityofVariable(
deviceType, "vlag_port_aggregation", vlagArg2)
if(value == "ok"):
command = command + vlagArg2
else:
retVal = "Error-162"
return retVal
elif(vlagArg1 == "mac-address-table"):
# debugOutput("mac-address-table")
command = command + vlagArg1 + " refresh"
elif(vlagArg1 == "peer-gateway"):
# debugOutput("peer-gateway")
command = command + vlagArg1 + " "
elif(vlagArg1 == "priority"):
# debugOutput("priority")
command = command + vlagArg1 + " "
value = cnos.checkSanityofVariable(deviceType, "vlag_priority",
vlagArg2)
if(value == "ok"):
command = command + vlagArg2
else:
retVal = "Error-163"
return retVal
elif(vlagArg1 == "startup-delay"):
# debugOutput("startup-delay")
command = command + vlagArg1 + " "
value = cnos.checkSanityofVariable(
deviceType, "vlag_startup_delay", vlagArg2)
if(value == "ok"):
command = command + vlagArg2
else:
retVal = "Error-164"
return retVal
elif(vlagArg1 == "tier-id"):
# debugOutput("tier-id")
command = command + vlagArg1 + " "
value = cnos.checkSanityofVariable(deviceType, "vlag_tier_id", vlagArg2)
if(value == "ok"):
command = command + vlagArg2
else:
retVal = "Error-165"
return retVal
elif(vlagArg1 == "vrrp"):
# debugOutput("vrrp")
command = command + vlagArg1 + " active"
elif(vlagArg1 == "instance"):
# debugOutput("instance")
command = command + vlagArg1 + " "
value = cnos.checkSanityofVariable(deviceType, "vlag_instance",
vlagArg2)
if(value == "ok"):
command = command + vlagArg2
if(vlagArg3 is not None):
command = command + " port-channel "
value = cnos.checkSanityofVariable(
deviceType, "vlag_port_aggregation", vlagArg3)
if(value == "ok"):
command = command + vlagArg3
else:
retVal = "Error-162"
return retVal
else:
command = command + " enable "
else:
retVal = "Error-166"
return retVal
elif(vlagArg1 == "hlthchk"):
# debugOutput("hlthchk")
command = command + vlagArg1 + " "
value = cnos.checkSanityofVariable(
deviceType, "vlag_hlthchk_options", vlagArg2)
if(value == "ok"):
if(vlagArg2 == "keepalive-attempts"):
value = cnos.checkSanityofVariable(
deviceType, "vlag_keepalive_attempts", vlagArg3)
if(value == "ok"):
command = command + vlagArg2 + " " + vlagArg3
else:
retVal = "Error-167"
return retVal
elif(vlagArg2 == "keepalive-interval"):
value = cnos.checkSanityofVariable(
deviceType, "vlag_keepalive_interval", vlagArg3)
if(value == "ok"):
command = command + vlagArg2 + " " + vlagArg3
else:
retVal = "Error-168"
return retVal
elif(vlagArg2 == "retry-interval"):
value = cnos.checkSanityofVariable(
deviceType, "vlag_retry_interval", vlagArg3)
if(value == "ok"):
command = command + vlagArg2 + " " + vlagArg3
else:
retVal = "Error-169"
return retVal
elif(vlagArg2 == "peer-ip"):
# Here I am not taking care of IPV6 option.
value = cnos.checkSanityofVariable(
deviceType, "vlag_peerip", vlagArg3)
if(value == "ok"):
command = command + vlagArg2 + " " + vlagArg3
if(vlagArg4 is not None):
value = cnos.checkSanityofVariable(
deviceType, "vlag_peerip_vrf", vlagArg4)
if(value == "ok"):
command = command + " vrf " + vlagArg4
else:
retVal = "Error-170"
return retVal
else:
retVal = "Error-171"
return retVal
else:
retVal = "Error-172"
return retVal
# debugOutput(command)
cmd = [{'command': command, 'prompt': None, 'answer': None}]
retVal = retVal + str(cnos.run_cnos_commands(module, cmd))
return retVal
# EOM
def main():
#
# Define parameters for vlag creation entry
#
module = AnsibleModule(
argument_spec=dict(
outputfile=dict(required=True),
host=dict(required=False),
username=dict(required=False),
password=dict(required=False, no_log=True),
enablePassword=dict(required=False, no_log=True),
deviceType=dict(required=True),
vlagArg1=dict(required=True),
vlagArg2=dict(required=False),
vlagArg3=dict(required=False),
vlagArg4=dict(required=False),),
supports_check_mode=False)
outputfile = module.params['outputfile']
output = ""
# Send the CLi command
output = output + str(vlagConfig(module, '(config)#', None))
# Save it into the file
file = open(outputfile, "a")
file.write(output)
file.close()
# need to add logic to check when changes occur or not
errorMsg = cnos.checkOutputForError(output)
if(errorMsg is None):
module.exit_json(changed=True, msg="VLAG configurations accomplished")
else:
module.fail_json(msg=errorMsg)
if __name__ == '__main__':
main()
|
aethaniel/micropython
|
refs/heads/master
|
tests/basics/list_slice_assign_grow.py
|
72
|
x = list(range(2))
l = list(x)
l[0:0] = [10]
print(l)
l = list(x)
l[:0] = [10, 20]
print(l)
l = list(x)
l[0:0] = [10, 20, 30, 40]
print(l)
l = list(x)
l[1:1] = [10, 20, 30, 40]
print(l)
l = list(x)
l[2:] = [10, 20, 30, 40]
print(l)
# Weird cases
l = list(x)
l[1:0] = [10, 20, 30, 40]
print(l)
l = list(x)
l[100:100] = [10, 20, 30, 40]
print(l)
|
ruuk/script.module.youtube.dl
|
refs/heads/master
|
lib/youtube_dl/extractor/clyp.py
|
22
|
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_parse_qs,
compat_urllib_parse_urlparse,
)
from ..utils import (
float_or_none,
unified_timestamp,
)
class ClypIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?clyp\.it/(?P<id>[a-z0-9]+)'
_TESTS = [{
'url': 'https://clyp.it/ojz2wfah',
'md5': '1d4961036c41247ecfdcc439c0cddcbb',
'info_dict': {
'id': 'ojz2wfah',
'ext': 'mp3',
'title': 'Krisson80 - bits wip wip',
'description': '#Krisson80BitsWipWip #chiptune\n#wip',
'duration': 263.21,
'timestamp': 1443515251,
'upload_date': '20150929',
},
}, {
'url': 'https://clyp.it/b04p1odi?token=b0078e077e15835845c528a44417719d',
'info_dict': {
'id': 'b04p1odi',
'ext': 'mp3',
'title': 'GJ! (Reward Edit)',
'description': 'Metal Resistance (THE ONE edition)',
'duration': 177.789,
'timestamp': 1528241278,
'upload_date': '20180605',
},
'params': {
'skip_download': True,
},
}]
def _real_extract(self, url):
audio_id = self._match_id(url)
qs = compat_parse_qs(compat_urllib_parse_urlparse(url).query)
token = qs.get('token', [None])[0]
query = {}
if token:
query['token'] = token
metadata = self._download_json(
'https://api.clyp.it/%s' % audio_id, audio_id, query=query)
formats = []
for secure in ('', 'Secure'):
for ext in ('Ogg', 'Mp3'):
format_id = '%s%s' % (secure, ext)
format_url = metadata.get('%sUrl' % format_id)
if format_url:
formats.append({
'url': format_url,
'format_id': format_id,
'vcodec': 'none',
})
self._sort_formats(formats)
title = metadata['Title']
description = metadata.get('Description')
duration = float_or_none(metadata.get('Duration'))
timestamp = unified_timestamp(metadata.get('DateCreated'))
return {
'id': audio_id,
'title': title,
'description': description,
'duration': duration,
'timestamp': timestamp,
'formats': formats,
}
|
goodwinnk/intellij-community
|
refs/heads/master
|
python/testData/copyPaste/TryBlockWithBadSelection.src.py
|
35
|
def f():
<selection>try:
x = 1
y = 2</selection>
finally:
pass
|
izzyalonso/tndata_backend
|
refs/heads/master
|
tndata_backend/utils/views.py
|
2
|
import logging
from django import http
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import authenticate, get_user_model, login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import Group
from django.core.urlresolvers import reverse, reverse_lazy
from django.shortcuts import redirect, render
from django.views.generic import TemplateView, FormView
from django_rq import job
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from goals.permissions import CONTENT_VIEWERS
from goals.models import Organization, Program
from userprofile.forms import UserForm
from . db import get_object_or_none
from . email import (
send_new_user_request_notification_to_managers,
send_new_enduser_welcome,
send_new_user_welcome,
)
from . forms import EmailForm, SetNewPasswordForm
from . models import ResetToken
from . slack import post_message
from . user_utils import username_hash, get_client_ip
logger = logging.getLogger(__name__)
@job
def _enroll_user_in_program(user_id, program_id):
User = get_user_model()
try:
user = User.objects.get(pk=user_id)
program = Program.objects.get(pk=program_id)
for goal in program.auto_enrolled_goals.all():
goal.enroll(user)
except (User.DoesNotExist, Program.DoesNotExist):
pass
def _setup_content_viewer(request, user, password):
"""Handle addional post-account-creation tasks for content viewers."""
# Add them to the appropriate groups.
for group in Group.objects.filter(name=CONTENT_VIEWERS):
user.groups.add(group)
# Log the user in & set an appropriate message.
user = authenticate(username=user.username, password=password)
login(request, user)
messages.success(request, "Welcome! Your account has been created.")
# Send some email notifications.
send_new_user_request_notification_to_managers(user)
send_new_user_welcome(user)
# Ping slack so this doesn't go unnoticed.
msg = (
":warning: Hey @bkmontgomery, {user} <{email}> just signed up as a "
"content viewer. If necessary, update their permissions at: "
"https://app.tndata.org/admin/auth/user/{id}/"
)
msg = msg.format(user=user.get_full_name(), email=user.email, id=user.id)
post_message("#tech", msg)
def _setup_enduser(request, user, send_welcome_email=True):
"""Handle addional post-account-creation tasks for end-users."""
User = get_user_model()
# Check for any Organization & Program parameters (falling back to the
# session values), and make the user an organization member and enroll
# them in the Program's goals (if applicable).
try:
program_id = request.POST.get('program', request.session.get('program'))
org_id = request.POST.get('organization', request.session.get('organization'))
program = Program.objects.get(pk=program_id, organization__id=org_id)
program.members.add(user)
program.organization.members.add(user)
_enroll_user_in_program.delay(user.id, program.id)
# Don't make them go through onboarding
user.userprofile.needs_onboarding = False
user.userprofile.save()
except Program.DoesNotExist:
pass
# Send some email notifications.
if send_welcome_email:
send_new_enduser_welcome(user)
# Ping slack so this doesn't go unnoticed.
num_users = User.objects.filter(is_active=True).count()
msg = (
":exclamation: :point_right: {user} <{email}> just joined. That "
"makes *{num_users}* active accounts."
)
msg = msg.format(
user=user.get_full_name(), email=user.email, num_users=num_users
)
# Set an appropriate message.
messages.success(request, "Welcome to Compass! Your account has been updated.")
post_message("#tech", msg)
def signup(request, content_viewer=False, enduser=False):
"""This view handles user account creation. There are different scenarios
for different types of users, as indicated by the keyword arguments to
this function:
- content_viewer: If True, the account will be created as a content viewer
(ie. a user who may need access to the editing tools at some point). This
user account will be automatically activated, logged in, and added to
some groups.
/utils/signup/
- enduser: If True, the account will be created as if the user intends to
download and use the mobile app. Their account will be created, and the
user will be redirected to links to the mobile app(s).
/join/
For Endusers: The request may also include one or more parameters indicating
the Organization & Program in which the user should be a member. For
example: `/join/?organization=42&program=7`
- Organization ID: The user will be added as a member of the specified
organization.
- Program ID: The user will be added as a member of the specified
program and will be auto-enrolled in certain goals from the program
"""
organization = get_object_or_none(Organization,
pk=request.GET.get('organization'))
program = get_object_or_none(Program, pk=request.GET.get('program'))
# Stash these in a session for later... (see `confirm_join`).
if organization or program:
request.session['organization'] = organization.id if organization else None
request.session['program'] = program.id if program else None
# Set the template/redirection based on the type of user signup
if enduser:
template = 'utils/signup_enduser.html'
redirect_to = 'join'
login_url = "{}?next={}".format(reverse('login'), reverse('utils:confirm'))
else:
template = 'utils/signup_content_viewer.html'
redirect_to = '/'
login_url = "{}".format(reverse('login'))
if request.method == "POST":
form = UserForm(request.POST)
password_form = SetNewPasswordForm(request.POST, prefix="pw")
if form.is_valid() and password_form.is_valid():
User = get_user_model()
try:
# Ensure the email isn't already tied to an account
u = User.objects.get(email=form.cleaned_data['email'])
messages.info(request, "It looks like you already have an "
"account! Log in to continue.")
return redirect(login_url)
except User.DoesNotExist:
# Create & activate the account, and do initial record-keeping
u = form.save(commit=False)
u.is_active = True
u.username = username_hash(u.email)
u.set_password(password_form.cleaned_data['password'])
u.save()
# Set their IP address.
u.userprofile.ip_address = get_client_ip(request)
u.userprofile.save()
if content_viewer:
password = password_form.cleaned_data['password']
_setup_content_viewer(request, u, password)
if enduser:
_setup_enduser(request, u)
redirect_to = reverse(redirect_to) + "?c=1"
return redirect(redirect_to)
else:
messages.error(request, "We could not process your request. "
"Please see the details, below.")
elif enduser and request.user.is_authenticated():
# Redirect to the confirmation page.
return redirect(reverse("utils:confirm"))
else:
password_form = SetNewPasswordForm(prefix='pw')
if organization:
form = UserForm(for_organization=organization.name)
elif program:
form = UserForm(for_organization=program.organization.name)
else:
form = UserForm()
# The following is a list of GET request variables that we'll pass along
# as POST request vars once a user submits the login form.
passthru_vars = ['organization', 'program']
passthru_vars = {
key: request.GET.get(key) for key in passthru_vars
if request.GET.get(key)
}
context = {
'organization': organization,
'program': program,
'passthru_vars': passthru_vars,
'form': form,
'password_form': password_form,
'completed': bool(request.GET.get("c", False)),
'android_url': settings.PLAY_APP_URL,
'ios_url': settings.IOS_APP_URL,
'login_url': login_url,
}
return render(request, template, context)
@login_required
def confirm_join(request):
# Intermediary step for the signup--for users with existing accounts.
org = get_object_or_none(Organization, pk=request.session.get('organization'))
program = get_object_or_none(Program, pk=request.session.get('program'))
if org is None and program is None:
logger.warning("Org & Program are None in utils.views.confirm_join")
# POST: enroll the user and redirect to a placeholder page.
if request.POST and bool(request.POST.get('confirmed', False)):
_setup_enduser(request, request.user, send_welcome_email=False)
# Do a little cleanup. At this point, we shouldn't need these anymore.
request.session.pop('organization', None)
request.session.pop('program', None)
return redirect(reverse("utils:confirm") + "?confirmed=1")
template = 'utils/confirm_join.html'
context = {
'organization': org,
'program': program,
'confirmed': bool(request.GET.get('confirmed', False)),
}
return render(request, template, context)
@api_view(['POST'])
def reset_password(request):
"""This defines an API endpoint that allows users to reset their password.
To reset a password, simply send a POST request with the following data:
{email: 'YOUR EMAIL ADDRESS'}
This will reset the user's password, and temporarily deactivate their
accout. Instructions on reseting their password are emailed to them.
Returns a 200 response upon success, and a 400 response on failure.
----
"""
# TODO: WE should send a text message with a token in it & have the user
# tap something to reset their email.
# Validate the email
form = EmailForm({'email_address': request.data.get('email')})
if form.is_valid():
logout(request) # Sends the user_logged_out signal
User = get_user_model()
try:
u = User.objects.get(email=form.cleaned_data['email_address'])
# Set unusuable password and disable account
u.set_unusable_password()
u.is_active = False
u.save()
# Generate a token for this session and email the user.
token = ResetToken(request, u.email)
token.generate()
token.send_email_notification()
msg = {'message': 'Password Reset. Please check your email for instructions'}
return Response(msg, status=status.HTTP_200_OK)
except User.DoesNotExist:
pass
error = {"error": "Invalid email address or account"}
return Response(error, status=status.HTTP_400_BAD_REQUEST)
class PasswordResetRequestView(FormView):
form_class = EmailForm
template_name = "utils/password_reset_request.html"
success_url = reverse_lazy('utils:password_reset_notification')
def form_invalid(self, form):
ctx = self.get_context_data(form=form)
ctx['invalid_email'] = True
return self.render_to_response(ctx)
def form_valid(self, form):
User = get_user_model()
try:
u = User.objects.get(email=form.cleaned_data['email_address'])
except User.DoesNotExist:
return self.form_invalid(form)
# Set unusuable password and disable account
u.set_unusable_password()
u.is_active = False
u.save()
# Generate a token for this session and email the user.
token = ResetToken(self.request, u.email)
token.generate()
token.send_email_notification()
return redirect(self.success_url)
class PasswordResetNotificationView(TemplateView):
template_name = "utils/password_reset_notification.html"
class SetNewPasswordView(FormView):
form_class = SetNewPasswordForm
template_name = "utils/set_new_password.html"
success_url = reverse_lazy('utils:password_reset_complete')
def get(self, request, *args, **kwargs):
has_token = 'token' in kwargs and ResetToken.check(request, kwargs['token'])
authenticated = request.user.is_authenticated
if not has_token and not authenticated:
# If there's no token, a user must be logged in to reset their pw.
url = "{0}?next={1}".format(
reverse_lazy("login"),
reverse_lazy("utils:set_new_password")
)
return redirect(url)
# Otherwise, the user should have requested this page with a token.
if has_token or authenticated:
return super(SetNewPasswordView, self).get(request, *args, **kwargs)
return http.HttpResponseForbidden()
def form_valid(self, form):
# Update the user's password an re-enable their account
email = ResetToken.get_email(self.request)
if email is None and self.request.user.is_authenticated():
# No email, but the user is logged in, so use theirs
email = self.request.user.email
elif email is None:
# Otherwise, we need to ask them again.
return redirect(reverse_lazy('utils:password_reset'))
u = get_user_model().objects.get(email=email)
u.set_password(form.cleaned_data['password'])
u.is_active = True
u.save()
return redirect(self.get_success_url())
class PasswordResetCompleteView(TemplateView):
template_name = "utils/password_reset_complete.html"
class FiveHundred(TemplateView):
template_name = "500.html"
class FourOhFour(TemplateView):
template_name = "404.html"
class FourOhThree(TemplateView):
template_name = "403.html"
|
macopedia/hr
|
refs/heads/8.0
|
__unported__/hr_payroll_register/report/payroll_register.py
|
28
|
# -*- coding:utf-8 -*-
#
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
import time
from openerp.report import report_sxw
class Parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
'get_no': self.get_no,
'get_basic': self.get_basic,
'get_ot': self.get_ot,
'get_transportation': self.get_transportation,
'get_allowances': self.get_allowances,
'get_gross': self.get_gross,
'get_taxable_gross': self.get_taxable_gross,
'get_ded_fit': self.get_ded_fit,
'get_ded_pf_ee': self.get_ded_pf_ee,
'get_deduct': self.get_deduct,
'get_total_deduct': self.get_total_deduct,
'get_net': self.get_net,
'get_er_contributions': self.get_er_contributions,
'get_details_by_payslip': self.get_details_by_payslip,
})
self.no = 0
self.salary = 0.0
self.ot = 0.0
self.transportation = 0.0
self.allowances = 0.0
self.gross = 0.0
self.taxable_gross = 0.0
self.ded_fit = 0.0
self.ded_pf_ee = 0.0
self.deduct = 0.0
self.total_deduct = 0.0
self.net = 0.0
self.er_contributions = 0.0
self.saved_run_id = -1
def _reset_values(self, run_id):
self.no = 0
self.salary = 0.0
self.ot = 0.0
self.transportation = 0.0
self.allowances = 0.0
self.gross = 0.0
self.taxable_gross = 0.0
self.ded_fit = 0.0
self.ded_pf_ee = 0.0
self.deduct = 0.0
self.total_deduct = 0.0
self.net = 0.0
self.er_contributions = 0.0
self.saved_run_id = run_id
def get_details_by_payslip(self, payslips):
res = []
for slip in payslips:
if self.saved_run_id != slip.payslip_run_id.id:
self._reset_values(slip.payslip_run_id.id)
tmp = self.get_details_by_rule_category(
slip.details_by_salary_rule_category)
tmp['name'] = slip.employee_id.name
tmp['id_no'] = slip.employee_id.f_employee_no
res.append(tmp)
return res
# Most of this function (except at the end) is copied verbatim from
# the Pay Slip Details Report
#
def get_details_by_rule_category(self, obj):
payslip_line = self.pool.get('hr.payslip.line')
rule_cate_obj = self.pool.get('hr.salary.rule.category')
def get_recursive_parent(rule_categories):
if not rule_categories:
return []
if rule_categories[0].parent_id:
rule_categories.insert(0, rule_categories[0].parent_id)
get_recursive_parent(rule_categories)
return rule_categories
res = []
result = {}
ids = []
# Choose only the categories (or rules) that we want to
# show in the report.
#
reg_line = {
'name': '',
'id_no': '',
'salary': 0,
'ot': 0,
'transportation': 0,
'allowances': 0,
'taxable_gross': 0,
'gross': 0,
'fit': 0,
'ee_pension': 0,
'deductions': 0,
'deductions_total': 0,
'net': 0,
'er_contributions': 0,
}
# Arrange the Pay Slip Lines by category
#
for id in range(len(obj)):
ids.append(obj[id].id)
if ids:
self.cr.execute('''
SELECT pl.id, pl.category_id FROM hr_payslip_line as pl
LEFT JOIN hr_salary_rule_category AS rc
on (pl.category_id = rc.id)
WHERE pl.id in %s
GROUP BY rc.parent_id, pl.sequence, pl.id, pl.category_id
ORDER BY pl.sequence, rc.parent_id''', (tuple(ids),))
for x in self.cr.fetchall():
result.setdefault(x[1], [])
result[x[1]].append(x[0])
for key, value in result.iteritems():
rule_categories = rule_cate_obj.browse(
self.cr, self.uid, [key])
parents = get_recursive_parent(rule_categories)
category_total = 0
for line in payslip_line.browse(self.cr, self.uid, value):
category_total += line.total
level = 0
for parent in parents:
res.append({
'rule_category': parent.name,
'name': parent.name,
'code': parent.code,
'level': level,
'total': category_total,
})
level += 1
for line in payslip_line.browse(self.cr, self.uid, value):
res.append({
'rule_category': line.name,
'name': line.name,
'code': line.code,
'total': line.total,
'level': level
})
for r in res:
# Level 0 is the category
if r['code'] == 'BASIC' and r['level'] == 0:
reg_line['salary'] = r['total']
elif r['code'] == 'OT':
reg_line['ot'] = r['total']
elif r['code'] == 'TRA' or r['code'] == 'TRVA':
reg_line['transportation'] = r['total']
elif r['code'] == 'ALW':
reg_line['allowances'] = r['total']
elif r['code'] == 'TXBL':
reg_line['taxable_gross'] = r['total']
elif r['code'] == 'GROSS':
reg_line['gross'] = r['total']
elif r['code'] == 'FITCALC':
reg_line['fit'] = r['total']
elif r['code'] == 'PENFEE':
reg_line['ee_pension'] = r['total']
elif r['code'] == 'DED':
reg_line['deductions'] = r['total']
elif r['code'] == 'DEDTOTAL':
reg_line['deductions_total'] = r['total']
elif r['code'] == 'NET':
reg_line['net'] = r['total']
elif r['code'] == 'ER':
reg_line['er_contributions'] = r['total']
# Make adjustments to subtract from the parent category's total the
# amount of individual rules that we show separately on the sheet.
#
reg_line['allowances'] -= reg_line['transportation']
reg_line['deductions'] -= reg_line['ee_pension']
# Increase running totals
#
self.salary += reg_line['salary']
self.ot += reg_line['ot']
self.transportation += reg_line['transportation']
self.allowances += reg_line['allowances']
self.gross += reg_line['gross']
self.taxable_gross += reg_line['taxable_gross']
self.ded_fit += reg_line['fit']
self.ded_pf_ee += reg_line['ee_pension']
self.deduct += reg_line['deductions']
self.total_deduct += reg_line['deductions_total']
self.net += reg_line['net']
self.er_contributions += reg_line['er_contributions']
return reg_line
def get_basic(self, obj):
return self.salary
def get_ot(self, obj):
return self.ot
def get_transportation(self, obj):
return self.transportation
def get_allowances(self, obj):
return self.allowances
def get_gross(self, obj):
return self.gross
def get_taxable_gross(self, obj):
return self.taxable_gross
def get_ded_fit(self, obj):
return self.ded_fit
def get_ded_pf_ee(self, obj):
return self.ded_pf_ee
def get_deduct(self, obj):
return self.deduct
def get_total_deduct(self, obj):
return self.total_deduct
def get_net(self, obj):
return self.net
def get_er_contributions(self, obj):
return self.er_contributions
def get_no(self):
self.no += 1
return self.no
|
Just-D/chromium-1
|
refs/heads/master
|
tools/telemetry/telemetry/core/discover.py
|
15
|
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import inspect
import os
import re
from telemetry import decorators
from telemetry.internal.util import camel_case
from telemetry.internal.util import classes as classes_module
@decorators.Cache
def DiscoverModules(start_dir, top_level_dir, pattern='*'):
"""Discover all modules in |start_dir| which match |pattern|.
Args:
start_dir: The directory to recursively search.
top_level_dir: The top level of the package, for importing.
pattern: Unix shell-style pattern for filtering the filenames to import.
Returns:
list of modules.
"""
# start_dir and top_level_dir must be consistent with each other.
start_dir = os.path.realpath(start_dir)
top_level_dir = os.path.realpath(top_level_dir)
modules = []
for dir_path, _, filenames in os.walk(start_dir):
for filename in filenames:
# Filter out unwanted filenames.
if filename.startswith('.') or filename.startswith('_'):
continue
if os.path.splitext(filename)[1] != '.py':
continue
if not fnmatch.fnmatch(filename, pattern):
continue
# Find the module.
module_rel_path = os.path.relpath(os.path.join(dir_path, filename),
top_level_dir)
module_name = re.sub(r'[/\\]', '.', os.path.splitext(module_rel_path)[0])
# Import the module.
module = __import__(module_name, fromlist=[True])
modules.append(module)
return modules
# TODO(dtu): Normalize all discoverable classes to have corresponding module
# and class names, then always index by class name.
@decorators.Cache
def DiscoverClasses(start_dir, top_level_dir, base_class, pattern='*',
index_by_class_name=True, directly_constructable=False):
"""Discover all classes in |start_dir| which subclass |base_class|.
Base classes that contain subclasses are ignored by default.
Args:
start_dir: The directory to recursively search.
top_level_dir: The top level of the package, for importing.
base_class: The base class to search for.
pattern: Unix shell-style pattern for filtering the filenames to import.
index_by_class_name: If True, use class name converted to
lowercase_with_underscores instead of module name in return dict keys.
directly_constructable: If True, will only return classes that can be
constructed without arguments
Returns:
dict of {module_name: class} or {underscored_class_name: class}
"""
modules = DiscoverModules(start_dir, top_level_dir, pattern)
classes = {}
for module in modules:
new_classes = DiscoverClassesInModule(
module, base_class, index_by_class_name, directly_constructable)
classes = dict(classes.items() + new_classes.items())
return classes
@decorators.Cache
def DiscoverClassesInModule(module, base_class, index_by_class_name=False,
directly_constructable=False):
"""Discover all classes in |module| which subclass |base_class|.
Base classes that contain subclasses are ignored by default.
Args:
module: The module to search.
base_class: The base class to search for.
index_by_class_name: If True, use class name converted to
lowercase_with_underscores instead of module name in return dict keys.
Returns:
dict of {module_name: class} or {underscored_class_name: class}
"""
classes = {}
for _, obj in inspect.getmembers(module):
# Ensure object is a class.
if not inspect.isclass(obj):
continue
# Include only subclasses of base_class.
if not issubclass(obj, base_class):
continue
# Exclude the base_class itself.
if obj is base_class:
continue
# Exclude protected or private classes.
if obj.__name__.startswith('_'):
continue
# Include only the module in which the class is defined.
# If a class is imported by another module, exclude those duplicates.
if obj.__module__ != module.__name__:
continue
if index_by_class_name:
key_name = camel_case.ToUnderscore(obj.__name__)
else:
key_name = module.__name__.split('.')[-1]
if (not directly_constructable or
classes_module.IsDirectlyConstructable(obj)):
classes[key_name] = obj
return classes
_counter = [0]
def _GetUniqueModuleName():
_counter[0] += 1
return "module_" + str(_counter[0])
|
rwbogl/gbg
|
refs/heads/master
|
parse.py
|
1
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pycparser
from pycparser.c_ast import *
from pycparser import c_generator
def get_function(ast, name):
for node in ast.ext:
if type(node) == FuncDef and node.decl.name == name:
return node
return None
def pair_goto_labels(labels, conditional_gotos):
"""Return a dictionary of (label, [goto_partners]) pairs.
The dictionary's keys are label names, and the values are
lists of If nodes containing whose goto statements targets are the label.
"""
label_dict = {}
for label in labels:
label_dict[label.name] = []
for cond in conditional_gotos:
goto = cond.iftrue
target = goto.name
if target == label.name:
label_dict[label.name].append(cond)
return label_dict
def is_conditional_goto(node):
"""
Return if `node` is a conditional whose only branch is a goto statement.
"""
return (type(node) == If and type(node.iftrue) == Goto and
node.iffalse == None)
def update_parents(compound):
"""
Set node.parents[-1] = compound for every node in compound.block_items.
make sure their parent is `compound`.
Normally used after a transformation.
"""
if type(compound) == Compound:
for node in compound.block_items:
if "parents" in node.__dict__:
node.parents[-1] = compound
else:
node.parents = [compound]
elif type(compound) == Case:
for node in compound.stmts:
if "parents" in node.__dict__:
node.parents[-1] = compound
else:
node.parents = [compound]
def remove_siblings(label, conditional):
"""Remove a conditional goto/label node pair that are siblings.
Parents need to be updated after the removal, and this function does that."""
assert(are_siblings(label, conditional))
parent = label.parents[-1]
parent_list = []
attr_name = ""
if type(parent) == Case:
attr_name = "stmts"
parent_list = parent.stmts
elif type(parent) == Compound:
attr_name = "block_items"
parent_list = parent.block_items
else:
raise ValueError("got parents for removal that weren't compound or cases!")
label_index = parent_list.index(label)
cond_index = parent_list.index(conditional)
assert(label_index >= 0 and cond_index >= 0)
assert(label_index != cond_index)
if label_index > cond_index:
# Goto is before the label.
# In this case, we guard the statements from the goto to the label in a
# new conditional.
cond = negate(conditional.cond)
in_between = parent_list[cond_index+1:label_index]
between_compound = Compound(in_between)
update_parents(between_compound)
guard = If(cond, between_compound, None)
pre_goto = parent_list[:cond_index]
post_conditional = parent_list[label_index:]
setattr(parent, attr_name, pre_goto + [guard] + post_conditional)
else:
# Goto is after the label (or the goto _is_ the label, which means
# something has gone terribly wrong).
# In this case, we place a do-while loop directly after the label that
# will execute the statements as long as we're jumping.
# We will _not_ grab the statement from the label, because at this point
# every label contains a statement that clears its logical variable.
cond = conditional.cond
between_statements = parent_list[label_index+1:cond_index]
between_compound = Compound(between_statements)
update_parents(between_compound)
do_while = DoWhile(cond, between_compound)
pre_to_label = parent_list[:label_index+1]
after_goto = parent_list[cond_index+1:]
setattr(parent, attr_name, pre_to_label + [do_while] + after_goto)
def are_directly_related(one, two):
"""Check if two nodes are directly related.
If they don't have parents, this should raise an AttributeError.
Two nodes are siblings iff there exists some sequence of statements such that
one is present in one, and the other is either present or nested inside of
one of the statements.
Note: Being siblings is a special case of being related.
The check is this:
- At least one has a Compound parent.
- The other has that same compound parent somewhere in its parent
stack.
"""
parent_one = one.parents[-1]
parent_two = two.parents[-1]
if type(parent_one) == Compound and parent_one in two.parents:
# `two` exists in or is nested in the compound that `one` is in.
return True
elif type(parent_two) == Compound and parent_two in one.parents:
# `one` exists in or is nested in the compound that `two` is in.
return True
return False
def are_siblings(one, two):
"""Check if two nodes with parents are siblings.
If they don't have parents, this should raise an AttributeError.
They are siblings iff they both exist unnested in a sequence of
statements. Currently, this is checked by looking to see if
1. Both are under a Compound or Case node, and
2. Both have the _same_ node as a parent.
I justify this by saying that there can't be any sequence of statements if
they aren't inside of a Compound.
"""
one_parent = one.parents[-1]
two_parent = two.parents[-1]
under_compound = (type(one_parent) == Compound and
type(two_parent) == Compound)
under_case = (type(one_parent) == Case and
type(two_parent) == Case)
return (under_compound or under_case) and one_parent == two_parent
class GotoLabelFinder(NodeVisitor):
"""Visitor that will find every goto or label under a given node.
The results will be two lists of Nodes, self.gotos and self.labels,
complete with the offset, level, and parent stack monkey patched on for
each. The self.gotos list is actually a list of If Nodes, with the offset,
level, and parent stack of the If. This isn't so strange, as we're treating
the conditional and goto as one unit.
"""
def __init__(self):
self.offset = 0
self.level = 0
self.gotos = []
self.labels = []
self.parents = []
def level_visit(self, node):
node.parents = list(self.parents)
self.level += 1
self.generic_visit(node)
self.level -= 1
def generic_visit(self, node):
self.parents.append(node)
for access, child in node.children():
self.visit(child)
self.parents.pop()
def visit_Goto(self, node):
"""
Append the conditonal parent of the goto to self.gotos, or raise a
NotImplementedError if that isn't possible.
The conditional parent will have some extra attributes:
parents: List of nodes above the conditional.
The goto will also have some extra attributes:
offset: The offset of the goto.
level: The level of the goto.
"""
parents = list(self.parents)
parent = parents[-1]
if type(parent) != If:
line = node.coord.line
raise NotImplementedError("unsupported unconditional goto statement at line {}".format(line))
# The parent doesn't have itself as a parent.
parent.parents = list(self.parents)[:-1]
# Add some goto-specific data to the goto.
node.offset = self.offset
node.level = self.level
self.gotos.append(parent)
self.offset += 1
def visit_Label(self, node):
"""Append the label to self.labels with some extra attributes.
Attributes:
parents: Nodes above the label.
Offset: Offset of the label.
Level: Level of the label.
"""
node.parents = list(self.parents)
node.offset = self.offset
node.level = self.level
self.labels.append(node)
self.offset += 1
self.generic_visit(node)
def visit_If(self, node):
self.level_visit(node)
def visit_While(self, node):
self.level_visit(node)
def visit_DoWhile(self, node):
self.level_visit(node)
def visit_Switch(self, node):
self.level_visit(node)
def visit_For(self, node):
self.level_visit(node)
def visit_Compound(self, node):
# Only increment the level if we're not a part of a "leveler."
if not type(self.parents[-1]) in [If, While, DoWhile, Switch, For]:
self.level += 1
self.generic_visit(node)
self.level -= 1
else:
self.generic_visit(node)
def is_loop(node):
return type(node) in [While, DoWhile, For]
def under_loop(node):
"""Test if a node is under a compound that is under a loop.
A node is under a loop if its parents are compound, then (loop).
If the node doesn't have parents, this will raise an AttributeError.
"""
if len(node.parents) < 2:
return False
compound = node.parents[-1]
loop = node.parents[-2]
return type(compound) == Compound and is_loop(loop)
def under_if(node):
"""Test if a node is under a compound that is under an If.
If the node doesn't have parents, this will raise an AttributeError.
"""
if len(node.parents) < 2:
return False
compound = node.parents[-1]
conditional = node.parents[-2]
return type(compound) == Compound and type(conditional) == If
def under_switch(node):
"""Test if a node is under a switch statement.
This happens if its parents are case, then compound, then switch.
If the node doesn't have parents, this will raise an AttributeError.
"""
if len(node.parents) < 3:
return False
switch, compound, case = node.parents[-3:]
return (type(case) == Case and type(compound) == Compound and
type(switch) == Switch)
def move_goto_out_switch(conditional):
"""Move a conditional goto out of a switch statement."""
assert(under_switch(conditional))
above_compound, switch, switch_compound, case = conditional.parents[-4:]
if type(above_compound) != Compound:
raise NotImplementedError("Only support switch statements under "
"compounds!")
cond = conditional.cond
goto = conditional.iftrue
name = logical_label_name(goto)
# Set the logical value to the condition of the goto.
set_logical = create_assign(name, cond)
# Make the conditional dependent on the logical variable.
conditional.cond = ID(name)
# If the logical variable is true, then break out of the switch.
guard = If(ID(name), Break(), None)
cond_index = case.stmts.index(conditional)
assert(cond_index >= 0)
case.stmts[cond_index] = set_logical
case.stmts.insert(cond_index+1, guard)
switch_index = above_compound.block_items.index(switch)
above_compound.block_items.insert(switch_index+1, conditional)
# We moved above three parents, so remove three of them from the conditional
# to make sure that later checks work.
conditional.parents = conditional.parents[:-3]
def move_goto_out_loop(conditional):
"""Move a conditional goto out of a loop statement."""
assert(under_loop(conditional))
parent_compound, loop, loop_compound = conditional.parents[-3:]
if type(parent_compound) != Compound:
raise NotImplementedError("Can only pull gotos out of loops that are "
"under a compound!")
cond = conditional.cond
goto = conditional.iftrue
name = logical_label_name(goto)
# Set the logical value to the condition of the goto.
set_logical = create_assign(name, cond)
# Make the conditional dependent on the logical variable.
conditional.cond = ID(name)
# If the logical variable is true, then break out of the loop.
guard = If(ID(name), Break(), None)
cond_index = loop_compound.block_items.index(conditional)
assert(cond_index >= 0)
loop_compound.block_items[cond_index] = set_logical
loop_compound.block_items.insert(cond_index+1, guard)
loop_index = parent_compound.block_items.index(loop)
parent_compound.block_items.insert(loop_index+1, conditional)
# We moved above two parents, so remove two of them from the conditional to
# make sure that later checks work.
conditional.parents = conditional.parents[:-2]
def declare_regular_variable(var_id, type_name, init, function):
"""Declare `type_name var_id = init` at the top of `function`.
This is "regular" in the sense that there are no storage qualifiers.
:var_id: A node of type ID.
:type_name: A string specificing what type the variable is.
:init: A node specifiying what the initial value is.
:function: The FuncDef node where this variable will be declared.
:returns: Nothing.
"""
id_type = IdentifierType([type_name])
type_decl = TypeDecl(var_id.name, [], id_type)
decl = Decl(var_id, [], [], [], type_decl, init, None)
function.body.block_items.insert(0, decl)
def declare_logic_variable(name, function):
"""Declare the variable `int name = 0` at the top of `function`."""
var_id = ID(name)
type_name = "int"
logical_value = Constant(type_name, "0")
declare_regular_variable(var_id, type_name, logical_value, function)
def create_assign(name, val):
var_id = ID(name)
return Assignment("=", var_id, val)
def logical_label_name(goto_label):
return "goto_{}".format(goto_label.name)
def logic_init(labels, func):
"""
Declare a logical variable `goto_LABEL = 0` for each label in `labels` at
the top of `func`. Also, reinitialize it to 0 at the label.
This is only useful if each label is actually _in_ the function.
"""
for label in labels:
parent = label.parents[-1]
to_insert = []
if type(parent) == Compound:
to_insert = parent.block_items
elif type(parent) == Case:
to_insert = parent.stmts
else:
raise NotImplementedError("Can only initialize labels under compounds or cases for now!")
declare_logic_variable("goto_{}".format(label.name), func)
val = Constant("int", "0")
clear_logical_var = create_assign(logical_label_name(label), val)
label_index = to_insert.index(label)
# Move the statement that the label holds to after the label,
# and the setting to 0 into the label. Make sure we update parents.
to_insert.insert(label_index + 1, label.stmt)
label.stmt = clear_logical_var
update_parents(parent)
def move_goto_in_loop(conditional, label):
"""Move a goto in a loop-statement."""
assert(is_conditional_goto(conditional))
assert(under_loop(label))
goto = conditional.iftrue
loop = label.parents[-2]
loop_compound = label.parents[-1]
compound = conditional.parents[-1]
place_inwards_cond_guard(compound, conditional, loop)
logical_name = logical_label_name(label)
loop.cond = BinaryOp("||", ID(logical_name), loop.cond)
loop_compound.block_items.insert(0, conditional)
conditional.cond = ID(logical_name)
update_parents(loop_compound)
def move_goto_out_if(conditional):
"""Move a conditional goto out of an if-statement."""
assert(under_if(conditional))
above_if, if_stmt, if_compound = conditional.parents[-3:]
if (type(above_if) != Compound):
raise NotImplementedError("can't perform OT on an if-statement not under a compound!")
goto = conditional.iftrue
logical_name = logical_label_name(goto)
cond = conditional.cond
set_logical = create_assign(logical_name, cond)
cond_index = if_compound.block_items.index(conditional)
assert(cond_index >= 0)
after_goto = if_compound.block_items[cond_index+1:]
guard_block = Compound(after_goto)
guard = If(negate(ID(logical_name)), guard_block, None)
if_compound.block_items = if_compound.block_items[:cond_index] + [set_logical, guard]
# Make conditional dependent on logical variable.
conditional.cond = ID(logical_name)
update_parents(guard_block)
if_index = above_if.block_items.index(if_stmt)
assert(if_index >= 0)
above_if.block_items.insert(if_index + 1, conditional)
update_parents(above_if)
def place_inwards_cond_guard(parent_compound, conditional, in_stmt):
"""Place the guarding conditional and change the goto's condition for IT.
In effect, this places a new conditional that guards the statements between
`conditional` and `in_stmt`, using `conditional.cond` as the guard's
condition.
Returns the guard conditional.
"""
if type(parent_compound) != Compound:
raise NotImplementedError("can only move gotos into statements whose parents are compounds!")
if not are_siblings(conditional, in_stmt):
raise NotImplementedError("nested IT not implemented yet!")
cond_index = parent_compound.block_items.index(conditional)
stmt_index = parent_compound.block_items.index(in_stmt)
if cond_index > stmt_index:
raise NotImplementedError("goto lifting not implemented yet!")
assert(cond_index >= 0 and stmt_index >= 0 and cond_index != stmt_index)
between_stmts = parent_compound.block_items[cond_index+1:stmt_index]
between_compound = Compound(between_stmts)
goto = conditional.iftrue
logical_name = logical_label_name(goto)
cond = conditional.cond
set_logical = create_assign(logical_name, cond)
guard = If(negate(ID(logical_name)), between_compound, None)
bi = parent_compound.block_items
parent_compound.block_items = bi[:cond_index] + [set_logical, guard, in_stmt] + bi[stmt_index+1:]
# The nodes moved inside of a guard have gained two parents, while
# update_parents currently (2016-02-08) only checks for one.
for node in between_compound.block_items:
if "parents" in node.__dict__:
node.parents = node.parents + [guard, guard.iftrue]
update_parents(parent_compound)
return guard
def move_goto_in_if(conditional, label):
"""Move a goto into an if-statement."""
assert(under_if(label))
above_compound = conditional.parents[-1]
if_compound = label.parents[-1]
if_stmt = label.parents[-2]
if if_stmt.iftrue != if_compound:
raise NotImplementedError("only support labels in the 'then' clause for IT!")
place_inwards_cond_guard(above_compound, conditional, if_stmt)
logical_name = logical_label_name(label)
if_stmt.cond = BinaryOp("||", ID(logical_name), if_stmt.cond)
if_compound.block_items.insert(0, conditional)
conditional.cond = ID(logical_name)
update_parents(if_compound)
def logical_switch_name(switch):
# https://stackoverflow.com/questions/279561
if "counter" not in logical_switch_name.__dict__:
logical_switch_name.counter = -1
logical_switch_name.counter += 1
return "switch_var_" + str(logical_switch_name.counter)
def move_goto_in_switch(conditional, label, func):
"""Move a goto into a switch statement."""
assert(under_switch(label))
switch, compound, case = label.parents[-3:]
above_compound = conditional.parents[-1]
guard = place_inwards_cond_guard(above_compound, conditional, switch)
switch_var = switch.cond;
logical_name = logical_switch_name(switch)
declare_logic_variable(logical_switch_name(switch), func)
# This makes the logical variable exactly the switch variable.
continue_switch = create_assign(logical_name, switch_var)
# This makes the logical variable the case that our label is under.
steal_switch = create_assign(logical_name, case.expr)
label_name = logical_label_name(conditional.iftrue)
switch.cond = ID(logical_name)
# If the goto's cond was false, then continue as normal.
guard.iftrue.block_items.append(continue_switch)
# If the goto's cond was true, then force jumping to the switch case.
guard.iffalse = steal_switch
case.stmts.insert(0, conditional)
conditional.cond = ID(label_name)
conditional.parents += [switch, compound, case]
def negate(exp):
return UnaryOp("!", exp)
def do_it(func_node):
t = GotoLabelFinder()
t.visit(func_node)
labels = t.labels
gotos = t.gotos
d = pair_goto_labels(labels, gotos)
logic_init(t.labels, func_node)
for label in t.labels:
for conditional in d[label.name]:
while not are_siblings(label, conditional):
if not are_directly_related(label, conditional):
print("Skipping two indirectly related nodes...")
break
if under_if(conditional):
print("Moving out of a conditional...")
move_goto_out_if(conditional)
elif under_loop(conditional):
print("Moving out of a loop...")
move_goto_out_loop(conditional)
elif under_switch(conditional):
print("Moving out of a switch...")
move_goto_out_switch(conditional)
elif under_loop(label):
print("Moving into a loop...")
move_goto_in_loop(conditional, label)
elif under_switch(label):
print("Moving into a switch...")
move_goto_in_switch(conditional, label, func)
elif under_if(label):
print("Moving into an if-statement...")
move_goto_in_if(conditional, label)
else:
print("Nothing we can do for the non-looped...")
break
print("One iteration done...")
if are_siblings(label, conditional):
print("Siblings!")
remove_siblings(label, conditional)
else:
print("Well, we tried.")
if __name__ == "__main__":
import sys
filename = ""
function_name = ""
try:
filename = sys.argv[1]
except IndexError:
filename = "./test.c"
try:
function_name = sys.argv[2]
except IndexError:
function_name = "main"
ast = pycparser.parse_file(filename, use_cpp=True,
cpp_args="-I/usr/share/python3-pycparser/fake_libc_include")
func = get_function(ast, function_name)
generator = c_generator.CGenerator()
print(generator.visit(func))
do_it(func)
print(generator.visit(func))
|
stryder199/RyarkAssignments
|
refs/heads/master
|
Assignment2/quizzes/eop/chapter3/sample_statistics.py
|
1
|
question_list = [
# (mark, count, [directories])
(1, 3, 'eop/chapter3/sample_statistics_io_'),
]
practice_mode = True
standalone = False
logged = False
log_dir = ''
|
Soya93/Extract-Refactoring
|
refs/heads/master
|
python/lib/Lib/site-packages/django/conf/locale/vi/__init__.py
|
12133432
| |
xuweiliang/Codelibrary
|
refs/heads/master
|
openstack_dashboard/dashboards/admin/access_and_security/download_sql/__init__.py
|
12133432
| |
mwx1993/TACTIC
|
refs/heads/master
|
src/pyasm/application/houdini/houdini_environment.py
|
10
|
###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
all = ['HoudiniEnvironment']
import os
from pyasm.application.common import AppEnvironment
class HoudiniEnvironment(AppEnvironment):
def set_up(info):
env = AppEnvironment.get()
from houdini import Houdini
app = Houdini()
info.app = app
env.set_app(app)
env.set_info(info)
set_up = staticmethod(set_up)
|
QualiSystems/Azure-Shell
|
refs/heads/develop
|
package/tests/test_cp/test_azure/test_domain/test_services/test_task_waiter.py
|
1
|
from unittest import TestCase
import mock
from cloudshell.cp.azure.domain.services.task_waiter import TaskWaiterService
class TestTaskWaiterService(TestCase):
def setUp(self):
self.cancellation_service = mock.MagicMock()
self.task_waiter_service = TaskWaiterService(cancellation_service=self.cancellation_service)
@mock.patch("cloudshell.cp.azure.domain.services.task_waiter.time.sleep")
def test_wait_for_task(self, sleep):
"""Check that method will return azure poller result once operation will end"""
operation_poller = mock.MagicMock()
cancellation_context = mock.MagicMock()
operation_poller.done.side_effect = [False, True]
# Act
result = self.task_waiter_service.wait_for_task(operation_poller=operation_poller,
cancellation_context=cancellation_context)
# Verify
self.assertEqual(result, operation_poller.result())
operation_poller.done.assert_called()
sleep.assert_called_once_with(30)
self.cancellation_service.check_if_cancelled.assert_called_once_with(cancellation_context)
|
sublime1809/django
|
refs/heads/master
|
django/core/management/commands/startapp.py
|
513
|
from importlib import import_module
from django.core.management.base import CommandError
from django.core.management.templates import TemplateCommand
class Command(TemplateCommand):
help = ("Creates a Django app directory structure for the given app "
"name in the current directory or optionally in the given "
"directory.")
missing_args_message = "You must provide an application name."
def handle(self, **options):
app_name, target = options.pop('name'), options.pop('directory')
self.validate_name(app_name, "app")
# Check that the app_name cannot be imported.
try:
import_module(app_name)
except ImportError:
pass
else:
raise CommandError("%r conflicts with the name of an existing "
"Python module and cannot be used as an app "
"name. Please try another name." % app_name)
super(Command, self).handle('app', app_name, target, **options)
|
jhonatajh/mtasa-blue
|
refs/heads/master
|
vendor/google-breakpad/src/tools/gyp/test/mac/gyptest-depend-on-bundle.py
|
303
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that a dependency on a bundle causes the whole bundle to be built.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp('test.gyp', chdir='depend-on-bundle')
test.build('test.gyp', 'dependent_on_bundle', chdir='depend-on-bundle')
# Binary itself.
test.built_file_must_exist('dependent_on_bundle', chdir='depend-on-bundle')
# Bundle dependency.
test.built_file_must_exist(
'my_bundle.framework/Versions/A/my_bundle',
chdir='depend-on-bundle')
test.built_file_must_exist( # package_framework
'my_bundle.framework/my_bundle',
chdir='depend-on-bundle')
test.built_file_must_exist( # plist
'my_bundle.framework/Versions/A/Resources/Info.plist',
chdir='depend-on-bundle')
test.built_file_must_exist(
'my_bundle.framework/Versions/A/Resources/English.lproj/' # Resources
'InfoPlist.strings',
chdir='depend-on-bundle')
test.pass_test()
|
spicykaiju/pyvmomi
|
refs/heads/master
|
tests/test_virtual_machine_object.py
|
10
|
# VMware vSphere Python SDK
# Copyright (c) 2008-2015 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import tests
import vcr
from pyVim import connect
from pyVmomi import vim
class VirtualMachineTests(tests.VCRTestBase):
@vcr.use_cassette('vm_nic_data.yaml',
cassette_library_dir=tests.fixtures_path,
record_mode='never')
def test_vm_nic_data(self):
data = {'ESXi-5.5-16': [],
'ESXi-5.5-17': [],
'ESXi-5.5-18': [],
'ESXi11': ['00:0c:29:e1:e0:f8'],
'ESXi12': ['00:50:56:b4:3c:3c'],
'ESXi20': ['00:50:56:b4:fc:9b', '00:50:56:b4:28:e7'],
'ESXi21': ['00:50:56:b4:8d:7a', '00:50:56:b4:39:b8'],
'ESXi22': ['00:0c:29:36:b5:5a', '00:0c:29:36:b5:64'],
'ESXi23': ['00:50:56:b4:91:f9', '00:50:56:b4:90:9f'],
'ESXi38-v5.0': ['00:0c:29:ce:6a:d8', '00:0c:29:ce:6a:e2'],
'MARVEL-Agents_of_Atlas': [],
'MARVEL-Alex_Power': [],
'MARVEL-Archangel': [],
'MARVEL-Freak': [],
'MARVEL-Hepzibah': [],
'MARVEL-Mach_IV': [],
'MARVEL-Serpent_Society': [],
'MARVEL-Spectrum': [],
'MARVEL-The_Hand': [],
'MARVEL-Vanisher_(Ultimate)': [],
'VCVA-5.5u1-11': ['00:0c:29:9d:a3:8c'],
'VCVA-5.5u1-14': ['00:0c:29:75:21:2e'],
'VCVA33': ['00:0c:29:e3:f9:f7'],
'VCVA36': ['00:0c:29:44:8b:76'],
'VCVA37-v5.0': ['00:50:56:b4:89:db'],
'box': ['00:50:56:82:28:7d'],
'box_copy': ['00:50:56:82:34:02'],
'esx4.1.0': ['00:0c:29:1f:ec:ba', '00:0c:29:1f:ec:c4']}
# see: http://python3porting.com/noconv.html
si = connect.SmartConnect(host='vcsa',
user='my_user',
pwd='my_password')
content = si.RetrieveContent()
virtual_machines = content.viewManager.CreateContainerView(
content.rootFolder, [vim.VirtualMachine], True)
for virtual_machine in virtual_machines.view:
name = virtual_machine.name
self.assertTrue(name in data.keys())
macs = data[name]
if virtual_machine.guest:
for net in virtual_machine.guest.net:
self.assertTrue(net.macAddress in macs)
|
WASPACDC/hmdsm.repository
|
refs/heads/master
|
plugin.video.mrpiracy/resources/lib/tvdb_api.py
|
20
|
import os
import xbmcgui
import xbmc
import time
import urllib
import json
from bs4 import BeautifulSoup
class TVDB:
def __init__(self, api_key, lingua):
self.api_key = api_key
self.lingua = lingua
def abrir_url(url,postData=False):
if postData:
data = urllib.urlencode({'procurar' : postData})
req = urllib2.Request(url,data)
else: req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link=response.read()
response.close()
return link
def getCurrentServerTime(self):
url = abrir_url('http://thetvdb.com/api/Updates.php?type=none')
soup = BeautifulSoup(abrir_url, 'xml')
self.previoustime = soup.time
return self.previoustime
def getSerieId(self, idIMDb):
url = abrir_url('http://www.thetvdb.com//api/GetSeriesByRemoteID.php?imdbid='+idIMDb+'&language='+self.lingua)
soup = BeautifulSoup(abrir_url, 'xml')
self.serieId = soup.find('seriesid').string
self.serieName = soup.find('SeriesName').string
return self.serieId
def getSeasonEpisode(self, serieId, season, episode):
url = abrir_url('http://thetvdb.com/api/'+self.api_key+'/series/'+serieId+'/default/'+season+'/'+episode+'/'+self.lingua+'.xml')
soup = BeautifulSoup(abrir_url, 'xml')
data = {}
data['name'] = soup.find('EpisodeName').string
data['overview'] = soup.find('Overview').string
data['actors'] = soup.find('GuestStars').string
data['aired'] = soup.find('FirstAired').string
data['director'] = soup.fin('Director').string
data['writer'] = soup.find('Writer').string
data['poster'] = 'http://thetvdb.com/banners/'+soup.find('filename').string
json_data = json.dumps(data)
return json_data
|
matrixise/odoo
|
refs/heads/8.0
|
addons/website_gengo/controllers/__init__.py
|
7372
|
import main
|
TangHao1987/intellij-community
|
refs/heads/master
|
plugins/hg4idea/testData/bin/mercurial/scmposix.py
|
94
|
import sys, os
import osutil
def _rcfiles(path):
rcs = [os.path.join(path, 'hgrc')]
rcdir = os.path.join(path, 'hgrc.d')
try:
rcs.extend([os.path.join(rcdir, f)
for f, kind in osutil.listdir(rcdir)
if f.endswith(".rc")])
except OSError:
pass
return rcs
def systemrcpath():
path = []
if sys.platform == 'plan9':
root = 'lib/mercurial'
else:
root = 'etc/mercurial'
# old mod_python does not set sys.argv
if len(getattr(sys, 'argv', [])) > 0:
p = os.path.dirname(os.path.dirname(sys.argv[0]))
path.extend(_rcfiles(os.path.join(p, root)))
path.extend(_rcfiles('/' + root))
return path
def userrcpath():
if sys.platform == 'plan9':
return [os.environ['home'] + '/lib/hgrc']
else:
return [os.path.expanduser('~/.hgrc')]
|
android-ia/platform_external_chromium_org
|
refs/heads/master
|
third_party/cython/src/Cython/Plex/Regexps.py
|
99
|
#=======================================================================
#
# Python Lexical Analyser
#
# Regular Expressions
#
#=======================================================================
import types
from sys import maxint as maxint
import Errors
#
# Constants
#
BOL = 'bol'
EOL = 'eol'
EOF = 'eof'
nl_code = ord('\n')
#
# Helper functions
#
def chars_to_ranges(s):
"""
Return a list of character codes consisting of pairs
[code1a, code1b, code2a, code2b,...] which cover all
the characters in |s|.
"""
char_list = list(s)
char_list.sort()
i = 0
n = len(char_list)
result = []
while i < n:
code1 = ord(char_list[i])
code2 = code1 + 1
i = i + 1
while i < n and code2 >= ord(char_list[i]):
code2 = code2 + 1
i = i + 1
result.append(code1)
result.append(code2)
return result
def uppercase_range(code1, code2):
"""
If the range of characters from code1 to code2-1 includes any
lower case letters, return the corresponding upper case range.
"""
code3 = max(code1, ord('a'))
code4 = min(code2, ord('z') + 1)
if code3 < code4:
d = ord('A') - ord('a')
return (code3 + d, code4 + d)
else:
return None
def lowercase_range(code1, code2):
"""
If the range of characters from code1 to code2-1 includes any
upper case letters, return the corresponding lower case range.
"""
code3 = max(code1, ord('A'))
code4 = min(code2, ord('Z') + 1)
if code3 < code4:
d = ord('a') - ord('A')
return (code3 + d, code4 + d)
else:
return None
def CodeRanges(code_list):
"""
Given a list of codes as returned by chars_to_ranges, return
an RE which will match a character in any of the ranges.
"""
re_list = []
for i in xrange(0, len(code_list), 2):
re_list.append(CodeRange(code_list[i], code_list[i + 1]))
return Alt(*re_list)
def CodeRange(code1, code2):
"""
CodeRange(code1, code2) is an RE which matches any character
with a code |c| in the range |code1| <= |c| < |code2|.
"""
if code1 <= nl_code < code2:
return Alt(RawCodeRange(code1, nl_code),
RawNewline,
RawCodeRange(nl_code + 1, code2))
else:
return RawCodeRange(code1, code2)
#
# Abstract classes
#
class RE(object):
"""RE is the base class for regular expression constructors.
The following operators are defined on REs:
re1 + re2 is an RE which matches |re1| followed by |re2|
re1 | re2 is an RE which matches either |re1| or |re2|
"""
nullable = 1 # True if this RE can match 0 input symbols
match_nl = 1 # True if this RE can match a string ending with '\n'
str = None # Set to a string to override the class's __str__ result
def build_machine(self, machine, initial_state, final_state,
match_bol, nocase):
"""
This method should add states to |machine| to implement this
RE, starting at |initial_state| and ending at |final_state|.
If |match_bol| is true, the RE must be able to match at the
beginning of a line. If nocase is true, upper and lower case
letters should be treated as equivalent.
"""
raise NotImplementedError("%s.build_machine not implemented" %
self.__class__.__name__)
def build_opt(self, m, initial_state, c):
"""
Given a state |s| of machine |m|, return a new state
reachable from |s| on character |c| or epsilon.
"""
s = m.new_state()
initial_state.link_to(s)
initial_state.add_transition(c, s)
return s
def __add__(self, other):
return Seq(self, other)
def __or__(self, other):
return Alt(self, other)
def __str__(self):
if self.str:
return self.str
else:
return self.calc_str()
def check_re(self, num, value):
if not isinstance(value, RE):
self.wrong_type(num, value, "Plex.RE instance")
def check_string(self, num, value):
if type(value) != type(''):
self.wrong_type(num, value, "string")
def check_char(self, num, value):
self.check_string(num, value)
if len(value) != 1:
raise Errors.PlexValueError("Invalid value for argument %d of Plex.%s."
"Expected a string of length 1, got: %s" % (
num, self.__class__.__name__, repr(value)))
def wrong_type(self, num, value, expected):
if type(value) == types.InstanceType:
got = "%s.%s instance" % (
value.__class__.__module__, value.__class__.__name__)
else:
got = type(value).__name__
raise Errors.PlexTypeError("Invalid type for argument %d of Plex.%s "
"(expected %s, got %s" % (
num, self.__class__.__name__, expected, got))
#
# Primitive RE constructors
# -------------------------
#
# These are the basic REs from which all others are built.
#
## class Char(RE):
## """
## Char(c) is an RE which matches the character |c|.
## """
## nullable = 0
## def __init__(self, char):
## self.char = char
## self.match_nl = char == '\n'
## def build_machine(self, m, initial_state, final_state, match_bol, nocase):
## c = self.char
## if match_bol and c != BOL:
## s1 = self.build_opt(m, initial_state, BOL)
## else:
## s1 = initial_state
## if c == '\n' or c == EOF:
## s1 = self.build_opt(m, s1, EOL)
## if len(c) == 1:
## code = ord(self.char)
## s1.add_transition((code, code+1), final_state)
## if nocase and is_letter_code(code):
## code2 = other_case_code(code)
## s1.add_transition((code2, code2+1), final_state)
## else:
## s1.add_transition(c, final_state)
## def calc_str(self):
## return "Char(%s)" % repr(self.char)
def Char(c):
"""
Char(c) is an RE which matches the character |c|.
"""
if len(c) == 1:
result = CodeRange(ord(c), ord(c) + 1)
else:
result = SpecialSymbol(c)
result.str = "Char(%s)" % repr(c)
return result
class RawCodeRange(RE):
"""
RawCodeRange(code1, code2) is a low-level RE which matches any character
with a code |c| in the range |code1| <= |c| < |code2|, where the range
does not include newline. For internal use only.
"""
nullable = 0
match_nl = 0
range = None # (code, code)
uppercase_range = None # (code, code) or None
lowercase_range = None # (code, code) or None
def __init__(self, code1, code2):
self.range = (code1, code2)
self.uppercase_range = uppercase_range(code1, code2)
self.lowercase_range = lowercase_range(code1, code2)
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
if match_bol:
initial_state = self.build_opt(m, initial_state, BOL)
initial_state.add_transition(self.range, final_state)
if nocase:
if self.uppercase_range:
initial_state.add_transition(self.uppercase_range, final_state)
if self.lowercase_range:
initial_state.add_transition(self.lowercase_range, final_state)
def calc_str(self):
return "CodeRange(%d,%d)" % (self.code1, self.code2)
class _RawNewline(RE):
"""
RawNewline is a low-level RE which matches a newline character.
For internal use only.
"""
nullable = 0
match_nl = 1
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
if match_bol:
initial_state = self.build_opt(m, initial_state, BOL)
s = self.build_opt(m, initial_state, EOL)
s.add_transition((nl_code, nl_code + 1), final_state)
RawNewline = _RawNewline()
class SpecialSymbol(RE):
"""
SpecialSymbol(sym) is an RE which matches the special input
symbol |sym|, which is one of BOL, EOL or EOF.
"""
nullable = 0
match_nl = 0
sym = None
def __init__(self, sym):
self.sym = sym
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
# Sequences 'bol bol' and 'bol eof' are impossible, so only need
# to allow for bol if sym is eol
if match_bol and self.sym == EOL:
initial_state = self.build_opt(m, initial_state, BOL)
initial_state.add_transition(self.sym, final_state)
class Seq(RE):
"""Seq(re1, re2, re3...) is an RE which matches |re1| followed by
|re2| followed by |re3|..."""
def __init__(self, *re_list):
nullable = 1
for i in xrange(len(re_list)):
re = re_list[i]
self.check_re(i, re)
nullable = nullable and re.nullable
self.re_list = re_list
self.nullable = nullable
i = len(re_list)
match_nl = 0
while i:
i = i - 1
re = re_list[i]
if re.match_nl:
match_nl = 1
break
if not re.nullable:
break
self.match_nl = match_nl
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
re_list = self.re_list
if len(re_list) == 0:
initial_state.link_to(final_state)
else:
s1 = initial_state
n = len(re_list)
for i in xrange(n):
if i < n - 1:
s2 = m.new_state()
else:
s2 = final_state
re = re_list[i]
re.build_machine(m, s1, s2, match_bol, nocase)
s1 = s2
match_bol = re.match_nl or (match_bol and re.nullable)
def calc_str(self):
return "Seq(%s)" % ','.join(map(str, self.re_list))
class Alt(RE):
"""Alt(re1, re2, re3...) is an RE which matches either |re1| or
|re2| or |re3|..."""
def __init__(self, *re_list):
self.re_list = re_list
nullable = 0
match_nl = 0
nullable_res = []
non_nullable_res = []
i = 1
for re in re_list:
self.check_re(i, re)
if re.nullable:
nullable_res.append(re)
nullable = 1
else:
non_nullable_res.append(re)
if re.match_nl:
match_nl = 1
i = i + 1
self.nullable_res = nullable_res
self.non_nullable_res = non_nullable_res
self.nullable = nullable
self.match_nl = match_nl
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
for re in self.nullable_res:
re.build_machine(m, initial_state, final_state, match_bol, nocase)
if self.non_nullable_res:
if match_bol:
initial_state = self.build_opt(m, initial_state, BOL)
for re in self.non_nullable_res:
re.build_machine(m, initial_state, final_state, 0, nocase)
def calc_str(self):
return "Alt(%s)" % ','.join(map(str, self.re_list))
class Rep1(RE):
"""Rep1(re) is an RE which matches one or more repetitions of |re|."""
def __init__(self, re):
self.check_re(1, re)
self.re = re
self.nullable = re.nullable
self.match_nl = re.match_nl
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
s1 = m.new_state()
s2 = m.new_state()
initial_state.link_to(s1)
self.re.build_machine(m, s1, s2, match_bol or self.re.match_nl, nocase)
s2.link_to(s1)
s2.link_to(final_state)
def calc_str(self):
return "Rep1(%s)" % self.re
class SwitchCase(RE):
"""
SwitchCase(re, nocase) is an RE which matches the same strings as RE,
but treating upper and lower case letters according to |nocase|. If
|nocase| is true, case is ignored, otherwise it is not.
"""
re = None
nocase = None
def __init__(self, re, nocase):
self.re = re
self.nocase = nocase
self.nullable = re.nullable
self.match_nl = re.match_nl
def build_machine(self, m, initial_state, final_state, match_bol, nocase):
self.re.build_machine(m, initial_state, final_state, match_bol,
self.nocase)
def calc_str(self):
if self.nocase:
name = "NoCase"
else:
name = "Case"
return "%s(%s)" % (name, self.re)
#
# Composite RE constructors
# -------------------------
#
# These REs are defined in terms of the primitive REs.
#
Empty = Seq()
Empty.__doc__ = \
"""
Empty is an RE which matches the empty string.
"""
Empty.str = "Empty"
def Str1(s):
"""
Str1(s) is an RE which matches the literal string |s|.
"""
result = Seq(*tuple(map(Char, s)))
result.str = "Str(%s)" % repr(s)
return result
def Str(*strs):
"""
Str(s) is an RE which matches the literal string |s|.
Str(s1, s2, s3, ...) is an RE which matches any of |s1| or |s2| or |s3|...
"""
if len(strs) == 1:
return Str1(strs[0])
else:
result = Alt(*tuple(map(Str1, strs)))
result.str = "Str(%s)" % ','.join(map(repr, strs))
return result
def Any(s):
"""
Any(s) is an RE which matches any character in the string |s|.
"""
#result = apply(Alt, tuple(map(Char, s)))
result = CodeRanges(chars_to_ranges(s))
result.str = "Any(%s)" % repr(s)
return result
def AnyBut(s):
"""
AnyBut(s) is an RE which matches any character (including
newline) which is not in the string |s|.
"""
ranges = chars_to_ranges(s)
ranges.insert(0, -maxint)
ranges.append(maxint)
result = CodeRanges(ranges)
result.str = "AnyBut(%s)" % repr(s)
return result
AnyChar = AnyBut("")
AnyChar.__doc__ = \
"""
AnyChar is an RE which matches any single character (including a newline).
"""
AnyChar.str = "AnyChar"
def Range(s1, s2 = None):
"""
Range(c1, c2) is an RE which matches any single character in the range
|c1| to |c2| inclusive.
Range(s) where |s| is a string of even length is an RE which matches
any single character in the ranges |s[0]| to |s[1]|, |s[2]| to |s[3]|,...
"""
if s2:
result = CodeRange(ord(s1), ord(s2) + 1)
result.str = "Range(%s,%s)" % (s1, s2)
else:
ranges = []
for i in range(0, len(s1), 2):
ranges.append(CodeRange(ord(s1[i]), ord(s1[i+1]) + 1))
result = Alt(*ranges)
result.str = "Range(%s)" % repr(s1)
return result
def Opt(re):
"""
Opt(re) is an RE which matches either |re| or the empty string.
"""
result = Alt(re, Empty)
result.str = "Opt(%s)" % re
return result
def Rep(re):
"""
Rep(re) is an RE which matches zero or more repetitions of |re|.
"""
result = Opt(Rep1(re))
result.str = "Rep(%s)" % re
return result
def NoCase(re):
"""
NoCase(re) is an RE which matches the same strings as RE, but treating
upper and lower case letters as equivalent.
"""
return SwitchCase(re, nocase = 1)
def Case(re):
"""
Case(re) is an RE which matches the same strings as RE, but treating
upper and lower case letters as distinct, i.e. it cancels the effect
of any enclosing NoCase().
"""
return SwitchCase(re, nocase = 0)
#
# RE Constants
#
Bol = Char(BOL)
Bol.__doc__ = \
"""
Bol is an RE which matches the beginning of a line.
"""
Bol.str = "Bol"
Eol = Char(EOL)
Eol.__doc__ = \
"""
Eol is an RE which matches the end of a line.
"""
Eol.str = "Eol"
Eof = Char(EOF)
Eof.__doc__ = \
"""
Eof is an RE which matches the end of the file.
"""
Eof.str = "Eof"
|
superchilli/webapp
|
refs/heads/master
|
venv/lib/python2.7/site-packages/alembic/templates/pylons/env.py
|
41
|
"""Pylons bootstrap environment.
Place 'pylons_config_file' into alembic.ini, and the application will
be loaded from there.
"""
from alembic import context
from paste.deploy import loadapp
from logging.config import fileConfig
from sqlalchemy.engine.base import Engine
try:
# if pylons app already in, don't create a new app
from pylons import config as pylons_config
pylons_config['__file__']
except:
config = context.config
# can use config['__file__'] here, i.e. the Pylons
# ini file, instead of alembic.ini
config_file = config.get_main_option('pylons_config_file')
fileConfig(config_file)
wsgi_app = loadapp('config:%s' % config_file, relative_to='.')
# customize this section for non-standard engine configurations.
meta = __import__("%s.model.meta" % wsgi_app.config['pylons.package']).model.meta
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(
url=meta.engine.url, target_metadata=target_metadata,
literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# specify here how the engine is acquired
# engine = meta.engine
raise NotImplementedError("Please specify engine connectivity here")
with engine.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
|
tedi3231/openerp
|
refs/heads/master
|
build/lib/openerp/addons/account/installer.py
|
5
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import datetime
from dateutil.relativedelta import relativedelta
import logging
from operator import itemgetter
from os.path import join as opj
import time
from openerp import netsvc, tools
from openerp.tools.translate import _
from openerp.osv import fields, osv
_logger = logging.getLogger(__name__)
class account_installer(osv.osv_memory):
_name = 'account.installer'
_inherit = 'res.config.installer'
def _get_charts(self, cr, uid, context=None):
modules = self.pool.get('ir.module.module')
# Looking for the module with the 'Account Charts' category
category_name, category_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'module_category_localization_account_charts')
ids = modules.search(cr, uid, [('category_id', '=', category_id)], context=context)
charts = list(
sorted(((m.name, m.shortdesc)
for m in modules.browse(cr, uid, ids, context=context)),
key=itemgetter(1)))
charts.insert(0, ('configurable', _('Custom')))
return charts
_columns = {
# Accounting
'charts': fields.selection(_get_charts, 'Accounting Package',
required=True,
help="Installs localized accounting charts to match as closely as "
"possible the accounting needs of your company based on your "
"country."),
'date_start': fields.date('Start Date', required=True),
'date_stop': fields.date('End Date', required=True),
'period': fields.selection([('month', 'Monthly'), ('3months','3 Monthly')], 'Periods', required=True),
'company_id': fields.many2one('res.company', 'Company', required=True),
'has_default_company' : fields.boolean('Has Default Company', readonly=True),
}
def _default_company(self, cr, uid, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
return user.company_id and user.company_id.id or False
def _default_has_default_company(self, cr, uid, context=None):
count = self.pool.get('res.company').search_count(cr, uid, [], context=context)
return bool(count == 1)
_defaults = {
'date_start': lambda *a: time.strftime('%Y-01-01'),
'date_stop': lambda *a: time.strftime('%Y-12-31'),
'period': 'month',
'company_id': _default_company,
'has_default_company': _default_has_default_company,
'charts': 'configurable'
}
def get_unconfigured_cmp(self, cr, uid, context=None):
""" get the list of companies that have not been configured yet
but don't care about the demo chart of accounts """
cmp_select = []
company_ids = self.pool.get('res.company').search(cr, uid, [], context=context)
cr.execute("SELECT company_id FROM account_account WHERE active = 't' AND account_account.parent_id IS NULL AND name != %s", ("Chart For Automated Tests",))
configured_cmp = [r[0] for r in cr.fetchall()]
return list(set(company_ids)-set(configured_cmp))
def check_unconfigured_cmp(self, cr, uid, context=None):
""" check if there are still unconfigured companies """
if not self.get_unconfigured_cmp(cr, uid, context=context):
raise osv.except_osv(_('No unconfigured company !'), _("There is currently no company without chart of account. The wizard will therefore not be executed."))
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
if context is None:context = {}
res = super(account_installer, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False)
cmp_select = []
# display in the widget selection only the companies that haven't been configured yet
unconfigured_cmp = self.get_unconfigured_cmp(cr, uid, context=context)
for field in res['fields']:
if field == 'company_id':
res['fields'][field]['domain'] = [('id','in',unconfigured_cmp)]
res['fields'][field]['selection'] = [('', '')]
if unconfigured_cmp:
cmp_select = [(line.id, line.name) for line in self.pool.get('res.company').browse(cr, uid, unconfigured_cmp)]
res['fields'][field]['selection'] = cmp_select
return res
def on_change_start_date(self, cr, uid, id, start_date=False):
if start_date:
start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d")
end_date = (start_date + relativedelta(months=12)) - relativedelta(days=1)
return {'value': {'date_stop': end_date.strftime('%Y-%m-%d')}}
return {}
def execute(self, cr, uid, ids, context=None):
self.execute_simple(cr, uid, ids, context)
super(account_installer, self).execute(cr, uid, ids, context=context)
def execute_simple(self, cr, uid, ids, context=None):
if context is None:
context = {}
fy_obj = self.pool.get('account.fiscalyear')
for res in self.read(cr, uid, ids, context=context):
if 'date_start' in res and 'date_stop' in res:
f_ids = fy_obj.search(cr, uid, [('date_start', '<=', res['date_start']), ('date_stop', '>=', res['date_stop']), ('company_id', '=', res['company_id'][0])], context=context)
if not f_ids:
name = code = res['date_start'][:4]
if int(name) != int(res['date_stop'][:4]):
name = res['date_start'][:4] +'-'+ res['date_stop'][:4]
code = res['date_start'][2:4] +'-'+ res['date_stop'][2:4]
vals = {
'name': name,
'code': code,
'date_start': res['date_start'],
'date_stop': res['date_stop'],
'company_id': res['company_id'][0]
}
fiscal_id = fy_obj.create(cr, uid, vals, context=context)
if res['period'] == 'month':
fy_obj.create_period(cr, uid, [fiscal_id])
elif res['period'] == '3months':
fy_obj.create_period3(cr, uid, [fiscal_id])
def modules_to_install(self, cr, uid, ids, context=None):
modules = super(account_installer, self).modules_to_install(
cr, uid, ids, context=context)
chart = self.read(cr, uid, ids, ['charts'],
context=context)[0]['charts']
_logger.debug('Installing chart of accounts %s', chart)
return modules | set([chart])
account_installer()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
xpac1985/ansible
|
refs/heads/devel
|
test/units/parsing/vault/test_vault.py
|
60
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import getpass
import os
import shutil
import time
import tempfile
import six
from binascii import unhexlify
from binascii import hexlify
from nose.plugins.skip import SkipTest
from ansible.compat.tests import unittest
from ansible.utils.unicode import to_bytes, to_unicode
from ansible import errors
from ansible.parsing.vault import VaultLib
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Util import Counter
HAS_COUNTER = True
except ImportError:
HAS_COUNTER = False
# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Protocol.KDF import PBKDF2
HAS_PBKDF2 = True
except ImportError:
HAS_PBKDF2 = False
# AES IMPORTS
try:
from Crypto.Cipher import AES as AES
HAS_AES = True
except ImportError:
HAS_AES = False
class TestVaultLib(unittest.TestCase):
def test_methods_exist(self):
v = VaultLib('ansible')
slots = ['is_encrypted',
'encrypt',
'decrypt',
'_format_output',
'_split_header',]
for slot in slots:
assert hasattr(v, slot), "VaultLib is missing the %s method" % slot
def test_is_encrypted(self):
v = VaultLib(None)
assert not v.is_encrypted(u"foobar"), "encryption check on plaintext failed"
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(b"ansible")
assert v.is_encrypted(data), "encryption check on headered text failed"
def test_format_output(self):
v = VaultLib('ansible')
v.cipher_name = "TEST"
sensitive_data = b"ansible"
data = v._format_output(sensitive_data)
lines = data.split(b'\n')
assert len(lines) > 1, "failed to properly add header"
header = to_bytes(lines[0])
assert header.endswith(b';TEST'), "header does end with cipher name"
header_parts = header.split(b';')
assert len(header_parts) == 3, "header has the wrong number of parts"
assert header_parts[0] == b'$ANSIBLE_VAULT', "header does not start with $ANSIBLE_VAULT"
assert header_parts[1] == v.b_version, "header version is incorrect"
assert header_parts[2] == b'TEST', "header does end with cipher name"
def test_split_header(self):
v = VaultLib('ansible')
data = b"$ANSIBLE_VAULT;9.9;TEST\nansible"
rdata = v._split_header(data)
lines = rdata.split(b'\n')
assert lines[0] == b"ansible"
assert v.cipher_name == 'TEST', "cipher name was not set"
assert v.b_version == b"9.9"
def test_encrypt_decrypt_aes(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
v = VaultLib('ansible')
v.cipher_name = u'AES'
# AES encryption code has been removed, so this is old output for
# AES-encrypted 'foobar' with password 'ansible'.
enc_data = b'$ANSIBLE_VAULT;1.1;AES\n53616c7465645f5fc107ce1ef4d7b455e038a13b053225776458052f8f8f332d554809d3f150bfa3\nfe3db930508b65e0ff5947e4386b79af8ab094017629590ef6ba486814cf70f8e4ab0ed0c7d2587e\n786a5a15efeb787e1958cbdd480d076c\n'
dec_data = v.decrypt(enc_data)
assert dec_data == b"foobar", "decryption failed"
def test_encrypt_decrypt_aes256(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
v = VaultLib('ansible')
v.cipher_name = 'AES256'
enc_data = v.encrypt(b"foobar")
dec_data = v.decrypt(enc_data)
assert enc_data != b"foobar", "encryption failed"
assert dec_data == b"foobar", "decryption failed"
def test_encrypt_encrypted(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
v = VaultLib('ansible')
v.cipher_name = 'AES'
data = "$ANSIBLE_VAULT;9.9;TEST\n%s" % hexlify(six.b("ansible"))
error_hit = False
try:
enc_data = v.encrypt(data)
except errors.AnsibleError as e:
error_hit = True
assert error_hit, "No error was thrown when trying to encrypt data with a header"
def test_decrypt_decrypted(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
v = VaultLib('ansible')
data = "ansible"
error_hit = False
try:
dec_data = v.decrypt(data)
except errors.AnsibleError as e:
error_hit = True
assert error_hit, "No error was thrown when trying to decrypt data without a header"
def test_cipher_not_set(self):
# not setting the cipher should default to AES256
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
v = VaultLib('ansible')
data = "ansible"
error_hit = False
try:
enc_data = v.encrypt(data)
except errors.AnsibleError as e:
error_hit = True
assert not error_hit, "An error was thrown when trying to encrypt data without the cipher set"
assert v.cipher_name == "AES256", "cipher name is not set to AES256: %s" % v.cipher_name
|
npiganeau/odoo
|
refs/heads/master
|
addons/analytic/wizard/account_analytic_cost_ledger_for_journal_report.py
|
378
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class account_analytic_cost_ledger_journal_report(osv.osv_memory):
_name = 'account.analytic.cost.ledger.journal.report'
_description = 'Account Analytic Cost Ledger For Journal Report'
_columns = {
'date1': fields.date('Start of period', required=True),
'date2': fields.date('End of period', required=True),
'journal': fields.many2many('account.analytic.journal', 'ledger_journal_rel', 'ledger_id', 'journal_id', 'Journals'),
}
_defaults = {
'date1': lambda *a: time.strftime('%Y-01-01'),
'date2': lambda *a: time.strftime('%Y-%m-%d')
}
def check_report(self, cr, uid, ids, context=None):
if context is None:
context = {}
data = self.read(cr, uid, ids)[0]
datas = {
'ids': context.get('active_ids', []),
'model': 'account.analytic.account',
'form': data
}
datas['form']['active_ids'] = context.get('active_ids', False)
return self.pool['report'].get_action(cr, uid, [], 'account.report_analyticcostledgerquantity', data=datas, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
laslabs/odoo
|
refs/heads/9.0
|
addons/event_sale/__openerp__.py
|
16
|
# -*- coding: utf-8 -*-
{
'name': 'Events Sales',
'version': '1.1',
'category': 'Marketing',
'website': 'https://www.odoo.com/page/events',
'description': """
Creating registration with sale orders.
=======================================
This module allows you to automate and connect your registration creation with
your main sale flow and therefore, to enable the invoicing feature of registrations.
It defines a new kind of service products that offers you the possibility to
choose an event category associated with it. When you encode a sale order for
that product, you will be able to choose an existing event of that category and
when you confirm your sale order it will automatically create a registration for
this event.
""",
'depends': ['event', 'sale'],
'data': [
'views/event.xml',
'views/product.xml',
'views/sale_order.xml',
'event_sale_data.xml',
'report/event_event_templates.xml',
'security/ir.model.access.csv',
'wizard/event_edit_registration.xml',
],
'demo': ['event_demo.xml'],
'test': ['test/confirm.yml'],
'installable': True,
'auto_install': True
}
|
vdemeester/compose
|
refs/heads/master
|
tests/unit/config/config_test.py
|
2
|
import codecs
import os
import shutil
import tempfile
from operator import itemgetter
from random import shuffle
import pytest
import yaml
from ddt import data
from ddt import ddt
from ...helpers import build_config_details
from ...helpers import BUSYBOX_IMAGE_WITH_TAG
from ...helpers import cd
from compose.config import config
from compose.config import types
from compose.config.config import ConfigFile
from compose.config.config import resolve_build_args
from compose.config.config import resolve_environment
from compose.config.environment import Environment
from compose.config.errors import ConfigurationError
from compose.config.errors import VERSION_EXPLANATION
from compose.config.serialize import denormalize_service_dict
from compose.config.serialize import serialize_config
from compose.config.serialize import serialize_ns_time_value
from compose.config.types import VolumeSpec
from compose.const import COMPOSE_SPEC as VERSION
from compose.const import COMPOSEFILE_V1 as V1
from compose.const import IS_WINDOWS_PLATFORM
from tests import mock
from tests import unittest
DEFAULT_VERSION = VERSION
def make_service_dict(name, service_dict, working_dir='.', filename=None):
"""Test helper function to construct a ServiceExtendsResolver
"""
resolver = config.ServiceExtendsResolver(
config.ServiceConfig(
working_dir=working_dir,
filename=filename,
name=name,
config=service_dict),
config.ConfigFile(filename=filename, config={}),
environment=Environment.from_env_file(working_dir)
)
return config.process_service(resolver.run())
def service_sort(services):
return sorted(services, key=itemgetter('name'))
def secret_sort(secrets):
return sorted(secrets, key=itemgetter('source'))
@ddt
class ConfigTest(unittest.TestCase):
def test_load(self):
service_dicts = config.load(
build_config_details(
{
'services': {
'foo': {'image': 'busybox'},
'bar': {'image': 'busybox', 'environment': ['FOO=1']},
}
},
'tests/fixtures/extends',
'common.yml'
)
).services
assert service_sort(service_dicts) == service_sort([
{
'name': 'bar',
'image': 'busybox',
'environment': {'FOO': '1'},
},
{
'name': 'foo',
'image': 'busybox',
}
])
def test_load_v2(self):
config_data = config.load(
build_config_details({
'version': '2',
'services': {
'foo': {'image': 'busybox'},
'bar': {'image': 'busybox', 'environment': ['FOO=1']},
},
'volumes': {
'hello': {
'driver': 'default',
'driver_opts': {'beep': 'boop'}
}
},
'networks': {
'default': {
'driver': 'bridge',
'driver_opts': {'beep': 'boop'}
},
'with_ipam': {
'ipam': {
'driver': 'default',
'config': [
{'subnet': '172.28.0.0/16'}
]
}
},
'internal': {
'driver': 'bridge',
'internal': True
}
}
}, 'working_dir', 'filename.yml')
)
service_dicts = config_data.services
volume_dict = config_data.volumes
networks_dict = config_data.networks
assert service_sort(service_dicts) == service_sort([
{
'name': 'bar',
'image': 'busybox',
'environment': {'FOO': '1'},
},
{
'name': 'foo',
'image': 'busybox',
}
])
assert volume_dict == {
'hello': {
'driver': 'default',
'driver_opts': {'beep': 'boop'}
}
}
assert networks_dict == {
'default': {
'driver': 'bridge',
'driver_opts': {'beep': 'boop'}
},
'with_ipam': {
'ipam': {
'driver': 'default',
'config': [
{'subnet': '172.28.0.0/16'}
]
}
},
'internal': {
'driver': 'bridge',
'internal': True
}
}
def test_valid_versions(self):
cfg = config.load(
build_config_details({
'services': {
'foo': {'image': 'busybox'},
'bar': {'image': 'busybox', 'environment': ['FOO=1']},
}
})
)
assert cfg.config_version == VERSION
assert cfg.version == VERSION
for version in ['2', '2.0', '2.1', '2.2', '2.3',
'3', '3.0', '3.1', '3.2', '3.3', '3.4', '3.5', '3.6', '3.7', '3.8']:
cfg = config.load(build_config_details({'version': version}))
assert cfg.config_version == version
assert cfg.version == VERSION
def test_v1_file_version(self):
cfg = config.load(build_config_details({'web': {'image': 'busybox'}}))
assert cfg.version == V1
assert list(s['name'] for s in cfg.services) == ['web']
cfg = config.load(build_config_details({'version': {'image': 'busybox'}}))
assert cfg.version == V1
assert list(s['name'] for s in cfg.services) == ['version']
def test_wrong_version_type(self):
for version in [1, 2, 2.0]:
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{'version': version},
filename='filename.yml',
)
)
assert 'Version in "filename.yml" is invalid - it should be a string.' \
in excinfo.exconly()
def test_unsupported_version(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{'version': '1'},
filename='filename.yml',
)
)
assert 'Version in "filename.yml" is invalid' in excinfo.exconly()
assert VERSION_EXPLANATION in excinfo.exconly()
def test_version_1_is_invalid(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '1',
'web': {'image': 'busybox'},
},
filename='filename.yml',
)
)
assert 'Version in "filename.yml" is invalid' in excinfo.exconly()
assert VERSION_EXPLANATION in excinfo.exconly()
def test_v1_file_with_version_is_invalid(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '2',
'web': {'image': 'busybox'},
},
filename='filename.yml',
)
)
assert "compose.config.errors.ConfigurationError: " \
"The Compose file 'filename.yml' is invalid because:\n" \
"'web' does not match any of the regexes: '^x-'" in excinfo.exconly()
assert VERSION_EXPLANATION in excinfo.exconly()
def test_named_volume_config_empty(self):
config_details = build_config_details({
'version': '2',
'services': {
'simple': {'image': 'busybox'}
},
'volumes': {
'simple': None,
'other': {},
}
})
config_result = config.load(config_details)
volumes = config_result.volumes
assert 'simple' in volumes
assert volumes['simple'] == {}
assert volumes['other'] == {}
def test_named_volume_numeric_driver_opt(self):
config_details = build_config_details({
'version': '2',
'services': {
'simple': {'image': 'busybox'}
},
'volumes': {
'simple': {'driver_opts': {'size': 42}},
}
})
cfg = config.load(config_details)
assert cfg.volumes['simple']['driver_opts']['size'] == '42'
def test_volume_invalid_driver_opt(self):
config_details = build_config_details({
'version': '2',
'services': {
'simple': {'image': 'busybox'}
},
'volumes': {
'simple': {'driver_opts': {'size': True}},
}
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert 'driver_opts.size contains an invalid type' in exc.exconly()
def test_named_volume_invalid_type_list(self):
config_details = build_config_details({
'version': '2',
'services': {
'simple': {'image': 'busybox'}
},
'volumes': []
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "volume must be a mapping, not an array" in exc.exconly()
def test_networks_invalid_type_list(self):
config_details = build_config_details({
'version': '2',
'services': {
'simple': {'image': 'busybox'}
},
'networks': []
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "network must be a mapping, not an array" in exc.exconly()
def test_load_service_with_name_version(self):
with mock.patch('compose.config.config.log') as mock_logging:
config_data = config.load(
build_config_details({
'version': {
'image': 'busybox'
}
}, 'working_dir', 'filename.yml')
)
assert 'Unexpected type for "version" key in "filename.yml"' \
in mock_logging.warning.call_args[0][0]
service_dicts = config_data.services
assert service_sort(service_dicts) == service_sort([
{
'name': 'version',
'image': 'busybox',
}
])
def test_load_throws_error_when_not_dict(self):
with pytest.raises(ConfigurationError):
config.load(
build_config_details(
{'web': BUSYBOX_IMAGE_WITH_TAG},
'working_dir',
'filename.yml'
)
)
def test_load_throws_error_when_not_dict_v2(self):
with pytest.raises(ConfigurationError):
config.load(
build_config_details(
{'version': '2', 'services': {'web': BUSYBOX_IMAGE_WITH_TAG}},
'working_dir',
'filename.yml'
)
)
def test_load_throws_error_with_invalid_network_fields(self):
with pytest.raises(ConfigurationError):
config.load(
build_config_details({
'version': '2',
'services': {'web': BUSYBOX_IMAGE_WITH_TAG},
'networks': {
'invalid': {'foo', 'bar'}
}
}, 'working_dir', 'filename.yml')
)
def test_load_config_link_local_ips_network(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2',
'services': {
'web': {
'image': 'example/web',
'networks': {
'foobar': {
'aliases': ['foo', 'bar'],
'link_local_ips': ['169.254.8.8']
}
}
}
},
'networks': {'foobar': {}}
}
)
details = config.ConfigDetails('.', [base_file])
web_service = config.load(details).services[0]
assert web_service['networks'] == {
'foobar': {
'aliases': ['foo', 'bar'],
'link_local_ips': ['169.254.8.8']
}
}
def test_load_config_service_labels(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2.1',
'services': {
'web': {
'image': 'example/web',
'labels': ['label_key=label_val']
},
'db': {
'image': 'example/db',
'labels': {
'label_key': 'label_val'
}
}
},
}
)
details = config.ConfigDetails('.', [base_file])
service_dicts = config.load(details).services
for service in service_dicts:
assert service['labels'] == {
'label_key': 'label_val'
}
def test_load_config_custom_resource_names(self):
base_file = config.ConfigFile(
'base.yaml', {
'version': '3.5',
'volumes': {
'abc': {
'name': 'xyz'
}
},
'networks': {
'abc': {
'name': 'xyz'
}
},
'secrets': {
'abc': {
'name': 'xyz'
}
},
'configs': {
'abc': {
'name': 'xyz'
}
}
}
)
details = config.ConfigDetails('.', [base_file])
loaded_config = config.load(details)
assert loaded_config.networks['abc'] == {'name': 'xyz'}
assert loaded_config.volumes['abc'] == {'name': 'xyz'}
assert loaded_config.secrets['abc']['name'] == 'xyz'
assert loaded_config.configs['abc']['name'] == 'xyz'
def test_load_config_volume_and_network_labels(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2.1',
'services': {
'web': {
'image': 'example/web',
},
},
'networks': {
'with_label': {
'labels': {
'label_key': 'label_val'
}
}
},
'volumes': {
'with_label': {
'labels': {
'label_key': 'label_val'
}
}
}
}
)
details = config.ConfigDetails('.', [base_file])
loaded_config = config.load(details)
assert loaded_config.networks == {
'with_label': {
'labels': {
'label_key': 'label_val'
}
}
}
assert loaded_config.volumes == {
'with_label': {
'labels': {
'label_key': 'label_val'
}
}
}
def test_load_config_invalid_service_names(self):
for invalid_name in ['?not?allowed', ' ', '', '!', '/', '\xe2']:
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'version': '2',
'services': {
invalid_name:
{
'image': 'busybox'
}
}
}))
assert 'Invalid service name \'%s\'' % invalid_name in exc.exconly()
def test_load_config_invalid_service_names_v2(self):
for invalid_name in ['?not?allowed', ' ', '', '!', '/', '\xe2']:
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'version': '2',
'services': {invalid_name: {'image': 'busybox'}},
}))
assert 'Invalid service name \'%s\'' % invalid_name in exc.exconly()
def test_load_with_invalid_field_name(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'version': '2',
'services': {
'web': {'image': 'busybox', 'name': 'bogus'},
}
},
'working_dir',
'filename.yml',
))
assert "Unsupported config option for services.web: 'name'" in exc.exconly()
def test_load_with_invalid_field_name_v1(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'version': '2',
'services': {
'web': {'image': 'busybox', 'name': 'bogus'}
}
},
'working_dir',
'filename.yml',
))
assert "Unsupported config option for services.web: 'name'" in exc.exconly()
def test_load_invalid_service_definition(self):
config_details = build_config_details(
{
'version': '2',
'services': {
'web': 'wrong'
}
},
'working_dir',
'filename.yml')
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "service 'web' must be a mapping not a string." in exc.exconly()
def test_load_with_empty_build_args(self):
config_details = build_config_details(
{
'version': '2',
'services': {
'web': {
'build': {
'context': os.getcwd(),
'args': None,
},
},
},
}
)
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert (
"services.web.build.args contains an invalid type, it should be an "
"object, or an array" in exc.exconly()
)
def test_config_integer_service_name_raise_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '2',
'services': {1: {'image': 'busybox'}}
},
'working_dir',
'filename.yml'
)
)
assert (
"In file 'filename.yml', the service name 1 must be a quoted string, i.e. '1'" in
excinfo.exconly()
)
def test_config_integer_service_name_raise_validation_error_v2(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '2',
'services': {1: {'image': 'busybox'}}
},
'working_dir',
'filename.yml'
)
)
assert (
"In file 'filename.yml', the service name 1 must be a quoted string, i.e. '1'." in
excinfo.exconly()
)
def test_config_integer_service_name_raise_validation_error_v2_when_no_interpolate(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '2',
'services': {1: {'image': 'busybox'}}
},
'working_dir',
'filename.yml'
),
interpolate=False
)
assert (
"In file 'filename.yml', the service name 1 must be a quoted string, i.e. '1'." in
excinfo.exconly()
)
def test_config_integer_service_property_raise_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details({
'version': '2.1',
'services': {'foobar': {'image': 'busybox', 1234: 'hah'}}
}, 'working_dir', 'filename.yml')
)
assert (
"Unsupported config option for services.foobar: '1234'" in excinfo.exconly()
)
def test_config_invalid_service_name_raise_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details({
'version': '2',
'services': {
'test_app': {'build': '.'},
'mong\\o': {'image': 'mongo'},
}
})
)
assert 'Invalid service name \'mong\\o\'' in excinfo.exconly()
def test_config_duplicate_cache_from_values_no_validation_error(self):
with pytest.raises(ConfigurationError) as exc:
config.load(
build_config_details({
'version': '2.3',
'services': {
'test': {'build': {'context': '.', 'cache_from': ['a', 'b', 'a']}}
}
})
)
assert 'build.cache_from contains non-unique items' not in exc.exconly()
def test_load_with_multiple_files_v1(self):
base_file = config.ConfigFile(
'base.yaml',
{
'web': {
'image': 'example/web',
'links': ['db'],
},
'db': {
'image': 'example/db',
},
})
override_file = config.ConfigFile(
'override.yaml',
{
'web': {
'build': '/',
'volumes': ['/home/user/project:/code'],
},
})
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'build': {'context': os.path.abspath('/')},
'volumes': [VolumeSpec.parse('/home/user/project:/code')],
'links': ['db'],
},
{
'name': 'db',
'image': 'example/db',
},
]
assert service_sort(service_dicts) == service_sort(expected)
def test_load_with_multiple_files_and_empty_override(self):
base_file = config.ConfigFile(
'base.yml',
{'web': {'image': 'example/web'}})
override_file = config.ConfigFile('override.yml', None)
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
config.load(details)
error_msg = "Top level object in 'override.yml' needs to be an object"
assert error_msg in exc.exconly()
def test_load_with_multiple_files_and_empty_override_v2(self):
base_file = config.ConfigFile(
'base.yml',
{'version': '2', 'services': {'web': {'image': 'example/web'}}})
override_file = config.ConfigFile('override.yml', None)
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
config.load(details)
error_msg = "Top level object in 'override.yml' needs to be an object"
assert error_msg in exc.exconly()
def test_load_with_multiple_files_and_empty_base(self):
base_file = config.ConfigFile('base.yml', None)
override_file = config.ConfigFile(
'override.yml',
{'web': {'image': 'example/web'}})
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
config.load(details)
assert "Top level object in 'base.yml' needs to be an object" in exc.exconly()
def test_load_with_multiple_files_and_empty_base_v2(self):
base_file = config.ConfigFile('base.yml', None)
override_file = config.ConfigFile(
'override.tml',
{'version': '2', 'services': {'web': {'image': 'example/web'}}}
)
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
config.load(details)
assert "Top level object in 'base.yml' needs to be an object" in exc.exconly()
def test_load_with_multiple_files_and_extends_in_override_file(self):
base_file = config.ConfigFile(
'base.yaml',
{
'web': {'image': 'example/web'},
})
override_file = config.ConfigFile(
'override.yaml',
{
'web': {
'extends': {
'file': 'common.yml',
'service': 'base',
},
'volumes': ['/home/user/project:/code'],
},
})
details = config.ConfigDetails('.', [base_file, override_file])
tmpdir = tempfile.mkdtemp('config_test')
self.addCleanup(shutil.rmtree, tmpdir)
with open(os.path.join(tmpdir, 'common.yml'), mode="w") as common_fh:
common_fh.write("""
base:
labels: ['label=one']
""")
with cd(tmpdir):
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'image': 'example/web',
'volumes': [VolumeSpec.parse('/home/user/project:/code')],
'labels': {'label': 'one'},
},
]
assert service_sort(service_dicts) == service_sort(expected)
def test_load_mixed_extends_resolution(self):
main_file = config.ConfigFile(
'main.yml', {
'version': '2.2',
'services': {
'prodweb': {
'extends': {
'service': 'web',
'file': 'base.yml'
},
'environment': {'PROD': 'true'},
},
},
}
)
tmpdir = tempfile.mkdtemp('config_test')
self.addCleanup(shutil.rmtree, tmpdir)
with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh:
base_fh.write("""
version: '2.2'
services:
base:
image: base
web:
extends: base
""")
details = config.ConfigDetails('.', [main_file])
with cd(tmpdir):
service_dicts = config.load(details).services
assert service_dicts[0] == {
'name': 'prodweb',
'image': 'base',
'environment': {'PROD': 'true'},
}
def test_load_with_multiple_files_and_invalid_override(self):
base_file = config.ConfigFile(
'base.yaml',
{'version': '2', 'services': {'web': {'image': 'example/web'}}})
override_file = config.ConfigFile(
'override.yaml',
{'version': '2', 'services': {'bogus': 'thing'}})
details = config.ConfigDetails('.', [base_file, override_file])
with pytest.raises(ConfigurationError) as exc:
config.load(details)
assert "service 'bogus' must be a mapping not a string." in exc.exconly()
assert "In file 'override.yaml'" in exc.exconly()
def test_load_sorts_in_dependency_order(self):
config_details = build_config_details({
'web': {
'image': BUSYBOX_IMAGE_WITH_TAG,
'links': ['db'],
},
'db': {
'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes_from': ['volume:ro']
},
'volume': {
'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': ['/tmp'],
}
})
services = config.load(config_details).services
assert services[0]['name'] == 'volume'
assert services[1]['name'] == 'db'
assert services[2]['name'] == 'web'
def test_load_with_extensions(self):
config_details = build_config_details({
'version': '2.3',
'x-data': {
'lambda': 3,
'excess': [True, {}]
}
})
config_data = config.load(config_details)
assert config_data.services == []
def test_config_build_configuration(self):
service = config.load(
build_config_details(
{'web': {
'build': '.',
'dockerfile': 'Dockerfile-alt'
}},
'tests/fixtures/extends',
'filename.yml'
)
).services
assert 'context' in service[0]['build']
assert service[0]['build']['dockerfile'] == 'Dockerfile-alt'
def test_config_build_configuration_v2(self):
# service.dockerfile is invalid in v2
with pytest.raises(ConfigurationError):
config.load(
build_config_details(
{
'version': '2',
'services': {
'web': {
'build': '.',
'dockerfile': 'Dockerfile-alt'
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
)
service = config.load(
build_config_details({
'version': '2',
'services': {
'web': {
'build': '.'
}
}
}, 'tests/fixtures/extends', 'filename.yml')
).services[0]
assert 'context' in service['build']
service = config.load(
build_config_details(
{
'version': '2',
'services': {
'web': {
'build': {
'context': '.',
'dockerfile': 'Dockerfile-alt'
}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
).services
assert 'context' in service[0]['build']
assert service[0]['build']['dockerfile'] == 'Dockerfile-alt'
def test_load_with_buildargs(self):
service = config.load(
build_config_details(
{
'version': '2',
'services': {
'web': {
'build': {
'context': '.',
'dockerfile': 'Dockerfile-alt',
'args': {
'opt1': 42,
'opt2': 'foobar'
}
}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
).services[0]
assert 'args' in service['build']
assert 'opt1' in service['build']['args']
assert isinstance(service['build']['args']['opt1'], str)
assert service['build']['args']['opt1'] == '42'
assert service['build']['args']['opt2'] == 'foobar'
def test_load_build_labels_dict(self):
service = config.load(
build_config_details(
{
'services': {
'web': {
'build': {
'context': '.',
'dockerfile': 'Dockerfile-alt',
'labels': {
'label1': 42,
'label2': 'foobar'
}
}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
).services[0]
assert 'labels' in service['build']
assert 'label1' in service['build']['labels']
assert service['build']['labels']['label1'] == '42'
assert service['build']['labels']['label2'] == 'foobar'
def test_load_build_labels_list(self):
base_file = config.ConfigFile(
'base.yml',
{
'version': '2.3',
'services': {
'web': {
'build': {
'context': '.',
'labels': ['foo=bar', 'baz=true', 'foobar=1']
},
},
},
}
)
details = config.ConfigDetails('.', [base_file])
service = config.load(details).services[0]
assert service['build']['labels'] == {
'foo': 'bar', 'baz': 'true', 'foobar': '1'
}
def test_build_args_allow_empty_properties(self):
service = config.load(
build_config_details(
{
'version': '2',
'services': {
'web': {
'build': {
'context': '.',
'dockerfile': 'Dockerfile-alt',
'args': {
'foo': None
}
}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
).services[0]
assert 'args' in service['build']
assert 'foo' in service['build']['args']
assert service['build']['args']['foo'] == ''
# If build argument is None then it will be converted to the empty
# string. Make sure that int zero kept as it is, i.e. not converted to
# the empty string
def test_build_args_check_zero_preserved(self):
service = config.load(
build_config_details(
{
'version': '2',
'services': {
'web': {
'build': {
'context': '.',
'dockerfile': 'Dockerfile-alt',
'args': {
'foo': 0
}
}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
).services[0]
assert 'args' in service['build']
assert 'foo' in service['build']['args']
assert service['build']['args']['foo'] == '0'
def test_load_with_multiple_files_mismatched_networks_format(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2',
'services': {
'web': {
'image': 'example/web',
'networks': {
'foobar': {'aliases': ['foo', 'bar']}
}
}
},
'networks': {'foobar': {}, 'baz': {}}
}
)
override_file = config.ConfigFile(
'override.yaml',
{
'version': '2',
'services': {
'web': {
'networks': ['baz']
}
}
}
)
details = config.ConfigDetails('.', [base_file, override_file])
web_service = config.load(details).services[0]
assert web_service['networks'] == {
'foobar': {'aliases': ['bar', 'foo']},
'baz': {}
}
def test_load_with_multiple_files_mismatched_networks_format_inverse_order(self):
base_file = config.ConfigFile(
'override.yaml',
{
'version': '2',
'services': {
'web': {
'networks': ['baz']
}
}
}
)
override_file = config.ConfigFile(
'base.yaml',
{
'version': '2',
'services': {
'web': {
'image': 'example/web',
'networks': {
'foobar': {'aliases': ['foo', 'bar']}
}
}
},
'networks': {'foobar': {}, 'baz': {}}
}
)
details = config.ConfigDetails('.', [base_file, override_file])
web_service = config.load(details).services[0]
assert web_service['networks'] == {
'foobar': {'aliases': ['bar', 'foo']},
'baz': {}
}
def test_load_with_multiple_files_v2(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2',
'services': {
'web': {
'image': 'example/web',
'depends_on': ['db'],
},
'db': {
'image': 'example/db',
}
},
})
override_file = config.ConfigFile(
'override.yaml',
{
'version': '2',
'services': {
'web': {
'build': '/',
'volumes': ['/home/user/project:/code'],
'depends_on': ['other'],
},
'other': {
'image': 'example/other',
}
}
})
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'build': {'context': os.path.abspath('/')},
'image': 'example/web',
'volumes': [VolumeSpec.parse('/home/user/project:/code')],
'depends_on': {
'db': {'condition': 'service_started'},
'other': {'condition': 'service_started'},
},
},
{
'name': 'db',
'image': 'example/db',
},
{
'name': 'other',
'image': 'example/other',
},
]
assert service_sort(service_dicts) == service_sort(expected)
@mock.patch.dict(os.environ)
def test_load_with_multiple_files_v3_2(self):
os.environ['COMPOSE_CONVERT_WINDOWS_PATHS'] = 'true'
base_file = config.ConfigFile(
'base.yaml',
{
'version': '3.2',
'services': {
'web': {
'image': 'example/web',
'volumes': [
{'source': '/a', 'target': '/b', 'type': 'bind'},
{'source': 'vol', 'target': '/x', 'type': 'volume', 'read_only': True}
],
'stop_grace_period': '30s',
}
},
'volumes': {'vol': {}}
}
)
override_file = config.ConfigFile(
'override.yaml',
{
'version': '3.2',
'services': {
'web': {
'volumes': ['/c:/b', '/anonymous']
}
}
}
)
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
svc_volumes = map(lambda v: v.repr(), service_dicts[0]['volumes'])
for vol in svc_volumes:
assert vol in [
'/anonymous',
'/c:/b:rw',
{'source': 'vol', 'target': '/x', 'type': 'volume', 'read_only': True}
]
assert service_dicts[0]['stop_grace_period'] == '30s'
@mock.patch.dict(os.environ)
def test_volume_mode_override(self):
os.environ['COMPOSE_CONVERT_WINDOWS_PATHS'] = 'true'
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2.3',
'services': {
'web': {
'image': 'example/web',
'volumes': ['/c:/b:rw']
}
},
}
)
override_file = config.ConfigFile(
'override.yaml',
{
'version': '2.3',
'services': {
'web': {
'volumes': ['/c:/b:ro']
}
}
}
)
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
svc_volumes = list(map(lambda v: v.repr(), service_dicts[0]['volumes']))
assert svc_volumes == ['/c:/b:ro']
def test_undeclared_volume_v2(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2',
'services': {
'web': {
'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': ['data0028:/data:ro'],
},
},
}
)
details = config.ConfigDetails('.', [base_file])
with pytest.raises(ConfigurationError):
config.load(details)
base_file = config.ConfigFile(
'base.yaml',
{
'version': '2',
'services': {
'web': {
'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': ['./data0028:/data:ro'],
},
},
}
)
details = config.ConfigDetails('.', [base_file])
config_data = config.load(details)
volume = config_data.services[0].get('volumes')[0]
assert not volume.is_named_volume
def test_undeclared_volume_v1(self):
base_file = config.ConfigFile(
'base.yaml',
{
'web': {
'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': ['data0028:/data:ro'],
},
}
)
details = config.ConfigDetails('.', [base_file])
config_data = config.load(details)
volume = config_data.services[0].get('volumes')[0]
assert volume.external == 'data0028'
assert volume.is_named_volume
def test_volumes_long_syntax(self):
base_file = config.ConfigFile(
'base.yaml', {
'version': '2.3',
'services': {
'web': {
'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': [
{
'target': '/anonymous', 'type': 'volume'
}, {
'source': '/abc', 'target': '/xyz', 'type': 'bind'
}, {
'source': '\\\\.\\pipe\\abcd', 'target': '/named_pipe', 'type': 'npipe'
}, {
'type': 'tmpfs', 'target': '/tmpfs'
}
]
},
},
},
)
details = config.ConfigDetails('.', [base_file])
config_data = config.load(details)
volumes = config_data.services[0].get('volumes')
anon_volume = [v for v in volumes if v.target == '/anonymous'][0]
tmpfs_mount = [v for v in volumes if v.type == 'tmpfs'][0]
host_mount = [v for v in volumes if v.type == 'bind'][0]
npipe_mount = [v for v in volumes if v.type == 'npipe'][0]
assert anon_volume.type == 'volume'
assert not anon_volume.is_named_volume
assert tmpfs_mount.target == '/tmpfs'
assert not tmpfs_mount.is_named_volume
assert host_mount.source == '/abc'
assert host_mount.target == '/xyz'
assert not host_mount.is_named_volume
assert npipe_mount.source == '\\\\.\\pipe\\abcd'
assert npipe_mount.target == '/named_pipe'
assert not npipe_mount.is_named_volume
def test_load_bind_mount_relative_path(self):
expected_source = 'C:\\tmp\\web' if IS_WINDOWS_PLATFORM else '/tmp/web'
base_file = config.ConfigFile(
'base.yaml', {
'version': '3.4',
'services': {
'web': {
'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': [
{'type': 'bind', 'source': './web', 'target': '/web'},
],
},
},
},
)
details = config.ConfigDetails('/tmp', [base_file])
config_data = config.load(details)
mount = config_data.services[0].get('volumes')[0]
assert mount.target == '/web'
assert mount.type == 'bind'
assert mount.source == expected_source
def test_load_bind_mount_relative_path_with_tilde(self):
base_file = config.ConfigFile(
'base.yaml', {
'version': '3.4',
'services': {
'web': {
'image': BUSYBOX_IMAGE_WITH_TAG,
'volumes': [
{'type': 'bind', 'source': '~/web', 'target': '/web'},
],
},
},
},
)
details = config.ConfigDetails('.', [base_file])
config_data = config.load(details)
mount = config_data.services[0].get('volumes')[0]
assert mount.target == '/web'
assert mount.type == 'bind'
assert (
not mount.source.startswith('~') and mount.source.endswith(
'{}web'.format(os.path.sep)
)
)
def test_config_invalid_ipam_config(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': str(VERSION),
'networks': {
'foo': {
'driver': 'default',
'ipam': {
'driver': 'default',
'config': ['172.18.0.0/16'],
}
}
}
},
filename='filename.yml',
)
)
assert ('networks.foo.ipam.config contains an invalid type,'
' it should be an object') in excinfo.exconly()
def test_config_valid_ipam_config(self):
ipam_config = {
'subnet': '172.28.0.0/16',
'ip_range': '172.28.5.0/24',
'gateway': '172.28.5.254',
'aux_addresses': {
'host1': '172.28.1.5',
'host2': '172.28.1.6',
'host3': '172.28.1.7',
},
}
networks = config.load(
build_config_details(
{
'networks': {
'foo': {
'driver': 'default',
'ipam': {
'driver': 'default',
'config': [ipam_config],
}
}
}
},
filename='filename.yml',
)
).networks
assert 'foo' in networks
assert networks['foo']['ipam']['config'] == [ipam_config]
def test_config_valid_service_names(self):
for valid_name in ['_', '-', '.__.', '_what-up.', 'what_.up----', 'whatup']:
services = config.load(
build_config_details(
{valid_name: {'image': 'busybox'}},
'tests/fixtures/extends',
'common.yml')).services
assert services[0]['name'] == valid_name
def test_config_hint(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': str(VERSION),
'services': {
'foo': {'image': 'busybox', 'privilige': 'something'},
}
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "(did you mean 'privileged'?)" in excinfo.exconly()
def test_load_errors_on_uppercase_with_no_image(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
'Foo': {'build': '.'},
}, 'tests/fixtures/build-ctx'))
assert "Service 'Foo' contains uppercase characters" in exc.exconly()
def test_invalid_config_v1(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': str(VERSION),
'services': {
'foo': {'image': 1},
}
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "foo.image contains an invalid type, it should be a string" \
in excinfo.exconly()
def test_invalid_config_v2(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '2',
'services': {
'foo': {'image': 1},
},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "services.foo.image contains an invalid type, it should be a string" \
in excinfo.exconly()
def test_invalid_config_build_and_image_specified_v1(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'foo': {'image': 'busybox', 'build': '.'},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "foo has both an image and build path specified." in excinfo.exconly()
def test_invalid_config_type_should_be_an_array(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': str(VERSION),
'services': {
'foo': {'image': 'busybox', 'links': 'an_link'},
}
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "foo.links contains an invalid type, it should be an array" \
in excinfo.exconly()
def test_invalid_config_not_a_dictionary(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
['foo', 'lol'],
'tests/fixtures/extends',
'filename.yml'
)
)
assert "Top level object in 'filename.yml' needs to be an object" \
in excinfo.exconly()
def test_invalid_config_not_unique_items(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': str(VERSION),
'services': {
'web': {'build': '.', 'devices': ['/dev/foo:/dev/foo', '/dev/foo:/dev/foo']}
}
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "has non-unique elements" in excinfo.exconly()
def test_invalid_list_of_strings_format(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': str(VERSION),
'services': {
'web': {'build': '.', 'command': [1]}
}
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "web.command contains 1, which is an invalid type, it should be a string" \
in excinfo.exconly()
def test_load_config_dockerfile_without_build_raises_error_v1(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
'web': {
'image': 'busybox',
'dockerfile': 'Dockerfile.alt'
}
}))
assert "web has both an image and alternate Dockerfile." in exc.exconly()
def test_config_extra_hosts_string_raises_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': str(VERSION),
'services': {
'web': {
'image': 'busybox',
'extra_hosts': 'somehost:162.242.195.82'}}
},
'working_dir',
'filename.yml'
)
)
assert "web.extra_hosts contains an invalid type" \
in excinfo.exconly()
def test_config_extra_hosts_list_of_dicts_validation_error(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': str(VERSION),
'services': {
'web': {
'image': 'busybox',
'extra_hosts': [
{'somehost': '162.242.195.82'},
{'otherhost': '50.31.209.229'}
]}}
},
'working_dir',
'filename.yml'
)
)
assert "web.extra_hosts contains {\"somehost\": \"162.242.195.82\"}, " \
"which is an invalid type, it should be a string" \
in excinfo.exconly()
def test_config_ulimits_invalid_keys_validation_error(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'version': str(VERSION),
'services': {
'web': {
'image': 'busybox',
'ulimits': {
'nofile': {
"not_soft_or_hard": 100,
"soft": 10000,
"hard": 20000,
}
}
}
}
},
'working_dir',
'filename.yml'))
assert "web.ulimits.nofile contains unsupported option: 'not_soft_or_hard'" \
in exc.exconly()
def test_config_ulimits_required_keys_validation_error(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'version': str(VERSION),
'services': {
'web': {
'image': 'busybox',
'ulimits': {'nofile': {"soft": 10000}}
}
}
},
'working_dir',
'filename.yml'))
assert "web.ulimits.nofile" in exc.exconly()
assert "'hard' is a required property" in exc.exconly()
def test_config_ulimits_soft_greater_than_hard_error(self):
expected = "'soft' value can not be greater than 'hard' value"
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{
'version': str(VERSION),
'services': {
'web': {
'image': 'busybox',
'ulimits': {
'nofile': {"soft": 10000, "hard": 1000}
}
}
}
},
'working_dir',
'filename.yml'))
assert expected in exc.exconly()
def test_valid_config_which_allows_two_type_definitions(self):
expose_values = [["8000"], [8000]]
for expose in expose_values:
service = config.load(
build_config_details(
{
'version': str(VERSION),
'services': {
'web': {
'image': 'busybox',
'expose': expose}}},
'working_dir',
'filename.yml'
)
).services
assert service[0]['expose'] == expose
def test_valid_config_oneof_string_or_list(self):
entrypoint_values = [["sh"], "sh"]
for entrypoint in entrypoint_values:
service = config.load(
build_config_details(
{
'version': str(VERSION),
'services': {
'web': {
'image': 'busybox',
'entrypoint': entrypoint}}},
'working_dir',
'filename.yml'
)
).services
assert service[0]['entrypoint'] == entrypoint
def test_logs_warning_for_boolean_in_environment(self):
config_details = build_config_details({
'version': str(VERSION),
'services': {
'web': {
'image': 'busybox',
'environment': {'SHOW_STUFF': True}
}
}
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "contains true, which is an invalid type" in exc.exconly()
def test_config_valid_environment_dict_key_contains_dashes(self):
services = config.load(
build_config_details(
{
'version': str(VERSION),
'services': {
'web': {
'image': 'busybox',
'environment': {'SPRING_JPA_HIBERNATE_DDL-AUTO': 'none'}}}},
'working_dir',
'filename.yml'
)
).services
assert services[0]['environment']['SPRING_JPA_HIBERNATE_DDL-AUTO'] == 'none'
def test_load_yaml_with_yaml_error(self):
tmpdir = tempfile.mkdtemp('invalid_yaml_test')
self.addCleanup(shutil.rmtree, tmpdir)
invalid_yaml_file = os.path.join(tmpdir, 'docker-compose.yml')
with open(invalid_yaml_file, mode="w") as invalid_yaml_file_fh:
invalid_yaml_file_fh.write("""
web:
this is bogus: ok: what
""")
with pytest.raises(ConfigurationError) as exc:
config.load_yaml(str(invalid_yaml_file))
assert 'line 3, column 22' in exc.exconly()
def test_load_yaml_with_bom(self):
tmpdir = tempfile.mkdtemp('bom_yaml')
self.addCleanup(shutil.rmtree, tmpdir)
bom_yaml = os.path.join(tmpdir, 'docker-compose.yml')
with codecs.open(str(bom_yaml), 'w', encoding='utf-8') as f:
f.write('''\ufeff
version: '2.3'
volumes:
park_bom:
''')
assert config.load_yaml(str(bom_yaml)) == {
'version': '2.3',
'volumes': {'park_bom': None}
}
def test_validate_extra_hosts_invalid(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
'version': str(VERSION),
'services': {
'web': {
'image': 'alpine',
'extra_hosts': "www.example.com: 192.168.0.17",
}
}
}))
assert "web.extra_hosts contains an invalid type" in exc.exconly()
def test_validate_extra_hosts_invalid_list(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
'version': str(VERSION),
'services': {
'web': {
'image': 'alpine',
'extra_hosts': [
{'www.example.com': '192.168.0.17'},
{'api.example.com': '192.168.0.18'}
],
}
}
}))
assert "which is an invalid type" in exc.exconly()
def test_normalize_dns_options(self):
actual = config.load(build_config_details({
'version': str(VERSION),
'services': {
'web': {
'image': 'alpine',
'dns': '8.8.8.8',
'dns_search': 'domain.local',
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'dns': ['8.8.8.8'],
'dns_search': ['domain.local'],
}
]
def test_tmpfs_option(self):
actual = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'alpine',
'tmpfs': '/run',
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'tmpfs': ['/run'],
}
]
def test_oom_score_adj_option(self):
actual = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'alpine',
'oom_score_adj': 500
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'oom_score_adj': 500
}
]
def test_swappiness_option(self):
actual = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'alpine',
'mem_swappiness': 10,
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'mem_swappiness': 10,
}
]
@data(
'2 ',
'3.',
'3.0.0',
'3.0.a',
'3.a',
'3a')
def test_invalid_version_formats(self, version):
content = {
'version': version,
'services': {
'web': {
'image': 'alpine',
}
}
}
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(content))
assert 'Version "{}" in "filename.yml" is invalid.'.format(version) in exc.exconly()
def test_group_add_option(self):
actual = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'alpine',
'group_add': ["docker", 777]
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'group_add': ["docker", 777]
}
]
def test_dns_opt_option(self):
actual = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'alpine',
'dns_opt': ["use-vc", "no-tld-query"]
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'alpine',
'dns_opt': ["use-vc", "no-tld-query"]
}
]
def test_isolation_option(self):
actual = config.load(build_config_details({
'services': {
'web': {
'image': 'win10',
'isolation': 'hyperv'
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'win10',
'isolation': 'hyperv',
}
]
def test_runtime_option(self):
actual = config.load(build_config_details({
'services': {
'web': {
'image': 'nvidia/cuda',
'runtime': 'nvidia'
}
}
}))
assert actual.services == [
{
'name': 'web',
'image': 'nvidia/cuda',
'runtime': 'nvidia',
}
]
def test_merge_service_dicts_from_files_with_extends_in_base(self):
base = {
'volumes': ['.:/app'],
'extends': {'service': 'app'}
}
override = {
'image': 'alpine:edge',
}
actual = config.merge_service_dicts_from_files(
base,
override,
DEFAULT_VERSION)
assert actual == {
'image': 'alpine:edge',
'volumes': ['.:/app'],
'extends': {'service': 'app'}
}
def test_merge_service_dicts_from_files_with_extends_in_override(self):
base = {
'volumes': ['.:/app'],
'extends': {'service': 'app'}
}
override = {
'image': 'alpine:edge',
'extends': {'service': 'foo'}
}
actual = config.merge_service_dicts_from_files(
base,
override,
DEFAULT_VERSION)
assert actual == {
'image': 'alpine:edge',
'volumes': ['.:/app'],
'extends': {'service': 'foo'}
}
def test_merge_service_dicts_heterogeneous(self):
base = {
'volumes': ['.:/app'],
'ports': ['5432']
}
override = {
'image': 'alpine:edge',
'ports': [5432]
}
actual = config.merge_service_dicts_from_files(
base,
override,
DEFAULT_VERSION)
assert actual == {
'image': 'alpine:edge',
'volumes': ['.:/app'],
'ports': types.ServicePort.parse('5432')
}
def test_merge_service_dicts_heterogeneous_2(self):
base = {
'volumes': ['.:/app'],
'ports': [5432]
}
override = {
'image': 'alpine:edge',
'ports': ['5432']
}
actual = config.merge_service_dicts_from_files(
base,
override,
DEFAULT_VERSION)
assert actual == {
'image': 'alpine:edge',
'volumes': ['.:/app'],
'ports': types.ServicePort.parse('5432')
}
def test_merge_service_dicts_ports_sorting(self):
base = {
'ports': [5432]
}
override = {
'image': 'alpine:edge',
'ports': ['5432/udp']
}
actual = config.merge_service_dicts_from_files(
base,
override,
DEFAULT_VERSION)
assert len(actual['ports']) == 2
assert types.ServicePort.parse('5432')[0] in actual['ports']
assert types.ServicePort.parse('5432/udp')[0] in actual['ports']
def test_merge_service_dicts_heterogeneous_volumes(self):
base = {
'volumes': ['/a:/b', '/x:/z'],
}
override = {
'image': 'alpine:edge',
'volumes': [
{'source': '/e', 'target': '/b', 'type': 'bind'},
{'source': '/c', 'target': '/d', 'type': 'bind'}
]
}
actual = config.merge_service_dicts_from_files(
base, override, VERSION
)
assert actual['volumes'] == [
{'source': '/e', 'target': '/b', 'type': 'bind'},
{'source': '/c', 'target': '/d', 'type': 'bind'},
'/x:/z'
]
def test_merge_logging_v1(self):
base = {
'image': 'alpine:edge',
'log_driver': 'something',
'log_opt': {'foo': 'three'},
}
override = {
'image': 'alpine:edge',
'command': 'true',
}
actual = config.merge_service_dicts(base, override, V1)
assert actual == {
'image': 'alpine:edge',
'log_driver': 'something',
'log_opt': {'foo': 'three'},
'command': 'true',
}
def test_merge_logging_v2(self):
base = {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000',
'timeout': '23'
}
}
}
override = {
'logging': {
'options': {
'timeout': '360',
'pretty-print': 'on'
}
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000',
'timeout': '360',
'pretty-print': 'on'
}
}
}
def test_merge_logging_v2_override_driver(self):
base = {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000',
'timeout': '23'
}
}
}
override = {
'logging': {
'driver': 'syslog',
'options': {
'timeout': '360',
'pretty-print': 'on'
}
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'syslog',
'options': {
'timeout': '360',
'pretty-print': 'on'
}
}
}
def test_merge_logging_v2_no_base_driver(self):
base = {
'image': 'alpine:edge',
'logging': {
'options': {
'frequency': '2000',
'timeout': '23'
}
}
}
override = {
'logging': {
'driver': 'json-file',
'options': {
'timeout': '360',
'pretty-print': 'on'
}
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000',
'timeout': '360',
'pretty-print': 'on'
}
}
}
def test_merge_logging_v2_no_drivers(self):
base = {
'image': 'alpine:edge',
'logging': {
'options': {
'frequency': '2000',
'timeout': '23'
}
}
}
override = {
'logging': {
'options': {
'timeout': '360',
'pretty-print': 'on'
}
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
'options': {
'frequency': '2000',
'timeout': '360',
'pretty-print': 'on'
}
}
}
def test_merge_logging_v2_no_override_options(self):
base = {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000',
'timeout': '23'
}
}
}
override = {
'logging': {
'driver': 'syslog'
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'syslog',
}
}
def test_merge_logging_v2_no_base(self):
base = {
'image': 'alpine:edge'
}
override = {
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000'
}
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'json-file',
'options': {
'frequency': '2000'
}
}
}
def test_merge_logging_v2_no_override(self):
base = {
'image': 'alpine:edge',
'logging': {
'driver': 'syslog',
'options': {
'frequency': '2000'
}
}
}
override = {}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'alpine:edge',
'logging': {
'driver': 'syslog',
'options': {
'frequency': '2000'
}
}
}
def test_merge_mixed_ports(self):
base = {
'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'ports': [
{
'target': '1245',
'published': '1245',
'protocol': 'udp',
}
]
}
override = {
'ports': ['1245:1245/udp']
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': BUSYBOX_IMAGE_WITH_TAG,
'command': 'top',
'ports': [types.ServicePort('1245', '1245', 'udp', None, None)]
}
def test_merge_depends_on_no_override(self):
base = {
'image': 'busybox',
'depends_on': {
'app1': {'condition': 'service_started'},
'app2': {'condition': 'service_healthy'},
'app3': {'condition': 'service_completed_successfully'}
}
}
override = {}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == base
def test_merge_depends_on_mixed_syntax(self):
base = {
'image': 'busybox',
'depends_on': {
'app1': {'condition': 'service_started'},
'app2': {'condition': 'service_healthy'},
'app3': {'condition': 'service_completed_successfully'}
}
}
override = {
'depends_on': ['app4']
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'busybox',
'depends_on': {
'app1': {'condition': 'service_started'},
'app2': {'condition': 'service_healthy'},
'app3': {'condition': 'service_completed_successfully'},
'app4': {'condition': 'service_started'},
}
}
def test_empty_environment_key_allowed(self):
service_dict = config.load(
build_config_details(
{
'web': {
'build': '.',
'environment': {
'POSTGRES_PASSWORD': ''
},
},
},
'.',
None,
)
).services[0]
assert service_dict['environment']['POSTGRES_PASSWORD'] == ''
def test_merge_pid(self):
# Regression: https://github.com/docker/compose/issues/4184
base = {
'image': 'busybox',
'pid': 'host'
}
override = {
'labels': {'com.docker.compose.test': 'yes'}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'busybox',
'pid': 'host',
'labels': {'com.docker.compose.test': 'yes'}
}
def test_merge_different_secrets(self):
base = {
'image': 'busybox',
'secrets': [
{'source': 'src.txt'}
]
}
override = {'secrets': ['other-src.txt']}
actual = config.merge_service_dicts(base, override, VERSION)
assert secret_sort(actual['secrets']) == secret_sort([
{'source': 'src.txt'},
{'source': 'other-src.txt'}
])
def test_merge_secrets_override(self):
base = {
'image': 'busybox',
'secrets': ['src.txt'],
}
override = {
'secrets': [
{
'source': 'src.txt',
'target': 'data.txt',
'mode': 0o400
}
]
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual['secrets'] == override['secrets']
def test_merge_different_configs(self):
base = {
'image': 'busybox',
'configs': [
{'source': 'src.txt'}
]
}
override = {'configs': ['other-src.txt']}
actual = config.merge_service_dicts(base, override, VERSION)
assert secret_sort(actual['configs']) == secret_sort([
{'source': 'src.txt'},
{'source': 'other-src.txt'}
])
def test_merge_configs_override(self):
base = {
'image': 'busybox',
'configs': ['src.txt'],
}
override = {
'configs': [
{
'source': 'src.txt',
'target': 'data.txt',
'mode': 0o400
}
]
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual['configs'] == override['configs']
def test_merge_deploy(self):
base = {
'image': 'busybox',
}
override = {
'deploy': {
'mode': 'global',
'restart_policy': {
'condition': 'on-failure'
}
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual['deploy'] == override['deploy']
def test_merge_deploy_override(self):
base = {
'deploy': {
'endpoint_mode': 'vip',
'labels': ['com.docker.compose.a=1', 'com.docker.compose.b=2'],
'mode': 'replicated',
'placement': {
'max_replicas_per_node': 1,
'constraints': [
'node.role == manager', 'engine.labels.aws == true'
],
'preferences': [
{'spread': 'node.labels.zone'}, {'spread': 'x.d.z'}
]
},
'replicas': 3,
'resources': {
'limits': {'cpus': '0.50', 'memory': '50m'},
'reservations': {
'cpus': '0.1',
'generic_resources': [
{'discrete_resource_spec': {'kind': 'abc', 'value': 123}}
],
'memory': '15m'
}
},
'restart_policy': {'condition': 'any', 'delay': '10s'},
'update_config': {'delay': '10s', 'max_failure_ratio': 0.3}
},
'image': 'hello-world'
}
override = {
'deploy': {
'labels': {
'com.docker.compose.b': '21', 'com.docker.compose.c': '3'
},
'placement': {
'constraints': ['node.role == worker', 'engine.labels.dev == true'],
'preferences': [{'spread': 'node.labels.zone'}, {'spread': 'x.d.s'}]
},
'resources': {
'limits': {'memory': '200m'},
'reservations': {
'cpus': '0.78',
'generic_resources': [
{'discrete_resource_spec': {'kind': 'abc', 'value': 134}},
{'discrete_resource_spec': {'kind': 'xyz', 'value': 0.1}}
]
}
},
'restart_policy': {'condition': 'on-failure', 'max_attempts': 42},
'update_config': {'max_failure_ratio': 0.712, 'parallelism': 4}
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual['deploy'] == {
'mode': 'replicated',
'endpoint_mode': 'vip',
'labels': {
'com.docker.compose.a': '1',
'com.docker.compose.b': '21',
'com.docker.compose.c': '3'
},
'placement': {
'max_replicas_per_node': 1,
'constraints': [
'engine.labels.aws == true', 'engine.labels.dev == true',
'node.role == manager', 'node.role == worker'
],
'preferences': [
{'spread': 'node.labels.zone'}, {'spread': 'x.d.s'}, {'spread': 'x.d.z'}
]
},
'replicas': 3,
'resources': {
'limits': {'cpus': '0.50', 'memory': '200m'},
'reservations': {
'cpus': '0.78',
'memory': '15m',
'generic_resources': [
{'discrete_resource_spec': {'kind': 'abc', 'value': 134}},
{'discrete_resource_spec': {'kind': 'xyz', 'value': 0.1}},
]
}
},
'restart_policy': {
'condition': 'on-failure',
'delay': '10s',
'max_attempts': 42,
},
'update_config': {
'max_failure_ratio': 0.712,
'delay': '10s',
'parallelism': 4
}
}
def test_merge_credential_spec(self):
base = {
'image': 'bb',
'credential_spec': {
'file': '/hello-world',
}
}
override = {
'credential_spec': {
'registry': 'revolution.com',
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual['credential_spec'] == override['credential_spec']
def test_merge_scale(self):
base = {
'image': 'bar',
'scale': 2,
}
override = {
'scale': 4,
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {'image': 'bar', 'scale': 4}
def test_merge_blkio_config(self):
base = {
'image': 'bar',
'blkio_config': {
'weight': 300,
'weight_device': [
{'path': '/dev/sda1', 'weight': 200}
],
'device_read_iops': [
{'path': '/dev/sda1', 'rate': 300}
],
'device_write_iops': [
{'path': '/dev/sda1', 'rate': 1000}
]
}
}
override = {
'blkio_config': {
'weight': 450,
'weight_device': [
{'path': '/dev/sda2', 'weight': 400}
],
'device_read_iops': [
{'path': '/dev/sda1', 'rate': 2000}
],
'device_read_bps': [
{'path': '/dev/sda1', 'rate': 1024}
]
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'bar',
'blkio_config': {
'weight': override['blkio_config']['weight'],
'weight_device': (
base['blkio_config']['weight_device'] +
override['blkio_config']['weight_device']
),
'device_read_iops': override['blkio_config']['device_read_iops'],
'device_read_bps': override['blkio_config']['device_read_bps'],
'device_write_iops': base['blkio_config']['device_write_iops']
}
}
def test_merge_extra_hosts(self):
base = {
'image': 'bar',
'extra_hosts': {
'foo': '1.2.3.4',
}
}
override = {
'extra_hosts': ['bar:5.6.7.8', 'foo:127.0.0.1']
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual['extra_hosts'] == {
'foo': '127.0.0.1',
'bar': '5.6.7.8',
}
def test_merge_healthcheck_config(self):
base = {
'image': 'bar',
'healthcheck': {
'start_period': 1000,
'interval': 3000,
'test': ['true']
}
}
override = {
'healthcheck': {
'interval': 5000,
'timeout': 10000,
'test': ['echo', 'OK'],
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual['healthcheck'] == {
'start_period': base['healthcheck']['start_period'],
'test': override['healthcheck']['test'],
'interval': override['healthcheck']['interval'],
'timeout': override['healthcheck']['timeout'],
}
def test_merge_healthcheck_override_disables(self):
base = {
'image': 'bar',
'healthcheck': {
'start_period': 1000,
'interval': 3000,
'timeout': 2000,
'retries': 3,
'test': ['true']
}
}
override = {
'healthcheck': {
'disabled': True
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual['healthcheck'] == {'disabled': True}
def test_merge_healthcheck_override_enables(self):
base = {
'image': 'bar',
'healthcheck': {
'disabled': True
}
}
override = {
'healthcheck': {
'disabled': False,
'start_period': 1000,
'interval': 3000,
'timeout': 2000,
'retries': 3,
'test': ['true']
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual['healthcheck'] == override['healthcheck']
def test_merge_device_cgroup_rules(self):
base = {
'image': 'bar',
'device_cgroup_rules': ['c 7:128 rwm', 'x 3:244 rw']
}
override = {
'device_cgroup_rules': ['c 7:128 rwm', 'f 0:128 n']
}
actual = config.merge_service_dicts(base, override, VERSION)
assert sorted(actual['device_cgroup_rules']) == sorted(
['c 7:128 rwm', 'x 3:244 rw', 'f 0:128 n']
)
def test_merge_isolation(self):
base = {
'image': 'bar',
'isolation': 'default',
}
override = {
'isolation': 'hyperv',
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual == {
'image': 'bar',
'isolation': 'hyperv',
}
def test_merge_storage_opt(self):
base = {
'image': 'bar',
'storage_opt': {
'size': '1G',
'readonly': 'false',
}
}
override = {
'storage_opt': {
'size': '2G',
'encryption': 'aes',
}
}
actual = config.merge_service_dicts(base, override, VERSION)
assert actual['storage_opt'] == {
'size': '2G',
'readonly': 'false',
'encryption': 'aes',
}
def test_external_volume_config(self):
config_details = build_config_details({
'version': '2',
'services': {
'bogus': {'image': 'busybox'}
},
'volumes': {
'ext': {'external': True},
'ext2': {'external': {'name': 'aliased'}}
}
})
config_result = config.load(config_details)
volumes = config_result.volumes
assert 'ext' in volumes
assert volumes['ext']['external'] is True
assert 'ext2' in volumes
assert volumes['ext2']['external']['name'] == 'aliased'
def test_external_volume_invalid_config(self):
config_details = build_config_details({
'version': '2',
'services': {
'bogus': {'image': 'busybox'}
},
'volumes': {
'ext': {'external': True, 'driver': 'foo'}
}
})
with pytest.raises(ConfigurationError):
config.load(config_details)
def test_depends_on_orders_services(self):
config_details = build_config_details({
'version': '2',
'services': {
'one': {'image': 'busybox', 'depends_on': ['three', 'two']},
'two': {'image': 'busybox', 'depends_on': ['three']},
'three': {'image': 'busybox'},
},
})
actual = config.load(config_details)
assert (
[service['name'] for service in actual.services] ==
['three', 'two', 'one']
)
def test_depends_on_unknown_service_errors(self):
config_details = build_config_details({
'version': '2',
'services': {
'one': {'image': 'busybox', 'depends_on': ['three']},
},
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "Service 'one' depends on service 'three'" in exc.exconly()
def test_linked_service_is_undefined(self):
with pytest.raises(ConfigurationError):
config.load(
build_config_details({
'version': '2',
'services': {
'web': {'image': 'busybox', 'links': ['db:db']},
},
})
)
def test_load_dockerfile_without_context(self):
config_details = build_config_details({
'version': '2',
'services': {
'one': {'build': {'dockerfile': 'Dockerfile.foo'}},
},
})
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert 'has neither an image nor a build context' in exc.exconly()
def test_load_secrets(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '3.1',
'services': {
'web': {
'image': 'example/web',
'secrets': [
'one',
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
},
],
},
},
'secrets': {
'one': {'file': 'secret.txt'},
},
})
details = config.ConfigDetails('.', [base_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'image': 'example/web',
'secrets': [
types.ServiceSecret('one', None, None, None, None, None),
types.ServiceSecret('source', 'target', '100', '200', 0o777, None),
],
},
]
assert service_sort(service_dicts) == service_sort(expected)
def test_load_secrets_multi_file(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '3.1',
'services': {
'web': {
'image': 'example/web',
'secrets': ['one'],
},
},
'secrets': {
'one': {'file': 'secret.txt'},
},
})
override_file = config.ConfigFile(
'base.yaml',
{
'version': '3.1',
'services': {
'web': {
'secrets': [
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
},
],
},
},
})
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'image': 'example/web',
'secrets': [
types.ServiceSecret('one', None, None, None, None, None),
types.ServiceSecret('source', 'target', '100', '200', 0o777, None),
],
},
]
assert service_sort(service_dicts) == service_sort(expected)
def test_load_configs(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '3.3',
'services': {
'web': {
'image': 'example/web',
'configs': [
'one',
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
},
],
},
},
'configs': {
'one': {'file': 'secret.txt'},
},
})
details = config.ConfigDetails('.', [base_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'image': 'example/web',
'configs': [
types.ServiceConfig('one', None, None, None, None, None),
types.ServiceConfig('source', 'target', '100', '200', 0o777, None),
],
},
]
assert service_sort(service_dicts) == service_sort(expected)
def test_load_configs_multi_file(self):
base_file = config.ConfigFile(
'base.yaml',
{
'version': '3.3',
'services': {
'web': {
'image': 'example/web',
'configs': ['one'],
},
},
'configs': {
'one': {'file': 'secret.txt'},
},
})
override_file = config.ConfigFile(
'base.yaml',
{
'version': '3.3',
'services': {
'web': {
'configs': [
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
},
],
},
},
})
details = config.ConfigDetails('.', [base_file, override_file])
service_dicts = config.load(details).services
expected = [
{
'name': 'web',
'image': 'example/web',
'configs': [
types.ServiceConfig('one', None, None, None, None, None),
types.ServiceConfig('source', 'target', '100', '200', 0o777, None),
],
},
]
assert service_sort(service_dicts) == service_sort(expected)
def test_config_convertible_label_types(self):
config_details = build_config_details(
{
'version': '3.5',
'services': {
'web': {
'build': {
'labels': {'testbuild': True},
'context': os.getcwd()
},
'labels': {
"key": 12345
}
},
},
'networks': {
'foo': {
'labels': {'network.ips.max': 1023}
}
},
'volumes': {
'foo': {
'labels': {'volume.is_readonly': False}
}
},
'secrets': {
'foo': {
'labels': {'secret.data.expires': 1546282120}
}
},
'configs': {
'foo': {
'labels': {'config.data.correction.value': -0.1412}
}
}
}
)
loaded_config = config.load(config_details)
assert loaded_config.services[0]['build']['labels'] == {'testbuild': 'True'}
assert loaded_config.services[0]['labels'] == {'key': '12345'}
assert loaded_config.networks['foo']['labels']['network.ips.max'] == '1023'
assert loaded_config.volumes['foo']['labels']['volume.is_readonly'] == 'False'
assert loaded_config.secrets['foo']['labels']['secret.data.expires'] == '1546282120'
assert loaded_config.configs['foo']['labels']['config.data.correction.value'] == '-0.1412'
def test_config_invalid_label_types(self):
config_details = build_config_details({
'version': '2.3',
'volumes': {
'foo': {'labels': [1, 2, 3]}
}
})
with pytest.raises(ConfigurationError):
config.load(config_details)
def test_service_volume_invalid_config(self):
config_details = build_config_details(
{
'version': '3.2',
'services': {
'web': {
'build': {
'context': '.',
'args': None,
},
'volumes': [
{
"type": "volume",
"source": "/data",
"garbage": {
"and": "error"
}
}
]
}
}
}
)
with pytest.raises(ConfigurationError) as exc:
config.load(config_details)
assert "services.web.volumes contains unsupported option: 'garbage'" in exc.exconly()
def test_config_valid_service_label_validation(self):
config_details = build_config_details(
{
'version': '3.5',
'services': {
'web': {
'image': 'busybox',
'labels': {
"key": "string"
}
},
},
}
)
config.load(config_details)
def test_config_duplicate_mount_points(self):
config1 = build_config_details(
{
'version': '3.5',
'services': {
'web': {
'image': 'busybox',
'volumes': ['/tmp/foo:/tmp/foo', '/tmp/foo:/tmp/foo:rw']
}
}
}
)
config2 = build_config_details(
{
'version': '3.5',
'services': {
'web': {
'image': 'busybox',
'volumes': ['/x:/y', '/z:/y']
}
}
}
)
with self.assertRaises(ConfigurationError) as e:
config.load(config1)
self.assertEqual(str(e.exception), 'Duplicate mount points: [%s]' % (
', '.join(['/tmp/foo:/tmp/foo:rw']*2)))
with self.assertRaises(ConfigurationError) as e:
config.load(config2)
self.assertEqual(str(e.exception), 'Duplicate mount points: [%s]' % (
', '.join(['/x:/y:rw', '/z:/y:rw'])))
class NetworkModeTest(unittest.TestCase):
def test_network_mode_standard(self):
config_data = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'busybox',
'command': "top",
'network_mode': 'bridge',
},
},
}))
assert config_data.services[0]['network_mode'] == 'bridge'
def test_network_mode_standard_v1(self):
config_data = config.load(build_config_details({
'web': {
'image': 'busybox',
'command': "top",
'net': 'bridge',
},
}))
assert config_data.services[0]['network_mode'] == 'bridge'
assert 'net' not in config_data.services[0]
def test_network_mode_container(self):
config_data = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'busybox',
'command': "top",
'network_mode': 'container:foo',
},
},
}))
assert config_data.services[0]['network_mode'] == 'container:foo'
def test_network_mode_container_v1(self):
config_data = config.load(build_config_details({
'web': {
'image': 'busybox',
'command': "top",
'net': 'container:foo',
},
}))
assert config_data.services[0]['network_mode'] == 'container:foo'
def test_network_mode_service(self):
config_data = config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'busybox',
'command': "top",
'network_mode': 'service:foo',
},
'foo': {
'image': 'busybox',
'command': "top",
},
},
}))
assert config_data.services[1]['network_mode'] == 'service:foo'
def test_network_mode_service_v1(self):
config_data = config.load(build_config_details({
'web': {
'image': 'busybox',
'command': "top",
'net': 'container:foo',
},
'foo': {
'image': 'busybox',
'command': "top",
},
}))
assert config_data.services[1]['network_mode'] == 'service:foo'
def test_network_mode_service_nonexistent(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'busybox',
'command': "top",
'network_mode': 'service:foo',
},
},
}))
assert "service 'foo' which is undefined" in excinfo.exconly()
def test_network_mode_plus_networks_is_invalid(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(build_config_details({
'version': '2',
'services': {
'web': {
'image': 'busybox',
'command': "top",
'network_mode': 'bridge',
'networks': ['front'],
},
},
'networks': {
'front': None,
}
}))
assert "'network_mode' and 'networks' cannot be combined" in excinfo.exconly()
class PortsTest(unittest.TestCase):
INVALID_PORTS_TYPES = [
{"1": "8000"},
False,
"8000",
8000,
]
NON_UNIQUE_SINGLE_PORTS = [
["8000", "8000"],
]
INVALID_PORT_MAPPINGS = [
["8000-8004:8000-8002"],
["4242:4242-4244"],
]
VALID_SINGLE_PORTS = [
["8000"],
["8000/tcp"],
["8000", "9000"],
[8000],
[8000, 9000],
]
VALID_PORT_MAPPINGS = [
["8000:8050"],
["49153-49154:3002-3003"],
]
def test_config_invalid_ports_type_validation(self):
for invalid_ports in self.INVALID_PORTS_TYPES:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'ports': invalid_ports})
assert "contains an invalid type" in exc.value.msg
def test_config_non_unique_ports_validation(self):
for invalid_ports in self.NON_UNIQUE_SINGLE_PORTS:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'ports': invalid_ports})
assert "non-unique" in exc.value.msg
@pytest.mark.skip(reason="Validator is one_off (generic error)")
def test_config_invalid_ports_format_validation(self):
for invalid_ports in self.INVALID_PORT_MAPPINGS:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'ports': invalid_ports})
assert "Port ranges don't match in length" in exc.value.msg
def test_config_valid_ports_format_validation(self):
for valid_ports in self.VALID_SINGLE_PORTS + self.VALID_PORT_MAPPINGS:
self.check_config({'ports': valid_ports})
def test_config_invalid_expose_type_validation(self):
for invalid_expose in self.INVALID_PORTS_TYPES:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'expose': invalid_expose})
assert "contains an invalid type" in exc.value.msg
def test_config_non_unique_expose_validation(self):
for invalid_expose in self.NON_UNIQUE_SINGLE_PORTS:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'expose': invalid_expose})
assert "non-unique" in exc.value.msg
def test_config_invalid_expose_format_validation(self):
# Valid port mappings ARE NOT valid 'expose' entries
for invalid_expose in self.INVALID_PORT_MAPPINGS + self.VALID_PORT_MAPPINGS:
with pytest.raises(ConfigurationError) as exc:
self.check_config({'expose': invalid_expose})
assert "should be of the format" in exc.value.msg
def test_config_valid_expose_format_validation(self):
# Valid single ports ARE valid 'expose' entries
for valid_expose in self.VALID_SINGLE_PORTS:
self.check_config({'expose': valid_expose})
def check_config(self, cfg):
config.load(
build_config_details({
'version': '2.3',
'services': {
'web': dict(image='busybox', **cfg)
},
}, 'working_dir', 'filename.yml')
)
class SubnetTest(unittest.TestCase):
INVALID_SUBNET_TYPES = [
None,
False,
10,
]
INVALID_SUBNET_MAPPINGS = [
"",
"192.168.0.1/sdfsdfs",
"192.168.0.1/",
"192.168.0.1/33",
"192.168.0.1/01",
"192.168.0.1",
"fe80:0000:0000:0000:0204:61ff:fe9d:f156/sdfsdfs",
"fe80:0000:0000:0000:0204:61ff:fe9d:f156/",
"fe80:0000:0000:0000:0204:61ff:fe9d:f156/129",
"fe80:0000:0000:0000:0204:61ff:fe9d:f156/01",
"fe80:0000:0000:0000:0204:61ff:fe9d:f156",
"ge80:0000:0000:0000:0204:61ff:fe9d:f156/128",
"192.168.0.1/31/31",
]
VALID_SUBNET_MAPPINGS = [
"192.168.0.1/0",
"192.168.0.1/32",
"fe80:0000:0000:0000:0204:61ff:fe9d:f156/0",
"fe80:0000:0000:0000:0204:61ff:fe9d:f156/128",
"1:2:3:4:5:6:7:8/0",
"1::/0",
"1:2:3:4:5:6:7::/0",
"1::8/0",
"1:2:3:4:5:6::8/0",
"::/0",
"::8/0",
"::2:3:4:5:6:7:8/0",
"fe80::7:8%eth0/0",
"fe80::7:8%1/0",
"::255.255.255.255/0",
"::ffff:255.255.255.255/0",
"::ffff:0:255.255.255.255/0",
"2001:db8:3:4::192.0.2.33/0",
"64:ff9b::192.0.2.33/0",
]
def test_config_invalid_subnet_type_validation(self):
for invalid_subnet in self.INVALID_SUBNET_TYPES:
with pytest.raises(ConfigurationError) as exc:
self.check_config(invalid_subnet)
assert "contains an invalid type" in exc.value.msg
def test_config_invalid_subnet_format_validation(self):
for invalid_subnet in self.INVALID_SUBNET_MAPPINGS:
with pytest.raises(ConfigurationError) as exc:
self.check_config(invalid_subnet)
assert "should use the CIDR format" in exc.value.msg
def test_config_valid_subnet_format_validation(self):
for valid_subnet in self.VALID_SUBNET_MAPPINGS:
self.check_config(valid_subnet)
def check_config(self, subnet):
config.load(
build_config_details({
'version': '3.5',
'services': {
'web': {
'image': 'busybox'
}
},
'networks': {
'default': {
'ipam': {
'config': [
{
'subnet': subnet
}
],
'driver': 'default'
}
}
}
})
)
class InterpolationTest(unittest.TestCase):
@mock.patch.dict(os.environ)
def test_config_file_with_environment_file(self):
project_dir = 'tests/fixtures/default-env-file'
service_dicts = config.load(
config.find(
project_dir, None, Environment.from_env_file(project_dir)
)
).services
assert service_dicts[0] == {
'name': 'web',
'image': 'alpine:latest',
'ports': [
types.ServicePort.parse('5643')[0],
types.ServicePort.parse('9999')[0]
],
'command': 'true'
}
@mock.patch.dict(os.environ)
def test_config_file_with_options_environment_file(self):
project_dir = 'tests/fixtures/default-env-file'
# env-file is relative to current working dir
env = Environment.from_env_file(project_dir, project_dir + '/.env2')
service_dicts = config.load(
config.find(
project_dir, None, env
)
).services
assert service_dicts[0] == {
'name': 'web',
'image': 'alpine:latest',
'ports': [
types.ServicePort.parse('5644')[0],
types.ServicePort.parse('9998')[0]
],
'command': 'false'
}
@mock.patch.dict(os.environ)
def test_config_file_with_environment_variable(self):
project_dir = 'tests/fixtures/environment-interpolation'
os.environ.update(
IMAGE="busybox",
HOST_PORT="80",
LABEL_VALUE="myvalue",
)
service_dicts = config.load(
config.find(
project_dir, None, Environment.from_env_file(project_dir)
)
).services
assert service_dicts == [
{
'name': 'web',
'image': 'busybox',
'ports': types.ServicePort.parse('80:8000'),
'labels': {'mylabel': 'myvalue'},
'hostname': 'host-',
'command': '${ESCAPED}',
}
]
@mock.patch.dict(os.environ)
def test_config_file_with_environment_variable_with_defaults(self):
project_dir = 'tests/fixtures/environment-interpolation-with-defaults'
os.environ.update(
IMAGE="busybox",
)
service_dicts = config.load(
config.find(
project_dir, None, Environment.from_env_file(project_dir)
)
).services
assert service_dicts == [
{
'name': 'web',
'image': 'busybox',
'ports': types.ServicePort.parse('80:8000'),
'hostname': 'host-',
}
]
@mock.patch.dict(os.environ)
def test_unset_variable_produces_warning(self):
os.environ.pop('FOO', None)
os.environ.pop('BAR', None)
config_details = build_config_details(
{
'web': {
'image': '${FOO}',
'command': '${BAR}',
'container_name': '${BAR}',
},
},
'.',
None,
)
with mock.patch('compose.config.environment.log') as log:
config.load(config_details)
assert 2 == log.warning.call_count
warnings = sorted(args[0][0] for args in log.warning.call_args_list)
assert 'BAR' in warnings[0]
assert 'FOO' in warnings[1]
@pytest.mark.skip(reason='compatibility mode was removed internally')
def test_compatibility_mode_warnings(self):
config_details = build_config_details({
'version': '3.5',
'services': {
'web': {
'deploy': {
'labels': ['abc=def'],
'endpoint_mode': 'dnsrr',
'update_config': {'max_failure_ratio': 0.4},
'placement': {'constraints': ['node.id==deadbeef']},
'resources': {
'reservations': {'cpus': '0.2'}
},
'restart_policy': {
'delay': '2s',
'window': '12s'
}
},
'image': 'busybox'
}
}
})
with mock.patch('compose.config.config.log') as log:
config.load(config_details, compatibility=True)
assert log.warning.call_count == 1
warn_message = log.warning.call_args[0][0]
assert warn_message.startswith(
'The following deploy sub-keys are not supported in compatibility mode'
)
assert 'labels' in warn_message
assert 'endpoint_mode' in warn_message
assert 'update_config' in warn_message
assert 'resources.reservations.cpus' in warn_message
assert 'restart_policy.delay' in warn_message
assert 'restart_policy.window' in warn_message
@pytest.mark.skip(reason='compatibility mode was removed internally')
def test_compatibility_mode_load(self):
config_details = build_config_details({
'version': '3.5',
'services': {
'foo': {
'image': 'alpine:3.10.1',
'deploy': {
'replicas': 3,
'restart_policy': {
'condition': 'any',
'max_attempts': 7,
},
'resources': {
'limits': {'memory': '300M', 'cpus': '0.7'},
'reservations': {'memory': '100M'},
},
},
'credential_spec': {
'file': 'spec.json'
},
},
},
})
with mock.patch('compose.config.config.log') as log:
cfg = config.load(config_details, compatibility=True)
assert log.warning.call_count == 0
service_dict = cfg.services[0]
assert service_dict == {
'image': 'alpine:3.10.1',
'scale': 3,
'restart': {'MaximumRetryCount': 7, 'Name': 'always'},
'mem_limit': '300M',
'mem_reservation': '100M',
'cpus': 0.7,
'name': 'foo',
'security_opt': ['credentialspec=file://spec.json'],
}
@mock.patch.dict(os.environ)
def test_invalid_interpolation(self):
with pytest.raises(config.ConfigurationError) as cm:
config.load(
build_config_details(
{'web': {'image': '${'}},
'working_dir',
'filename.yml'
)
)
assert 'Invalid' in cm.value.msg
assert 'for "image" option' in cm.value.msg
assert 'in service "web"' in cm.value.msg
assert '"${"' in cm.value.msg
@mock.patch.dict(os.environ)
def test_interpolation_secrets_section(self):
os.environ['FOO'] = 'baz.bar'
config_dict = config.load(build_config_details({
'version': '3.1',
'secrets': {
'secretdata': {
'external': {'name': '$FOO'}
}
}
}))
assert config_dict.secrets == {
'secretdata': {
'external': {'name': 'baz.bar'},
'name': 'baz.bar'
}
}
@mock.patch.dict(os.environ)
def test_interpolation_configs_section(self):
os.environ['FOO'] = 'baz.bar'
config_dict = config.load(build_config_details({
'version': '3.3',
'configs': {
'configdata': {
'external': {'name': '$FOO'}
}
}
}))
assert config_dict.configs == {
'configdata': {
'external': {'name': 'baz.bar'},
'name': 'baz.bar'
}
}
class VolumeConfigTest(unittest.TestCase):
def test_no_binding(self):
d = make_service_dict('foo', {'build': '.', 'volumes': ['/data']}, working_dir='.')
assert d['volumes'] == ['/data']
@mock.patch.dict(os.environ)
def test_volume_binding_with_environment_variable(self):
os.environ['VOLUME_PATH'] = '/host/path'
d = config.load(
build_config_details(
{'foo': {'build': '.', 'volumes': ['${VOLUME_PATH}:/container/path']}},
'.',
None,
)
).services[0]
assert d['volumes'] == [VolumeSpec.parse('/host/path:/container/path')]
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='posix paths')
def test_volumes_order_is_preserved(self):
volumes = ['/{0}:/{0}'.format(i) for i in range(0, 6)]
shuffle(volumes)
cfg = make_service_dict('foo', {'build': '.', 'volumes': volumes})
assert cfg['volumes'] == volumes
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='posix paths')
@mock.patch.dict(os.environ)
def test_volume_binding_with_home(self):
os.environ['HOME'] = '/home/user'
d = make_service_dict('foo', {'build': '.', 'volumes': ['~:/container/path']}, working_dir='.')
assert d['volumes'] == ['/home/user:/container/path']
def test_name_does_not_expand(self):
d = make_service_dict('foo', {'build': '.', 'volumes': ['mydatavolume:/data']}, working_dir='.')
assert d['volumes'] == ['mydatavolume:/data']
def test_absolute_posix_path_does_not_expand(self):
d = make_service_dict('foo', {'build': '.', 'volumes': ['/var/lib/data:/data']}, working_dir='.')
assert d['volumes'] == ['/var/lib/data:/data']
def test_absolute_windows_path_does_not_expand(self):
d = make_service_dict('foo', {'build': '.', 'volumes': ['c:\\data:/data']}, working_dir='.')
assert d['volumes'] == ['c:\\data:/data']
@pytest.mark.skipif(IS_WINDOWS_PLATFORM, reason='posix paths')
def test_relative_path_does_expand_posix(self):
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['./data:/data']},
working_dir='/home/me/myproject')
assert d['volumes'] == ['/home/me/myproject/data:/data']
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['.:/data']},
working_dir='/home/me/myproject')
assert d['volumes'] == ['/home/me/myproject:/data']
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['../otherproject:/data']},
working_dir='/home/me/myproject')
assert d['volumes'] == ['/home/me/otherproject:/data']
@pytest.mark.skipif(not IS_WINDOWS_PLATFORM, reason='windows paths')
def test_relative_path_does_expand_windows(self):
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['./data:/data']},
working_dir='c:\\Users\\me\\myproject')
assert d['volumes'] == ['c:\\Users\\me\\myproject\\data:/data']
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['.:/data']},
working_dir='c:\\Users\\me\\myproject')
assert d['volumes'] == ['c:\\Users\\me\\myproject:/data']
d = make_service_dict(
'foo',
{'build': '.', 'volumes': ['../otherproject:/data']},
working_dir='c:\\Users\\me\\myproject')
assert d['volumes'] == ['c:\\Users\\me\\otherproject:/data']
@mock.patch.dict(os.environ)
def test_home_directory_with_driver_does_not_expand(self):
os.environ['NAME'] = 'surprise!'
d = make_service_dict('foo', {
'build': '.',
'volumes': ['~:/data'],
'volume_driver': 'foodriver',
}, working_dir='.')
assert d['volumes'] == ['~:/data']
def test_volume_path_with_non_ascii_directory(self):
volume = '/Füü/data:/data'
container_path = config.resolve_volume_path(".", volume)
assert container_path == volume
class MergePathMappingTest:
config_name = ""
def test_empty(self):
service_dict = config.merge_service_dicts({}, {}, DEFAULT_VERSION)
assert self.config_name not in service_dict
def test_no_override(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/data']},
{},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == {'/foo:/code', '/data'}
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{self.config_name: ['/bar:/code']},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == {'/bar:/code'}
def test_override_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/data']},
{self.config_name: ['/bar:/code']},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == {'/bar:/code', '/data'}
def test_add_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/data']},
{self.config_name: ['/bar:/code', '/quux:/data']},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == {'/bar:/code', '/quux:/data'}
def test_remove_explicit_path(self):
service_dict = config.merge_service_dicts(
{self.config_name: ['/foo:/code', '/quux:/data']},
{self.config_name: ['/bar:/code', '/data']},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == {'/bar:/code', '/data'}
class MergeVolumesTest(unittest.TestCase, MergePathMappingTest):
config_name = 'volumes'
class MergeDevicesTest(unittest.TestCase, MergePathMappingTest):
config_name = 'devices'
class BuildOrImageMergeTest(unittest.TestCase):
def test_merge_build_or_image_no_override(self):
assert config.merge_service_dicts({'build': '.'}, {}, V1) == {'build': '.'}
assert config.merge_service_dicts({'image': 'redis'}, {}, V1) == {'image': 'redis'}
def test_merge_build_or_image_override_with_same(self):
assert config.merge_service_dicts({'build': '.'}, {'build': './web'}, V1) == {'build': './web'}
assert config.merge_service_dicts({'image': 'redis'}, {'image': 'postgres'}, V1) == {
'image': 'postgres'
}
def test_merge_build_or_image_override_with_other(self):
assert config.merge_service_dicts({'build': '.'}, {'image': 'redis'}, V1) == {
'image': 'redis'
}
assert config.merge_service_dicts({'image': 'redis'}, {'build': '.'}, V1) == {'build': '.'}
class MergeListsTest:
config_name = ""
base_config = []
override_config = []
def merged_config(self):
return set(self.base_config) | set(self.override_config)
def test_empty(self):
assert self.config_name not in config.merge_service_dicts({}, {}, DEFAULT_VERSION)
def test_no_override(self):
service_dict = config.merge_service_dicts(
{self.config_name: self.base_config},
{},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(self.base_config)
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{self.config_name: self.base_config},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(self.base_config)
def test_add_item(self):
service_dict = config.merge_service_dicts(
{self.config_name: self.base_config},
{self.config_name: self.override_config},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == set(self.merged_config())
class MergePortsTest(unittest.TestCase, MergeListsTest):
config_name = 'ports'
base_config = ['10:8000', '9000']
override_config = ['20:8000']
def merged_config(self):
return self.convert(self.base_config) | self.convert(self.override_config)
def convert(self, port_config):
return set(config.merge_service_dicts(
{self.config_name: port_config},
{self.config_name: []},
DEFAULT_VERSION
)[self.config_name])
def test_duplicate_port_mappings(self):
service_dict = config.merge_service_dicts(
{self.config_name: self.base_config},
{self.config_name: self.base_config},
DEFAULT_VERSION
)
assert set(service_dict[self.config_name]) == self.convert(self.base_config)
def test_no_override(self):
service_dict = config.merge_service_dicts(
{self.config_name: self.base_config},
{},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == self.convert(self.base_config)
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{self.config_name: self.base_config},
DEFAULT_VERSION)
assert set(service_dict[self.config_name]) == self.convert(self.base_config)
class MergeNetworksTest(unittest.TestCase, MergeListsTest):
config_name = 'networks'
base_config = {'default': {'aliases': ['foo.bar', 'foo.baz']}}
override_config = {'default': {'ipv4_address': '123.234.123.234'}}
def test_no_network_overrides(self):
service_dict = config.merge_service_dicts(
{self.config_name: self.base_config},
{self.config_name: self.override_config},
DEFAULT_VERSION)
assert service_dict[self.config_name] == {
'default': {
'aliases': ['foo.bar', 'foo.baz'],
'ipv4_address': '123.234.123.234'
}
}
def test_network_has_none_value(self):
service_dict = config.merge_service_dicts(
{self.config_name: {
'default': None
}},
{self.config_name: {
'default': {
'aliases': []
}
}},
DEFAULT_VERSION)
assert service_dict[self.config_name] == {
'default': {
'aliases': []
}
}
def test_all_properties(self):
service_dict = config.merge_service_dicts(
{self.config_name: {
'default': {
'aliases': ['foo.bar', 'foo.baz'],
'link_local_ips': ['192.168.1.10', '192.168.1.11'],
'ipv4_address': '111.111.111.111',
'ipv6_address': 'FE80:CD00:0000:0CDE:1257:0000:211E:729C-first'
}
}},
{self.config_name: {
'default': {
'aliases': ['foo.baz', 'foo.baz2'],
'link_local_ips': ['192.168.1.11', '192.168.1.12'],
'ipv4_address': '123.234.123.234',
'ipv6_address': 'FE80:CD00:0000:0CDE:1257:0000:211E:729C-second'
}
}},
DEFAULT_VERSION)
assert service_dict[self.config_name] == {
'default': {
'aliases': ['foo.bar', 'foo.baz', 'foo.baz2'],
'link_local_ips': ['192.168.1.10', '192.168.1.11', '192.168.1.12'],
'ipv4_address': '123.234.123.234',
'ipv6_address': 'FE80:CD00:0000:0CDE:1257:0000:211E:729C-second'
}
}
def test_no_network_name_overrides(self):
service_dict = config.merge_service_dicts(
{
self.config_name: {
'default': {
'aliases': ['foo.bar', 'foo.baz'],
'ipv4_address': '123.234.123.234'
}
}
},
{
self.config_name: {
'another_network': {
'ipv4_address': '123.234.123.234'
}
}
},
DEFAULT_VERSION)
assert service_dict[self.config_name] == {
'default': {
'aliases': ['foo.bar', 'foo.baz'],
'ipv4_address': '123.234.123.234'
},
'another_network': {
'ipv4_address': '123.234.123.234'
}
}
class MergeStringsOrListsTest(unittest.TestCase):
def test_no_override(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{},
DEFAULT_VERSION)
assert set(service_dict['dns']) == {'8.8.8.8'}
def test_no_base(self):
service_dict = config.merge_service_dicts(
{},
{'dns': '8.8.8.8'},
DEFAULT_VERSION)
assert set(service_dict['dns']) == {'8.8.8.8'}
def test_add_string(self):
service_dict = config.merge_service_dicts(
{'dns': ['8.8.8.8']},
{'dns': '9.9.9.9'},
DEFAULT_VERSION)
assert set(service_dict['dns']) == {'8.8.8.8', '9.9.9.9'}
def test_add_list(self):
service_dict = config.merge_service_dicts(
{'dns': '8.8.8.8'},
{'dns': ['9.9.9.9']},
DEFAULT_VERSION)
assert set(service_dict['dns']) == {'8.8.8.8', '9.9.9.9'}
class MergeLabelsTest(unittest.TestCase):
def test_empty(self):
assert 'labels' not in config.merge_service_dicts({}, {}, DEFAULT_VERSION)
def test_no_override(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'build': '.'}, 'tests/'),
DEFAULT_VERSION)
assert service_dict['labels'] == {'foo': '1', 'bar': ''}
def test_no_base(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.'}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['foo=2']}, 'tests/'),
DEFAULT_VERSION)
assert service_dict['labels'] == {'foo': '2'}
def test_override_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['foo=2']}, 'tests/'),
DEFAULT_VERSION)
assert service_dict['labels'] == {'foo': '2', 'bar': ''}
def test_add_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['bar=2']}, 'tests/'),
DEFAULT_VERSION)
assert service_dict['labels'] == {'foo': '1', 'bar': '2'}
def test_remove_explicit_value(self):
service_dict = config.merge_service_dicts(
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar=2']}, 'tests/'),
make_service_dict('foo', {'build': '.', 'labels': ['bar']}, 'tests/'),
DEFAULT_VERSION)
assert service_dict['labels'] == {'foo': '1', 'bar': ''}
class MergeBuildTest(unittest.TestCase):
def test_full(self):
base = {
'context': '.',
'dockerfile': 'Dockerfile',
'args': {
'x': '1',
'y': '2',
},
'cache_from': ['ubuntu'],
'labels': ['com.docker.compose.test=true']
}
override = {
'context': './prod',
'dockerfile': 'Dockerfile.prod',
'args': ['x=12'],
'cache_from': ['debian'],
'labels': {
'com.docker.compose.test': 'false',
'com.docker.compose.prod': 'true',
}
}
result = config.merge_build(None, {'build': base}, {'build': override})
assert result['context'] == override['context']
assert result['dockerfile'] == override['dockerfile']
assert result['args'] == {'x': '12', 'y': '2'}
assert set(result['cache_from']) == {'ubuntu', 'debian'}
assert result['labels'] == override['labels']
def test_empty_override(self):
base = {
'context': '.',
'dockerfile': 'Dockerfile',
'args': {
'x': '1',
'y': '2',
},
'cache_from': ['ubuntu'],
'labels': {
'com.docker.compose.test': 'true'
}
}
override = {}
result = config.merge_build(None, {'build': base}, {'build': override})
assert result == base
def test_empty_base(self):
base = {}
override = {
'context': './prod',
'dockerfile': 'Dockerfile.prod',
'args': {'x': '12'},
'cache_from': ['debian'],
'labels': {
'com.docker.compose.test': 'false',
'com.docker.compose.prod': 'true',
}
}
result = config.merge_build(None, {'build': base}, {'build': override})
assert result == override
class MemoryOptionsTest(unittest.TestCase):
def test_validation_fails_with_just_memswap_limit(self):
"""
When you set a 'memswap_limit' it is invalid config unless you also set
a mem_limit
"""
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'foo': {'image': 'busybox', 'memswap_limit': 2000000},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "foo.memswap_limit is invalid: when defining " \
"'memswap_limit' you must set 'mem_limit' as well" \
in excinfo.exconly()
def test_validation_with_correct_memswap_values(self):
service_dict = config.load(
build_config_details(
{'foo': {'image': 'busybox', 'mem_limit': 1000000, 'memswap_limit': 2000000}},
'tests/fixtures/extends',
'common.yml'
)
).services
assert service_dict[0]['memswap_limit'] == 2000000
def test_memswap_can_be_a_string(self):
service_dict = config.load(
build_config_details(
{'foo': {'image': 'busybox', 'mem_limit': "1G", 'memswap_limit': "512M"}},
'tests/fixtures/extends',
'common.yml'
)
).services
assert service_dict[0]['memswap_limit'] == "512M"
class EnvTest(unittest.TestCase):
def test_parse_environment_as_list(self):
environment = [
'NORMAL=F1',
'CONTAINS_EQUALS=F=2',
'TRAILING_EQUALS=',
]
assert config.parse_environment(environment) == {
'NORMAL': 'F1', 'CONTAINS_EQUALS': 'F=2', 'TRAILING_EQUALS': ''
}
def test_parse_environment_as_dict(self):
environment = {
'NORMAL': 'F1',
'CONTAINS_EQUALS': 'F=2',
'TRAILING_EQUALS': None,
}
assert config.parse_environment(environment) == environment
def test_parse_environment_invalid(self):
with pytest.raises(ConfigurationError):
config.parse_environment('a=b')
def test_parse_environment_empty(self):
assert config.parse_environment(None) == {}
@mock.patch.dict(os.environ)
def test_resolve_environment(self):
os.environ['FILE_DEF'] = 'E1'
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
service_dict = {
'build': '.',
'environment': {
'FILE_DEF': 'F1',
'FILE_DEF_EMPTY': '',
'ENV_DEF': None,
'NO_DEF': None
},
}
assert resolve_environment(
service_dict, Environment.from_env_file(None)
) == {'FILE_DEF': 'F1', 'FILE_DEF_EMPTY': '', 'ENV_DEF': 'E3', 'NO_DEF': None}
def test_resolve_environment_from_env_file(self):
assert resolve_environment({'env_file': ['tests/fixtures/env/one.env']}) == {
'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'bar'
}
def test_environment_overrides_env_file(self):
assert resolve_environment({
'environment': {'FOO': 'baz'},
'env_file': ['tests/fixtures/env/one.env'],
}) == {'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'baz'}
def test_resolve_environment_with_multiple_env_files(self):
service_dict = {
'env_file': [
'tests/fixtures/env/one.env',
'tests/fixtures/env/two.env'
]
}
assert resolve_environment(service_dict) == {
'ONE': '2', 'TWO': '1', 'THREE': '3', 'FOO': 'baz', 'DOO': 'dah'
}
def test_resolve_environment_nonexistent_file(self):
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details(
{'foo': {'image': 'example', 'env_file': 'nonexistent.env'}},
working_dir='tests/fixtures/env'))
assert 'Couldn\'t find env file' in exc.exconly()
assert 'nonexistent.env' in exc.exconly()
@mock.patch.dict(os.environ)
def test_resolve_environment_from_env_file_with_empty_values(self):
os.environ['FILE_DEF'] = 'E1'
os.environ['FILE_DEF_EMPTY'] = 'E2'
os.environ['ENV_DEF'] = 'E3'
assert resolve_environment(
{'env_file': ['tests/fixtures/env/resolve.env']},
Environment.from_env_file(None)
) == {
'FILE_DEF': 'bär',
'FILE_DEF_EMPTY': '',
'ENV_DEF': 'E3',
'NO_DEF': None
}
@mock.patch.dict(os.environ)
def test_resolve_build_args(self):
os.environ['env_arg'] = 'value2'
build = {
'context': '.',
'args': {
'arg1': 'value1',
'empty_arg': '',
'env_arg': None,
'no_env': None
}
}
assert resolve_build_args(build['args'], Environment.from_env_file(build['context'])) == {
'arg1': 'value1', 'empty_arg': '', 'env_arg': 'value2', 'no_env': None
}
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason='paths use slash')
@mock.patch.dict(os.environ)
def test_resolve_path(self):
os.environ['HOSTENV'] = '/tmp'
os.environ['CONTAINERENV'] = '/host/tmp'
service_dict = config.load(
build_config_details(
{'services': {
'foo': {'build': '.', 'volumes': ['$HOSTENV:$CONTAINERENV']}}},
"tests/fixtures/env",
)
).services[0]
assert set(service_dict['volumes']) == {VolumeSpec.parse('/tmp:/host/tmp')}
service_dict = config.load(
build_config_details(
{'services': {
'foo': {'build': '.', 'volumes': ['/opt${HOSTENV}:/opt${CONTAINERENV}']}}},
"tests/fixtures/env",
)
).services[0]
assert set(service_dict['volumes']) == {VolumeSpec.parse('/opt/tmp:/opt/host/tmp')}
def load_from_filename(filename, override_dir=None):
return config.load(
config.find('.', [filename], Environment.from_env_file('.'), override_dir=override_dir)
).services
class ExtendsTest(unittest.TestCase):
def test_extends(self):
service_dicts = load_from_filename('tests/fixtures/extends/docker-compose.yml')
assert service_sort(service_dicts) == service_sort([
{
'name': 'mydb',
'image': 'busybox',
'command': 'top',
},
{
'name': 'myweb',
'image': 'busybox',
'command': 'top',
'network_mode': 'bridge',
'links': ['mydb:db'],
'environment': {
"FOO": "1",
"BAR": "2",
"BAZ": "2",
},
}
])
def test_merging_env_labels_ulimits(self):
service_dicts = load_from_filename('tests/fixtures/extends/common-env-labels-ulimits.yml')
assert service_sort(service_dicts) == service_sort([
{
'name': 'web',
'image': 'busybox',
'command': '/bin/true',
'network_mode': 'host',
'environment': {
"FOO": "2",
"BAR": "1",
"BAZ": "3",
},
'labels': {'label': 'one'},
'ulimits': {'nproc': 65535, 'memlock': {'soft': 1024, 'hard': 2048}}
}
])
def test_nested(self):
service_dicts = load_from_filename('tests/fixtures/extends/nested.yml')
assert service_dicts == [
{
'name': 'myweb',
'image': 'busybox',
'command': '/bin/true',
'network_mode': 'host',
'environment': {
"FOO": "2",
"BAR": "2",
},
},
]
def test_self_referencing_file(self):
"""
We specify a 'file' key that is the filename we're already in.
"""
service_dicts = load_from_filename('tests/fixtures/extends/specify-file-as-self.yml')
assert service_sort(service_dicts) == service_sort([
{
'environment':
{
'YEP': '1', 'BAR': '1', 'BAZ': '3'
},
'image': 'busybox',
'name': 'myweb'
},
{
'environment':
{'YEP': '1'},
'image': 'busybox',
'name': 'otherweb'
},
{
'environment':
{'YEP': '1', 'BAZ': '3'},
'image': 'busybox',
'name': 'web'
}
])
def test_circular(self):
with pytest.raises(config.CircularReference) as exc:
load_from_filename('tests/fixtures/extends/circle-1.yml')
path = [
(os.path.basename(filename), service_name)
for (filename, service_name) in exc.value.trail
]
expected = [
('circle-1.yml', 'web'),
('circle-2.yml', 'other'),
('circle-1.yml', 'web'),
]
assert path == expected
def test_extends_validation_empty_dictionary(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '3',
'services':
{
'web': {'image': 'busybox', 'extends': {}},
}
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert 'service' in excinfo.exconly()
def test_extends_validation_missing_service_key(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '3',
'services':
{
'web': {
'image': 'busybox',
'extends': {'file': 'common.yml'}
}
}
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "'service' is a required property" in excinfo.exconly()
def test_extends_validation_invalid_key(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '3',
'services':
{
'web': {
'image': 'busybox',
'extends': {
'file': 'common.yml',
'service': 'web',
'rogue_key': 'is not allowed'
}
},
}
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "web.extends contains unsupported option: 'rogue_key'" \
in excinfo.exconly()
def test_extends_validation_sub_property_key(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details(
{
'version': '3',
'services': {
'web': {
'image': 'busybox',
'extends': {
'file': 1,
'service': 'web',
}
}
},
},
'tests/fixtures/extends',
'filename.yml'
)
)
assert "web.extends.file contains 1, which is an invalid type, it should be a string" \
in excinfo.exconly()
def test_extends_validation_no_file_key_no_filename_set(self):
dictionary = {'extends': {'service': 'web'}}
with pytest.raises(ConfigurationError) as excinfo:
make_service_dict('myweb', dictionary, working_dir='tests/fixtures/extends')
assert 'file' in excinfo.exconly()
def test_extends_validation_valid_config(self):
service = config.load(
build_config_details(
{
'web': {'image': 'busybox', 'extends': {'service': 'web', 'file': 'common.yml'}},
},
'tests/fixtures/extends',
'common.yml'
)
).services
assert len(service) == 1
assert isinstance(service[0], dict)
assert service[0]['command'] == "/bin/true"
def test_extended_service_with_invalid_config(self):
with pytest.raises(ConfigurationError) as exc:
load_from_filename('tests/fixtures/extends/service-with-invalid-schema.yml')
assert (
"myweb has neither an image nor a build context specified" in
exc.exconly()
)
def test_extended_service_with_valid_config(self):
service = load_from_filename('tests/fixtures/extends/service-with-valid-composite-extends.yml')
assert service[0]['command'] == "top"
def test_extends_file_defaults_to_self(self):
"""
Test not specifying a file in our extends options that the
config is valid and correctly extends from itself.
"""
service_dicts = load_from_filename('tests/fixtures/extends/no-file-specified.yml')
assert service_sort(service_dicts) == service_sort([
{
'name': 'myweb',
'image': 'busybox',
'environment': {
"BAR": "1",
"BAZ": "3",
}
},
{
'name': 'web',
'image': 'busybox',
'environment': {
"BAZ": "3",
}
}
])
def test_invalid_links_in_extended_service(self):
with pytest.raises(ConfigurationError) as excinfo:
load_from_filename('tests/fixtures/extends/invalid-links.yml')
assert "services with 'links' cannot be extended" in excinfo.exconly()
def test_invalid_volumes_from_in_extended_service(self):
with pytest.raises(ConfigurationError) as excinfo:
load_from_filename('tests/fixtures/extends/invalid-volumes.yml')
assert "services with 'volumes_from' cannot be extended" in excinfo.exconly()
def test_invalid_net_in_extended_service(self):
with pytest.raises(ConfigurationError) as excinfo:
load_from_filename('tests/fixtures/extends/invalid-net-v2.yml')
assert 'network_mode: service' in excinfo.exconly()
assert 'cannot be extended' in excinfo.exconly()
with pytest.raises(ConfigurationError) as excinfo:
load_from_filename('tests/fixtures/extends/invalid-net.yml')
assert 'net: container' in excinfo.exconly()
assert 'cannot be extended' in excinfo.exconly()
@mock.patch.dict(os.environ)
def test_load_config_runs_interpolation_in_extended_service(self):
os.environ.update(HOSTNAME_VALUE="penguin")
expected_interpolated_value = "host-penguin"
service_dicts = load_from_filename(
'tests/fixtures/extends/valid-interpolation.yml')
for service in service_dicts:
assert service['hostname'] == expected_interpolated_value
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason='paths use slash')
def test_volume_path(self):
dicts = load_from_filename('tests/fixtures/volume-path/docker-compose.yml')
paths = [
VolumeSpec(
os.path.abspath('tests/fixtures/volume-path/common/foo'),
'/foo',
'rw'),
VolumeSpec(
os.path.abspath('tests/fixtures/volume-path/bar'),
'/bar',
'rw')
]
assert set(dicts[0]['volumes']) == set(paths)
def test_parent_build_path_dne(self):
child = load_from_filename('tests/fixtures/extends/nonexistent-path-child.yml')
assert child == [
{
'name': 'dnechild',
'image': 'busybox',
'command': '/bin/true',
'environment': {
"FOO": "1",
"BAR": "2",
},
},
]
def test_load_throws_error_when_base_service_does_not_exist(self):
with pytest.raises(ConfigurationError) as excinfo:
load_from_filename('tests/fixtures/extends/nonexistent-service.yml')
assert "Cannot extend service 'foo'" in excinfo.exconly()
assert "Service not found" in excinfo.exconly()
def test_partial_service_config_in_extends_is_still_valid(self):
dicts = load_from_filename('tests/fixtures/extends/valid-common-config.yml')
assert dicts[0]['environment'] == {'FOO': '1'}
def test_extended_service_with_verbose_and_shorthand_way(self):
services = load_from_filename('tests/fixtures/extends/verbose-and-shorthand.yml')
assert service_sort(services) == service_sort([
{
'name': 'base',
'image': 'busybox',
'environment': {'BAR': '1'},
},
{
'name': 'verbose',
'image': 'busybox',
'environment': {'BAR': '1', 'FOO': '1'},
},
{
'name': 'shorthand',
'image': 'busybox',
'environment': {'BAR': '1', 'FOO': '2'},
},
])
@mock.patch.dict(os.environ)
def test_extends_with_environment_and_env_files(self):
tmpdir = tempfile.mkdtemp('test_extends_with_environment')
self.addCleanup(shutil.rmtree, tmpdir)
commondir = os.path.join(tmpdir, 'common')
os.mkdir(commondir)
with open(os.path.join(commondir, 'base.yml'), mode="w") as base_fh:
base_fh.write("""
app:
image: 'example/app'
env_file:
- 'envs'
environment:
- SECRET
- TEST_ONE=common
- TEST_TWO=common
""")
with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
docker_compose_fh.write("""
ext:
extends:
file: common/base.yml
service: app
env_file:
- 'envs'
environment:
- THING
- TEST_ONE=top
""")
with open(os.path.join(commondir, 'envs'), mode="w") as envs_fh:
envs_fh.write("""
COMMON_ENV_FILE
TEST_ONE=common-env-file
TEST_TWO=common-env-file
TEST_THREE=common-env-file
TEST_FOUR=common-env-file
""")
with open(os.path.join(tmpdir, 'envs'), mode="w") as envs_fh:
envs_fh.write("""
TOP_ENV_FILE
TEST_ONE=top-env-file
TEST_TWO=top-env-file
TEST_THREE=top-env-file
""")
expected = [
{
'name': 'ext',
'image': 'example/app',
'environment': {
'SECRET': 'secret',
'TOP_ENV_FILE': 'secret',
'COMMON_ENV_FILE': 'secret',
'THING': 'thing',
'TEST_ONE': 'top',
'TEST_TWO': 'common',
'TEST_THREE': 'top-env-file',
'TEST_FOUR': 'common-env-file',
},
},
]
os.environ['SECRET'] = 'secret'
os.environ['THING'] = 'thing'
os.environ['COMMON_ENV_FILE'] = 'secret'
os.environ['TOP_ENV_FILE'] = 'secret'
config = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert config == expected
def test_extends_with_mixed_versions_is_error(self):
tmpdir = tempfile.mkdtemp('test_extends_with_mixed_version')
self.addCleanup(shutil.rmtree, tmpdir)
with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
docker_compose_fh.write("""
version: "2"
services:
web:
extends:
file: base.yml
service: base
image: busybox
""")
with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh:
base_fh.write("""
base:
volumes: ['/foo']
ports: ['3000:3000']
""")
with pytest.raises(ConfigurationError) as exc:
load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert 'Version mismatch' in exc.exconly()
def test_extends_with_defined_version_passes(self):
tmpdir = tempfile.mkdtemp('test_extends_with_defined_version')
self.addCleanup(shutil.rmtree, tmpdir)
with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
docker_compose_fh.write("""
version: "2"
services:
web:
extends:
file: base.yml
service: base
image: busybox
""")
with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh:
base_fh.write("""
version: "2"
services:
base:
volumes: ['/foo']
ports: ['3000:3000']
command: top
""")
service = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert service[0]['command'] == "top"
def test_extends_with_depends_on(self):
tmpdir = tempfile.mkdtemp('test_extends_with_depends_on')
self.addCleanup(shutil.rmtree, tmpdir)
with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
docker_compose_fh.write("""
version: "2"
services:
base:
image: example
web:
extends: base
image: busybox
depends_on: ['other']
other:
image: example
""")
services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert service_sort(services)[2]['depends_on'] == {
'other': {'condition': 'service_started'}
}
def test_extends_with_healthcheck(self):
service_dicts = load_from_filename('tests/fixtures/extends/healthcheck-2.yml')
assert service_sort(service_dicts) == [{
'name': 'demo',
'image': 'foobar:latest',
'healthcheck': {
'test': ['CMD', '/health.sh'],
'interval': 10000000000,
'timeout': 5000000000,
'retries': 36,
}
}]
def test_extends_with_ports(self):
tmpdir = tempfile.mkdtemp('test_extends_with_ports')
self.addCleanup(shutil.rmtree, tmpdir)
with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
docker_compose_fh.write("""
version: '2'
services:
a:
image: nginx
ports:
- 80
b:
extends:
service: a
""")
services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert len(services) == 2
for svc in services:
assert svc['ports'] == [types.ServicePort('80', None, None, None, None)]
def test_extends_with_security_opt(self):
tmpdir = tempfile.mkdtemp('test_extends_with_ports')
self.addCleanup(shutil.rmtree, tmpdir)
with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
docker_compose_fh.write("""
version: '2'
services:
a:
image: nginx
security_opt:
- apparmor:unconfined
- seccomp:unconfined
b:
extends:
service: a
""")
services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
assert len(services) == 2
for svc in services:
assert types.SecurityOpt.parse('apparmor:unconfined') in svc['security_opt']
assert types.SecurityOpt.parse('seccomp:unconfined') in svc['security_opt']
@mock.patch.object(ConfigFile, 'from_filename', wraps=ConfigFile.from_filename)
def test_extends_same_file_optimization(self, from_filename_mock):
load_from_filename('tests/fixtures/extends/no-file-specified.yml')
from_filename_mock.assert_called_once()
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason='paths use slash')
class ExpandPathTest(unittest.TestCase):
working_dir = '/home/user/somedir'
def test_expand_path_normal(self):
result = config.expand_path(self.working_dir, 'myfile')
assert result == self.working_dir + '/' + 'myfile'
def test_expand_path_absolute(self):
abs_path = '/home/user/otherdir/somefile'
result = config.expand_path(self.working_dir, abs_path)
assert result == abs_path
def test_expand_path_with_tilde(self):
test_path = '~/otherdir/somefile'
with mock.patch.dict(os.environ):
os.environ['HOME'] = user_path = '/home/user/'
result = config.expand_path(self.working_dir, test_path)
assert result == user_path + 'otherdir/somefile'
class VolumePathTest(unittest.TestCase):
def test_split_path_mapping_with_windows_path(self):
host_path = "c:\\Users\\msamblanet\\Documents\\anvil\\connect\\config"
windows_volume_path = host_path + ":/opt/connect/config:ro"
expected_mapping = ("/opt/connect/config", (host_path, 'ro'))
mapping = config.split_path_mapping(windows_volume_path)
assert mapping == expected_mapping
def test_split_path_mapping_with_windows_path_in_container(self):
host_path = 'c:\\Users\\remilia\\data'
container_path = 'c:\\scarletdevil\\data'
expected_mapping = (container_path, (host_path, None))
mapping = config.split_path_mapping('{}:{}'.format(host_path, container_path))
assert mapping == expected_mapping
def test_split_path_mapping_with_root_mount(self):
host_path = '/'
container_path = '/var/hostroot'
expected_mapping = (container_path, (host_path, None))
mapping = config.split_path_mapping('{}:{}'.format(host_path, container_path))
assert mapping == expected_mapping
@pytest.mark.xfail(IS_WINDOWS_PLATFORM, reason='paths use slash')
class BuildPathTest(unittest.TestCase):
def setUp(self):
self.abs_context_path = os.path.join(os.getcwd(), 'tests/fixtures/build-ctx')
def test_nonexistent_path(self):
with pytest.raises(ConfigurationError):
config.load(
build_config_details(
{
'foo': {'build': 'nonexistent.path'},
},
'working_dir',
'filename.yml'
)
)
def test_relative_path(self):
relative_build_path = '../build-ctx/'
service_dict = make_service_dict(
'relpath',
{'build': relative_build_path},
working_dir='tests/fixtures/build-path'
)
assert service_dict['build'] == self.abs_context_path
def test_absolute_path(self):
service_dict = make_service_dict(
'abspath',
{'build': self.abs_context_path},
working_dir='tests/fixtures/build-path'
)
assert service_dict['build'] == self.abs_context_path
def test_from_file(self):
service_dict = load_from_filename('tests/fixtures/build-path/docker-compose.yml')
assert service_dict == [{'name': 'foo', 'build': {'context': self.abs_context_path}}]
def test_from_file_override_dir(self):
override_dir = os.path.join(os.getcwd(), 'tests/fixtures/')
service_dict = load_from_filename(
'tests/fixtures/build-path-override-dir/docker-compose.yml', override_dir=override_dir)
assert service_dict == [{'name': 'foo', 'build': {'context': self.abs_context_path}}]
def test_valid_url_in_build_path(self):
valid_urls = [
'git://github.com/docker/docker',
'git@github.com:docker/docker.git',
'git@bitbucket.org:atlassianlabs/atlassian-docker.git',
'https://github.com/docker/docker.git',
'http://github.com/docker/docker.git',
'github.com/docker/docker.git',
]
for valid_url in valid_urls:
service_dict = config.load(build_config_details({
'validurl': {'build': valid_url},
}, '.', None)).services
assert service_dict[0]['build'] == {'context': valid_url}
def test_invalid_url_in_build_path(self):
invalid_urls = [
'example.com/bogus',
'ftp://example.com/',
'/path/does/not/exist',
]
for invalid_url in invalid_urls:
with pytest.raises(ConfigurationError) as exc:
config.load(build_config_details({
'invalidurl': {'build': invalid_url},
}, '.', None))
assert 'build path' in exc.exconly()
class HealthcheckTest(unittest.TestCase):
def test_healthcheck(self):
config_dict = config.load(
build_config_details({
'version': '2.3',
'services': {
'test': {
'image': 'busybox',
'healthcheck': {
'test': ['CMD', 'true'],
'interval': '1s',
'timeout': '1m',
'retries': 3,
'start_period': '10s',
}
}
}
})
)
serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['test']
assert serialized_service['healthcheck'] == {
'test': ['CMD', 'true'],
'interval': '1s',
'timeout': '1m',
'retries': 3,
'start_period': '10s'
}
def test_disable(self):
config_dict = config.load(
build_config_details({
'version': '2.3',
'services': {
'test': {
'image': 'busybox',
'healthcheck': {
'disable': True,
}
}
}
})
)
serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['test']
assert serialized_service['healthcheck'] == {
'test': ['NONE'],
}
def test_disable_with_other_config_is_invalid(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details({
'version': '2.3',
'services': {
'invalid-healthcheck': {
'image': 'busybox',
'healthcheck': {
'disable': True,
'interval': '1s',
}
}
}
})
)
assert 'invalid-healthcheck' in excinfo.exconly()
assert '"disable: true" cannot be combined with other options' in excinfo.exconly()
def test_healthcheck_with_invalid_test(self):
with pytest.raises(ConfigurationError) as excinfo:
config.load(
build_config_details({
'version': '2.3',
'services': {
'invalid-healthcheck': {
'image': 'busybox',
'healthcheck': {
'test': ['true'],
'interval': '1s',
'timeout': '1m',
'retries': 3,
'start_period': '10s',
}
}
}
})
)
assert 'invalid-healthcheck' in excinfo.exconly()
assert 'the first item must be either NONE, CMD or CMD-SHELL' in excinfo.exconly()
class GetDefaultConfigFilesTestCase(unittest.TestCase):
files = [
'docker-compose.yml',
'docker-compose.yaml',
'compose.yml',
'compose.yaml',
]
def test_get_config_path_default_file_in_basedir(self):
for index, filename in enumerate(self.files):
assert filename == get_config_filename_for_files(self.files[index:])
with pytest.raises(config.ComposeFileNotFound):
get_config_filename_for_files([])
def test_get_config_path_default_file_in_parent_dir(self):
"""Test with files placed in the subdir"""
def get_config_in_subdir(files):
return get_config_filename_for_files(files, subdir=True)
for index, filename in enumerate(self.files):
assert filename == get_config_in_subdir(self.files[index:])
with pytest.raises(config.ComposeFileNotFound):
get_config_in_subdir([])
def get_config_filename_for_files(filenames, subdir=None):
def make_files(dirname, filenames):
for fname in filenames:
with open(os.path.join(dirname, fname), 'w') as f:
f.write('')
project_dir = tempfile.mkdtemp()
try:
make_files(project_dir, filenames)
if subdir:
base_dir = tempfile.mkdtemp(dir=project_dir)
else:
base_dir = project_dir
filenames = config.get_default_config_files(base_dir)
if not filenames:
raise config.ComposeFileNotFound(config.SUPPORTED_FILENAMES)
return os.path.basename(filenames[0])
finally:
shutil.rmtree(project_dir)
class SerializeTest(unittest.TestCase):
def test_denormalize_depends(self):
service_dict = {
'image': 'busybox',
'command': 'true',
'depends_on': {
'service2': {'condition': 'service_started'},
'service3': {'condition': 'service_started'},
}
}
assert denormalize_service_dict(service_dict, VERSION) == service_dict
def test_serialize_time(self):
data = {
9: '9ns',
9000: '9us',
9000000: '9ms',
90000000: '90ms',
900000000: '900ms',
999999999: '999999999ns',
1000000000: '1s',
60000000000: '1m',
60000000001: '60000000001ns',
9000000000000: '150m',
90000000000000: '25h',
}
for k, v in data.items():
assert serialize_ns_time_value(k) == v
def test_denormalize_healthcheck(self):
service_dict = {
'image': 'test',
'healthcheck': {
'test': 'exit 1',
'interval': '1m40s',
'timeout': '30s',
'retries': 5,
'start_period': '2s90ms'
}
}
processed_service = config.process_service(config.ServiceConfig(
'.', 'test', 'test', service_dict
))
denormalized_service = denormalize_service_dict(processed_service, VERSION)
assert denormalized_service['healthcheck']['interval'] == '100s'
assert denormalized_service['healthcheck']['timeout'] == '30s'
assert denormalized_service['healthcheck']['start_period'] == '2090ms'
def test_denormalize_image_has_digest(self):
service_dict = {
'image': 'busybox'
}
image_digest = 'busybox@sha256:abcde'
assert denormalize_service_dict(service_dict, VERSION, image_digest) == {
'image': 'busybox@sha256:abcde'
}
def test_denormalize_image_no_digest(self):
service_dict = {
'image': 'busybox'
}
assert denormalize_service_dict(service_dict, VERSION) == {
'image': 'busybox'
}
def test_serialize_secrets(self):
service_dict = {
'image': 'example/web',
'secrets': [
{'source': 'one'},
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
}
]
}
secrets_dict = {
'one': {'file': '/one.txt'},
'source': {'file': '/source.pem'},
'two': {'external': True},
}
config_dict = config.load(build_config_details({
'version': '3.1',
'services': {'web': service_dict},
'secrets': secrets_dict
}))
serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert secret_sort(serialized_service['secrets']) == secret_sort(service_dict['secrets'])
assert 'secrets' in serialized_config
assert serialized_config['secrets']['two'] == {'external': True, 'name': 'two'}
def test_serialize_ports(self):
config_dict = config.Config(config_version=VERSION, version=VERSION, services=[
{
'ports': [types.ServicePort('80', '8080', None, None, None)],
'image': 'alpine',
'name': 'web'
}
], volumes={}, networks={}, secrets={}, configs={})
serialized_config = yaml.safe_load(serialize_config(config_dict))
assert [{'published': 8080, 'target': 80}] == serialized_config['services']['web']['ports']
def test_serialize_ports_v1(self):
config_dict = config.Config(config_version=V1, version=V1, services=[
{
'ports': [types.ServicePort('80', '8080', None, None, None)],
'image': 'alpine',
'name': 'web'
}
], volumes={}, networks={}, secrets={}, configs={})
serialized_config = yaml.safe_load(serialize_config(config_dict))
assert ['8080:80/tcp'] == serialized_config['services']['web']['ports']
def test_serialize_ports_with_ext_ip(self):
config_dict = config.Config(config_version=VERSION, version=VERSION, services=[
{
'ports': [types.ServicePort('80', '8080', None, None, '127.0.0.1')],
'image': 'alpine',
'name': 'web'
}
], volumes={}, networks={}, secrets={}, configs={})
serialized_config = yaml.safe_load(serialize_config(config_dict))
assert '127.0.0.1:8080:80/tcp' in serialized_config['services']['web']['ports']
def test_serialize_configs(self):
service_dict = {
'image': 'example/web',
'configs': [
{'source': 'one'},
{
'source': 'source',
'target': 'target',
'uid': '100',
'gid': '200',
'mode': 0o777,
}
]
}
configs_dict = {
'one': {'file': '/one.txt'},
'source': {'file': '/source.pem'},
'two': {'external': True},
}
config_dict = config.load(build_config_details({
'version': '3.3',
'services': {'web': service_dict},
'configs': configs_dict
}))
serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert secret_sort(serialized_service['configs']) == secret_sort(service_dict['configs'])
assert 'configs' in serialized_config
assert serialized_config['configs']['two'] == {'external': True, 'name': 'two'}
def test_serialize_bool_string(self):
cfg = {
'version': '2.2',
'services': {
'web': {
'image': 'example/web',
'command': 'true',
'environment': {'FOO': 'Y', 'BAR': 'on'}
}
}
}
config_dict = config.load(build_config_details(cfg))
serialized_config = serialize_config(config_dict)
assert 'command: "true"\n' in serialized_config
assert 'FOO: "Y"\n' in serialized_config
assert 'BAR: "on"\n' in serialized_config
def test_serialize_escape_dollar_sign(self):
cfg = {
'version': '2.2',
'services': {
'web': {
'image': 'busybox',
'command': 'echo $$FOO',
'environment': {
'CURRENCY': '$$'
},
'entrypoint': ['$$SHELL', '-c'],
}
}
}
config_dict = config.load(build_config_details(cfg))
serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert serialized_service['environment']['CURRENCY'] == '$$'
assert serialized_service['command'] == 'echo $$FOO'
assert serialized_service['entrypoint'][0] == '$$SHELL'
def test_serialize_escape_dont_interpolate(self):
cfg = {
'version': '2.2',
'services': {
'web': {
'image': 'busybox',
'command': 'echo $FOO',
'environment': {
'CURRENCY': '$'
},
'env_file': ['tests/fixtures/env/three.env'],
'entrypoint': ['$SHELL', '-c'],
}
}
}
config_dict = config.load(build_config_details(cfg, working_dir='.'), interpolate=False)
serialized_config = yaml.safe_load(serialize_config(config_dict, escape_dollar=False))
serialized_service = serialized_config['services']['web']
assert serialized_service['environment']['CURRENCY'] == '$'
# Values coming from env_files are not allowed to have variables
assert serialized_service['environment']['FOO'] == 'NO $$ENV VAR'
assert serialized_service['environment']['DOO'] == 'NO $${ENV} VAR'
assert serialized_service['command'] == 'echo $FOO'
assert serialized_service['entrypoint'][0] == '$SHELL'
def test_serialize_unicode_values(self):
cfg = {
'version': '2.3',
'services': {
'web': {
'image': 'busybox',
'command': 'echo 十六夜 咲夜'
}
}
}
config_dict = config.load(build_config_details(cfg))
serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_service = serialized_config['services']['web']
assert serialized_service['command'] == 'echo 十六夜 咲夜'
def test_serialize_external_false(self):
cfg = {
'version': '3.4',
'volumes': {
'test': {
'name': 'test-false',
'external': False
}
}
}
config_dict = config.load(build_config_details(cfg))
serialized_config = yaml.safe_load(serialize_config(config_dict))
serialized_volume = serialized_config['volumes']['test']
assert serialized_volume['external'] is False
|
Ironarcher/casso-backend
|
refs/heads/master
|
lib/flask/testsuite/test_apps/lib/python2.5/site-packages/site_app.py
|
1799
|
import flask
app = flask.Flask(__name__)
|
baseblack/ReproWeb
|
refs/heads/master
|
3rdParty/python/flask/testsuite/test_apps/path/installed_package/__init__.py
|
1799
|
import flask
app = flask.Flask(__name__)
|
ChristineLaMuse/mozillians
|
refs/heads/master
|
vendor-local/lib/python/unidecode/x014.py
|
252
|
data = (
'[?]', # 0x00
'e', # 0x01
'aai', # 0x02
'i', # 0x03
'ii', # 0x04
'o', # 0x05
'oo', # 0x06
'oo', # 0x07
'ee', # 0x08
'i', # 0x09
'a', # 0x0a
'aa', # 0x0b
'we', # 0x0c
'we', # 0x0d
'wi', # 0x0e
'wi', # 0x0f
'wii', # 0x10
'wii', # 0x11
'wo', # 0x12
'wo', # 0x13
'woo', # 0x14
'woo', # 0x15
'woo', # 0x16
'wa', # 0x17
'wa', # 0x18
'waa', # 0x19
'waa', # 0x1a
'waa', # 0x1b
'ai', # 0x1c
'w', # 0x1d
'\'', # 0x1e
't', # 0x1f
'k', # 0x20
'sh', # 0x21
's', # 0x22
'n', # 0x23
'w', # 0x24
'n', # 0x25
'[?]', # 0x26
'w', # 0x27
'c', # 0x28
'?', # 0x29
'l', # 0x2a
'en', # 0x2b
'in', # 0x2c
'on', # 0x2d
'an', # 0x2e
'pe', # 0x2f
'paai', # 0x30
'pi', # 0x31
'pii', # 0x32
'po', # 0x33
'poo', # 0x34
'poo', # 0x35
'hee', # 0x36
'hi', # 0x37
'pa', # 0x38
'paa', # 0x39
'pwe', # 0x3a
'pwe', # 0x3b
'pwi', # 0x3c
'pwi', # 0x3d
'pwii', # 0x3e
'pwii', # 0x3f
'pwo', # 0x40
'pwo', # 0x41
'pwoo', # 0x42
'pwoo', # 0x43
'pwa', # 0x44
'pwa', # 0x45
'pwaa', # 0x46
'pwaa', # 0x47
'pwaa', # 0x48
'p', # 0x49
'p', # 0x4a
'h', # 0x4b
'te', # 0x4c
'taai', # 0x4d
'ti', # 0x4e
'tii', # 0x4f
'to', # 0x50
'too', # 0x51
'too', # 0x52
'dee', # 0x53
'di', # 0x54
'ta', # 0x55
'taa', # 0x56
'twe', # 0x57
'twe', # 0x58
'twi', # 0x59
'twi', # 0x5a
'twii', # 0x5b
'twii', # 0x5c
'two', # 0x5d
'two', # 0x5e
'twoo', # 0x5f
'twoo', # 0x60
'twa', # 0x61
'twa', # 0x62
'twaa', # 0x63
'twaa', # 0x64
'twaa', # 0x65
't', # 0x66
'tte', # 0x67
'tti', # 0x68
'tto', # 0x69
'tta', # 0x6a
'ke', # 0x6b
'kaai', # 0x6c
'ki', # 0x6d
'kii', # 0x6e
'ko', # 0x6f
'koo', # 0x70
'koo', # 0x71
'ka', # 0x72
'kaa', # 0x73
'kwe', # 0x74
'kwe', # 0x75
'kwi', # 0x76
'kwi', # 0x77
'kwii', # 0x78
'kwii', # 0x79
'kwo', # 0x7a
'kwo', # 0x7b
'kwoo', # 0x7c
'kwoo', # 0x7d
'kwa', # 0x7e
'kwa', # 0x7f
'kwaa', # 0x80
'kwaa', # 0x81
'kwaa', # 0x82
'k', # 0x83
'kw', # 0x84
'keh', # 0x85
'kih', # 0x86
'koh', # 0x87
'kah', # 0x88
'ce', # 0x89
'caai', # 0x8a
'ci', # 0x8b
'cii', # 0x8c
'co', # 0x8d
'coo', # 0x8e
'coo', # 0x8f
'ca', # 0x90
'caa', # 0x91
'cwe', # 0x92
'cwe', # 0x93
'cwi', # 0x94
'cwi', # 0x95
'cwii', # 0x96
'cwii', # 0x97
'cwo', # 0x98
'cwo', # 0x99
'cwoo', # 0x9a
'cwoo', # 0x9b
'cwa', # 0x9c
'cwa', # 0x9d
'cwaa', # 0x9e
'cwaa', # 0x9f
'cwaa', # 0xa0
'c', # 0xa1
'th', # 0xa2
'me', # 0xa3
'maai', # 0xa4
'mi', # 0xa5
'mii', # 0xa6
'mo', # 0xa7
'moo', # 0xa8
'moo', # 0xa9
'ma', # 0xaa
'maa', # 0xab
'mwe', # 0xac
'mwe', # 0xad
'mwi', # 0xae
'mwi', # 0xaf
'mwii', # 0xb0
'mwii', # 0xb1
'mwo', # 0xb2
'mwo', # 0xb3
'mwoo', # 0xb4
'mwoo', # 0xb5
'mwa', # 0xb6
'mwa', # 0xb7
'mwaa', # 0xb8
'mwaa', # 0xb9
'mwaa', # 0xba
'm', # 0xbb
'm', # 0xbc
'mh', # 0xbd
'm', # 0xbe
'm', # 0xbf
'ne', # 0xc0
'naai', # 0xc1
'ni', # 0xc2
'nii', # 0xc3
'no', # 0xc4
'noo', # 0xc5
'noo', # 0xc6
'na', # 0xc7
'naa', # 0xc8
'nwe', # 0xc9
'nwe', # 0xca
'nwa', # 0xcb
'nwa', # 0xcc
'nwaa', # 0xcd
'nwaa', # 0xce
'nwaa', # 0xcf
'n', # 0xd0
'ng', # 0xd1
'nh', # 0xd2
'le', # 0xd3
'laai', # 0xd4
'li', # 0xd5
'lii', # 0xd6
'lo', # 0xd7
'loo', # 0xd8
'loo', # 0xd9
'la', # 0xda
'laa', # 0xdb
'lwe', # 0xdc
'lwe', # 0xdd
'lwi', # 0xde
'lwi', # 0xdf
'lwii', # 0xe0
'lwii', # 0xe1
'lwo', # 0xe2
'lwo', # 0xe3
'lwoo', # 0xe4
'lwoo', # 0xe5
'lwa', # 0xe6
'lwa', # 0xe7
'lwaa', # 0xe8
'lwaa', # 0xe9
'l', # 0xea
'l', # 0xeb
'l', # 0xec
'se', # 0xed
'saai', # 0xee
'si', # 0xef
'sii', # 0xf0
'so', # 0xf1
'soo', # 0xf2
'soo', # 0xf3
'sa', # 0xf4
'saa', # 0xf5
'swe', # 0xf6
'swe', # 0xf7
'swi', # 0xf8
'swi', # 0xf9
'swii', # 0xfa
'swii', # 0xfb
'swo', # 0xfc
'swo', # 0xfd
'swoo', # 0xfe
'swoo', # 0xff
)
|
jacquesqiao/Paddle
|
refs/heads/develop
|
python/paddle/trainer_config_helpers/tests/configs/test_print_layer.py
|
7
|
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.trainer_config_helpers import *
settings(learning_rate=1e-4, batch_size=1000)
din = data_layer(name='input', size=100)
print_layer(input=din)
outputs(din)
|
yhoshino11/pytest_example
|
refs/heads/master
|
.tox/flake8/lib/python2.7/site-packages/pip/index.py
|
31
|
"""Routines related to PyPI, indexes"""
from __future__ import absolute_import
import logging
import cgi
import sys
import os
import re
import mimetypes
import posixpath
import warnings
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip.compat import ipaddress
from pip.utils import (
Inf, cached_property, normalize_name, splitext, normalize_path)
from pip.utils.deprecation import RemovedInPip7Warning, RemovedInPip8Warning
from pip.utils.logging import indent_log
from pip.exceptions import (
DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename,
UnsupportedWheel,
)
from pip.download import url_to_path, path_to_url
from pip.models import PyPI
from pip.wheel import Wheel, wheel_ext
from pip.pep425tags import supported_tags, supported_tags_noarch, get_platform
from pip.req.req_requirement import InstallationCandidate
from pip._vendor import html5lib, requests, pkg_resources, six
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.requests.exceptions import SSLError
__all__ = ['PackageFinder']
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
SECURE_ORIGINS = [
# protocol, hostname, port
("https", "*", "*"),
("*", "localhost", "*"),
("*", "127.0.0.0/8", "*"),
("*", "::1/128", "*"),
("file", "*", None),
]
logger = logging.getLogger(__name__)
class PackageFinder(object):
"""This finds packages.
This is meant to match easy_install's technique for looking for
packages, by reading pages and looking for appropriate links
"""
def __init__(self, find_links, index_urls,
use_wheel=True, allow_external=(), allow_unverified=(),
allow_all_external=False, allow_all_prereleases=False,
trusted_hosts=None, process_dependency_links=False,
session=None):
if session is None:
raise TypeError(
"PackageFinder() missing 1 required keyword argument: "
"'session'"
)
# Build find_links. If an argument starts with ~, it may be
# a local file relative to a home directory. So try normalizing
# it and if it exists, use the normalized version.
# This is deliberately conservative - it might be fine just to
# blindly normalize anything starting with a ~...
self.find_links = []
for link in find_links:
if link.startswith('~'):
new_link = normalize_path(link)
if os.path.exists(new_link):
link = new_link
self.find_links.append(link)
self.index_urls = index_urls
self.dependency_links = []
# These are boring links that have already been logged somehow:
self.logged_links = set()
self.use_wheel = use_wheel
# Do we allow (safe and verifiable) externally hosted files?
self.allow_external = set(normalize_name(n) for n in allow_external)
# Which names are allowed to install insecure and unverifiable files?
self.allow_unverified = set(
normalize_name(n) for n in allow_unverified
)
# Anything that is allowed unverified is also allowed external
self.allow_external |= self.allow_unverified
# Do we allow all (safe and verifiable) externally hosted files?
self.allow_all_external = allow_all_external
# Domains that we won't emit warnings for when not using HTTPS
self.secure_origins = [
("*", host, "*")
for host in (trusted_hosts if trusted_hosts else [])
]
# Stores if we ignored any external links so that we can instruct
# end users how to install them if no distributions are available
self.need_warn_external = False
# Stores if we ignored any unsafe links so that we can instruct
# end users how to install them if no distributions are available
self.need_warn_unverified = False
# Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases
# Do we process dependency links?
self.process_dependency_links = process_dependency_links
# The Session we'll use to make requests
self.session = session
def add_dependency_links(self, links):
# # FIXME: this shouldn't be global list this, it should only
# # apply to requirements of the package that specifies the
# # dependency_links value
# # FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
warnings.warn(
"Dependency Links processing has been deprecated and will be "
"removed in a future release.",
RemovedInPip7Warning,
)
self.dependency_links.extend(links)
def _sort_locations(self, locations):
"""
Sort locations into "files" (archives) and "urls", and return
a pair of lists (files,urls)
"""
files = []
urls = []
# puts the url for the given file path into the appropriate list
def sort_path(path):
url = path_to_url(path)
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
urls.append(url)
else:
files.append(url)
for url in locations:
is_local_path = os.path.exists(url)
is_file_url = url.startswith('file:')
is_find_link = url in self.find_links
if is_local_path or is_file_url:
if is_local_path:
path = url
else:
path = url_to_path(url)
if is_find_link and os.path.isdir(path):
path = os.path.realpath(path)
for item in os.listdir(path):
sort_path(os.path.join(path, item))
elif is_file_url and os.path.isdir(path):
urls.append(url)
elif os.path.isfile(path):
sort_path(path)
else:
urls.append(url)
return files, urls
def _candidate_sort_key(self, candidate):
"""
Function used to generate link sort key for link tuples.
The greater the return value, the more preferred it is.
If not finding wheels, then sorted by version only.
If finding wheels, then the sort order is by version, then:
1. existing installs
2. wheels ordered via Wheel.support_index_min()
3. source archives
Note: it was considered to embed this logic into the Link
comparison operators, but then different sdist links
with the same version, would have to be considered equal
"""
if self.use_wheel:
support_num = len(supported_tags)
if candidate.location == INSTALLED_VERSION:
pri = 1
elif candidate.location.is_wheel:
# can raise InvalidWheelFilename
wheel = Wheel(candidate.location.filename)
if not wheel.supported():
raise UnsupportedWheel(
"%s is not a supported wheel for this platform. It "
"can't be sorted." % wheel.filename
)
pri = -(wheel.support_index_min())
else: # sdist
pri = -(support_num)
return (candidate.version, pri)
else:
return candidate.version
def _sort_versions(self, applicable_versions):
"""
Bring the latest version (and wheels) to the front, but maintain the
existing ordering as secondary. See the docstring for `_link_sort_key`
for details. This function is isolated for easier unit testing.
"""
return sorted(
applicable_versions,
key=self._candidate_sort_key,
reverse=True
)
def _validate_secure_origin(self, logger, location):
# Determine if this url used a secure transport mechanism
parsed = urllib_parse.urlparse(str(location))
origin = (parsed.scheme, parsed.hostname, parsed.port)
# Determine if our origin is a secure origin by looking through our
# hardcoded list of secure origins, as well as any additional ones
# configured on this PackageFinder instance.
for secure_origin in (SECURE_ORIGINS + self.secure_origins):
# Check to see if the protocol matches
if origin[0] != secure_origin[0] and secure_origin[0] != "*":
continue
try:
# We need to do this decode dance to ensure that we have a
# unicode object, even on Python 2.x.
addr = ipaddress.ip_address(
origin[1]
if (
isinstance(origin[1], six.text_type) or
origin[1] is None
)
else origin[1].decode("utf8")
)
network = ipaddress.ip_network(
secure_origin[1]
if isinstance(secure_origin[1], six.text_type)
else secure_origin[1].decode("utf8")
)
except ValueError:
# We don't have both a valid address or a valid network, so
# we'll check this origin against hostnames.
if origin[1] != secure_origin[1] and secure_origin[1] != "*":
continue
else:
# We have a valid address and network, so see if the address
# is contained within the network.
if addr not in network:
continue
# Check to see if the port patches
if (origin[2] != secure_origin[2] and
secure_origin[2] != "*" and
secure_origin[2] is not None):
continue
# If we've gotten here, then this origin matches the current
# secure origin and we should break out of the loop and continue
# on.
break
else:
# If the loop successfully completed without a break, that means
# that the origin we are testing is not a secure origin.
logger.warning(
"This repository located at %s is not a trusted host, if "
"this repository is available via HTTPS it is recommend to "
"use HTTPS instead, otherwise you may silence this warning "
"with '--trusted-host %s'.",
parsed.hostname,
parsed.hostname,
)
warnings.warn(
"Implicitly allowing locations which are not hosted at a "
"secure origin is deprecated and will require the use of "
"--trusted-host in the future.",
RemovedInPip7Warning,
)
def _get_index_urls_locations(self, project_name):
"""Returns the locations found via self.index_urls
Checks the url_name on the main (first in the list) index and
use this url_name to produce all locations
"""
def mkurl_pypi_url(url):
loc = posixpath.join(url, project_url_name)
# For maximum compatibility with easy_install, ensure the path
# ends in a trailing slash. Although this isn't in the spec
# (and PyPI can handle it without the slash) some other index
# implementations might break if they relied on easy_install's
# behavior.
if not loc.endswith('/'):
loc = loc + '/'
return loc
project_url_name = urllib_parse.quote(project_name.lower())
if self.index_urls:
# Check that we have the url_name correctly spelled:
# Only check main index if index URL is given
main_index_url = Link(
mkurl_pypi_url(self.index_urls[0]),
trusted=True,
)
page = self._get_page(main_index_url)
if page is None and PyPI.netloc not in str(main_index_url):
warnings.warn(
"Failed to find %r at %s. It is suggested to upgrade "
"your index to support normalized names as the name in "
"/simple/{name}." % (project_name, main_index_url),
RemovedInPip8Warning,
)
project_url_name = self._find_url_name(
Link(self.index_urls[0], trusted=True),
project_url_name,
) or project_url_name
if project_url_name is not None:
return [mkurl_pypi_url(url) for url in self.index_urls]
return []
def _find_all_versions(self, project_name):
"""Find all available versions for project_name
This checks index_urls, find_links and dependency_links
All versions found are returned
See _link_package_versions for details on which files are accepted
"""
index_locations = self._get_index_urls_locations(project_name)
file_locations, url_locations = self._sort_locations(index_locations)
fl_file_loc, fl_url_loc = self._sort_locations(self.find_links)
file_locations.extend(fl_file_loc)
url_locations.extend(fl_url_loc)
_flocations, _ulocations = self._sort_locations(self.dependency_links)
file_locations.extend(_flocations)
# We trust every url that the user has given us whether it was given
# via --index-url or --find-links
locations = [Link(url, trusted=True) for url in url_locations]
# We explicitly do not trust links that came from dependency_links
locations.extend([Link(url) for url in _ulocations])
logger.debug('%d location(s) to search for versions of %s:',
len(locations), project_name)
for location in locations:
logger.debug('* %s', location)
self._validate_secure_origin(logger, location)
find_links_versions = list(self._package_versions(
# We trust every directly linked archive in find_links
(Link(url, '-f', trusted=True) for url in self.find_links),
project_name.lower()
))
page_versions = []
for page in self._get_pages(locations, project_name):
logger.debug('Analyzing links from page %s', page.url)
with indent_log():
page_versions.extend(
self._package_versions(page.links, project_name.lower())
)
dependency_versions = list(self._package_versions(
(Link(url) for url in self.dependency_links), project_name.lower()
))
if dependency_versions:
logger.debug(
'dependency_links found: %s',
', '.join([
version.location.url for version in dependency_versions
])
)
file_versions = list(
self._package_versions(
(Link(url) for url in file_locations),
project_name.lower()
)
)
if file_versions:
file_versions.sort(reverse=True)
logger.debug(
'Local files found: %s',
', '.join([
url_to_path(candidate.location.url)
for candidate in file_versions
])
)
# This is an intentional priority ordering
return (
file_versions + find_links_versions + page_versions +
dependency_versions
)
def find_requirement(self, req, upgrade):
"""Try to find an InstallationCandidate for req
Expects req, an InstallRequirement and upgrade, a boolean
Returns an InstallationCandidate or None
May raise DistributionNotFound or BestVersionAlreadyInstalled
"""
all_versions = self._find_all_versions(req.name)
# Filter out anything which doesn't match our specifier
_versions = set(
req.specifier.filter(
[x.version for x in all_versions],
prereleases=(
self.allow_all_prereleases
if self.allow_all_prereleases else None
),
)
)
applicable_versions = [
x for x in all_versions if x.version in _versions
]
if req.satisfied_by is not None:
# Finally add our existing versions to the front of our versions.
applicable_versions.insert(
0,
InstallationCandidate(
req.name,
req.satisfied_by.version,
INSTALLED_VERSION,
)
)
existing_applicable = True
else:
existing_applicable = False
applicable_versions = self._sort_versions(applicable_versions)
if not upgrade and existing_applicable:
if applicable_versions[0].location is INSTALLED_VERSION:
logger.debug(
'Existing installed version (%s) is most up-to-date and '
'satisfies requirement',
req.satisfied_by.version,
)
else:
logger.debug(
'Existing installed version (%s) satisfies requirement '
'(most up-to-date version is %s)',
req.satisfied_by.version,
applicable_versions[0][2],
)
return None
if not applicable_versions:
logger.critical(
'Could not find a version that satisfies the requirement %s '
'(from versions: %s)',
req,
', '.join(
sorted(
set(str(i.version) for i in all_versions),
key=parse_version,
)
)
)
if self.need_warn_external:
logger.warning(
"Some externally hosted files were ignored as access to "
"them may be unreliable (use --allow-external %s to "
"allow).",
req.name,
)
if self.need_warn_unverified:
logger.warning(
"Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow).",
req.name,
)
raise DistributionNotFound(
'No matching distribution found for %s' % req
)
if applicable_versions[0].location is INSTALLED_VERSION:
# We have an existing version, and its the best version
logger.debug(
'Installed version (%s) is most up-to-date (past versions: '
'%s)',
req.satisfied_by.version,
', '.join(str(i.version) for i in applicable_versions[1:]) or
"none",
)
raise BestVersionAlreadyInstalled
if len(applicable_versions) > 1:
logger.debug(
'Using version %s (newest of versions: %s)',
applicable_versions[0].version,
', '.join(str(i.version) for i in applicable_versions)
)
selected_version = applicable_versions[0].location
if (selected_version.verifiable is not None and not
selected_version.verifiable):
logger.warning(
"%s is potentially insecure and unverifiable.", req.name,
)
if selected_version._deprecated_regex:
warnings.warn(
"%s discovered using a deprecated method of parsing, in the "
"future it will no longer be discovered." % req.name,
RemovedInPip7Warning,
)
return selected_version
def _find_url_name(self, index_url, url_name):
"""
Finds the true URL name of a package, when the given name isn't quite
correct.
This is usually used to implement case-insensitivity.
"""
if not index_url.url.endswith('/'):
# Vaguely part of the PyPI API... weird but true.
# FIXME: bad to modify this?
index_url.url += '/'
page = self._get_page(index_url)
if page is None:
logger.critical('Cannot fetch index base URL %s', index_url)
return
norm_name = normalize_name(url_name)
for link in page.links:
base = posixpath.basename(link.path.rstrip('/'))
if norm_name == normalize_name(base):
logger.debug(
'Real name of requirement %s is %s', url_name, base,
)
return base
return None
def _get_pages(self, locations, project_name):
"""
Yields (page, page_url) from the given locations, skipping
locations that have errors, and adding download/homepage links
"""
all_locations = list(locations)
seen = set()
normalized = normalize_name(project_name)
while all_locations:
location = all_locations.pop(0)
if location in seen:
continue
seen.add(location)
page = self._get_page(location)
if page is None:
continue
yield page
for link in page.rel_links():
if (normalized not in self.allow_external and not
self.allow_all_external):
self.need_warn_external = True
logger.debug(
"Not searching %s for files because external "
"urls are disallowed.",
link,
)
continue
if (link.trusted is not None and not
link.trusted and
normalized not in self.allow_unverified):
logger.debug(
"Not searching %s for urls, it is an "
"untrusted link and cannot produce safe or "
"verifiable files.",
link,
)
self.need_warn_unverified = True
continue
all_locations.append(link)
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
_egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
def _sort_links(self, links):
"""
Returns elements of links in order, non-egg links first, egg links
second, while eliminating duplicates
"""
eggs, no_eggs = [], []
seen = set()
for link in links:
if link not in seen:
seen.add(link)
if link.egg_fragment:
eggs.append(link)
else:
no_eggs.append(link)
return no_eggs + eggs
def _package_versions(self, links, search_name):
for link in self._sort_links(links):
v = self._link_package_versions(link, search_name)
if v is not None:
yield v
def _known_extensions(self):
extensions = ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip')
if self.use_wheel:
return extensions + (wheel_ext,)
return extensions
def _link_package_versions(self, link, search_name):
"""Return an InstallationCandidate or None"""
platform = get_platform()
version = None
if link.egg_fragment:
egg_info = link.egg_fragment
else:
egg_info, ext = link.splitext()
if not ext:
if link not in self.logged_links:
logger.debug('Skipping link %s; not a file', link)
self.logged_links.add(link)
return
if egg_info.endswith('.tar'):
# Special double-extension case:
egg_info = egg_info[:-4]
ext = '.tar' + ext
if ext not in self._known_extensions():
if link not in self.logged_links:
logger.debug(
'Skipping link %s; unknown archive format: %s',
link,
ext,
)
self.logged_links.add(link)
return
if "macosx10" in link.path and ext == '.zip':
if link not in self.logged_links:
logger.debug('Skipping link %s; macosx10 one', link)
self.logged_links.add(link)
return
if ext == wheel_ext:
try:
wheel = Wheel(link.filename)
except InvalidWheelFilename:
logger.debug(
'Skipping %s because the wheel filename is invalid',
link
)
return
if (pkg_resources.safe_name(wheel.name).lower() !=
pkg_resources.safe_name(search_name).lower()):
logger.debug(
'Skipping link %s; wrong project name (not %s)',
link,
search_name,
)
return
if not wheel.supported():
logger.debug(
'Skipping %s because it is not compatible with this '
'Python',
link,
)
return
# This is a dirty hack to prevent installing Binary Wheels from
# PyPI unless it is a Windows or Mac Binary Wheel. This is
# paired with a change to PyPI disabling uploads for the
# same. Once we have a mechanism for enabling support for
# binary wheels on linux that deals with the inherent problems
# of binary distribution this can be removed.
comes_from = getattr(link, "comes_from", None)
if (
(
not platform.startswith('win') and not
platform.startswith('macosx') and not
platform == 'cli'
) and
comes_from is not None and
urllib_parse.urlparse(
comes_from.url
).netloc.endswith(PyPI.netloc)):
if not wheel.supported(tags=supported_tags_noarch):
logger.debug(
"Skipping %s because it is a pypi-hosted binary "
"Wheel on an unsupported platform",
link,
)
return
version = wheel.version
if not version:
version = self._egg_info_matches(egg_info, search_name, link)
if version is None:
logger.debug(
'Skipping link %s; wrong project name (not %s)',
link,
search_name,
)
return
if (link.internal is not None and not
link.internal and not
normalize_name(search_name).lower()
in self.allow_external and not
self.allow_all_external):
# We have a link that we are sure is external, so we should skip
# it unless we are allowing externals
logger.debug("Skipping %s because it is externally hosted.", link)
self.need_warn_external = True
return
if (link.verifiable is not None and not
link.verifiable and not
(normalize_name(search_name).lower()
in self.allow_unverified)):
# We have a link that we are sure we cannot verify its integrity,
# so we should skip it unless we are allowing unsafe installs
# for this requirement.
logger.debug(
"Skipping %s because it is an insecure and unverifiable file.",
link,
)
self.need_warn_unverified = True
return
match = self._py_version_re.search(version)
if match:
version = version[:match.start()]
py_version = match.group(1)
if py_version != sys.version[:3]:
logger.debug(
'Skipping %s because Python version is incorrect', link
)
return
logger.debug('Found link %s, version: %s', link, version)
return InstallationCandidate(search_name, version, link)
def _egg_info_matches(self, egg_info, search_name, link):
match = self._egg_info_re.search(egg_info)
if not match:
logger.debug('Could not parse version from link: %s', link)
return None
name = match.group(0).lower()
# To match the "safe" name that pkg_resources creates:
name = name.replace('_', '-')
# project name and version must be separated by a dash
look_for = search_name.lower() + "-"
if name.startswith(look_for):
return match.group(0)[len(look_for):]
else:
return None
def _get_page(self, link):
return HTMLPage.get_page(link, session=self.session)
class HTMLPage(object):
"""Represents one page, along with its URL"""
# FIXME: these regexes are horrible hacks:
_homepage_re = re.compile(b'<th>\\s*home\\s*page', re.I)
_download_re = re.compile(b'<th>\\s*download\\s+url', re.I)
_href_re = re.compile(
b'href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))',
re.I | re.S
)
def __init__(self, content, url, headers=None, trusted=None):
# Determine if we have any encoding information in our headers
encoding = None
if headers and "Content-Type" in headers:
content_type, params = cgi.parse_header(headers["Content-Type"])
if "charset" in params:
encoding = params['charset']
self.content = content
self.parsed = html5lib.parse(
self.content,
encoding=encoding,
namespaceHTMLElements=False,
)
self.url = url
self.headers = headers
self.trusted = trusted
def __str__(self):
return self.url
@classmethod
def get_page(cls, link, skip_archives=True, session=None):
if session is None:
raise TypeError(
"get_page() missing 1 required keyword argument: 'session'"
)
url = link.url
url = url.split('#', 1)[0]
# Check for VCS schemes that do not support lookup as web pages.
from pip.vcs import VcsSupport
for scheme in VcsSupport.schemes:
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
logger.debug('Cannot look at %s URL %s', scheme, link)
return None
try:
if skip_archives:
filename = link.filename
for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
if filename.endswith(bad_ext):
content_type = cls._get_content_type(
url, session=session,
)
if content_type.lower().startswith('text/html'):
break
else:
logger.debug(
'Skipping page %s because of Content-Type: %s',
link,
content_type,
)
return
logger.debug('Getting page %s', url)
# Tack index.html onto file:// URLs that point to directories
(scheme, netloc, path, params, query, fragment) = \
urllib_parse.urlparse(url)
if (scheme == 'file' and
os.path.isdir(urllib_request.url2pathname(path))):
# add trailing slash if not present so urljoin doesn't trim
# final segment
if not url.endswith('/'):
url += '/'
url = urllib_parse.urljoin(url, 'index.html')
logger.debug(' file: URL is directory, getting %s', url)
resp = session.get(
url,
headers={
"Accept": "text/html",
"Cache-Control": "max-age=600",
},
)
resp.raise_for_status()
# The check for archives above only works if the url ends with
# something that looks like an archive. However that is not a
# requirement of an url. Unless we issue a HEAD request on every
# url we cannot know ahead of time for sure if something is HTML
# or not. However we can check after we've downloaded it.
content_type = resp.headers.get('Content-Type', 'unknown')
if not content_type.lower().startswith("text/html"):
logger.debug(
'Skipping page %s because of Content-Type: %s',
link,
content_type,
)
return
inst = cls(
resp.content, resp.url, resp.headers,
trusted=link.trusted,
)
except requests.HTTPError as exc:
level = 2 if exc.response.status_code == 404 else 1
cls._handle_fail(link, exc, url, level=level)
except requests.ConnectionError as exc:
cls._handle_fail(link, "connection error: %s" % exc, url)
except requests.Timeout:
cls._handle_fail(link, "timed out", url)
except SSLError as exc:
reason = ("There was a problem confirming the ssl certificate: "
"%s" % exc)
cls._handle_fail(link, reason, url, level=2, meth=logger.info)
else:
return inst
@staticmethod
def _handle_fail(link, reason, url, level=1, meth=None):
if meth is None:
meth = logger.debug
meth("Could not fetch URL %s: %s - skipping", link, reason)
@staticmethod
def _get_content_type(url, session):
"""Get the Content-Type of the given url, using a HEAD request"""
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
if scheme not in ('http', 'https'):
# FIXME: some warning or something?
# assertion error?
return ''
resp = session.head(url, allow_redirects=True)
resp.raise_for_status()
return resp.headers.get("Content-Type", "")
@cached_property
def api_version(self):
metas = [
x for x in self.parsed.findall(".//meta")
if x.get("name", "").lower() == "api-version"
]
if metas:
try:
return int(metas[0].get("value", None))
except (TypeError, ValueError):
pass
return None
@cached_property
def base_url(self):
bases = [
x for x in self.parsed.findall(".//base")
if x.get("href") is not None
]
if bases and bases[0].get("href"):
return bases[0].get("href")
else:
return self.url
@property
def links(self):
"""Yields all links in the page"""
for anchor in self.parsed.findall(".//a"):
if anchor.get("href"):
href = anchor.get("href")
url = self.clean_link(
urllib_parse.urljoin(self.base_url, href)
)
# Determine if this link is internal. If that distinction
# doesn't make sense in this context, then we don't make
# any distinction.
internal = None
if self.api_version and self.api_version >= 2:
# Only api_versions >= 2 have a distinction between
# external and internal links
internal = bool(
anchor.get("rel") and
"internal" in anchor.get("rel").split()
)
yield Link(url, self, internal=internal)
def rel_links(self):
for url in self.explicit_rel_links():
yield url
for url in self.scraped_rel_links():
yield url
def explicit_rel_links(self, rels=('homepage', 'download')):
"""Yields all links with the given relations"""
rels = set(rels)
for anchor in self.parsed.findall(".//a"):
if anchor.get("rel") and anchor.get("href"):
found_rels = set(anchor.get("rel").split())
# Determine the intersection between what rels were found and
# what rels were being looked for
if found_rels & rels:
href = anchor.get("href")
url = self.clean_link(
urllib_parse.urljoin(self.base_url, href)
)
yield Link(url, self, trusted=False)
def scraped_rel_links(self):
# Can we get rid of this horrible horrible method?
for regex in (self._homepage_re, self._download_re):
match = regex.search(self.content)
if not match:
continue
href_match = self._href_re.search(self.content, pos=match.end())
if not href_match:
continue
url = (
href_match.group(1) or
href_match.group(2) or
href_match.group(3)
)
if not url:
continue
try:
url = url.decode("ascii")
except UnicodeDecodeError:
continue
url = self.clean_link(urllib_parse.urljoin(self.base_url, url))
yield Link(url, self, trusted=False, _deprecated_regex=True)
_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
def clean_link(self, url):
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
the link, it will be rewritten to %20 (while not over-quoting
% or other characters)."""
return self._clean_re.sub(
lambda match: '%%%2x' % ord(match.group(0)), url)
class Link(object):
def __init__(self, url, comes_from=None, internal=None, trusted=None,
_deprecated_regex=False):
# url can be a UNC windows share
if url != Inf and url.startswith('\\\\'):
url = path_to_url(url)
self.url = url
self.comes_from = comes_from
self.internal = internal
self.trusted = trusted
self._deprecated_regex = _deprecated_regex
def __str__(self):
if self.comes_from:
return '%s (from %s)' % (self.url, self.comes_from)
else:
return str(self.url)
def __repr__(self):
return '<Link %s>' % self
def __eq__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url == other.url
def __ne__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url != other.url
def __lt__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url < other.url
def __le__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url <= other.url
def __gt__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url > other.url
def __ge__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url >= other.url
def __hash__(self):
return hash(self.url)
@property
def filename(self):
_, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
name = posixpath.basename(path.rstrip('/')) or netloc
name = urllib_parse.unquote(name)
assert name, ('URL %r produced no filename' % self.url)
return name
@property
def scheme(self):
return urllib_parse.urlsplit(self.url)[0]
@property
def netloc(self):
return urllib_parse.urlsplit(self.url)[1]
@property
def path(self):
return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
def splitext(self):
return splitext(posixpath.basename(self.path.rstrip('/')))
@property
def ext(self):
return self.splitext()[1]
@property
def url_without_fragment(self):
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
@property
def egg_fragment(self):
match = self._egg_fragment_re.search(self.url)
if not match:
return None
return match.group(1)
_hash_re = re.compile(
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
)
@property
def hash(self):
match = self._hash_re.search(self.url)
if match:
return match.group(2)
return None
@property
def hash_name(self):
match = self._hash_re.search(self.url)
if match:
return match.group(1)
return None
@property
def show_url(self):
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
@property
def verifiable(self):
"""
Returns True if this link can be verified after download, False if it
cannot, and None if we cannot determine.
"""
trusted = self.trusted or getattr(self.comes_from, "trusted", None)
if trusted is not None and trusted:
# This link came from a trusted source. It *may* be verifiable but
# first we need to see if this page is operating under the new
# API version.
try:
api_version = getattr(self.comes_from, "api_version", None)
api_version = int(api_version)
except (ValueError, TypeError):
api_version = None
if api_version is None or api_version <= 1:
# This link is either trusted, or it came from a trusted,
# however it is not operating under the API version 2 so
# we can't make any claims about if it's safe or not
return
if self.hash:
# This link came from a trusted source and it has a hash, so we
# can consider it safe.
return True
else:
# This link came from a trusted source, using the new API
# version, and it does not have a hash. It is NOT verifiable
return False
elif trusted is not None:
# This link came from an untrusted source and we cannot trust it
return False
@property
def is_wheel(self):
return self.ext == wheel_ext
# An object to represent the "link" for the installed version of a requirement.
# Using Inf as the url makes it sort higher.
INSTALLED_VERSION = Link(Inf)
|
mozilla-mobile/firefox-ios
|
refs/heads/master
|
Client/Assets/Search/run_tests.py
|
43
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from lxml import etree
import sys
import unittest
from scrape_plugins import Overlay
class TestOverlays(unittest.TestCase):
def setUp(self):
self.plugin = etree.parse("Tests/Base/testplugin.xml")
def testAppend(self):
overlay = Overlay("Tests/Overlays/append.xml")
overlay.apply(self.plugin)
self.assertEqualsExpectedXML(plugin=self.plugin, expectedPath="Tests/Expected/append.xml")
def testReplace(self):
overlay = Overlay("Tests/Overlays/replace.xml")
overlay.apply(self.plugin)
self.assertEqualsExpectedXML(plugin=self.plugin, expectedPath="Tests/Expected/replace.xml")
def assertEqualsExpectedXML(self, plugin, expectedPath):
actual = etree.tostring(plugin, pretty_print=True)
with open(expectedPath, "r") as file:
expected = file.read()
self.assertEqual(actual, expected, "\nExpected:\n%s\n\nActual:\n%s" % (expected, actual))
if __name__ == '__main__':
unittest.main()
|
fracpete/python-weka-wrapper
|
refs/heads/master
|
tests/wekatests/coretests/dataset.py
|
1
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# dataset.py
# Copyright (C) 2014-2019 Fracpete (pythonwekawrapper at gmail dot com)
import unittest
import weka.core.jvm as jvm
import weka.core.dataset as dataset
import weka.core.converters as converters
import wekatests.tests.weka_test as weka_test
from weka.core.dataset import create_instances_from_lists, create_instances_from_matrices
from random import randint
import numpy as np
class TestDataset(weka_test.WekaTest):
def test_attribute(self):
"""
Tests the Attribute class.
"""
name = "Num"
att = dataset.Attribute.create_numeric(name)
self.assertIsNotNone(att, "Failed to create attribute!")
self.assertEqual(name, att.name, "Names differ")
self.assertTrue(att.is_numeric)
name = "Nom"
att = dataset.Attribute.create_nominal(name, ["A", "B", "C"])
self.assertIsNotNone(att, "Failed to create attribute!")
self.assertEqual(name, att.name, "Names differ")
self.assertTrue(att.is_nominal)
name = "Dat1"
att = dataset.Attribute.create_date(name)
self.assertIsNotNone(att, "Failed to create attribute!")
self.assertEqual(name, att.name, "Names differ")
self.assertTrue(att.is_date)
name = "Dat2"
att = dataset.Attribute.create_date(name, formt="yyyy-MM-dd HH:mm")
self.assertIsNotNone(att, "Failed to create attribute!")
self.assertEqual(name, att.name, "Names differ")
self.assertTrue(att.is_date)
name = "Str"
att = dataset.Attribute.create_string(name)
self.assertIsNotNone(att, "Failed to create attribute!")
self.assertEqual(name, att.name, "Names differ")
self.assertTrue(att.is_string)
def test_attributestats(self):
"""
Tests the AttributeStats class.
"""
loader = converters.Loader(classname="weka.core.converters.ArffLoader")
data = loader.load_file(self.datafile("anneal.arff"))
self.assertIsNotNone(data, msg="Failed to load data!")
stats = data.attribute_stats(2)
self.assertIsNotNone(stats, msg="Failed to obtain stats!")
self.assertEqual(8, stats.distinct_count, "distinct_count differs")
self.assertEqual(898, stats.int_count, "int_count differs")
self.assertEqual(0, stats.missing_count, "missing_count differs")
self.assertEqual([86, 256, 440, 0, 51, 20, 10, 19, 16], stats.nominal_counts.tolist(), "nominal_counts differs")
self.assertEqual([86, 256, 440, 0, 51, 20, 10, 19, 16], stats.nominal_weights.tolist(), "nominal_weights differs")
self.assertEqual(898, stats.total_count, "total_count differs")
self.assertEqual(0, stats.unique_count, "unique_count differs")
def test_stats(self):
"""
Tests the Stats class.
"""
loader = converters.Loader(classname="weka.core.converters.ArffLoader")
data = loader.load_file(self.datafile("anneal.arff"))
self.assertIsNotNone(data, msg="Failed to load data!")
stats = data.attribute_stats(3)
numstats = stats.numeric_stats
self.assertAlmostEqual(898, numstats.count, places=3, msg="count differs")
self.assertAlmostEqual(70, numstats.max, places=3, msg="max differs")
self.assertAlmostEqual(3.635, numstats.mean, places=3, msg="mean differs")
self.assertAlmostEqual(0.0, numstats.min, places=3, msg="min differs")
self.assertAlmostEqual(13.717, numstats.stddev, places=3, msg="stddev differs")
self.assertAlmostEqual(3264, numstats.sum, places=3, msg="sum differs")
self.assertAlmostEqual(180636, numstats.sumsq, places=3, msg="sumsq differs")
def test_instance(self):
"""
Tests the Instance class.
"""
loader = converters.Loader(classname="weka.core.converters.ArffLoader")
data = loader.load_file(self.datafile("anneal.ORIG.arff"))
self.assertIsNotNone(data, msg="Failed to load data!")
inst = data.get_instance(0)
self.assertEqual(39, inst.num_attributes, msg="num_attributes differs")
self.assertEqual(-1, data.class_index, msg="class_index differs")
data.class_index = data.num_attributes - 1
self.assertEqual(38, data.class_index, msg="class_index differs")
data.class_is_first()
self.assertEqual(0, data.class_index, msg="class_index differs")
data.class_is_last()
self.assertEqual(38, data.class_index, msg="class_index differs")
self.assertIsNotNone(inst.dataset, msg="Dataset reference should not be None!")
self.assertTrue(inst.has_missing(), msg="Should have missing values")
self.assertTrue(inst.is_missing(0), msg="First value should be missing")
self.assertFalse(inst.is_missing(1), msg="Second value should not be missing")
self.assertEqual("C", inst.get_string_value(1), msg="string value differs")
inst.set_string_value(1, "H")
self.assertEqual("H", inst.get_string_value(1), msg="string value differs")
self.assertEqual(8, inst.get_value(3), msg="numeric value differs")
inst.set_value(3, 6.3)
self.assertEqual(6.3, inst.get_value(3), msg="numeric value differs")
self.assertEqual(1, inst.weight, msg="weight should be 1")
inst.weight = 0.5
self.assertEqual(0.5, inst.weight, msg="weights differ")
values = [1.0, 2.0, 3.0]
inst = dataset.Instance.create_instance(values, classname="weka.core.DenseInstance")
self.assertEqual(3, inst.num_attributes, msg="#attributes differ")
self.assertEqual(1.0, inst.get_value(0), msg="value at #" + str(0) + " differs")
self.assertEqual(2.0, inst.get_value(1), msg="value at #" + str(1) + " differs")
self.assertEqual(3.0, inst.get_value(2), msg="value at #" + str(2) + " differs")
values = [0.0, 2.0, 0.0]
inst = dataset.Instance.create_instance(values, classname="weka.core.SparseInstance")
self.assertEqual(3, inst.num_attributes, msg="#attributes differ")
self.assertEqual(0.0, inst.get_value(0), msg="value at #" + str(0) + " differs")
self.assertEqual(2.0, inst.get_value(1), msg="value at #" + str(1) + " differs")
self.assertEqual(0.0, inst.get_value(2), msg="value at #" + str(2) + " differs")
values = [(1, 2.0)]
inst = dataset.Instance.create_sparse_instance(values, 3, classname="weka.core.SparseInstance")
self.assertEqual(3, inst.num_attributes, msg="#attributes differ")
self.assertEqual(0.0, inst.get_value(0), msg="value at #" + str(0) + " differs")
self.assertEqual(2.0, inst.get_value(1), msg="value at #" + str(1) + " differs")
self.assertEqual(0.0, inst.get_value(2), msg="value at #" + str(2) + " differs")
def test_instances(self):
"""
Tests the Instances class.
"""
loader = converters.Loader(classname="weka.core.converters.ArffLoader")
data = loader.load_file(self.datafile("anneal.arff"))
self.assertIsNotNone(data, msg="Failed to load data!")
self.assertFalse(data.has_class(), msg="Should not have class set!")
count = 0
for i in data:
count += 1
self.assertEqual(898, count, msg="Number of rows differs!")
count = 0
for i in data.attributes():
count += 1
self.assertEqual(39, count, msg="Number of attributes differs!")
self.assertEqual(898, data.num_instances, msg="num_instances differs")
self.assertEqual(39, data.num_attributes, msg="num_attributes differs")
self.assertEqual(-1, data.class_index, msg="class_index differs")
data.class_index = data.num_attributes - 1
self.assertEqual(38, data.class_index, msg="class_index differs")
self.assertEqual(38, data.class_attribute.index, msg="class_index differs")
data.class_is_first()
self.assertEqual(0, data.class_index, msg="class_index differs")
self.assertTrue(data.has_class(), msg="Should have class set!")
data.class_is_last()
self.assertEqual(38, data.class_index, msg="class_index differs")
self.assertTrue(data.has_class(), msg="Should have class set!")
att = data.attribute(0)
self.assertIsNotNone(att, msg="Attribute should not be None!")
self.assertEqual("family", att.name, msg="attribute name differs")
name = "steel"
att = data.attribute_by_name(name)
self.assertIsNotNone(att, msg="Attribute should not be None!")
self.assertEqual(name, att.name, msg="attribute name differs")
self.assertEqual(2, att.index, msg="attribute index differs")
data.delete_attribute(2)
self.assertEqual(38, data.num_attributes, msg="num_attributes differs")
name = "steel"
att = data.attribute_by_name(name)
self.assertIsNone(att, msg="Attribute should be None!")
data.delete(3)
self.assertEqual(897, data.num_instances, msg="num_instances differs")
data.class_is_last()
data.no_class()
self.assertFalse(data.has_class(), msg="Should not have class set!")
# changing rows
data1 = loader.load_file(self.datafile("anneal.arff"))
self.assertIsNotNone(data1, msg="Failed to load data!")
data2 = loader.load_file(self.datafile("anneal.arff"))
self.assertIsNotNone(data2, msg="Failed to load data!")
inst1 = data1.get_instance(0)
inst2 = data2.get_instance(0)
self.assertEqual(inst1.values.tolist(), inst2.values.tolist(), msg="values differ")
data1.add_instance(inst2)
self.assertEqual(899, data1.num_instances, msg="num_instances differs (add)")
inst2 = data2.get_instance(2)
data1.add_instance(inst2, index=10)
inst1 = data1.get_instance(10)
self.assertEqual(900, data1.num_instances, msg="num_instances differs (insert)")
self.assertEqual(inst1.values.tolist(), inst2.values.tolist(), msg="values differ (insert)")
inst2 = data2.get_instance(1)
data1.set_instance(0, inst2)
inst1 = data1.get_instance(0)
self.assertEqual(inst1.values.tolist(), inst2.values.tolist(), msg="values differ (set)")
# create instances
atts = []
atts.append(dataset.Attribute.create_numeric("num"))
atts.append(dataset.Attribute.create_nominal("nom", ["yes", "no"]))
atts.append(dataset.Attribute.create_string("str"))
atts.append(dataset.Attribute.create_date("dat", "yyyy-MM-dd"))
data = dataset.Instances.create_instances("created", atts, 3)
# 1. row
values = []
values.append(1.1)
values.append(data.attribute(1).index_of("no"))
values.append(data.attribute(2).add_string_value("blah de blah"))
values.append(data.attribute(3).parse_date("2015-10-12"))
inst = dataset.Instance.create_instance(values)
data.add_instance(inst)
# 2. row
values = []
values.append(1.2)
values.append(data.attribute(1).index_of("yes"))
values.append(data.attribute(2).add_string_value("pi day!"))
values.append(data.attribute(3).parse_date("2001-12-31"))
inst = dataset.Instance.create_instance(values)
data.add_instance(inst)
# 3. row
values = []
values.append(1.3)
values.append(data.attribute(1).index_of("no"))
values.append(data.attribute(2).add_string_value("hello world"))
values.append(data.attribute(3).parse_date("2011-02-03"))
inst = dataset.Instance.create_instance(values)
data.add_instance(inst)
self.assertEqual(data.get_instance(0).get_value(0), 1.1, msg="Numeric value differs")
self.assertEqual(data.get_instance(1).get_value(0), 1.2, msg="Numeric value differs")
self.assertEqual(data.get_instance(2).get_value(0), 1.3, msg="Numeric value differs")
self.assertEqual(data.get_instance(0).get_string_value(1), "no", msg="Nominal value differs")
self.assertEqual(data.get_instance(1).get_string_value(1), "yes", msg="Nominal value differs")
self.assertEqual(data.get_instance(2).get_string_value(1), "no", msg="Nominal value differs")
self.assertEqual(data.get_instance(0).get_string_value(2), "blah de blah", msg="String value differs")
self.assertEqual(data.get_instance(1).get_string_value(2), "pi day!", msg="String value differs")
self.assertEqual(data.get_instance(2).get_string_value(2), "hello world", msg="String value differs")
self.assertEqual(data.get_instance(0).get_value(3), data.attribute(3).parse_date("2015-10-12"), msg="Date value differs")
self.assertEqual(data.get_instance(1).get_value(3), data.attribute(3).parse_date("2001-12-31"), msg="Date value differs")
self.assertEqual(data.get_instance(2).get_value(3), data.attribute(3).parse_date("2011-02-03"), msg="Date value differs")
# train/test split
loader = converters.Loader(classname="weka.core.converters.ArffLoader")
data = loader.load_file(self.datafile("iris.arff"))
self.assertIsNotNone(data, msg="Failed to load data!")
self.assertFalse(data.has_class(), msg="Should not have class set!")
data.class_is_last()
perc = 66.6
train, test = data.train_test_split(perc)
self.assertIsNotNone(train, msg="Train is None")
self.assertIsNotNone(test, msg="Test is None")
self.assertEqual(data.num_instances, train.num_instances + test.num_instances, msg="Total number of instances differ")
self.assertEqual(train.num_instances, 100, msg="Number of training instances differ")
self.assertEqual(test.num_instances, 50, msg="Number of test instances differ")
try:
perc = 0.0
data.train_test_split(perc)
self.fail(msg="Should not accept split percentage of " + str(perc))
except Exception, e:
pass
try:
perc = -1.0
data.train_test_split(perc)
self.fail(msg="Should not accept split percentage of " + str(perc))
except Exception, e:
pass
try:
perc = 100.0
data.train_test_split(perc)
self.fail(msg="Should not accept split percentage of " + str(perc))
except Exception, e:
pass
try:
perc = 101.0
data.train_test_split(perc)
self.fail(msg="Should not accept split percentage of " + str(perc))
except Exception, e:
pass
def test_create_instances_from_lists(self):
"""
Tests the create_instances_from_lists method.
"""
# numeric
x = [[randint(1, 10) for _ in range(5)] for _ in range(10)]
y = [randint(0, 1) for _ in range(10)]
self.assertEqual(len(x), 10)
self.assertEqual(len(y), 10)
dataset = create_instances_from_lists(x, name="generated from lists (no y)")
self.assertEqual(len(dataset), 10)
dataset = create_instances_from_lists(x, y, name="generated from lists")
self.assertEqual(len(dataset), 10)
# mixed
x = [["TEXT", 1, 1.1], ["XXX", 2, 2.2]]
y = ["A", "B"]
self.assertEqual(len(x), 2)
self.assertEqual(len(y), 2)
dataset = create_instances_from_lists(x, name="generated from mixed lists (no y)")
self.assertEqual(len(dataset), 2)
dataset = create_instances_from_lists(x, y, name="generated from mixed lists")
self.assertEqual(len(dataset), 2)
def test_create_instances_from_matrices(self):
"""
Tests the create_instances_from_matrices method.
"""
# numeric
x = np.random.randn(10, 5)
y = np.random.randn(10)
self.assertEqual(len(x), 10)
self.assertEqual(len(y), 10)
dataset = create_instances_from_lists(x, name="generated from lists (no y)")
self.assertEqual(len(dataset), 10)
dataset = create_instances_from_lists(x, y, name="generated from lists")
self.assertEqual(len(dataset), 10)
# mixed
x = np.array([("TEXT", 1, 1.1), ("XXX", 2, 2.2)], dtype='S20, i4, f8')
y = np.array(["A", "B"], dtype='S20')
self.assertEqual(len(x), 2)
self.assertEqual(len(y), 2)
dataset = create_instances_from_matrices(x, name="generated from mixed lists (no y)")
self.assertEqual(len(dataset), 2)
dataset = create_instances_from_matrices(x, y, name="generated from mixed lists")
self.assertEqual(len(dataset), 2)
def suite():
"""
Returns the test suite.
:return: the test suite
:rtype: unittest.TestSuite
"""
return unittest.TestLoader().loadTestsFromTestCase(TestDataset)
if __name__ == '__main__':
jvm.start()
unittest.TextTestRunner().run(suite())
jvm.stop()
|
jptomo/rpython-lang-scheme
|
refs/heads/master
|
rpython/jit/backend/test/support.py
|
2
|
import py
import sys
from rpython.rlib.debug import debug_print
from rpython.translator.translator import TranslationContext, graphof
from rpython.jit.metainterp.optimizeopt import ALL_OPTS_NAMES
from rpython.rlib.rarithmetic import is_valid_int
class BaseCompiledMixin(object):
CPUClass = None
basic = False
def _get_TranslationContext(self):
return TranslationContext()
def _compile_and_run(self, t, entry_point, entry_point_graph, args):
raise NotImplementedError
# XXX backendopt is ignored
def meta_interp(self, function, args, repeat=1, inline=False, trace_limit=sys.maxint,
backendopt=None, listcomp=False, **kwds): # XXX ignored
from rpython.jit.metainterp.warmspot import WarmRunnerDesc
from rpython.annotator.listdef import s_list_of_strings
from rpython.annotator import model as annmodel
for arg in args:
assert is_valid_int(arg)
self.pre_translation_hook()
t = self._get_TranslationContext()
if listcomp:
t.config.translation.list_comprehension_operations = True
arglist = ", ".join(['int(argv[%d])' % (i + 1) for i in range(len(args))])
if len(args) == 1:
arglist += ','
arglist = '(%s)' % arglist
if repeat != 1:
src = py.code.Source("""
def entry_point(argv):
args = %s
res = function(*args)
for k in range(%d - 1):
res = function(*args)
print res
return 0
""" % (arglist, repeat))
else:
src = py.code.Source("""
def entry_point(argv):
args = %s
res = function(*args)
print res
return 0
""" % (arglist,))
exec src.compile() in locals()
t.buildannotator().build_types(function, [int] * len(args),
main_entry_point=True)
t.buildrtyper().specialize()
warmrunnerdesc = WarmRunnerDesc(t, translate_support_code=True,
CPUClass=self.CPUClass,
**kwds)
for jd in warmrunnerdesc.jitdrivers_sd:
jd.warmstate.set_param_threshold(3) # for tests
jd.warmstate.set_param_trace_eagerness(2) # for tests
jd.warmstate.set_param_trace_limit(trace_limit)
jd.warmstate.set_param_inlining(inline)
jd.warmstate.set_param_enable_opts(ALL_OPTS_NAMES)
mixlevelann = warmrunnerdesc.annhelper
entry_point_graph = mixlevelann.getgraph(entry_point, [s_list_of_strings],
annmodel.SomeInteger())
warmrunnerdesc.finish()
self.post_translation_hook()
return self._compile_and_run(t, entry_point, entry_point_graph, args)
def pre_translation_hook(self):
pass
def post_translation_hook(self):
pass
def check_loops(self, *args, **kwds):
pass
def check_loop_count(self, *args, **kwds):
pass
def check_tree_loop_count(self, *args, **kwds):
pass
def check_enter_count(self, *args, **kwds):
pass
def check_enter_count_at_most(self, *args, **kwds):
pass
def check_max_trace_length(self, *args, **kwds):
pass
def check_aborted_count(self, *args, **kwds):
pass
def check_aborted_count_at_least(self, *args, **kwds):
pass
def interp_operations(self, *args, **kwds):
py.test.skip("interp_operations test skipped")
class CCompiledMixin(BaseCompiledMixin):
slow = False
def setup_class(cls):
if cls.slow:
from rpython.jit.conftest import option
if not option.run_slow_tests:
py.test.skip("use --slow to execute this long-running test")
def _get_TranslationContext(self):
t = TranslationContext()
t.config.translation.gc = 'boehm'
t.config.translation.list_comprehension_operations = True
return t
def _compile_and_run(self, t, entry_point, entry_point_graph, args):
from rpython.translator.c.genc import CStandaloneBuilder as CBuilder
# XXX patch exceptions
cbuilder = CBuilder(t, entry_point, config=t.config)
cbuilder.generate_source()
self._check_cbuilder(cbuilder)
exe_name = cbuilder.compile()
debug_print('---------- Test starting ----------')
stdout = cbuilder.cmdexec(" ".join([str(arg) for arg in args]))
res = int(stdout)
debug_print('---------- Test done (%d) ----------' % (res,))
return res
def _check_cbuilder(self, cbuilder):
pass
|
maximon93/fabric-bolt
|
refs/heads/master
|
fabric_bolt/web_hooks/__init__.py
|
11
|
from . import receivers
|
akhmadMizkat/odoo
|
refs/heads/master
|
addons/l10n_ma/__openerp__.py
|
27
|
# -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2010 kazacube (http://kazacube.com).
{
'name': 'Maroc - Accounting',
'version': '1.0',
'author': 'kazacube',
'category': 'Localization/Account Charts',
'description': """
This is the base module to manage the accounting chart for Maroc.
=================================================================
Ce Module charge le modèle du plan de comptes standard Marocain et permet de
générer les états comptables aux normes marocaines (Bilan, CPC (comptes de
produits et charges), balance générale à 6 colonnes, Grand livre cumulatif...).
L'intégration comptable a été validé avec l'aide du Cabinet d'expertise comptable
Seddik au cours du troisième trimestre 2010.""",
'website': 'http://www.kazacube.com',
'depends': ['base', 'account'],
'data': [
'account_pcg_morocco.xml',
'l10n_ma_tax.xml',
'account_chart_template.yml',
],
'demo': [],
'auto_install': False,
'installable': True,
}
|
AloneRoad/Inforlearn
|
refs/heads/1.0-rc3
|
django/contrib/gis/db/models/query.py
|
10
|
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from django.db.models.query import QuerySet, Q, ValuesQuerySet, ValuesListQuerySet
from django.contrib.gis.db.backend import SpatialBackend
from django.contrib.gis.db.models import aggregates
from django.contrib.gis.db.models.fields import get_srid_info, GeometryField, PointField
from django.contrib.gis.db.models.sql import AreaField, DistanceField, GeomField, GeoQuery, GeoWhereNode
from django.contrib.gis.measure import Area, Distance
class GeoQuerySet(QuerySet):
"The Geographic QuerySet."
### Methods overloaded from QuerySet ###
def __init__(self, model=None, query=None):
super(GeoQuerySet, self).__init__(model=model, query=query)
self.query = query or GeoQuery(self.model, connection)
def values(self, *fields):
return self._clone(klass=GeoValuesQuerySet, setup=True, _fields=fields)
def values_list(self, *fields, **kwargs):
flat = kwargs.pop('flat', False)
if kwargs:
raise TypeError('Unexpected keyword arguments to values_list: %s'
% (kwargs.keys(),))
if flat and len(fields) > 1:
raise TypeError("'flat' is not valid when values_list is called with more than one field.")
return self._clone(klass=GeoValuesListQuerySet, setup=True, flat=flat,
_fields=fields)
### GeoQuerySet Methods ###
def area(self, tolerance=0.05, **kwargs):
"""
Returns the area of the geographic field in an `area` attribute on
each element of this GeoQuerySet.
"""
# Peforming setup here rather than in `_spatial_attribute` so that
# we can get the units for `AreaField`.
procedure_args, geo_field = self._spatial_setup('area', field_name=kwargs.get('field_name', None))
s = {'procedure_args' : procedure_args,
'geo_field' : geo_field,
'setup' : False,
}
if SpatialBackend.oracle:
s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s'
s['procedure_args']['tolerance'] = tolerance
s['select_field'] = AreaField('sq_m') # Oracle returns area in units of meters.
elif SpatialBackend.postgis or SpatialBackend.spatialite:
if not geo_field.geodetic:
# Getting the area units of the geographic field.
s['select_field'] = AreaField(Area.unit_attname(geo_field.units_name))
else:
# TODO: Do we want to support raw number areas for geodetic fields?
raise Exception('Area on geodetic coordinate systems not supported.')
return self._spatial_attribute('area', s, **kwargs)
def centroid(self, **kwargs):
"""
Returns the centroid of the geographic field in a `centroid`
attribute on each element of this GeoQuerySet.
"""
return self._geom_attribute('centroid', **kwargs)
def collect(self, **kwargs):
"""
Performs an aggregate collect operation on the given geometry field.
This is analagous to a union operation, but much faster because
boundaries are not dissolved.
"""
return self._spatial_aggregate(aggregates.Collect, **kwargs)
def difference(self, geom, **kwargs):
"""
Returns the spatial difference of the geographic field in a `difference`
attribute on each element of this GeoQuerySet.
"""
return self._geomset_attribute('difference', geom, **kwargs)
def distance(self, geom, **kwargs):
"""
Returns the distance from the given geographic field name to the
given geometry in a `distance` attribute on each element of the
GeoQuerySet.
Keyword Arguments:
`spheroid` => If the geometry field is geodetic and PostGIS is
the spatial database, then the more accurate
spheroid calculation will be used instead of the
quicker sphere calculation.
`tolerance` => Used only for Oracle. The tolerance is
in meters -- a default of 5 centimeters (0.05)
is used.
"""
return self._distance_attribute('distance', geom, **kwargs)
def envelope(self, **kwargs):
"""
Returns a Geometry representing the bounding box of the
Geometry field in an `envelope` attribute on each element of
the GeoQuerySet.
"""
return self._geom_attribute('envelope', **kwargs)
def extent(self, **kwargs):
"""
Returns the extent (aggregate) of the features in the GeoQuerySet. The
extent will be returned as a 4-tuple, consisting of (xmin, ymin, xmax, ymax).
"""
return self._spatial_aggregate(aggregates.Extent, **kwargs)
def geojson(self, precision=8, crs=False, bbox=False, **kwargs):
"""
Returns a GeoJSON representation of the geomtry field in a `geojson`
attribute on each element of the GeoQuerySet.
The `crs` and `bbox` keywords may be set to True if the users wants
the coordinate reference system and the bounding box to be included
in the GeoJSON representation of the geometry.
"""
if not SpatialBackend.postgis or not SpatialBackend.geojson:
raise NotImplementedError('Only PostGIS 1.3.4+ supports GeoJSON serialization.')
if not isinstance(precision, (int, long)):
raise TypeError('Precision keyword must be set with an integer.')
# Setting the options flag -- which depends on which version of
# PostGIS we're using.
major, minor1, minor2 = SpatialBackend.version
if major >=1 and (minor1 >= 4):
options = 0
if crs and bbox: options = 3
elif bbox: options = 1
elif crs: options = 2
else:
options = 0
if crs and bbox: options = 3
elif crs: options = 1
elif bbox: options = 2
s = {'desc' : 'GeoJSON',
'procedure_args' : {'precision' : precision, 'options' : options},
'procedure_fmt' : '%(geo_col)s,%(precision)s,%(options)s',
}
return self._spatial_attribute('geojson', s, **kwargs)
def gml(self, precision=8, version=2, **kwargs):
"""
Returns GML representation of the given field in a `gml` attribute
on each element of the GeoQuerySet.
"""
s = {'desc' : 'GML', 'procedure_args' : {'precision' : precision}}
if SpatialBackend.postgis:
# PostGIS AsGML() aggregate function parameter order depends on the
# version -- uggh.
major, minor1, minor2 = SpatialBackend.version
if major >= 1 and (minor1 > 3 or (minor1 == 3 and minor2 > 1)):
procedure_fmt = '%(version)s,%(geo_col)s,%(precision)s'
else:
procedure_fmt = '%(geo_col)s,%(precision)s,%(version)s'
s['procedure_args'] = {'precision' : precision, 'version' : version}
return self._spatial_attribute('gml', s, **kwargs)
def intersection(self, geom, **kwargs):
"""
Returns the spatial intersection of the Geometry field in
an `intersection` attribute on each element of this
GeoQuerySet.
"""
return self._geomset_attribute('intersection', geom, **kwargs)
def kml(self, **kwargs):
"""
Returns KML representation of the geometry field in a `kml`
attribute on each element of this GeoQuerySet.
"""
s = {'desc' : 'KML',
'procedure_fmt' : '%(geo_col)s,%(precision)s',
'procedure_args' : {'precision' : kwargs.pop('precision', 8)},
}
return self._spatial_attribute('kml', s, **kwargs)
def length(self, **kwargs):
"""
Returns the length of the geometry field as a `Distance` object
stored in a `length` attribute on each element of this GeoQuerySet.
"""
return self._distance_attribute('length', None, **kwargs)
def make_line(self, **kwargs):
"""
Creates a linestring from all of the PointField geometries in the
this GeoQuerySet and returns it. This is a spatial aggregate
method, and thus returns a geometry rather than a GeoQuerySet.
"""
return self._spatial_aggregate(aggregates.MakeLine, geo_field_type=PointField, **kwargs)
def mem_size(self, **kwargs):
"""
Returns the memory size (number of bytes) that the geometry field takes
in a `mem_size` attribute on each element of this GeoQuerySet.
"""
return self._spatial_attribute('mem_size', {}, **kwargs)
def num_geom(self, **kwargs):
"""
Returns the number of geometries if the field is a
GeometryCollection or Multi* Field in a `num_geom`
attribute on each element of this GeoQuerySet; otherwise
the sets with None.
"""
return self._spatial_attribute('num_geom', {}, **kwargs)
def num_points(self, **kwargs):
"""
Returns the number of points in the first linestring in the
Geometry field in a `num_points` attribute on each element of
this GeoQuerySet; otherwise sets with None.
"""
return self._spatial_attribute('num_points', {}, **kwargs)
def perimeter(self, **kwargs):
"""
Returns the perimeter of the geometry field as a `Distance` object
stored in a `perimeter` attribute on each element of this GeoQuerySet.
"""
return self._distance_attribute('perimeter', None, **kwargs)
def point_on_surface(self, **kwargs):
"""
Returns a Point geometry guaranteed to lie on the surface of the
Geometry field in a `point_on_surface` attribute on each element
of this GeoQuerySet; otherwise sets with None.
"""
return self._geom_attribute('point_on_surface', **kwargs)
def scale(self, x, y, z=0.0, **kwargs):
"""
Scales the geometry to a new size by multiplying the ordinates
with the given x,y,z scale factors.
"""
if SpatialBackend.spatialite:
if z != 0.0:
raise NotImplementedError('SpatiaLite does not support 3D scaling.')
s = {'procedure_fmt' : '%(geo_col)s,%(x)s,%(y)s',
'procedure_args' : {'x' : x, 'y' : y},
'select_field' : GeomField(),
}
else:
s = {'procedure_fmt' : '%(geo_col)s,%(x)s,%(y)s,%(z)s',
'procedure_args' : {'x' : x, 'y' : y, 'z' : z},
'select_field' : GeomField(),
}
return self._spatial_attribute('scale', s, **kwargs)
def snap_to_grid(self, *args, **kwargs):
"""
Snap all points of the input geometry to the grid. How the
geometry is snapped to the grid depends on how many arguments
were given:
- 1 argument : A single size to snap both the X and Y grids to.
- 2 arguments: X and Y sizes to snap the grid to.
- 4 arguments: X, Y sizes and the X, Y origins.
"""
if False in [isinstance(arg, (float, int, long)) for arg in args]:
raise TypeError('Size argument(s) for the grid must be a float or integer values.')
nargs = len(args)
if nargs == 1:
size = args[0]
procedure_fmt = '%(geo_col)s,%(size)s'
procedure_args = {'size' : size}
elif nargs == 2:
xsize, ysize = args
procedure_fmt = '%(geo_col)s,%(xsize)s,%(ysize)s'
procedure_args = {'xsize' : xsize, 'ysize' : ysize}
elif nargs == 4:
xsize, ysize, xorigin, yorigin = args
procedure_fmt = '%(geo_col)s,%(xorigin)s,%(yorigin)s,%(xsize)s,%(ysize)s'
procedure_args = {'xsize' : xsize, 'ysize' : ysize,
'xorigin' : xorigin, 'yorigin' : yorigin}
else:
raise ValueError('Must provide 1, 2, or 4 arguments to `snap_to_grid`.')
s = {'procedure_fmt' : procedure_fmt,
'procedure_args' : procedure_args,
'select_field' : GeomField(),
}
return self._spatial_attribute('snap_to_grid', s, **kwargs)
def svg(self, relative=False, precision=8, **kwargs):
"""
Returns SVG representation of the geographic field in a `svg`
attribute on each element of this GeoQuerySet.
Keyword Arguments:
`relative` => If set to True, this will evaluate the path in
terms of relative moves (rather than absolute).
`precision` => May be used to set the maximum number of decimal
digits used in output (defaults to 8).
"""
relative = int(bool(relative))
if not isinstance(precision, (int, long)):
raise TypeError('SVG precision keyword argument must be an integer.')
s = {'desc' : 'SVG',
'procedure_fmt' : '%(geo_col)s,%(rel)s,%(precision)s',
'procedure_args' : {'rel' : relative,
'precision' : precision,
}
}
return self._spatial_attribute('svg', s, **kwargs)
def sym_difference(self, geom, **kwargs):
"""
Returns the symmetric difference of the geographic field in a
`sym_difference` attribute on each element of this GeoQuerySet.
"""
return self._geomset_attribute('sym_difference', geom, **kwargs)
def translate(self, x, y, z=0.0, **kwargs):
"""
Translates the geometry to a new location using the given numeric
parameters as offsets.
"""
if SpatialBackend.spatialite:
if z != 0.0:
raise NotImplementedError('SpatiaLite does not support 3D translation.')
s = {'procedure_fmt' : '%(geo_col)s,%(x)s,%(y)s',
'procedure_args' : {'x' : x, 'y' : y},
'select_field' : GeomField(),
}
else:
s = {'procedure_fmt' : '%(geo_col)s,%(x)s,%(y)s,%(z)s',
'procedure_args' : {'x' : x, 'y' : y, 'z' : z},
'select_field' : GeomField(),
}
return self._spatial_attribute('translate', s, **kwargs)
def transform(self, srid=4326, **kwargs):
"""
Transforms the given geometry field to the given SRID. If no SRID is
provided, the transformation will default to using 4326 (WGS84).
"""
if not isinstance(srid, (int, long)):
raise TypeError('An integer SRID must be provided.')
field_name = kwargs.get('field_name', None)
tmp, geo_field = self._spatial_setup('transform', field_name=field_name)
# Getting the selection SQL for the given geographic field.
field_col = self._geocol_select(geo_field, field_name)
# Why cascading substitutions? Because spatial backends like
# Oracle and MySQL already require a function call to convert to text, thus
# when there's also a transformation we need to cascade the substitutions.
# For example, 'SDO_UTIL.TO_WKTGEOMETRY(SDO_CS.TRANSFORM( ... )'
geo_col = self.query.custom_select.get(geo_field, field_col)
# Setting the key for the field's column with the custom SELECT SQL to
# override the geometry column returned from the database.
custom_sel = '%s(%s, %s)' % (SpatialBackend.transform, geo_col, srid)
# TODO: Should we have this as an alias?
# custom_sel = '(%s(%s, %s)) AS %s' % (SpatialBackend.transform, geo_col, srid, qn(geo_field.name))
self.query.transformed_srid = srid # So other GeoQuerySet methods
self.query.custom_select[geo_field] = custom_sel
return self._clone()
def union(self, geom, **kwargs):
"""
Returns the union of the geographic field with the given
Geometry in a `union` attribute on each element of this GeoQuerySet.
"""
return self._geomset_attribute('union', geom, **kwargs)
def unionagg(self, **kwargs):
"""
Performs an aggregate union on the given geometry field. Returns
None if the GeoQuerySet is empty. The `tolerance` keyword is for
Oracle backends only.
"""
return self._spatial_aggregate(aggregates.Union, **kwargs)
### Private API -- Abstracted DRY routines. ###
def _spatial_setup(self, att, desc=None, field_name=None, geo_field_type=None):
"""
Performs set up for executing the spatial function.
"""
# Does the spatial backend support this?
func = getattr(SpatialBackend, att, False)
if desc is None: desc = att
if not func: raise ImproperlyConfigured('%s stored procedure not available.' % desc)
# Initializing the procedure arguments.
procedure_args = {'function' : func}
# Is there a geographic field in the model to perform this
# operation on?
geo_field = self.query._geo_field(field_name)
if not geo_field:
raise TypeError('%s output only available on GeometryFields.' % func)
# If the `geo_field_type` keyword was used, then enforce that
# type limitation.
if not geo_field_type is None and not isinstance(geo_field, geo_field_type):
raise TypeError('"%s" stored procedures may only be called on %ss.' % (func, geo_field_type.__name__))
# Setting the procedure args.
procedure_args['geo_col'] = self._geocol_select(geo_field, field_name)
return procedure_args, geo_field
def _spatial_aggregate(self, aggregate, field_name=None,
geo_field_type=None, tolerance=0.05):
"""
DRY routine for calling aggregate spatial stored procedures and
returning their result to the caller of the function.
"""
# Getting the field the geographic aggregate will be called on.
geo_field = self.query._geo_field(field_name)
if not geo_field:
raise TypeError('%s aggregate only available on GeometryFields.' % aggregate.name)
# Checking if there are any geo field type limitations on this
# aggregate (e.g. ST_Makeline only operates on PointFields).
if not geo_field_type is None and not isinstance(geo_field, geo_field_type):
raise TypeError('%s aggregate may only be called on %ss.' % (aggregate.name, geo_field_type.__name__))
# Getting the string expression of the field name, as this is the
# argument taken by `Aggregate` objects.
agg_col = field_name or geo_field.name
# Adding any keyword parameters for the Aggregate object. Oracle backends
# in particular need an additional `tolerance` parameter.
agg_kwargs = {}
if SpatialBackend.oracle: agg_kwargs['tolerance'] = tolerance
# Calling the QuerySet.aggregate, and returning only the value of the aggregate.
return self.aggregate(geoagg=aggregate(agg_col, **agg_kwargs))['geoagg']
def _spatial_attribute(self, att, settings, field_name=None, model_att=None):
"""
DRY routine for calling a spatial stored procedure on a geometry column
and attaching its output as an attribute of the model.
Arguments:
att:
The name of the spatial attribute that holds the spatial
SQL function to call.
settings:
Dictonary of internal settings to customize for the spatial procedure.
Public Keyword Arguments:
field_name:
The name of the geographic field to call the spatial
function on. May also be a lookup to a geometry field
as part of a foreign key relation.
model_att:
The name of the model attribute to attach the output of
the spatial function to.
"""
# Default settings.
settings.setdefault('desc', None)
settings.setdefault('geom_args', ())
settings.setdefault('geom_field', None)
settings.setdefault('procedure_args', {})
settings.setdefault('procedure_fmt', '%(geo_col)s')
settings.setdefault('select_params', [])
# Performing setup for the spatial column, unless told not to.
if settings.get('setup', True):
default_args, geo_field = self._spatial_setup(att, desc=settings['desc'], field_name=field_name)
for k, v in default_args.iteritems(): settings['procedure_args'].setdefault(k, v)
else:
geo_field = settings['geo_field']
# The attribute to attach to the model.
if not isinstance(model_att, basestring): model_att = att
# Special handling for any argument that is a geometry.
for name in settings['geom_args']:
# Using the field's get_db_prep_lookup() to get any needed
# transformation SQL -- we pass in a 'dummy' `contains` lookup.
where, params = geo_field.get_db_prep_lookup('contains', settings['procedure_args'][name])
# Replacing the procedure format with that of any needed
# transformation SQL.
old_fmt = '%%(%s)s' % name
new_fmt = where[0] % '%%s'
settings['procedure_fmt'] = settings['procedure_fmt'].replace(old_fmt, new_fmt)
settings['select_params'].extend(params)
# Getting the format for the stored procedure.
fmt = '%%(function)s(%s)' % settings['procedure_fmt']
# If the result of this function needs to be converted.
if settings.get('select_field', False):
sel_fld = settings['select_field']
if isinstance(sel_fld, GeomField) and SpatialBackend.select:
self.query.custom_select[model_att] = SpatialBackend.select
self.query.extra_select_fields[model_att] = sel_fld
# Finally, setting the extra selection attribute with
# the format string expanded with the stored procedure
# arguments.
return self.extra(select={model_att : fmt % settings['procedure_args']},
select_params=settings['select_params'])
def _distance_attribute(self, func, geom=None, tolerance=0.05, spheroid=False, **kwargs):
"""
DRY routine for GeoQuerySet distance attribute routines.
"""
# Setting up the distance procedure arguments.
procedure_args, geo_field = self._spatial_setup(func, field_name=kwargs.get('field_name', None))
# If geodetic defaulting distance attribute to meters (Oracle and
# PostGIS spherical distances return meters). Otherwise, use the
# units of the geometry field.
if geo_field.geodetic:
dist_att = 'm'
else:
dist_att = Distance.unit_attname(geo_field.units_name)
# Shortcut booleans for what distance function we're using.
distance = func == 'distance'
length = func == 'length'
perimeter = func == 'perimeter'
if not (distance or length or perimeter):
raise ValueError('Unknown distance function: %s' % func)
# The field's get_db_prep_lookup() is used to get any
# extra distance parameters. Here we set up the
# parameters that will be passed in to field's function.
lookup_params = [geom or 'POINT (0 0)', 0]
# If the spheroid calculation is desired, either by the `spheroid`
# keyword or when calculating the length of geodetic field, make
# sure the 'spheroid' distance setting string is passed in so we
# get the correct spatial stored procedure.
if spheroid or (SpatialBackend.postgis and geo_field.geodetic and length):
lookup_params.append('spheroid')
where, params = geo_field.get_db_prep_lookup('distance_lte', lookup_params)
# The `geom_args` flag is set to true if a geometry parameter was
# passed in.
geom_args = bool(geom)
if SpatialBackend.oracle:
if distance:
procedure_fmt = '%(geo_col)s,%(geom)s,%(tolerance)s'
elif length or perimeter:
procedure_fmt = '%(geo_col)s,%(tolerance)s'
procedure_args['tolerance'] = tolerance
else:
# Getting whether this field is in units of degrees since the field may have
# been transformed via the `transform` GeoQuerySet method.
if self.query.transformed_srid:
u, unit_name, s = get_srid_info(self.query.transformed_srid)
geodetic = unit_name in geo_field.geodetic_units
else:
geodetic = geo_field.geodetic
if SpatialBackend.spatialite and geodetic:
raise ValueError('SQLite does not support linear distance calculations on geodetic coordinate systems.')
if distance:
if self.query.transformed_srid:
# Setting the `geom_args` flag to false because we want to handle
# transformation SQL here, rather than the way done by default
# (which will transform to the original SRID of the field rather
# than to what was transformed to).
geom_args = False
procedure_fmt = '%s(%%(geo_col)s, %s)' % (SpatialBackend.transform, self.query.transformed_srid)
if geom.srid is None or geom.srid == self.query.transformed_srid:
# If the geom parameter srid is None, it is assumed the coordinates
# are in the transformed units. A placeholder is used for the
# geometry parameter. `GeomFromText` constructor is also needed
# to wrap geom placeholder for SpatiaLite.
if SpatialBackend.spatialite:
procedure_fmt += ', %s(%%%%s, %s)' % (SpatialBackend.from_text, self.query.transformed_srid)
else:
procedure_fmt += ', %%s'
else:
# We need to transform the geom to the srid specified in `transform()`,
# so wrapping the geometry placeholder in transformation SQL.
# SpatiaLite also needs geometry placeholder wrapped in `GeomFromText`
# constructor.
if SpatialBackend.spatialite:
procedure_fmt += ', %s(%s(%%%%s, %s), %s)' % (SpatialBackend.transform, SpatialBackend.from_text,
geom.srid, self.query.transformed_srid)
else:
procedure_fmt += ', %s(%%%%s, %s)' % (SpatialBackend.transform, self.query.transformed_srid)
else:
# `transform()` was not used on this GeoQuerySet.
procedure_fmt = '%(geo_col)s,%(geom)s'
if geodetic:
# Spherical distance calculation is needed (because the geographic
# field is geodetic). However, the PostGIS ST_distance_sphere/spheroid()
# procedures may only do queries from point columns to point geometries
# some error checking is required.
if not isinstance(geo_field, PointField):
raise ValueError('Spherical distance calculation only supported on PointFields.')
if not str(SpatialBackend.Geometry(buffer(params[0].wkb)).geom_type) == 'Point':
raise ValueError('Spherical distance calculation only supported with Point Geometry parameters')
# The `function` procedure argument needs to be set differently for
# geodetic distance calculations.
if spheroid:
# Call to distance_spheroid() requires spheroid param as well.
procedure_fmt += ',%(spheroid)s'
procedure_args.update({'function' : SpatialBackend.distance_spheroid, 'spheroid' : where[1]})
else:
procedure_args.update({'function' : SpatialBackend.distance_sphere})
elif length or perimeter:
procedure_fmt = '%(geo_col)s'
if geodetic and length:
# There's no `length_sphere`
procedure_fmt += ',%(spheroid)s'
procedure_args.update({'function' : SpatialBackend.length_spheroid, 'spheroid' : where[1]})
# Setting up the settings for `_spatial_attribute`.
s = {'select_field' : DistanceField(dist_att),
'setup' : False,
'geo_field' : geo_field,
'procedure_args' : procedure_args,
'procedure_fmt' : procedure_fmt,
}
if geom_args:
s['geom_args'] = ('geom',)
s['procedure_args']['geom'] = geom
elif geom:
# The geometry is passed in as a parameter because we handled
# transformation conditions in this routine.
s['select_params'] = [SpatialBackend.Adaptor(geom)]
return self._spatial_attribute(func, s, **kwargs)
def _geom_attribute(self, func, tolerance=0.05, **kwargs):
"""
DRY routine for setting up a GeoQuerySet method that attaches a
Geometry attribute (e.g., `centroid`, `point_on_surface`).
"""
s = {'select_field' : GeomField(),}
if SpatialBackend.oracle:
s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s'
s['procedure_args'] = {'tolerance' : tolerance}
return self._spatial_attribute(func, s, **kwargs)
def _geomset_attribute(self, func, geom, tolerance=0.05, **kwargs):
"""
DRY routine for setting up a GeoQuerySet method that attaches a
Geometry attribute and takes a Geoemtry parameter. This is used
for geometry set-like operations (e.g., intersection, difference,
union, sym_difference).
"""
s = {'geom_args' : ('geom',),
'select_field' : GeomField(),
'procedure_fmt' : '%(geo_col)s,%(geom)s',
'procedure_args' : {'geom' : geom},
}
if SpatialBackend.oracle:
s['procedure_fmt'] += ',%(tolerance)s'
s['procedure_args']['tolerance'] = tolerance
return self._spatial_attribute(func, s, **kwargs)
def _geocol_select(self, geo_field, field_name):
"""
Helper routine for constructing the SQL to select the geographic
column. Takes into account if the geographic field is in a
ForeignKey relation to the current model.
"""
opts = self.model._meta
if not geo_field in opts.fields:
# Is this operation going to be on a related geographic field?
# If so, it'll have to be added to the select related information
# (e.g., if 'location__point' was given as the field name).
self.query.add_select_related([field_name])
self.query.pre_sql_setup()
rel_table, rel_col = self.query.related_select_cols[self.query.related_select_fields.index(geo_field)]
return self.query._field_column(geo_field, rel_table)
elif not geo_field in opts.local_fields:
# This geographic field is inherited from another model, so we have to
# use the db table for the _parent_ model instead.
tmp_fld, parent_model, direct, m2m = opts.get_field_by_name(geo_field.name)
return self.query._field_column(geo_field, parent_model._meta.db_table)
else:
return self.query._field_column(geo_field)
class GeoValuesQuerySet(ValuesQuerySet):
def __init__(self, *args, **kwargs):
super(GeoValuesQuerySet, self).__init__(*args, **kwargs)
# This flag tells `resolve_columns` to run the values through
# `convert_values`. This ensures that Geometry objects instead
# of string values are returned with `values()` or `values_list()`.
self.query.geo_values = True
class GeoValuesListQuerySet(GeoValuesQuerySet, ValuesListQuerySet):
pass
|
shivam1111/odoo
|
refs/heads/8.0
|
addons/payment_buckaroo/controllers/main.py
|
325
|
# -*- coding: utf-8 -*-
try:
import simplejson as json
except ImportError:
import json
import logging
import pprint
import werkzeug
from openerp import http, SUPERUSER_ID
from openerp.http import request
_logger = logging.getLogger(__name__)
class BuckarooController(http.Controller):
_return_url = '/payment/buckaroo/return'
_cancel_url = '/payment/buckaroo/cancel'
_exception_url = '/payment/buckaroo/error'
_reject_url = '/payment/buckaroo/reject'
@http.route([
'/payment/buckaroo/return',
'/payment/buckaroo/cancel',
'/payment/buckaroo/error',
'/payment/buckaroo/reject',
], type='http', auth='none')
def buckaroo_return(self, **post):
""" Buckaroo."""
_logger.info('Buckaroo: entering form_feedback with post data %s', pprint.pformat(post)) # debug
request.registry['payment.transaction'].form_feedback(request.cr, SUPERUSER_ID, post, 'buckaroo', context=request.context)
return_url = post.pop('return_url', '')
if not return_url:
data ='' + post.pop('ADD_RETURNDATA', '{}').replace("'", "\"")
custom = json.loads(data)
return_url = custom.pop('return_url', '/')
return werkzeug.utils.redirect(return_url)
|
stormi/tsunami
|
refs/heads/master
|
src/test/__init__.py
|
12133432
| |
Quikling/gpdb
|
refs/heads/master
|
gpMgmt/test/behave_utils/gpfdist_utils/__init__.py
|
12133432
| |
TechBK/horizon-dev
|
refs/heads/master
|
openstack_dashboard/dashboards/admin/volumes/volumes/__init__.py
|
12133432
| |
hyperized/ansible
|
refs/heads/devel
|
lib/ansible/plugins/action/group_by.py
|
122
|
# Copyright 2012, Jeroen Hoekx <jeroen@hoekx.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.module_utils.six import string_types
class ActionModule(ActionBase):
''' Create inventory groups based on variables '''
# We need to be able to modify the inventory
TRANSFERS_FILES = False
_VALID_ARGS = frozenset(('key', 'parents'))
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
if 'key' not in self._task.args:
result['failed'] = True
result['msg'] = "the 'key' param is required when using group_by"
return result
group_name = self._task.args.get('key')
parent_groups = self._task.args.get('parents', ['all'])
if isinstance(parent_groups, string_types):
parent_groups = [parent_groups]
result['changed'] = False
result['add_group'] = group_name.replace(' ', '-')
result['parent_groups'] = [name.replace(' ', '-') for name in parent_groups]
return result
|
facebook/sparts
|
refs/heads/master
|
sparts/__init__.py
|
3
|
# Copyright (c) 2014, Facebook, Inc. All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
__version__ = '0.7.3'
|
vinhlh/bite-project
|
refs/heads/master
|
deps/gdata-python-client/samples/apps/marketplace_sample/domain_mgmt_app.py
|
22
|
#!/usr/bin/python2.4
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Apps marketplace sample app.
Demonstartes how to use provisoining data in marketplace apps.
"""
__author__ = 'Gunjan Sharma <gunjansharma@google.com>'
import logging
import os
import re
import urllib
from urlparse import urlparse
from django.utils import simplejson as json
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp import util
from appengine_utilities.sessions import Session
from gdata.apps.client import AppsClient
from gdata.apps.groups.client import GroupsProvisioningClient
from gdata.apps.organization.client import OrganizationUnitProvisioningClient
import gdata.auth
CONSUMER_KEY = '965697648820.apps.googleusercontent.com'
CONSUMER_SECRET = '3GBNP4EJykV7wq8tuN0LTFLr'
class TwoLeggedOauthTokenGenerator(webapp.RequestHandler):
def Get2loToken(self):
user = users.get_current_user()
return gdata.gauth.TwoLeggedOAuthHmacToken(
CONSUMER_KEY, CONSUMER_SECRET, user.email())
class MainHandler(TwoLeggedOauthTokenGenerator):
"""Handles initial get request and post request to '/' URL."""
def get(self):
"""Handels the get request for the MainHandler.
It checks if a the user is logged in and also that he belogs to the domain,
if not redirects it to the login page else to the index.html page.
"""
domain = self.request.get('domain')
if not domain:
self.response.out.write(
'Missing required params. To use the app start with following URL: '
'http://domain-mgmt.appspot.com?from=google&domain=yourdomain.com')
return
user = users.get_current_user()
if user and self.CheckEmail(user):
logging.debug('logged in user: %s', user.email())
session = Session()
session['domain'] = domain
else:
self.redirect('/_ah/login_required?' +
urllib.urlencode((self.request.str_params)))
path = os.path.join(os.path.dirname(__file__), 'templates/index.html')
self.response.out.write(template.render(path, {}))
def CheckEmail(self, user):
"""Performs basic validation of the supplied email address.
Args:
user: A User object corresponding to logged in user.
Returns:
True if user is valid, False otherwise.
"""
domain = urlparse(user.federated_identity()).hostname
m = re.search('.*@' + domain, user.email())
if m:
return True
else:
return False
def post(self):
"""Handels the get request for the MainHandler.
Retrieves a list of all of the domain's users and sends it
to the Client as a JSON object.
"""
users_list = []
session = Session()
domain = session['domain']
client = AppsClient(domain=domain)
client.auth_token = self.Get2loToken()
client.ssl = True
feed = client.RetrieveAllUsers()
for entry in feed.entry:
users_list.append(entry.login.user_name)
self.response.out.write(json.dumps(users_list))
class UserDetailsHandler(TwoLeggedOauthTokenGenerator):
"""Handles get request to '/getdetails' URL."""
def get(self, username):
"""Handels the get request for the UserDetailsHandler.
Sends groups, organization unit and nicknames for the user
in a JSON object.
Args:
username: A string denoting the user's username.
"""
session = Session()
domain = session['domain']
if not domain:
self.redirect('/')
details = {}
details['groups'] = self.GetGroups(domain, username)
details['orgunit'] = self.GetOrgunit(domain, username)
details['nicknames'] = self.GetNicknames(domain, username)
data = json.dumps(details)
logging.debug('Sending data...')
logging.debug(data)
self.response.out.write(data)
logging.debug('Data sent successfully')
def GetGroups(self, domain, username):
"""Retrieves a list of groups for the given user.
Args:
domain: A string determining the user's domain.
username: A string denoting the user's username.
Returns:
A list of dicts of groups with their name and ID if successful.
Otherwise a list with single dict entry containing error message.
"""
try:
groups_client = GroupsProvisioningClient(domain=domain)
groups_client.auth_token = self.Get2loToken()
groups_client.ssl = True
feed = groups_client.RetrieveGroups(username, True)
groups = []
for entry in feed.entry:
group = {}
group['name'] = entry.group_name
group['id'] = entry.group_id
groups.append(group)
return groups
except:
return [{'name': 'An error occured while retriving Groups for the user',
'id': 'An error occured while retriving Groups for the user'}]
def GetOrgunit(self, domain, username):
"""Retrieves the Org Unit corresponding to the user.
Args:
domain: A string determining the user's domain.
username: A string denoting the user's username.
Returns:
A dict of orgunit having its name and path if successful.
Otherwise a dict entry containing error message.
"""
try:
ouclient = OrganizationUnitProvisioningClient(domain=domain)
ouclient.auth_token = self.Get2loToken()
ouclient.ssl = True
customer_id = ouclient.RetrieveCustomerId().customer_id
entry = ouclient.RetrieveOrgUser(customer_id, username + '@' + domain)
oupath = entry.org_unit_path
orgunit = {}
if not oupath:
orgunit['name'] = 'MAIN ORG UNIT'
orgunit['path'] = '/'
return orgunit
entry = ouclient.RetrieveOrgUnit(customer_id, oupath)
orgunit['name'] = entry.org_unit_name
orgunit['path'] = entry.org_unit_path
return orgunit
except:
return {'name': 'An error occured while retriving OrgUnit for the user.',
'path': 'An error occured while retriving OrgUnit for the user.'}
def GetNicknames(self, domain, username):
"""Retrieves the list of all the nicknames for the user.
Args:
domain: A string determining the user's domain.
username: A string denoting the user's username.
Returns:
A list of user's nicknames if successful.
Otherwise a list with a single entry containing error message.
"""
try:
client = AppsClient(domain=domain)
client.auth_token = self.Get2loToken()
client.ssl = True
feed = client.RetrieveNicknames(username)
nicknames = []
for entry in feed.entry:
nicknames.append(entry.nickname.name)
return nicknames
except:
return ['An error occured while retriving Nicknames for the user.']
class OpenIDHandler(webapp.RequestHandler):
def get(self):
"""Begins the OpenID flow for the supplied domain."""
domain = self.request.get('domain')
self.redirect(users.create_login_url(
dest_url='https://domain-mgmt.appspot.com?domain=' + domain,
_auth_domain=None,
federated_identity=domain))
def main():
application = webapp.WSGIApplication([('/', MainHandler),
('/getdetails/(.*)',
UserDetailsHandler),
('/_ah/login_required', OpenIDHandler)],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
|
lucashmorais/x-Bench
|
refs/heads/master
|
mozmill-env/python/Lib/site-packages/mercurial/hg.py
|
90
|
# hg.py - repository classes for mercurial
#
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from i18n import _
from lock import release
from node import hex, nullid
import localrepo, bundlerepo, unionrepo, httppeer, sshpeer, statichttprepo
import bookmarks, lock, util, extensions, error, node, scmutil, phases, url
import cmdutil, discovery
import merge as mergemod
import verify as verifymod
import errno, os, shutil
def _local(path):
path = util.expandpath(util.urllocalpath(path))
return (os.path.isfile(path) and bundlerepo or localrepo)
def addbranchrevs(lrepo, other, branches, revs):
peer = other.peer() # a courtesy to callers using a localrepo for other
hashbranch, branches = branches
if not hashbranch and not branches:
return revs or None, revs and revs[0] or None
revs = revs and list(revs) or []
if not peer.capable('branchmap'):
if branches:
raise util.Abort(_("remote branch lookup not supported"))
revs.append(hashbranch)
return revs, revs[0]
branchmap = peer.branchmap()
def primary(branch):
if branch == '.':
if not lrepo:
raise util.Abort(_("dirstate branch not accessible"))
branch = lrepo.dirstate.branch()
if branch in branchmap:
revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
return True
else:
return False
for branch in branches:
if not primary(branch):
raise error.RepoLookupError(_("unknown branch '%s'") % branch)
if hashbranch:
if not primary(hashbranch):
revs.append(hashbranch)
return revs, revs[0]
def parseurl(path, branches=None):
'''parse url#branch, returning (url, (branch, branches))'''
u = util.url(path)
branch = None
if u.fragment:
branch = u.fragment
u.fragment = None
return str(u), (branch, branches or [])
schemes = {
'bundle': bundlerepo,
'union': unionrepo,
'file': _local,
'http': httppeer,
'https': httppeer,
'ssh': sshpeer,
'static-http': statichttprepo,
}
def _peerlookup(path):
u = util.url(path)
scheme = u.scheme or 'file'
thing = schemes.get(scheme) or schemes['file']
try:
return thing(path)
except TypeError:
return thing
def islocal(repo):
'''return true if repo or path is local'''
if isinstance(repo, str):
try:
return _peerlookup(repo).islocal(repo)
except AttributeError:
return False
return repo.local()
def openpath(ui, path):
'''open path with open if local, url.open if remote'''
if islocal(path):
return util.posixfile(util.urllocalpath(path), 'rb')
else:
return url.open(ui, path)
def _peerorrepo(ui, path, create=False):
"""return a repository object for the specified path"""
obj = _peerlookup(path).instance(ui, path, create)
ui = getattr(obj, "ui", ui)
for name, module in extensions.extensions():
hook = getattr(module, 'reposetup', None)
if hook:
hook(ui, obj)
return obj
def repository(ui, path='', create=False):
"""return a repository object for the specified path"""
peer = _peerorrepo(ui, path, create)
repo = peer.local()
if not repo:
raise util.Abort(_("repository '%s' is not local") %
(path or peer.url()))
return repo.filtered('visible')
def peer(uiorrepo, opts, path, create=False):
'''return a repository peer for the specified path'''
rui = remoteui(uiorrepo, opts)
return _peerorrepo(rui, path, create).peer()
def defaultdest(source):
'''return default destination of clone if none is given'''
return os.path.basename(os.path.normpath(util.url(source).path or ''))
def share(ui, source, dest=None, update=True):
'''create a shared repository'''
if not islocal(source):
raise util.Abort(_('can only share local repositories'))
if not dest:
dest = defaultdest(source)
else:
dest = ui.expandpath(dest)
if isinstance(source, str):
origsource = ui.expandpath(source)
source, branches = parseurl(origsource)
srcrepo = repository(ui, source)
rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
else:
srcrepo = source.local()
origsource = source = srcrepo.url()
checkout = None
sharedpath = srcrepo.sharedpath # if our source is already sharing
root = os.path.realpath(dest)
roothg = os.path.join(root, '.hg')
if os.path.exists(roothg):
raise util.Abort(_('destination already exists'))
if not os.path.isdir(root):
os.mkdir(root)
util.makedir(roothg, notindexed=True)
requirements = ''
try:
requirements = srcrepo.opener.read('requires')
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
requirements += 'shared\n'
util.writefile(os.path.join(roothg, 'requires'), requirements)
util.writefile(os.path.join(roothg, 'sharedpath'), sharedpath)
r = repository(ui, root)
default = srcrepo.ui.config('paths', 'default')
if default:
fp = r.opener("hgrc", "w", text=True)
fp.write("[paths]\n")
fp.write("default = %s\n" % default)
fp.close()
if update:
r.ui.status(_("updating working directory\n"))
if update is not True:
checkout = update
for test in (checkout, 'default', 'tip'):
if test is None:
continue
try:
uprev = r.lookup(test)
break
except error.RepoLookupError:
continue
_update(r, uprev)
def copystore(ui, srcrepo, destpath):
'''copy files from store of srcrepo in destpath
returns destlock
'''
destlock = None
try:
hardlink = None
num = 0
srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
for f in srcrepo.store.copylist():
if srcpublishing and f.endswith('phaseroots'):
continue
src = os.path.join(srcrepo.sharedpath, f)
dst = os.path.join(destpath, f)
dstbase = os.path.dirname(dst)
if dstbase and not os.path.exists(dstbase):
os.mkdir(dstbase)
if os.path.exists(src):
if dst.endswith('data'):
# lock to avoid premature writing to the target
destlock = lock.lock(os.path.join(dstbase, "lock"))
hardlink, n = util.copyfiles(src, dst, hardlink)
num += n
if hardlink:
ui.debug("linked %d files\n" % num)
else:
ui.debug("copied %d files\n" % num)
return destlock
except: # re-raises
release(destlock)
raise
def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
update=True, stream=False, branch=None):
"""Make a copy of an existing repository.
Create a copy of an existing repository in a new directory. The
source and destination are URLs, as passed to the repository
function. Returns a pair of repository peers, the source and
newly created destination.
The location of the source is added to the new repository's
.hg/hgrc file, as the default to be used for future pulls and
pushes.
If an exception is raised, the partly cloned/updated destination
repository will be deleted.
Arguments:
source: repository object or URL
dest: URL of destination repository to create (defaults to base
name of source repository)
pull: always pull from source repository, even in local case
stream: stream raw data uncompressed from repository (fast over
LAN, slow over WAN)
rev: revision to clone up to (implies pull=True)
update: update working directory after clone completes, if
destination is local repository (True means update to default rev,
anything else is treated as a revision)
branch: branches to clone
"""
if isinstance(source, str):
origsource = ui.expandpath(source)
source, branch = parseurl(origsource, branch)
srcpeer = peer(ui, peeropts, source)
else:
srcpeer = source.peer() # in case we were called with a localrepo
branch = (None, branch or [])
origsource = source = srcpeer.url()
rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
if dest is None:
dest = defaultdest(source)
ui.status(_("destination directory: %s\n") % dest)
else:
dest = ui.expandpath(dest)
dest = util.urllocalpath(dest)
source = util.urllocalpath(source)
if not dest:
raise util.Abort(_("empty destination path is not valid"))
if os.path.exists(dest):
if not os.path.isdir(dest):
raise util.Abort(_("destination '%s' already exists") % dest)
elif os.listdir(dest):
raise util.Abort(_("destination '%s' is not empty") % dest)
srclock = destlock = cleandir = None
srcrepo = srcpeer.local()
try:
abspath = origsource
if islocal(origsource):
abspath = os.path.abspath(util.urllocalpath(origsource))
if islocal(dest):
cleandir = dest
copy = False
if (srcrepo and srcrepo.cancopy() and islocal(dest)
and not phases.hassecret(srcrepo)):
copy = not pull and not rev
if copy:
try:
# we use a lock here because if we race with commit, we
# can end up with extra data in the cloned revlogs that's
# not pointed to by changesets, thus causing verify to
# fail
srclock = srcrepo.lock(wait=False)
except error.LockError:
copy = False
if copy:
srcrepo.hook('preoutgoing', throw=True, source='clone')
hgdir = os.path.realpath(os.path.join(dest, ".hg"))
if not os.path.exists(dest):
os.mkdir(dest)
else:
# only clean up directories we create ourselves
cleandir = hgdir
try:
destpath = hgdir
util.makedir(destpath, notindexed=True)
except OSError, inst:
if inst.errno == errno.EEXIST:
cleandir = None
raise util.Abort(_("destination '%s' already exists")
% dest)
raise
destlock = copystore(ui, srcrepo, destpath)
# Recomputing branch cache might be slow on big repos,
# so just copy it
dstcachedir = os.path.join(destpath, 'cache')
srcbranchcache = srcrepo.sjoin('cache/branchheads')
dstbranchcache = os.path.join(dstcachedir, 'branchheads')
if os.path.exists(srcbranchcache):
if not os.path.exists(dstcachedir):
os.mkdir(dstcachedir)
util.copyfile(srcbranchcache, dstbranchcache)
# we need to re-init the repo after manually copying the data
# into it
destpeer = peer(srcrepo, peeropts, dest)
srcrepo.hook('outgoing', source='clone',
node=node.hex(node.nullid))
else:
try:
destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
# only pass ui when no srcrepo
except OSError, inst:
if inst.errno == errno.EEXIST:
cleandir = None
raise util.Abort(_("destination '%s' already exists")
% dest)
raise
revs = None
if rev:
if not srcpeer.capable('lookup'):
raise util.Abort(_("src repository does not support "
"revision lookup and so doesn't "
"support clone by revision"))
revs = [srcpeer.lookup(r) for r in rev]
checkout = revs[0]
if destpeer.local():
destpeer.local().clone(srcpeer, heads=revs, stream=stream)
elif srcrepo:
srcrepo.push(destpeer, revs=revs)
else:
raise util.Abort(_("clone from remote to remote not supported"))
cleandir = None
# clone all bookmarks except divergent ones
destrepo = destpeer.local()
if destrepo and srcpeer.capable("pushkey"):
rb = srcpeer.listkeys('bookmarks')
marks = destrepo._bookmarks
for k, n in rb.iteritems():
try:
m = destrepo.lookup(n)
marks[k] = m
except error.RepoLookupError:
pass
if rb:
marks.write()
elif srcrepo and destpeer.capable("pushkey"):
for k, n in srcrepo._bookmarks.iteritems():
destpeer.pushkey('bookmarks', k, '', hex(n))
if destrepo:
fp = destrepo.opener("hgrc", "w", text=True)
fp.write("[paths]\n")
u = util.url(abspath)
u.passwd = None
defaulturl = str(u)
fp.write("default = %s\n" % defaulturl)
fp.close()
destrepo.ui.setconfig('paths', 'default', defaulturl)
if update:
if update is not True:
checkout = srcpeer.lookup(update)
uprev = None
status = None
if checkout is not None:
try:
uprev = destrepo.lookup(checkout)
except error.RepoLookupError:
pass
if uprev is None:
try:
uprev = destrepo._bookmarks['@']
update = '@'
bn = destrepo[uprev].branch()
if bn == 'default':
status = _("updating to bookmark @\n")
else:
status = _("updating to bookmark @ on branch %s\n"
% bn)
except KeyError:
try:
uprev = destrepo.branchtip('default')
except error.RepoLookupError:
uprev = destrepo.lookup('tip')
if not status:
bn = destrepo[uprev].branch()
status = _("updating to branch %s\n") % bn
destrepo.ui.status(status)
_update(destrepo, uprev)
if update in destrepo._bookmarks:
bookmarks.setcurrent(destrepo, update)
return srcpeer, destpeer
finally:
release(srclock, destlock)
if cleandir is not None:
shutil.rmtree(cleandir, True)
if srcpeer is not None:
srcpeer.close()
def _showstats(repo, stats):
repo.ui.status(_("%d files updated, %d files merged, "
"%d files removed, %d files unresolved\n") % stats)
def updaterepo(repo, node, overwrite):
"""Update the working directory to node.
When overwrite is set, changes are clobbered, merged else
returns stats (see pydoc mercurial.merge.applyupdates)"""
return mergemod.update(repo, node, False, overwrite, None)
def update(repo, node):
"""update the working directory to node, merging linear changes"""
stats = updaterepo(repo, node, False)
_showstats(repo, stats)
if stats[3]:
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
return stats[3] > 0
# naming conflict in clone()
_update = update
def clean(repo, node, show_stats=True):
"""forcibly switch the working directory to node, clobbering changes"""
stats = updaterepo(repo, node, True)
if show_stats:
_showstats(repo, stats)
return stats[3] > 0
def merge(repo, node, force=None, remind=True):
"""Branch merge with node, resolving changes. Return true if any
unresolved conflicts."""
stats = mergemod.update(repo, node, True, force, False)
_showstats(repo, stats)
if stats[3]:
repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
"or 'hg update -C .' to abandon\n"))
elif remind:
repo.ui.status(_("(branch merge, don't forget to commit)\n"))
return stats[3] > 0
def _incoming(displaychlist, subreporecurse, ui, repo, source,
opts, buffered=False):
"""
Helper for incoming / gincoming.
displaychlist gets called with
(remoterepo, incomingchangesetlist, displayer) parameters,
and is supposed to contain only code that can't be unified.
"""
source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
other = peer(repo, opts, source)
ui.status(_('comparing with %s\n') % util.hidepassword(source))
revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
if revs:
revs = [other.lookup(rev) for rev in revs]
other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
revs, opts["bundle"], opts["force"])
try:
if not chlist:
ui.status(_("no changes found\n"))
return subreporecurse()
displayer = cmdutil.show_changeset(ui, other, opts, buffered)
# XXX once graphlog extension makes it into core,
# should be replaced by a if graph/else
displaychlist(other, chlist, displayer)
displayer.close()
finally:
cleanupfn()
subreporecurse()
return 0 # exit code is zero since we found incoming changes
def incoming(ui, repo, source, opts):
def subreporecurse():
ret = 1
if opts.get('subrepos'):
ctx = repo[None]
for subpath in sorted(ctx.substate):
sub = ctx.sub(subpath)
ret = min(ret, sub.incoming(ui, source, opts))
return ret
def display(other, chlist, displayer):
limit = cmdutil.loglimit(opts)
if opts.get('newest_first'):
chlist.reverse()
count = 0
for n in chlist:
if limit is not None and count >= limit:
break
parents = [p for p in other.changelog.parents(n) if p != nullid]
if opts.get('no_merges') and len(parents) == 2:
continue
count += 1
displayer.show(other[n])
return _incoming(display, subreporecurse, ui, repo, source, opts)
def _outgoing(ui, repo, dest, opts):
dest = ui.expandpath(dest or 'default-push', dest or 'default')
dest, branches = parseurl(dest, opts.get('branch'))
ui.status(_('comparing with %s\n') % util.hidepassword(dest))
revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
if revs:
revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
other = peer(repo, opts, dest)
outgoing = discovery.findcommonoutgoing(repo.unfiltered(), other, revs,
force=opts.get('force'))
o = outgoing.missing
if not o:
scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
return None
return o
def outgoing(ui, repo, dest, opts):
def recurse():
ret = 1
if opts.get('subrepos'):
ctx = repo[None]
for subpath in sorted(ctx.substate):
sub = ctx.sub(subpath)
ret = min(ret, sub.outgoing(ui, dest, opts))
return ret
limit = cmdutil.loglimit(opts)
o = _outgoing(ui, repo, dest, opts)
if o is None:
return recurse()
if opts.get('newest_first'):
o.reverse()
displayer = cmdutil.show_changeset(ui, repo, opts)
count = 0
for n in o:
if limit is not None and count >= limit:
break
parents = [p for p in repo.changelog.parents(n) if p != nullid]
if opts.get('no_merges') and len(parents) == 2:
continue
count += 1
displayer.show(repo[n])
displayer.close()
recurse()
return 0 # exit code is zero since we found outgoing changes
def revert(repo, node, choose):
"""revert changes to revision in node without updating dirstate"""
return mergemod.update(repo, node, False, True, choose)[3] > 0
def verify(repo):
"""verify the consistency of a repository"""
return verifymod.verify(repo)
def remoteui(src, opts):
'build a remote ui from ui or repo and opts'
if util.safehasattr(src, 'baseui'): # looks like a repository
dst = src.baseui.copy() # drop repo-specific config
src = src.ui # copy target options from repo
else: # assume it's a global ui object
dst = src.copy() # keep all global options
# copy ssh-specific options
for o in 'ssh', 'remotecmd':
v = opts.get(o) or src.config('ui', o)
if v:
dst.setconfig("ui", o, v)
# copy bundle-specific options
r = src.config('bundle', 'mainreporoot')
if r:
dst.setconfig('bundle', 'mainreporoot', r)
# copy selected local settings to the remote ui
for sect in ('auth', 'hostfingerprints', 'http_proxy'):
for key, val in src.configitems(sect):
dst.setconfig(sect, key, val)
v = src.config('web', 'cacerts')
if v:
dst.setconfig('web', 'cacerts', util.expandpath(v))
return dst
|
openstack/osprofiler
|
refs/heads/master
|
osprofiler/tests/unit/doc/test_specs.py
|
1
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
import os
import re
import docutils.core
from osprofiler.tests import test
class TitlesTestCase(test.TestCase):
specs_path = os.path.join(
os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, os.pardir,
"doc", "specs")
def _get_title(self, section_tree):
section = {"subtitles": []}
for node in section_tree:
if node.tagname == "title":
section["name"] = node.rawsource
elif node.tagname == "section":
subsection = self._get_title(node)
section["subtitles"].append(subsection["name"])
return section
def _get_titles(self, spec):
titles = {}
for node in spec:
if node.tagname == "section":
# Note subsection subtitles are thrown away
section = self._get_title(node)
titles[section["name"]] = section["subtitles"]
return titles
def _check_titles(self, filename, expect, actual):
missing_sections = [x for x in expect.keys() if x not in actual.keys()]
extra_sections = [x for x in actual.keys() if x not in expect.keys()]
msgs = []
if len(missing_sections) > 0:
msgs.append("Missing sections: %s" % missing_sections)
if len(extra_sections) > 0:
msgs.append("Extra sections: %s" % extra_sections)
for section in expect.keys():
missing_subsections = [x for x in expect[section]
if x not in actual.get(section, {})]
# extra subsections are allowed
if len(missing_subsections) > 0:
msgs.append("Section '%s' is missing subsections: %s"
% (section, missing_subsections))
if len(msgs) > 0:
self.fail("While checking '%s':\n %s"
% (filename, "\n ".join(msgs)))
def _check_lines_wrapping(self, tpl, raw):
for i, line in enumerate(raw.split("\n")):
if "http://" in line or "https://" in line:
continue
self.assertTrue(
len(line) < 80,
msg="%s:%d: Line limited to a maximum of 79 characters." %
(tpl, i + 1))
def _check_no_cr(self, tpl, raw):
matches = re.findall("\r", raw)
self.assertEqual(
len(matches), 0,
"Found %s literal carriage returns in file %s" %
(len(matches), tpl))
def _check_trailing_spaces(self, tpl, raw):
for i, line in enumerate(raw.split("\n")):
trailing_spaces = re.findall(" +$", line)
self.assertEqual(
len(trailing_spaces), 0,
"Found trailing spaces on line %s of %s" % (i + 1, tpl))
def test_template(self):
with open(os.path.join(self.specs_path, "template.rst")) as f:
template = f.read()
spec = docutils.core.publish_doctree(template)
template_titles = self._get_titles(spec)
for d in ["implemented", "in-progress"]:
spec_dir = "%s/%s" % (self.specs_path, d)
self.assertTrue(os.path.isdir(spec_dir),
"%s is not a directory" % spec_dir)
for filename in glob.glob(spec_dir + "/*"):
if filename.endswith("README.rst"):
continue
self.assertTrue(
filename.endswith(".rst"),
"spec's file must have .rst ext. Found: %s" % filename)
with open(filename) as f:
data = f.read()
titles = self._get_titles(docutils.core.publish_doctree(data))
self._check_titles(filename, template_titles, titles)
self._check_lines_wrapping(filename, data)
self._check_no_cr(filename, data)
self._check_trailing_spaces(filename, data)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.