repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
orifinkelman/incubator-trafficcontrol
|
refs/heads/master
|
docs/source/conf.py
|
3
|
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- coding: utf-8 -*-
#
# Traffic Control documentation build configuration file, created by
# sphinx-quickstart on Thu Nov 20 13:17:23 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
#extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Traffic Control'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# version = '2_19_15 11:44'
# The full version, including alpha/beta/rc tags.
release = '1.8-dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
html_theme_path = ["_themes", ]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '../../traffic_ops/app/public/images/tc_logo_c_only.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
html_favicon = '../../traffic_ops/app/public/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# html_copy_source = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Traffic Control Doc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'TrafficControl.tex', u'Traffic Control Documentation', 'Apache Software Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'traffic control', u'Traffic Control Documentation', 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Traffic Control', u'Traffic Control Documentation',
u'Apache Software Foundation', 'Traffic Control', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'Traffic Control'
epub_author = u'Apache Software Foundation'
epub_publisher = u'Apache Software Foundation'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
def setup(app):
# overrides for wide tables in RTD theme
app.add_stylesheet('theme_overrides.css') # path relative to _static
|
grap/OpenUpgrade
|
refs/heads/8.0
|
addons/website_sale/models/payment.py
|
46
|
# -*- coding: utf-8 -*-
import logging
from openerp import SUPERUSER_ID
from openerp.osv import orm, fields
from openerp.tools import float_compare
_logger = logging.getLogger(__name__)
class PaymentTransaction(orm.Model):
_inherit = 'payment.transaction'
_columns = {
# link with the sale order
'sale_order_id': fields.many2one('sale.order', 'Sale Order'),
}
def form_feedback(self, cr, uid, data, acquirer_name, context=None):
""" Override to confirm the sale order, if defined, and if the transaction
is done. """
tx = None
res = super(PaymentTransaction, self).form_feedback(cr, uid, data, acquirer_name, context=context)
# fetch the tx, check its state, confirm the potential SO
try:
tx_find_method_name = '_%s_form_get_tx_from_data' % acquirer_name
if hasattr(self, tx_find_method_name):
tx = getattr(self, tx_find_method_name)(cr, uid, data, context=context)
_logger.info('<%s> transaction processed: tx ref:%s, tx amount: %s', acquirer_name, tx.reference if tx else 'n/a', tx.amount if tx else 'n/a')
if tx and tx.sale_order_id:
# verify SO/TX match, excluding tx.fees which are currently not included in SO
amount_matches = (tx.sale_order_id.state in ['draft', 'sent'] and float_compare(tx.amount, tx.sale_order_id.amount_total, 2) == 0)
if amount_matches:
if tx.state == 'done':
_logger.info('<%s> transaction completed, confirming order %s (ID %s)', acquirer_name, tx.sale_order_id.name, tx.sale_order_id.id)
self.pool['sale.order'].action_button_confirm(cr, SUPERUSER_ID, [tx.sale_order_id.id], context=dict(context, send_email=True))
elif tx.state != 'cancel' and tx.sale_order_id.state == 'draft':
_logger.info('<%s> transaction pending, sending quote email for order %s (ID %s)', acquirer_name, tx.sale_order_id.name, tx.sale_order_id.id)
self.pool['sale.order'].force_quotation_send(cr, SUPERUSER_ID, [tx.sale_order_id.id], context=context)
else:
_logger.warning('<%s> transaction MISMATCH for order %s (ID %s)', acquirer_name, tx.sale_order_id.name, tx.sale_order_id.id)
except Exception:
_logger.exception('Fail to confirm the order or send the confirmation email%s', tx and ' for the transaction %s' % tx.reference or '')
return res
|
moylop260/odoo-dev
|
refs/heads/master
|
addons/portal_project_issue/tests/test_access_rights.py
|
43
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2013-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.portal_project.tests.test_access_rights import TestPortalProjectBase
from openerp.exceptions import AccessError
from openerp.osv.orm import except_orm
from openerp.tools import mute_logger
class TestPortalProjectBase(TestPortalProjectBase):
def setUp(self):
super(TestPortalProjectBase, self).setUp()
cr, uid = self.cr, self.uid
# Useful models
self.project_issue = self.registry('project.issue')
# Various test issues
self.issue_1_id = self.project_issue.create(cr, uid, {
'name': 'Test1', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_2_id = self.project_issue.create(cr, uid, {
'name': 'Test2', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_3_id = self.project_issue.create(cr, uid, {
'name': 'Test3', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_4_id = self.project_issue.create(cr, uid, {
'name': 'Test4', 'user_id': self.user_projectuser_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_5_id = self.project_issue.create(cr, uid, {
'name': 'Test5', 'user_id': self.user_portal_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_6_id = self.project_issue.create(cr, uid, {
'name': 'Test6', 'user_id': self.user_public_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
class TestPortalIssue(TestPortalProjectBase):
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm')
def test_00_project_access_rights(self):
""" Test basic project access rights, for project and portal_project """
cr, uid, pigs_id = self.cr, self.uid, self.project_pigs_id
# ----------------------------------------
# CASE1: public project
# ----------------------------------------
# Do: Alfred reads project -> ok (employee ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_2_id, self.issue_3_id, self.issue_4_id, self.issue_5_id, self.issue_6_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a public project')
# Test: all project issues readable
self.project_issue.read(cr, self.user_projectuser_id, issue_ids, ['name'])
# Test: all project issues writable
self.project_issue.write(cr, self.user_projectuser_id, issue_ids, {'description': 'TestDescription'})
# Do: Bert reads project -> crash, no group
# Test: no project issue visible
self.assertRaises(AccessError, self.project_issue.search, cr, self.user_none_id, [('project_id', '=', pigs_id)])
# Test: no project issue readable
self.assertRaises(AccessError, self.project_issue.read, cr, self.user_none_id, issue_ids, ['name'])
# Test: no project issue writable
self.assertRaises(AccessError, self.project_issue.write, cr, self.user_none_id, issue_ids, {'description': 'TestDescription'})
# Do: Chell reads project -> ok (portal ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a public project')
# Test: all project issues readable
self.project_issue.read(cr, self.user_portal_id, issue_ids, ['name'])
# Test: no project issue writable
self.assertRaises(AccessError, self.project_issue.write, cr, self.user_portal_id, issue_ids, {'description': 'TestDescription'})
# Do: Donovan reads project -> ok (public ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_public_id, [('project_id', '=', pigs_id)])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a public project')
# ----------------------------------------
# CASE2: portal project
# ----------------------------------------
self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'portal'})
# Do: Alfred reads project -> ok (employee ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a portal project')
# Do: Bert reads project -> crash, no group
# Test: no project issue searchable
self.assertRaises(AccessError, self.project_issue.search, cr, self.user_none_id, [('project_id', '=', pigs_id)])
# Data: issue follower
self.project_issue.message_subscribe_users(cr, self.user_projectuser_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id])
# Do: Chell reads project -> ok (portal ok public)
# Test: only followed project issues visible + assigned
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_3_id, self.issue_5_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: portal user should see the followed issues of a portal project')
# Data: issue follower cleaning
self.project_issue.message_unsubscribe_users(cr, self.user_projectuser_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id])
# ----------------------------------------
# CASE3: employee project
# ----------------------------------------
self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'employees'})
# Do: Alfred reads project -> ok (employee ok employee)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_2_id, self.issue_3_id, self.issue_4_id, self.issue_5_id, self.issue_6_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of an employees project')
# Do: Chell reads project -> ko (portal ko employee)
# Test: no project issue visible + assigned
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
self.assertFalse(issue_ids, 'access rights: portal user should not see issues of an employees project, even if assigned')
# ----------------------------------------
# CASE4: followers project
# ----------------------------------------
self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'followers'})
# Do: Alfred reads project -> ko (employee ko followers)
# Test: no project issue visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_4_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: employee user should not see issues of a not-followed followers project, only assigned')
# Do: Chell reads project -> ko (portal ko employee)
# Test: no project issue visible
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_5_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: portal user should not see issues of a not-followed followers project, only assigned')
# Data: subscribe Alfred, Chell and Donovan as follower
self.project_project.message_subscribe_users(cr, uid, [pigs_id], [self.user_projectuser_id, self.user_portal_id, self.user_public_id])
self.project_issue.message_subscribe_users(cr, self.user_manager_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id, self.user_projectuser_id])
# Do: Alfred reads project -> ok (follower ok followers)
# Test: followed + assigned issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_3_id, self.issue_4_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: employee user should not see followed + assigned issues of a follower project')
# Do: Chell reads project -> ok (follower ok follower)
# Test: followed + assigned issues visible
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_3_id, self.issue_5_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: employee user should not see followed + assigned issues of a follower project')
|
CG-F16-16-Rutgers/steersuite-rutgers
|
refs/heads/master
|
steerstats/run_steersim.py
|
8
|
import sys
from steersuite.LogParser import LogParser
from steersuite import init_steerSim
if __name__ == "__main__":
"""
Example run
python run_steersim.py -module scenario,scenarioAI=pprAI,useBenchmark,benchmarkTechnique=compositePLE,checkAgentValid,reducedGoals,fixedSpeed,checkAgentRelevant,minAgents=3,maxFrames=1000,checkAgentInteraction,egocentric,scenarioSetInitId=0,numScenarios=20,skipInsert=True -config configs/pprAI-config.xml -commandLine
"""
data = init_steerSim(len(sys.argv), sys.argv)
print "Done in python"
# print data
i=0
for key, value in data.iteritems():
print str(key) + ": " + str(value)
i=i+1
print i
|
mbareta/edx-platform-ft
|
refs/heads/open-release/eucalyptus.master
|
common/test/data/capa/prog2.py
|
270
|
# prog2
# Make this file long, since that seems to affect how uploaded files are
# handled in webob or cgi.FieldStorage.
moby_dick_ten_chapters = """
CHAPTER 1. Loomings.
Call me Ishmael. Some years ago--never mind how long precisely--having
little or no money in my purse, and nothing particular to interest me on
shore, I thought I would sail about a little and see the watery part of
the world. It is a way I have of driving off the spleen and regulating
the circulation. Whenever I find myself growing grim about the mouth;
whenever it is a damp, drizzly November in my soul; whenever I find
myself involuntarily pausing before coffin warehouses, and bringing up
the rear of every funeral I meet; and especially whenever my hypos get
such an upper hand of me, that it requires a strong moral principle to
prevent me from deliberately stepping into the street, and methodically
knocking people's hats off--then, I account it high time to get to sea
as soon as I can. This is my substitute for pistol and ball. With a
philosophical flourish Cato throws himself upon his sword; I quietly
take to the ship. There is nothing surprising in this. If they but knew
it, almost all men in their degree, some time or other, cherish very
nearly the same feelings towards the ocean with me.
There now is your insular city of the Manhattoes, belted round by
wharves as Indian isles by coral reefs--commerce surrounds it with
her surf. Right and left, the streets take you waterward. Its extreme
downtown is the battery, where that noble mole is washed by waves, and
cooled by breezes, which a few hours previous were out of sight of land.
Look at the crowds of water-gazers there.
Circumambulate the city of a dreamy Sabbath afternoon. Go from Corlears
Hook to Coenties Slip, and from thence, by Whitehall, northward. What
do you see?--Posted like silent sentinels all around the town, stand
thousands upon thousands of mortal men fixed in ocean reveries. Some
leaning against the spiles; some seated upon the pier-heads; some
looking over the bulwarks of ships from China; some high aloft in the
rigging, as if striving to get a still better seaward peep. But these
are all landsmen; of week days pent up in lath and plaster--tied to
counters, nailed to benches, clinched to desks. How then is this? Are
the green fields gone? What do they here?
But look! here come more crowds, pacing straight for the water, and
seemingly bound for a dive. Strange! Nothing will content them but the
extremest limit of the land; loitering under the shady lee of yonder
warehouses will not suffice. No. They must get just as nigh the water
as they possibly can without falling in. And there they stand--miles of
them--leagues. Inlanders all, they come from lanes and alleys, streets
and avenues--north, east, south, and west. Yet here they all unite.
Tell me, does the magnetic virtue of the needles of the compasses of all
those ships attract them thither?
Once more. Say you are in the country; in some high land of lakes. Take
almost any path you please, and ten to one it carries you down in a
dale, and leaves you there by a pool in the stream. There is magic
in it. Let the most absent-minded of men be plunged in his deepest
reveries--stand that man on his legs, set his feet a-going, and he will
infallibly lead you to water, if water there be in all that region.
Should you ever be athirst in the great American desert, try this
experiment, if your caravan happen to be supplied with a metaphysical
professor. Yes, as every one knows, meditation and water are wedded for
ever.
But here is an artist. He desires to paint you the dreamiest, shadiest,
quietest, most enchanting bit of romantic landscape in all the valley of
the Saco. What is the chief element he employs? There stand his trees,
each with a hollow trunk, as if a hermit and a crucifix were within; and
here sleeps his meadow, and there sleep his cattle; and up from yonder
cottage goes a sleepy smoke. Deep into distant woodlands winds a
mazy way, reaching to overlapping spurs of mountains bathed in their
hill-side blue. But though the picture lies thus tranced, and though
this pine-tree shakes down its sighs like leaves upon this shepherd's
head, yet all were vain, unless the shepherd's eye were fixed upon the
magic stream before him. Go visit the Prairies in June, when for scores
on scores of miles you wade knee-deep among Tiger-lilies--what is the
one charm wanting?--Water--there is not a drop of water there! Were
Niagara but a cataract of sand, would you travel your thousand miles to
see it? Why did the poor poet of Tennessee, upon suddenly receiving two
handfuls of silver, deliberate whether to buy him a coat, which he sadly
needed, or invest his money in a pedestrian trip to Rockaway Beach? Why
is almost every robust healthy boy with a robust healthy soul in him, at
some time or other crazy to go to sea? Why upon your first voyage as a
passenger, did you yourself feel such a mystical vibration, when first
told that you and your ship were now out of sight of land? Why did the
old Persians hold the sea holy? Why did the Greeks give it a separate
deity, and own brother of Jove? Surely all this is not without meaning.
And still deeper the meaning of that story of Narcissus, who because
he could not grasp the tormenting, mild image he saw in the fountain,
plunged into it and was drowned. But that same image, we ourselves see
in all rivers and oceans. It is the image of the ungraspable phantom of
life; and this is the key to it all.
Now, when I say that I am in the habit of going to sea whenever I begin
to grow hazy about the eyes, and begin to be over conscious of my lungs,
I do not mean to have it inferred that I ever go to sea as a passenger.
For to go as a passenger you must needs have a purse, and a purse is
but a rag unless you have something in it. Besides, passengers get
sea-sick--grow quarrelsome--don't sleep of nights--do not enjoy
themselves much, as a general thing;--no, I never go as a passenger;
nor, though I am something of a salt, do I ever go to sea as a
Commodore, or a Captain, or a Cook. I abandon the glory and distinction
of such offices to those who like them. For my part, I abominate all
honourable respectable toils, trials, and tribulations of every kind
whatsoever. It is quite as much as I can do to take care of myself,
without taking care of ships, barques, brigs, schooners, and what not.
And as for going as cook,--though I confess there is considerable glory
in that, a cook being a sort of officer on ship-board--yet, somehow,
I never fancied broiling fowls;--though once broiled, judiciously
buttered, and judgmatically salted and peppered, there is no one who
will speak more respectfully, not to say reverentially, of a broiled
fowl than I will. It is out of the idolatrous dotings of the old
Egyptians upon broiled ibis and roasted river horse, that you see the
mummies of those creatures in their huge bake-houses the pyramids.
No, when I go to sea, I go as a simple sailor, right before the mast,
plumb down into the forecastle, aloft there to the royal mast-head.
True, they rather order me about some, and make me jump from spar to
spar, like a grasshopper in a May meadow. And at first, this sort
of thing is unpleasant enough. It touches one's sense of honour,
particularly if you come of an old established family in the land, the
Van Rensselaers, or Randolphs, or Hardicanutes. And more than all,
if just previous to putting your hand into the tar-pot, you have been
lording it as a country schoolmaster, making the tallest boys stand
in awe of you. The transition is a keen one, I assure you, from a
schoolmaster to a sailor, and requires a strong decoction of Seneca and
the Stoics to enable you to grin and bear it. But even this wears off in
time.
What of it, if some old hunks of a sea-captain orders me to get a broom
and sweep down the decks? What does that indignity amount to, weighed,
I mean, in the scales of the New Testament? Do you think the archangel
Gabriel thinks anything the less of me, because I promptly and
respectfully obey that old hunks in that particular instance? Who ain't
a slave? Tell me that. Well, then, however the old sea-captains may
order me about--however they may thump and punch me about, I have the
satisfaction of knowing that it is all right; that everybody else is
one way or other served in much the same way--either in a physical
or metaphysical point of view, that is; and so the universal thump is
passed round, and all hands should rub each other's shoulder-blades, and
be content.
Again, I always go to sea as a sailor, because they make a point of
paying me for my trouble, whereas they never pay passengers a single
penny that I ever heard of. On the contrary, passengers themselves must
pay. And there is all the difference in the world between paying
and being paid. The act of paying is perhaps the most uncomfortable
infliction that the two orchard thieves entailed upon us. But BEING
PAID,--what will compare with it? The urbane activity with which a man
receives money is really marvellous, considering that we so earnestly
believe money to be the root of all earthly ills, and that on no account
can a monied man enter heaven. Ah! how cheerfully we consign ourselves
to perdition!
Finally, I always go to sea as a sailor, because of the wholesome
exercise and pure air of the fore-castle deck. For as in this world,
head winds are far more prevalent than winds from astern (that is,
if you never violate the Pythagorean maxim), so for the most part the
Commodore on the quarter-deck gets his atmosphere at second hand from
the sailors on the forecastle. He thinks he breathes it first; but not
so. In much the same way do the commonalty lead their leaders in many
other things, at the same time that the leaders little suspect it.
But wherefore it was that after having repeatedly smelt the sea as a
merchant sailor, I should now take it into my head to go on a whaling
voyage; this the invisible police officer of the Fates, who has the
constant surveillance of me, and secretly dogs me, and influences me
in some unaccountable way--he can better answer than any one else. And,
doubtless, my going on this whaling voyage, formed part of the grand
programme of Providence that was drawn up a long time ago. It came in as
a sort of brief interlude and solo between more extensive performances.
I take it that this part of the bill must have run something like this:
"GRAND CONTESTED ELECTION FOR THE PRESIDENCY OF THE UNITED STATES.
"WHALING VOYAGE BY ONE ISHMAEL.
"BLOODY BATTLE IN AFFGHANISTAN."
Though I cannot tell why it was exactly that those stage managers, the
Fates, put me down for this shabby part of a whaling voyage, when others
were set down for magnificent parts in high tragedies, and short and
easy parts in genteel comedies, and jolly parts in farces--though
I cannot tell why this was exactly; yet, now that I recall all the
circumstances, I think I can see a little into the springs and motives
which being cunningly presented to me under various disguises, induced
me to set about performing the part I did, besides cajoling me into the
delusion that it was a choice resulting from my own unbiased freewill
and discriminating judgment.
Chief among these motives was the overwhelming idea of the great
whale himself. Such a portentous and mysterious monster roused all my
curiosity. Then the wild and distant seas where he rolled his island
bulk; the undeliverable, nameless perils of the whale; these, with all
the attending marvels of a thousand Patagonian sights and sounds, helped
to sway me to my wish. With other men, perhaps, such things would not
have been inducements; but as for me, I am tormented with an everlasting
itch for things remote. I love to sail forbidden seas, and land on
barbarous coasts. Not ignoring what is good, I am quick to perceive a
horror, and could still be social with it--would they let me--since it
is but well to be on friendly terms with all the inmates of the place
one lodges in.
By reason of these things, then, the whaling voyage was welcome; the
great flood-gates of the wonder-world swung open, and in the wild
conceits that swayed me to my purpose, two and two there floated into
my inmost soul, endless processions of the whale, and, mid most of them
all, one grand hooded phantom, like a snow hill in the air.
CHAPTER 2. The Carpet-Bag.
I stuffed a shirt or two into my old carpet-bag, tucked it under my arm,
and started for Cape Horn and the Pacific. Quitting the good city of
old Manhatto, I duly arrived in New Bedford. It was a Saturday night in
December. Much was I disappointed upon learning that the little packet
for Nantucket had already sailed, and that no way of reaching that place
would offer, till the following Monday.
As most young candidates for the pains and penalties of whaling stop at
this same New Bedford, thence to embark on their voyage, it may as well
be related that I, for one, had no idea of so doing. For my mind was
made up to sail in no other than a Nantucket craft, because there was a
fine, boisterous something about everything connected with that famous
old island, which amazingly pleased me. Besides though New Bedford has
of late been gradually monopolising the business of whaling, and though
in this matter poor old Nantucket is now much behind her, yet Nantucket
was her great original--the Tyre of this Carthage;--the place where the
first dead American whale was stranded. Where else but from Nantucket
did those aboriginal whalemen, the Red-Men, first sally out in canoes to
give chase to the Leviathan? And where but from Nantucket, too, did that
first adventurous little sloop put forth, partly laden with imported
cobblestones--so goes the story--to throw at the whales, in order to
discover when they were nigh enough to risk a harpoon from the bowsprit?
Now having a night, a day, and still another night following before me
in New Bedford, ere I could embark for my destined port, it became a
matter of concernment where I was to eat and sleep meanwhile. It was a
very dubious-looking, nay, a very dark and dismal night, bitingly cold
and cheerless. I knew no one in the place. With anxious grapnels I had
sounded my pocket, and only brought up a few pieces of silver,--So,
wherever you go, Ishmael, said I to myself, as I stood in the middle of
a dreary street shouldering my bag, and comparing the gloom towards the
north with the darkness towards the south--wherever in your wisdom you
may conclude to lodge for the night, my dear Ishmael, be sure to inquire
the price, and don't be too particular.
With halting steps I paced the streets, and passed the sign of "The
Crossed Harpoons"--but it looked too expensive and jolly there. Further
on, from the bright red windows of the "Sword-Fish Inn," there came such
fervent rays, that it seemed to have melted the packed snow and ice from
before the house, for everywhere else the congealed frost lay ten inches
thick in a hard, asphaltic pavement,--rather weary for me, when I struck
my foot against the flinty projections, because from hard, remorseless
service the soles of my boots were in a most miserable plight. Too
expensive and jolly, again thought I, pausing one moment to watch the
broad glare in the street, and hear the sounds of the tinkling glasses
within. But go on, Ishmael, said I at last; don't you hear? get away
from before the door; your patched boots are stopping the way. So on I
went. I now by instinct followed the streets that took me waterward, for
there, doubtless, were the cheapest, if not the cheeriest inns.
Such dreary streets! blocks of blackness, not houses, on either hand,
and here and there a candle, like a candle moving about in a tomb. At
this hour of the night, of the last day of the week, that quarter of
the town proved all but deserted. But presently I came to a smoky light
proceeding from a low, wide building, the door of which stood invitingly
open. It had a careless look, as if it were meant for the uses of the
public; so, entering, the first thing I did was to stumble over an
ash-box in the porch. Ha! thought I, ha, as the flying particles almost
choked me, are these ashes from that destroyed city, Gomorrah? But "The
Crossed Harpoons," and "The Sword-Fish?"--this, then must needs be the
sign of "The Trap." However, I picked myself up and hearing a loud voice
within, pushed on and opened a second, interior door.
It seemed the great Black Parliament sitting in Tophet. A hundred black
faces turned round in their rows to peer; and beyond, a black Angel
of Doom was beating a book in a pulpit. It was a negro church; and the
preacher's text was about the blackness of darkness, and the weeping and
wailing and teeth-gnashing there. Ha, Ishmael, muttered I, backing out,
Wretched entertainment at the sign of 'The Trap!'
Moving on, I at last came to a dim sort of light not far from the docks,
and heard a forlorn creaking in the air; and looking up, saw a swinging
sign over the door with a white painting upon it, faintly representing
a tall straight jet of misty spray, and these words underneath--"The
Spouter Inn:--Peter Coffin."
Coffin?--Spouter?--Rather ominous in that particular connexion, thought
I. But it is a common name in Nantucket, they say, and I suppose this
Peter here is an emigrant from there. As the light looked so dim, and
the place, for the time, looked quiet enough, and the dilapidated little
wooden house itself looked as if it might have been carted here from
the ruins of some burnt district, and as the swinging sign had a
poverty-stricken sort of creak to it, I thought that here was the very
spot for cheap lodgings, and the best of pea coffee.
It was a queer sort of place--a gable-ended old house, one side palsied
as it were, and leaning over sadly. It stood on a sharp bleak corner,
where that tempestuous wind Euroclydon kept up a worse howling than ever
it did about poor Paul's tossed craft. Euroclydon, nevertheless, is a
mighty pleasant zephyr to any one in-doors, with his feet on the hob
quietly toasting for bed. "In judging of that tempestuous wind called
Euroclydon," says an old writer--of whose works I possess the only copy
extant--"it maketh a marvellous difference, whether thou lookest out at
it from a glass window where the frost is all on the outside, or whether
thou observest it from that sashless window, where the frost is on both
sides, and of which the wight Death is the only glazier." True enough,
thought I, as this passage occurred to my mind--old black-letter, thou
reasonest well. Yes, these eyes are windows, and this body of mine is
the house. What a pity they didn't stop up the chinks and the crannies
though, and thrust in a little lint here and there. But it's too late
to make any improvements now. The universe is finished; the copestone
is on, and the chips were carted off a million years ago. Poor Lazarus
there, chattering his teeth against the curbstone for his pillow, and
shaking off his tatters with his shiverings, he might plug up both ears
with rags, and put a corn-cob into his mouth, and yet that would not
keep out the tempestuous Euroclydon. Euroclydon! says old Dives, in his
red silken wrapper--(he had a redder one afterwards) pooh, pooh! What
a fine frosty night; how Orion glitters; what northern lights! Let them
talk of their oriental summer climes of everlasting conservatories; give
me the privilege of making my own summer with my own coals.
But what thinks Lazarus? Can he warm his blue hands by holding them up
to the grand northern lights? Would not Lazarus rather be in Sumatra
than here? Would he not far rather lay him down lengthwise along the
line of the equator; yea, ye gods! go down to the fiery pit itself, in
order to keep out this frost?
Now, that Lazarus should lie stranded there on the curbstone before the
door of Dives, this is more wonderful than that an iceberg should be
moored to one of the Moluccas. Yet Dives himself, he too lives like a
Czar in an ice palace made of frozen sighs, and being a president of a
temperance society, he only drinks the tepid tears of orphans.
But no more of this blubbering now, we are going a-whaling, and there is
plenty of that yet to come. Let us scrape the ice from our frosted feet,
and see what sort of a place this "Spouter" may be.
CHAPTER 3. The Spouter-Inn.
Entering that gable-ended Spouter-Inn, you found yourself in a wide,
low, straggling entry with old-fashioned wainscots, reminding one of
the bulwarks of some condemned old craft. On one side hung a very large
oilpainting so thoroughly besmoked, and every way defaced, that in the
unequal crosslights by which you viewed it, it was only by diligent
study and a series of systematic visits to it, and careful inquiry of
the neighbors, that you could any way arrive at an understanding of its
purpose. Such unaccountable masses of shades and shadows, that at first
you almost thought some ambitious young artist, in the time of the New
England hags, had endeavored to delineate chaos bewitched. But by dint
of much and earnest contemplation, and oft repeated ponderings, and
especially by throwing open the little window towards the back of the
entry, you at last come to the conclusion that such an idea, however
wild, might not be altogether unwarranted.
But what most puzzled and confounded you was a long, limber, portentous,
black mass of something hovering in the centre of the picture over three
blue, dim, perpendicular lines floating in a nameless yeast. A boggy,
soggy, squitchy picture truly, enough to drive a nervous man distracted.
Yet was there a sort of indefinite, half-attained, unimaginable
sublimity about it that fairly froze you to it, till you involuntarily
took an oath with yourself to find out what that marvellous painting
meant. Ever and anon a bright, but, alas, deceptive idea would dart you
through.--It's the Black Sea in a midnight gale.--It's the unnatural
combat of the four primal elements.--It's a blasted heath.--It's a
Hyperborean winter scene.--It's the breaking-up of the icebound stream
of Time. But at last all these fancies yielded to that one portentous
something in the picture's midst. THAT once found out, and all the rest
were plain. But stop; does it not bear a faint resemblance to a gigantic
fish? even the great leviathan himself?
In fact, the artist's design seemed this: a final theory of my own,
partly based upon the aggregated opinions of many aged persons with whom
I conversed upon the subject. The picture represents a Cape-Horner in a
great hurricane; the half-foundered ship weltering there with its three
dismantled masts alone visible; and an exasperated whale, purposing to
spring clean over the craft, is in the enormous act of impaling himself
upon the three mast-heads.
The opposite wall of this entry was hung all over with a heathenish
array of monstrous clubs and spears. Some were thickly set with
glittering teeth resembling ivory saws; others were tufted with knots of
human hair; and one was sickle-shaped, with a vast handle sweeping round
like the segment made in the new-mown grass by a long-armed mower. You
shuddered as you gazed, and wondered what monstrous cannibal and savage
could ever have gone a death-harvesting with such a hacking, horrifying
implement. Mixed with these were rusty old whaling lances and harpoons
all broken and deformed. Some were storied weapons. With this once long
lance, now wildly elbowed, fifty years ago did Nathan Swain kill fifteen
whales between a sunrise and a sunset. And that harpoon--so like a
corkscrew now--was flung in Javan seas, and run away with by a whale,
years afterwards slain off the Cape of Blanco. The original iron entered
nigh the tail, and, like a restless needle sojourning in the body of a
man, travelled full forty feet, and at last was found imbedded in the
hump.
Crossing this dusky entry, and on through yon low-arched way--cut
through what in old times must have been a great central chimney with
fireplaces all round--you enter the public room. A still duskier place
is this, with such low ponderous beams above, and such old wrinkled
planks beneath, that you would almost fancy you trod some old craft's
cockpits, especially of such a howling night, when this corner-anchored
old ark rocked so furiously. On one side stood a long, low, shelf-like
table covered with cracked glass cases, filled with dusty rarities
gathered from this wide world's remotest nooks. Projecting from the
further angle of the room stands a dark-looking den--the bar--a rude
attempt at a right whale's head. Be that how it may, there stands the
vast arched bone of the whale's jaw, so wide, a coach might almost drive
beneath it. Within are shabby shelves, ranged round with old decanters,
bottles, flasks; and in those jaws of swift destruction, like another
cursed Jonah (by which name indeed they called him), bustles a little
withered old man, who, for their money, dearly sells the sailors
deliriums and death.
Abominable are the tumblers into which he pours his poison. Though
true cylinders without--within, the villanous green goggling glasses
deceitfully tapered downwards to a cheating bottom. Parallel meridians
rudely pecked into the glass, surround these footpads' goblets. Fill to
THIS mark, and your charge is but a penny; to THIS a penny more; and so
on to the full glass--the Cape Horn measure, which you may gulp down for
a shilling.
Upon entering the place I found a number of young seamen gathered about
a table, examining by a dim light divers specimens of SKRIMSHANDER. I
sought the landlord, and telling him I desired to be accommodated with a
room, received for answer that his house was full--not a bed unoccupied.
"But avast," he added, tapping his forehead, "you haint no objections
to sharing a harpooneer's blanket, have ye? I s'pose you are goin'
a-whalin', so you'd better get used to that sort of thing."
I told him that I never liked to sleep two in a bed; that if I should
ever do so, it would depend upon who the harpooneer might be, and
that if he (the landlord) really had no other place for me, and the
harpooneer was not decidedly objectionable, why rather than wander
further about a strange town on so bitter a night, I would put up with
the half of any decent man's blanket.
"I thought so. All right; take a seat. Supper?--you want supper?
Supper'll be ready directly."
I sat down on an old wooden settle, carved all over like a bench on the
Battery. At one end a ruminating tar was still further adorning it with
his jack-knife, stooping over and diligently working away at the space
between his legs. He was trying his hand at a ship under full sail, but
he didn't make much headway, I thought.
At last some four or five of us were summoned to our meal in an
adjoining room. It was cold as Iceland--no fire at all--the landlord
said he couldn't afford it. Nothing but two dismal tallow candles, each
in a winding sheet. We were fain to button up our monkey jackets, and
hold to our lips cups of scalding tea with our half frozen fingers. But
the fare was of the most substantial kind--not only meat and potatoes,
but dumplings; good heavens! dumplings for supper! One young fellow in
a green box coat, addressed himself to these dumplings in a most direful
manner.
"My boy," said the landlord, "you'll have the nightmare to a dead
sartainty."
"Landlord," I whispered, "that aint the harpooneer is it?"
"Oh, no," said he, looking a sort of diabolically funny, "the harpooneer
is a dark complexioned chap. He never eats dumplings, he don't--he eats
nothing but steaks, and he likes 'em rare."
"The devil he does," says I. "Where is that harpooneer? Is he here?"
"He'll be here afore long," was the answer.
I could not help it, but I began to feel suspicious of this "dark
complexioned" harpooneer. At any rate, I made up my mind that if it so
turned out that we should sleep together, he must undress and get into
bed before I did.
Supper over, the company went back to the bar-room, when, knowing not
what else to do with myself, I resolved to spend the rest of the evening
as a looker on.
Presently a rioting noise was heard without. Starting up, the landlord
cried, "That's the Grampus's crew. I seed her reported in the offing
this morning; a three years' voyage, and a full ship. Hurrah, boys; now
we'll have the latest news from the Feegees."
A tramping of sea boots was heard in the entry; the door was flung open,
and in rolled a wild set of mariners enough. Enveloped in their shaggy
watch coats, and with their heads muffled in woollen comforters, all
bedarned and ragged, and their beards stiff with icicles, they seemed an
eruption of bears from Labrador. They had just landed from their boat,
and this was the first house they entered. No wonder, then, that they
made a straight wake for the whale's mouth--the bar--when the wrinkled
little old Jonah, there officiating, soon poured them out brimmers all
round. One complained of a bad cold in his head, upon which Jonah
mixed him a pitch-like potion of gin and molasses, which he swore was a
sovereign cure for all colds and catarrhs whatsoever, never mind of how
long standing, or whether caught off the coast of Labrador, or on the
weather side of an ice-island.
The liquor soon mounted into their heads, as it generally does even
with the arrantest topers newly landed from sea, and they began capering
about most obstreperously.
I observed, however, that one of them held somewhat aloof, and though
he seemed desirous not to spoil the hilarity of his shipmates by his own
sober face, yet upon the whole he refrained from making as much noise
as the rest. This man interested me at once; and since the sea-gods
had ordained that he should soon become my shipmate (though but a
sleeping-partner one, so far as this narrative is concerned), I will
here venture upon a little description of him. He stood full six feet
in height, with noble shoulders, and a chest like a coffer-dam. I have
seldom seen such brawn in a man. His face was deeply brown and burnt,
making his white teeth dazzling by the contrast; while in the deep
shadows of his eyes floated some reminiscences that did not seem to give
him much joy. His voice at once announced that he was a Southerner,
and from his fine stature, I thought he must be one of those tall
mountaineers from the Alleghanian Ridge in Virginia. When the revelry
of his companions had mounted to its height, this man slipped away
unobserved, and I saw no more of him till he became my comrade on the
sea. In a few minutes, however, he was missed by his shipmates, and
being, it seems, for some reason a huge favourite with them, they raised
a cry of "Bulkington! Bulkington! where's Bulkington?" and darted out of
the house in pursuit of him.
It was now about nine o'clock, and the room seeming almost
supernaturally quiet after these orgies, I began to congratulate myself
upon a little plan that had occurred to me just previous to the entrance
of the seamen.
No man prefers to sleep two in a bed. In fact, you would a good deal
rather not sleep with your own brother. I don't know how it is, but
people like to be private when they are sleeping. And when it comes to
sleeping with an unknown stranger, in a strange inn, in a strange
town, and that stranger a harpooneer, then your objections indefinitely
multiply. Nor was there any earthly reason why I as a sailor should
sleep two in a bed, more than anybody else; for sailors no more sleep
two in a bed at sea, than bachelor Kings do ashore. To be sure they
all sleep together in one apartment, but you have your own hammock, and
cover yourself with your own blanket, and sleep in your own skin.
The more I pondered over this harpooneer, the more I abominated the
thought of sleeping with him. It was fair to presume that being a
harpooneer, his linen or woollen, as the case might be, would not be of
the tidiest, certainly none of the finest. I began to twitch all over.
Besides, it was getting late, and my decent harpooneer ought to be
home and going bedwards. Suppose now, he should tumble in upon me at
midnight--how could I tell from what vile hole he had been coming?
"Landlord! I've changed my mind about that harpooneer.--I shan't sleep
with him. I'll try the bench here."
"Just as you please; I'm sorry I cant spare ye a tablecloth for a
mattress, and it's a plaguy rough board here"--feeling of the knots and
notches. "But wait a bit, Skrimshander; I've got a carpenter's plane
there in the bar--wait, I say, and I'll make ye snug enough." So saying
he procured the plane; and with his old silk handkerchief first dusting
the bench, vigorously set to planing away at my bed, the while grinning
like an ape. The shavings flew right and left; till at last the
plane-iron came bump against an indestructible knot. The landlord was
near spraining his wrist, and I told him for heaven's sake to quit--the
bed was soft enough to suit me, and I did not know how all the planing
in the world could make eider down of a pine plank. So gathering up the
shavings with another grin, and throwing them into the great stove in
the middle of the room, he went about his business, and left me in a
brown study.
I now took the measure of the bench, and found that it was a foot too
short; but that could be mended with a chair. But it was a foot too
narrow, and the other bench in the room was about four inches higher
than the planed one--so there was no yoking them. I then placed the
first bench lengthwise along the only clear space against the wall,
leaving a little interval between, for my back to settle down in. But I
soon found that there came such a draught of cold air over me from under
the sill of the window, that this plan would never do at all, especially
as another current from the rickety door met the one from the window,
and both together formed a series of small whirlwinds in the immediate
vicinity of the spot where I had thought to spend the night.
The devil fetch that harpooneer, thought I, but stop, couldn't I steal
a march on him--bolt his door inside, and jump into his bed, not to be
wakened by the most violent knockings? It seemed no bad idea; but upon
second thoughts I dismissed it. For who could tell but what the next
morning, so soon as I popped out of the room, the harpooneer might be
standing in the entry, all ready to knock me down!
Still, looking round me again, and seeing no possible chance of spending
a sufferable night unless in some other person's bed, I began to think
that after all I might be cherishing unwarrantable prejudices against
this unknown harpooneer. Thinks I, I'll wait awhile; he must be dropping
in before long. I'll have a good look at him then, and perhaps we may
become jolly good bedfellows after all--there's no telling.
But though the other boarders kept coming in by ones, twos, and threes,
and going to bed, yet no sign of my harpooneer.
"Landlord!" said I, "what sort of a chap is he--does he always keep such
late hours?" It was now hard upon twelve o'clock.
The landlord chuckled again with his lean chuckle, and seemed to
be mightily tickled at something beyond my comprehension. "No," he
answered, "generally he's an early bird--airley to bed and airley to
rise--yes, he's the bird what catches the worm. But to-night he went out
a peddling, you see, and I don't see what on airth keeps him so late,
unless, may be, he can't sell his head."
"Can't sell his head?--What sort of a bamboozingly story is this you
are telling me?" getting into a towering rage. "Do you pretend to say,
landlord, that this harpooneer is actually engaged this blessed Saturday
night, or rather Sunday morning, in peddling his head around this town?"
"That's precisely it," said the landlord, "and I told him he couldn't
sell it here, the market's overstocked."
"With what?" shouted I.
"With heads to be sure; ain't there too many heads in the world?"
"I tell you what it is, landlord," said I quite calmly, "you'd better
stop spinning that yarn to me--I'm not green."
"May be not," taking out a stick and whittling a toothpick, "but I
rayther guess you'll be done BROWN if that ere harpooneer hears you a
slanderin' his head."
"I'll break it for him," said I, now flying into a passion again at this
unaccountable farrago of the landlord's.
"It's broke a'ready," said he.
"Broke," said I--"BROKE, do you mean?"
"Sartain, and that's the very reason he can't sell it, I guess."
"Landlord," said I, going up to him as cool as Mt. Hecla in a
snow-storm--"landlord, stop whittling. You and I must understand one
another, and that too without delay. I come to your house and want a
bed; you tell me you can only give me half a one; that the other half
belongs to a certain harpooneer. And about this harpooneer, whom I
have not yet seen, you persist in telling me the most mystifying and
exasperating stories tending to beget in me an uncomfortable feeling
towards the man whom you design for my bedfellow--a sort of connexion,
landlord, which is an intimate and confidential one in the highest
degree. I now demand of you to speak out and tell me who and what this
harpooneer is, and whether I shall be in all respects safe to spend the
night with him. And in the first place, you will be so good as to unsay
that story about selling his head, which if true I take to be good
evidence that this harpooneer is stark mad, and I've no idea of sleeping
with a madman; and you, sir, YOU I mean, landlord, YOU, sir, by trying
to induce me to do so knowingly, would thereby render yourself liable to
a criminal prosecution."
"Wall," said the landlord, fetching a long breath, "that's a purty long
sarmon for a chap that rips a little now and then. But be easy, be easy,
this here harpooneer I have been tellin' you of has just arrived from
the south seas, where he bought up a lot of 'balmed New Zealand heads
(great curios, you know), and he's sold all on 'em but one, and that one
he's trying to sell to-night, cause to-morrow's Sunday, and it would not
do to be sellin' human heads about the streets when folks is goin' to
churches. He wanted to, last Sunday, but I stopped him just as he was
goin' out of the door with four heads strung on a string, for all the
airth like a string of inions."
This account cleared up the otherwise unaccountable mystery, and showed
that the landlord, after all, had had no idea of fooling me--but at
the same time what could I think of a harpooneer who stayed out of a
Saturday night clean into the holy Sabbath, engaged in such a cannibal
business as selling the heads of dead idolators?
"Depend upon it, landlord, that harpooneer is a dangerous man."
"He pays reg'lar," was the rejoinder. "But come, it's getting dreadful
late, you had better be turning flukes--it's a nice bed; Sal and me
slept in that ere bed the night we were spliced. There's plenty of room
for two to kick about in that bed; it's an almighty big bed that. Why,
afore we give it up, Sal used to put our Sam and little Johnny in the
foot of it. But I got a dreaming and sprawling about one night, and
somehow, Sam got pitched on the floor, and came near breaking his arm.
Arter that, Sal said it wouldn't do. Come along here, I'll give ye a
glim in a jiffy;" and so saying he lighted a candle and held it towards
me, offering to lead the way. But I stood irresolute; when looking at a
clock in the corner, he exclaimed "I vum it's Sunday--you won't see that
harpooneer to-night; he's come to anchor somewhere--come along then; DO
come; WON'T ye come?"
I considered the matter a moment, and then up stairs we went, and I was
ushered into a small room, cold as a clam, and furnished, sure enough,
with a prodigious bed, almost big enough indeed for any four harpooneers
to sleep abreast.
"There," said the landlord, placing the candle on a crazy old sea chest
that did double duty as a wash-stand and centre table; "there, make
yourself comfortable now, and good night to ye." I turned round from
eyeing the bed, but he had disappeared.
Folding back the counterpane, I stooped over the bed. Though none of the
most elegant, it yet stood the scrutiny tolerably well. I then glanced
round the room; and besides the bedstead and centre table, could see
no other furniture belonging to the place, but a rude shelf, the four
walls, and a papered fireboard representing a man striking a whale. Of
things not properly belonging to the room, there was a hammock lashed
up, and thrown upon the floor in one corner; also a large seaman's bag,
containing the harpooneer's wardrobe, no doubt in lieu of a land trunk.
Likewise, there was a parcel of outlandish bone fish hooks on the shelf
over the fire-place, and a tall harpoon standing at the head of the bed.
But what is this on the chest? I took it up, and held it close to the
light, and felt it, and smelt it, and tried every way possible to arrive
at some satisfactory conclusion concerning it. I can compare it to
nothing but a large door mat, ornamented at the edges with little
tinkling tags something like the stained porcupine quills round an
Indian moccasin. There was a hole or slit in the middle of this mat,
as you see the same in South American ponchos. But could it be possible
that any sober harpooneer would get into a door mat, and parade the
streets of any Christian town in that sort of guise? I put it on, to try
it, and it weighed me down like a hamper, being uncommonly shaggy and
thick, and I thought a little damp, as though this mysterious harpooneer
had been wearing it of a rainy day. I went up in it to a bit of glass
stuck against the wall, and I never saw such a sight in my life. I tore
myself out of it in such a hurry that I gave myself a kink in the neck.
I sat down on the side of the bed, and commenced thinking about this
head-peddling harpooneer, and his door mat. After thinking some time on
the bed-side, I got up and took off my monkey jacket, and then stood in
the middle of the room thinking. I then took off my coat, and thought
a little more in my shirt sleeves. But beginning to feel very cold now,
half undressed as I was, and remembering what the landlord said about
the harpooneer's not coming home at all that night, it being so very
late, I made no more ado, but jumped out of my pantaloons and boots, and
then blowing out the light tumbled into bed, and commended myself to the
care of heaven.
Whether that mattress was stuffed with corn-cobs or broken crockery,
there is no telling, but I rolled about a good deal, and could not sleep
for a long time. At last I slid off into a light doze, and had pretty
nearly made a good offing towards the land of Nod, when I heard a heavy
footfall in the passage, and saw a glimmer of light come into the room
from under the door.
Lord save me, thinks I, that must be the harpooneer, the infernal
head-peddler. But I lay perfectly still, and resolved not to say a word
till spoken to. Holding a light in one hand, and that identical New
Zealand head in the other, the stranger entered the room, and without
looking towards the bed, placed his candle a good way off from me on the
floor in one corner, and then began working away at the knotted cords
of the large bag I before spoke of as being in the room. I was all
eagerness to see his face, but he kept it averted for some time while
employed in unlacing the bag's mouth. This accomplished, however, he
turned round--when, good heavens! what a sight! Such a face! It was of
a dark, purplish, yellow colour, here and there stuck over with large
blackish looking squares. Yes, it's just as I thought, he's a terrible
bedfellow; he's been in a fight, got dreadfully cut, and here he is,
just from the surgeon. But at that moment he chanced to turn his face
so towards the light, that I plainly saw they could not be
sticking-plasters at all, those black squares on his cheeks. They were
stains of some sort or other. At first I knew not what to make of this;
but soon an inkling of the truth occurred to me. I remembered a story of
a white man--a whaleman too--who, falling among the cannibals, had been
tattooed by them. I concluded that this harpooneer, in the course of his
distant voyages, must have met with a similar adventure. And what is it,
thought I, after all! It's only his outside; a man can be honest in any
sort of skin. But then, what to make of his unearthly complexion, that
part of it, I mean, lying round about, and completely independent of the
squares of tattooing. To be sure, it might be nothing but a good coat of
tropical tanning; but I never heard of a hot sun's tanning a white man
into a purplish yellow one. However, I had never been in the South Seas;
and perhaps the sun there produced these extraordinary effects upon the
skin. Now, while all these ideas were passing through me like lightning,
this harpooneer never noticed me at all. But, after some difficulty
having opened his bag, he commenced fumbling in it, and presently pulled
out a sort of tomahawk, and a seal-skin wallet with the hair on. Placing
these on the old chest in the middle of the room, he then took the New
Zealand head--a ghastly thing enough--and crammed it down into the bag.
He now took off his hat--a new beaver hat--when I came nigh singing out
with fresh surprise. There was no hair on his head--none to speak of at
least--nothing but a small scalp-knot twisted up on his forehead. His
bald purplish head now looked for all the world like a mildewed skull.
Had not the stranger stood between me and the door, I would have bolted
out of it quicker than ever I bolted a dinner.
Even as it was, I thought something of slipping out of the window, but
it was the second floor back. I am no coward, but what to make of
this head-peddling purple rascal altogether passed my comprehension.
Ignorance is the parent of fear, and being completely nonplussed and
confounded about the stranger, I confess I was now as much afraid of him
as if it was the devil himself who had thus broken into my room at
the dead of night. In fact, I was so afraid of him that I was not
game enough just then to address him, and demand a satisfactory answer
concerning what seemed inexplicable in him.
Meanwhile, he continued the business of undressing, and at last showed
his chest and arms. As I live, these covered parts of him were checkered
with the same squares as his face; his back, too, was all over the same
dark squares; he seemed to have been in a Thirty Years' War, and just
escaped from it with a sticking-plaster shirt. Still more, his very
legs were marked, as if a parcel of dark green frogs were running up
the trunks of young palms. It was now quite plain that he must be some
abominable savage or other shipped aboard of a whaleman in the South
Seas, and so landed in this Christian country. I quaked to think of it.
A peddler of heads too--perhaps the heads of his own brothers. He might
take a fancy to mine--heavens! look at that tomahawk!
But there was no time for shuddering, for now the savage went about
something that completely fascinated my attention, and convinced me that
he must indeed be a heathen. Going to his heavy grego, or wrapall, or
dreadnaught, which he had previously hung on a chair, he fumbled in the
pockets, and produced at length a curious little deformed image with
a hunch on its back, and exactly the colour of a three days' old Congo
baby. Remembering the embalmed head, at first I almost thought that
this black manikin was a real baby preserved in some similar manner. But
seeing that it was not at all limber, and that it glistened a good deal
like polished ebony, I concluded that it must be nothing but a wooden
idol, which indeed it proved to be. For now the savage goes up to the
empty fire-place, and removing the papered fire-board, sets up this
little hunch-backed image, like a tenpin, between the andirons. The
chimney jambs and all the bricks inside were very sooty, so that I
thought this fire-place made a very appropriate little shrine or chapel
for his Congo idol.
I now screwed my eyes hard towards the half hidden image, feeling but
ill at ease meantime--to see what was next to follow. First he takes
about a double handful of shavings out of his grego pocket, and places
them carefully before the idol; then laying a bit of ship biscuit on
top and applying the flame from the lamp, he kindled the shavings into
a sacrificial blaze. Presently, after many hasty snatches into the fire,
and still hastier withdrawals of his fingers (whereby he seemed to be
scorching them badly), he at last succeeded in drawing out the biscuit;
then blowing off the heat and ashes a little, he made a polite offer of
it to the little negro. But the little devil did not seem to fancy such
dry sort of fare at all; he never moved his lips. All these strange
antics were accompanied by still stranger guttural noises from the
devotee, who seemed to be praying in a sing-song or else singing some
pagan psalmody or other, during which his face twitched about in the
most unnatural manner. At last extinguishing the fire, he took the idol
up very unceremoniously, and bagged it again in his grego pocket as
carelessly as if he were a sportsman bagging a dead woodcock.
All these queer proceedings increased my uncomfortableness, and
seeing him now exhibiting strong symptoms of concluding his business
operations, and jumping into bed with me, I thought it was high time,
now or never, before the light was put out, to break the spell in which
I had so long been bound.
But the interval I spent in deliberating what to say, was a fatal one.
Taking up his tomahawk from the table, he examined the head of it for an
instant, and then holding it to the light, with his mouth at the handle,
he puffed out great clouds of tobacco smoke. The next moment the light
was extinguished, and this wild cannibal, tomahawk between his teeth,
sprang into bed with me. I sang out, I could not help it now; and giving
a sudden grunt of astonishment he began feeling me.
Stammering out something, I knew not what, I rolled away from him
against the wall, and then conjured him, whoever or whatever he might
be, to keep quiet, and let me get up and light the lamp again. But his
guttural responses satisfied me at once that he but ill comprehended my
meaning.
"Who-e debel you?"--he at last said--"you no speak-e, dam-me, I kill-e."
And so saying the lighted tomahawk began flourishing about me in the
dark.
"Landlord, for God's sake, Peter Coffin!" shouted I. "Landlord! Watch!
Coffin! Angels! save me!"
"Speak-e! tell-ee me who-ee be, or dam-me, I kill-e!" again growled the
cannibal, while his horrid flourishings of the tomahawk scattered the
hot tobacco ashes about me till I thought my linen would get on fire.
But thank heaven, at that moment the landlord came into the room light
in hand, and leaping from the bed I ran up to him.
"Don't be afraid now," said he, grinning again, "Queequeg here wouldn't
harm a hair of your head."
"Stop your grinning," shouted I, "and why didn't you tell me that that
infernal harpooneer was a cannibal?"
"I thought ye know'd it;--didn't I tell ye, he was a peddlin' heads
around town?--but turn flukes again and go to sleep. Queequeg, look
here--you sabbee me, I sabbee--you this man sleepe you--you sabbee?"
"Me sabbee plenty"--grunted Queequeg, puffing away at his pipe and
sitting up in bed.
"You gettee in," he added, motioning to me with his tomahawk, and
throwing the clothes to one side. He really did this in not only a civil
but a really kind and charitable way. I stood looking at him a moment.
For all his tattooings he was on the whole a clean, comely looking
cannibal. What's all this fuss I have been making about, thought I to
myself--the man's a human being just as I am: he has just as much reason
to fear me, as I have to be afraid of him. Better sleep with a sober
cannibal than a drunken Christian.
"Landlord," said I, "tell him to stash his tomahawk there, or pipe, or
whatever you call it; tell him to stop smoking, in short, and I will
turn in with him. But I don't fancy having a man smoking in bed with me.
It's dangerous. Besides, I ain't insured."
This being told to Queequeg, he at once complied, and again politely
motioned me to get into bed--rolling over to one side as much as to
say--"I won't touch a leg of ye."
"Good night, landlord," said I, "you may go."
I turned in, and never slept better in my life.
CHAPTER 4. The Counterpane.
Upon waking next morning about daylight, I found Queequeg's arm thrown
over me in the most loving and affectionate manner. You had almost
thought I had been his wife. The counterpane was of patchwork, full of
odd little parti-coloured squares and triangles; and this arm of his
tattooed all over with an interminable Cretan labyrinth of a figure,
no two parts of which were of one precise shade--owing I suppose to
his keeping his arm at sea unmethodically in sun and shade, his shirt
sleeves irregularly rolled up at various times--this same arm of his, I
say, looked for all the world like a strip of that same patchwork quilt.
Indeed, partly lying on it as the arm did when I first awoke, I could
hardly tell it from the quilt, they so blended their hues together; and
it was only by the sense of weight and pressure that I could tell that
Queequeg was hugging me.
My sensations were strange. Let me try to explain them. When I was a
child, I well remember a somewhat similar circumstance that befell me;
whether it was a reality or a dream, I never could entirely settle.
The circumstance was this. I had been cutting up some caper or other--I
think it was trying to crawl up the chimney, as I had seen a little
sweep do a few days previous; and my stepmother who, somehow or other,
was all the time whipping me, or sending me to bed supperless,--my
mother dragged me by the legs out of the chimney and packed me off to
bed, though it was only two o'clock in the afternoon of the 21st June,
the longest day in the year in our hemisphere. I felt dreadfully. But
there was no help for it, so up stairs I went to my little room in the
third floor, undressed myself as slowly as possible so as to kill time,
and with a bitter sigh got between the sheets.
I lay there dismally calculating that sixteen entire hours must elapse
before I could hope for a resurrection. Sixteen hours in bed! the
small of my back ached to think of it. And it was so light too; the
sun shining in at the window, and a great rattling of coaches in the
streets, and the sound of gay voices all over the house. I felt worse
and worse--at last I got up, dressed, and softly going down in my
stockinged feet, sought out my stepmother, and suddenly threw myself
at her feet, beseeching her as a particular favour to give me a good
slippering for my misbehaviour; anything indeed but condemning me to lie
abed such an unendurable length of time. But she was the best and most
conscientious of stepmothers, and back I had to go to my room. For
several hours I lay there broad awake, feeling a great deal worse than I
have ever done since, even from the greatest subsequent misfortunes. At
last I must have fallen into a troubled nightmare of a doze; and slowly
waking from it--half steeped in dreams--I opened my eyes, and the before
sun-lit room was now wrapped in outer darkness. Instantly I felt a shock
running through all my frame; nothing was to be seen, and nothing was
to be heard; but a supernatural hand seemed placed in mine. My arm hung
over the counterpane, and the nameless, unimaginable, silent form
or phantom, to which the hand belonged, seemed closely seated by my
bed-side. For what seemed ages piled on ages, I lay there, frozen with
the most awful fears, not daring to drag away my hand; yet ever thinking
that if I could but stir it one single inch, the horrid spell would be
broken. I knew not how this consciousness at last glided away from me;
but waking in the morning, I shudderingly remembered it all, and for
days and weeks and months afterwards I lost myself in confounding
attempts to explain the mystery. Nay, to this very hour, I often puzzle
myself with it.
Now, take away the awful fear, and my sensations at feeling the
supernatural hand in mine were very similar, in their strangeness, to
those which I experienced on waking up and seeing Queequeg's pagan
arm thrown round me. But at length all the past night's events soberly
recurred, one by one, in fixed reality, and then I lay only alive to
the comical predicament. For though I tried to move his arm--unlock his
bridegroom clasp--yet, sleeping as he was, he still hugged me tightly,
as though naught but death should part us twain. I now strove to rouse
him--"Queequeg!"--but his only answer was a snore. I then rolled over,
my neck feeling as if it were in a horse-collar; and suddenly felt a
slight scratch. Throwing aside the counterpane, there lay the tomahawk
sleeping by the savage's side, as if it were a hatchet-faced baby. A
pretty pickle, truly, thought I; abed here in a strange house in the
broad day, with a cannibal and a tomahawk! "Queequeg!--in the name of
goodness, Queequeg, wake!" At length, by dint of much wriggling, and
loud and incessant expostulations upon the unbecomingness of his
hugging a fellow male in that matrimonial sort of style, I succeeded in
extracting a grunt; and presently, he drew back his arm, shook himself
all over like a Newfoundland dog just from the water, and sat up in bed,
stiff as a pike-staff, looking at me, and rubbing his eyes as if he
did not altogether remember how I came to be there, though a dim
consciousness of knowing something about me seemed slowly dawning over
him. Meanwhile, I lay quietly eyeing him, having no serious misgivings
now, and bent upon narrowly observing so curious a creature. When, at
last, his mind seemed made up touching the character of his bedfellow,
and he became, as it were, reconciled to the fact; he jumped out upon
the floor, and by certain signs and sounds gave me to understand that,
if it pleased me, he would dress first and then leave me to dress
afterwards, leaving the whole apartment to myself. Thinks I, Queequeg,
under the circumstances, this is a very civilized overture; but, the
truth is, these savages have an innate sense of delicacy, say what
you will; it is marvellous how essentially polite they are. I pay this
particular compliment to Queequeg, because he treated me with so much
civility and consideration, while I was guilty of great rudeness;
staring at him from the bed, and watching all his toilette motions; for
the time my curiosity getting the better of my breeding. Nevertheless,
a man like Queequeg you don't see every day, he and his ways were well
worth unusual regarding.
He commenced dressing at top by donning his beaver hat, a very tall one,
by the by, and then--still minus his trowsers--he hunted up his boots.
What under the heavens he did it for, I cannot tell, but his next
movement was to crush himself--boots in hand, and hat on--under the bed;
when, from sundry violent gaspings and strainings, I inferred he was
hard at work booting himself; though by no law of propriety that I ever
heard of, is any man required to be private when putting on his
boots. But Queequeg, do you see, was a creature in the transition
stage--neither caterpillar nor butterfly. He was just enough civilized
to show off his outlandishness in the strangest possible manners. His
education was not yet completed. He was an undergraduate. If he had not
been a small degree civilized, he very probably would not have troubled
himself with boots at all; but then, if he had not been still a savage,
he never would have dreamt of getting under the bed to put them on. At
last, he emerged with his hat very much dented and crushed down over his
eyes, and began creaking and limping about the room, as if, not
being much accustomed to boots, his pair of damp, wrinkled cowhide
ones--probably not made to order either--rather pinched and tormented
him at the first go off of a bitter cold morning.
Seeing, now, that there were no curtains to the window, and that the
street being very narrow, the house opposite commanded a plain view
into the room, and observing more and more the indecorous figure that
Queequeg made, staving about with little else but his hat and boots on;
I begged him as well as I could, to accelerate his toilet somewhat,
and particularly to get into his pantaloons as soon as possible. He
complied, and then proceeded to wash himself. At that time in the
morning any Christian would have washed his face; but Queequeg, to
my amazement, contented himself with restricting his ablutions to his
chest, arms, and hands. He then donned his waistcoat, and taking up a
piece of hard soap on the wash-stand centre table, dipped it into water
and commenced lathering his face. I was watching to see where he kept
his razor, when lo and behold, he takes the harpoon from the bed corner,
slips out the long wooden stock, unsheathes the head, whets it a little
on his boot, and striding up to the bit of mirror against the wall,
begins a vigorous scraping, or rather harpooning of his cheeks. Thinks
I, Queequeg, this is using Rogers's best cutlery with a vengeance.
Afterwards I wondered the less at this operation when I came to know of
what fine steel the head of a harpoon is made, and how exceedingly sharp
the long straight edges are always kept.
The rest of his toilet was soon achieved, and he proudly marched out of
the room, wrapped up in his great pilot monkey jacket, and sporting his
harpoon like a marshal's baton.
CHAPTER 5. Breakfast.
I quickly followed suit, and descending into the bar-room accosted the
grinning landlord very pleasantly. I cherished no malice towards him,
though he had been skylarking with me not a little in the matter of my
bedfellow.
However, a good laugh is a mighty good thing, and rather too scarce a
good thing; the more's the pity. So, if any one man, in his own
proper person, afford stuff for a good joke to anybody, let him not be
backward, but let him cheerfully allow himself to spend and be spent in
that way. And the man that has anything bountifully laughable about him,
be sure there is more in that man than you perhaps think for.
The bar-room was now full of the boarders who had been dropping in the
night previous, and whom I had not as yet had a good look at. They were
nearly all whalemen; chief mates, and second mates, and third mates, and
sea carpenters, and sea coopers, and sea blacksmiths, and harpooneers,
and ship keepers; a brown and brawny company, with bosky beards; an
unshorn, shaggy set, all wearing monkey jackets for morning gowns.
You could pretty plainly tell how long each one had been ashore. This
young fellow's healthy cheek is like a sun-toasted pear in hue, and
would seem to smell almost as musky; he cannot have been three days
landed from his Indian voyage. That man next him looks a few shades
lighter; you might say a touch of satin wood is in him. In the
complexion of a third still lingers a tropic tawn, but slightly bleached
withal; HE doubtless has tarried whole weeks ashore. But who could show
a cheek like Queequeg? which, barred with various tints, seemed like the
Andes' western slope, to show forth in one array, contrasting climates,
zone by zone.
"Grub, ho!" now cried the landlord, flinging open a door, and in we went
to breakfast.
They say that men who have seen the world, thereby become quite at ease
in manner, quite self-possessed in company. Not always, though: Ledyard,
the great New England traveller, and Mungo Park, the Scotch one; of all
men, they possessed the least assurance in the parlor. But perhaps the
mere crossing of Siberia in a sledge drawn by dogs as Ledyard did, or
the taking a long solitary walk on an empty stomach, in the negro heart
of Africa, which was the sum of poor Mungo's performances--this kind of
travel, I say, may not be the very best mode of attaining a high social
polish. Still, for the most part, that sort of thing is to be had
anywhere.
These reflections just here are occasioned by the circumstance that
after we were all seated at the table, and I was preparing to hear some
good stories about whaling; to my no small surprise, nearly every
man maintained a profound silence. And not only that, but they looked
embarrassed. Yes, here were a set of sea-dogs, many of whom without the
slightest bashfulness had boarded great whales on the high seas--entire
strangers to them--and duelled them dead without winking; and yet, here
they sat at a social breakfast table--all of the same calling, all of
kindred tastes--looking round as sheepishly at each other as though they
had never been out of sight of some sheepfold among the Green Mountains.
A curious sight; these bashful bears, these timid warrior whalemen!
But as for Queequeg--why, Queequeg sat there among them--at the head of
the table, too, it so chanced; as cool as an icicle. To be sure I cannot
say much for his breeding. His greatest admirer could not have cordially
justified his bringing his harpoon into breakfast with him, and using it
there without ceremony; reaching over the table with it, to the imminent
jeopardy of many heads, and grappling the beefsteaks towards him. But
THAT was certainly very coolly done by him, and every one knows that in
most people's estimation, to do anything coolly is to do it genteelly.
We will not speak of all Queequeg's peculiarities here; how he eschewed
coffee and hot rolls, and applied his undivided attention to beefsteaks,
done rare. Enough, that when breakfast was over he withdrew like the
rest into the public room, lighted his tomahawk-pipe, and was sitting
there quietly digesting and smoking with his inseparable hat on, when I
sallied out for a stroll.
CHAPTER 6. The Street.
If I had been astonished at first catching a glimpse of so outlandish
an individual as Queequeg circulating among the polite society of a
civilized town, that astonishment soon departed upon taking my first
daylight stroll through the streets of New Bedford.
In thoroughfares nigh the docks, any considerable seaport will
frequently offer to view the queerest looking nondescripts from foreign
parts. Even in Broadway and Chestnut streets, Mediterranean mariners
will sometimes jostle the affrighted ladies. Regent Street is not
unknown to Lascars and Malays; and at Bombay, in the Apollo Green, live
Yankees have often scared the natives. But New Bedford beats all Water
Street and Wapping. In these last-mentioned haunts you see only sailors;
but in New Bedford, actual cannibals stand chatting at street corners;
savages outright; many of whom yet carry on their bones unholy flesh. It
makes a stranger stare.
But, besides the Feegeeans, Tongatobooarrs, Erromanggoans, Pannangians,
and Brighggians, and, besides the wild specimens of the whaling-craft
which unheeded reel about the streets, you will see other sights still
more curious, certainly more comical. There weekly arrive in this town
scores of green Vermonters and New Hampshire men, all athirst for gain
and glory in the fishery. They are mostly young, of stalwart frames;
fellows who have felled forests, and now seek to drop the axe and snatch
the whale-lance. Many are as green as the Green Mountains whence they
came. In some things you would think them but a few hours old. Look
there! that chap strutting round the corner. He wears a beaver hat and
swallow-tailed coat, girdled with a sailor-belt and sheath-knife. Here
comes another with a sou'-wester and a bombazine cloak.
No town-bred dandy will compare with a country-bred one--I mean a
downright bumpkin dandy--a fellow that, in the dog-days, will mow his
two acres in buckskin gloves for fear of tanning his hands. Now when a
country dandy like this takes it into his head to make a distinguished
reputation, and joins the great whale-fishery, you should see the
comical things he does upon reaching the seaport. In bespeaking his
sea-outfit, he orders bell-buttons to his waistcoats; straps to his
canvas trowsers. Ah, poor Hay-Seed! how bitterly will burst those straps
in the first howling gale, when thou art driven, straps, buttons, and
all, down the throat of the tempest.
But think not that this famous town has only harpooneers, cannibals, and
bumpkins to show her visitors. Not at all. Still New Bedford is a queer
place. Had it not been for us whalemen, that tract of land would this
day perhaps have been in as howling condition as the coast of Labrador.
As it is, parts of her back country are enough to frighten one, they
look so bony. The town itself is perhaps the dearest place to live
in, in all New England. It is a land of oil, true enough: but not like
Canaan; a land, also, of corn and wine. The streets do not run with
milk; nor in the spring-time do they pave them with fresh eggs. Yet, in
spite of this, nowhere in all America will you find more patrician-like
houses; parks and gardens more opulent, than in New Bedford. Whence came
they? how planted upon this once scraggy scoria of a country?
Go and gaze upon the iron emblematical harpoons round yonder lofty
mansion, and your question will be answered. Yes; all these brave houses
and flowery gardens came from the Atlantic, Pacific, and Indian oceans.
One and all, they were harpooned and dragged up hither from the bottom
of the sea. Can Herr Alexander perform a feat like that?
In New Bedford, fathers, they say, give whales for dowers to their
daughters, and portion off their nieces with a few porpoises a-piece.
You must go to New Bedford to see a brilliant wedding; for, they say,
they have reservoirs of oil in every house, and every night recklessly
burn their lengths in spermaceti candles.
In summer time, the town is sweet to see; full of fine maples--long
avenues of green and gold. And in August, high in air, the beautiful and
bountiful horse-chestnuts, candelabra-wise, proffer the passer-by their
tapering upright cones of congregated blossoms. So omnipotent is art;
which in many a district of New Bedford has superinduced bright terraces
of flowers upon the barren refuse rocks thrown aside at creation's final
day.
And the women of New Bedford, they bloom like their own red roses. But
roses only bloom in summer; whereas the fine carnation of their cheeks
is perennial as sunlight in the seventh heavens. Elsewhere match that
bloom of theirs, ye cannot, save in Salem, where they tell me the young
girls breathe such musk, their sailor sweethearts smell them miles off
shore, as though they were drawing nigh the odorous Moluccas instead of
the Puritanic sands.
CHAPTER 7. The Chapel.
In this same New Bedford there stands a Whaleman's Chapel, and few are
the moody fishermen, shortly bound for the Indian Ocean or Pacific, who
fail to make a Sunday visit to the spot. I am sure that I did not.
Returning from my first morning stroll, I again sallied out upon this
special errand. The sky had changed from clear, sunny cold, to driving
sleet and mist. Wrapping myself in my shaggy jacket of the cloth called
bearskin, I fought my way against the stubborn storm. Entering, I
found a small scattered congregation of sailors, and sailors' wives and
widows. A muffled silence reigned, only broken at times by the shrieks
of the storm. Each silent worshipper seemed purposely sitting apart from
the other, as if each silent grief were insular and incommunicable. The
chaplain had not yet arrived; and there these silent islands of men and
women sat steadfastly eyeing several marble tablets, with black borders,
masoned into the wall on either side the pulpit. Three of them ran
something like the following, but I do not pretend to quote:--
SACRED TO THE MEMORY OF JOHN TALBOT, Who, at the age of eighteen, was
lost overboard, Near the Isle of Desolation, off Patagonia, November
1st, 1836. THIS TABLET Is erected to his Memory BY HIS SISTER.
SACRED TO THE MEMORY OF ROBERT LONG, WILLIS ELLERY, NATHAN COLEMAN,
WALTER CANNY, SETH MACY, AND SAMUEL GLEIG, Forming one of the boats'
crews OF THE SHIP ELIZA Who were towed out of sight by a Whale, On the
Off-shore Ground in the PACIFIC, December 31st, 1839. THIS MARBLE Is
here placed by their surviving SHIPMATES.
SACRED TO THE MEMORY OF The late CAPTAIN EZEKIEL HARDY, Who in the bows
of his boat was killed by a Sperm Whale on the coast of Japan, AUGUST
3d, 1833. THIS TABLET Is erected to his Memory BY HIS WIDOW.
Shaking off the sleet from my ice-glazed hat and jacket, I seated myself
near the door, and turning sideways was surprised to see Queequeg near
me. Affected by the solemnity of the scene, there was a wondering gaze
of incredulous curiosity in his countenance. This savage was the only
person present who seemed to notice my entrance; because he was the only
one who could not read, and, therefore, was not reading those frigid
inscriptions on the wall. Whether any of the relatives of the seamen
whose names appeared there were now among the congregation, I knew not;
but so many are the unrecorded accidents in the fishery, and so plainly
did several women present wear the countenance if not the trappings
of some unceasing grief, that I feel sure that here before me were
assembled those, in whose unhealing hearts the sight of those bleak
tablets sympathetically caused the old wounds to bleed afresh.
Oh! ye whose dead lie buried beneath the green grass; who standing among
flowers can say--here, HERE lies my beloved; ye know not the desolation
that broods in bosoms like these. What bitter blanks in those
black-bordered marbles which cover no ashes! What despair in those
immovable inscriptions! What deadly voids and unbidden infidelities in
the lines that seem to gnaw upon all Faith, and refuse resurrections to
the beings who have placelessly perished without a grave. As well might
those tablets stand in the cave of Elephanta as here.
In what census of living creatures, the dead of mankind are included;
why it is that a universal proverb says of them, that they tell no
tales, though containing more secrets than the Goodwin Sands; how it is
that to his name who yesterday departed for the other world, we prefix
so significant and infidel a word, and yet do not thus entitle him, if
he but embarks for the remotest Indies of this living earth; why the
Life Insurance Companies pay death-forfeitures upon immortals; in what
eternal, unstirring paralysis, and deadly, hopeless trance, yet lies
antique Adam who died sixty round centuries ago; how it is that we
still refuse to be comforted for those who we nevertheless maintain are
dwelling in unspeakable bliss; why all the living so strive to hush all
the dead; wherefore but the rumor of a knocking in a tomb will terrify a
whole city. All these things are not without their meanings.
But Faith, like a jackal, feeds among the tombs, and even from these
dead doubts she gathers her most vital hope.
It needs scarcely to be told, with what feelings, on the eve of a
Nantucket voyage, I regarded those marble tablets, and by the murky
light of that darkened, doleful day read the fate of the whalemen
who had gone before me. Yes, Ishmael, the same fate may be thine. But
somehow I grew merry again. Delightful inducements to embark, fine
chance for promotion, it seems--aye, a stove boat will make me an
immortal by brevet. Yes, there is death in this business of whaling--a
speechlessly quick chaotic bundling of a man into Eternity. But what
then? Methinks we have hugely mistaken this matter of Life and Death.
Methinks that what they call my shadow here on earth is my true
substance. Methinks that in looking at things spiritual, we are too
much like oysters observing the sun through the water, and thinking that
thick water the thinnest of air. Methinks my body is but the lees of my
better being. In fact take my body who will, take it I say, it is not
me. And therefore three cheers for Nantucket; and come a stove boat and
stove body when they will, for stave my soul, Jove himself cannot.
CHAPTER 8. The Pulpit.
I had not been seated very long ere a man of a certain venerable
robustness entered; immediately as the storm-pelted door flew back upon
admitting him, a quick regardful eyeing of him by all the congregation,
sufficiently attested that this fine old man was the chaplain. Yes, it
was the famous Father Mapple, so called by the whalemen, among whom he
was a very great favourite. He had been a sailor and a harpooneer in his
youth, but for many years past had dedicated his life to the ministry.
At the time I now write of, Father Mapple was in the hardy winter of a
healthy old age; that sort of old age which seems merging into a second
flowering youth, for among all the fissures of his wrinkles, there shone
certain mild gleams of a newly developing bloom--the spring verdure
peeping forth even beneath February's snow. No one having previously
heard his history, could for the first time behold Father Mapple without
the utmost interest, because there were certain engrafted clerical
peculiarities about him, imputable to that adventurous maritime life
he had led. When he entered I observed that he carried no umbrella, and
certainly had not come in his carriage, for his tarpaulin hat ran down
with melting sleet, and his great pilot cloth jacket seemed almost to
drag him to the floor with the weight of the water it had absorbed.
However, hat and coat and overshoes were one by one removed, and hung up
in a little space in an adjacent corner; when, arrayed in a decent suit,
he quietly approached the pulpit.
Like most old fashioned pulpits, it was a very lofty one, and since a
regular stairs to such a height would, by its long angle with the floor,
seriously contract the already small area of the chapel, the architect,
it seemed, had acted upon the hint of Father Mapple, and finished the
pulpit without a stairs, substituting a perpendicular side ladder, like
those used in mounting a ship from a boat at sea. The wife of a whaling
captain had provided the chapel with a handsome pair of red worsted
man-ropes for this ladder, which, being itself nicely headed, and
stained with a mahogany colour, the whole contrivance, considering what
manner of chapel it was, seemed by no means in bad taste. Halting for
an instant at the foot of the ladder, and with both hands grasping the
ornamental knobs of the man-ropes, Father Mapple cast a look upwards,
and then with a truly sailor-like but still reverential dexterity, hand
over hand, mounted the steps as if ascending the main-top of his vessel.
The perpendicular parts of this side ladder, as is usually the case with
swinging ones, were of cloth-covered rope, only the rounds were of wood,
so that at every step there was a joint. At my first glimpse of the
pulpit, it had not escaped me that however convenient for a ship,
these joints in the present instance seemed unnecessary. For I was not
prepared to see Father Mapple after gaining the height, slowly turn
round, and stooping over the pulpit, deliberately drag up the ladder
step by step, till the whole was deposited within, leaving him
impregnable in his little Quebec.
I pondered some time without fully comprehending the reason for this.
Father Mapple enjoyed such a wide reputation for sincerity and sanctity,
that I could not suspect him of courting notoriety by any mere tricks
of the stage. No, thought I, there must be some sober reason for this
thing; furthermore, it must symbolize something unseen. Can it be,
then, that by that act of physical isolation, he signifies his spiritual
withdrawal for the time, from all outward worldly ties and connexions?
Yes, for replenished with the meat and wine of the word, to the faithful
man of God, this pulpit, I see, is a self-containing stronghold--a lofty
Ehrenbreitstein, with a perennial well of water within the walls.
But the side ladder was not the only strange feature of the place,
borrowed from the chaplain's former sea-farings. Between the marble
cenotaphs on either hand of the pulpit, the wall which formed its back
was adorned with a large painting representing a gallant ship beating
against a terrible storm off a lee coast of black rocks and snowy
breakers. But high above the flying scud and dark-rolling clouds, there
floated a little isle of sunlight, from which beamed forth an angel's
face; and this bright face shed a distinct spot of radiance upon the
ship's tossed deck, something like that silver plate now inserted into
the Victory's plank where Nelson fell. "Ah, noble ship," the angel
seemed to say, "beat on, beat on, thou noble ship, and bear a hardy
helm; for lo! the sun is breaking through; the clouds are rolling
off--serenest azure is at hand."
Nor was the pulpit itself without a trace of the same sea-taste that
had achieved the ladder and the picture. Its panelled front was in
the likeness of a ship's bluff bows, and the Holy Bible rested on a
projecting piece of scroll work, fashioned after a ship's fiddle-headed
beak.
What could be more full of meaning?--for the pulpit is ever this earth's
foremost part; all the rest comes in its rear; the pulpit leads the
world. From thence it is the storm of God's quick wrath is first
descried, and the bow must bear the earliest brunt. From thence it is
the God of breezes fair or foul is first invoked for favourable winds.
Yes, the world's a ship on its passage out, and not a voyage complete;
and the pulpit is its prow.
CHAPTER 9. The Sermon.
Father Mapple rose, and in a mild voice of unassuming authority ordered
the scattered people to condense. "Starboard gangway, there! side away
to larboard--larboard gangway to starboard! Midships! midships!"
There was a low rumbling of heavy sea-boots among the benches, and a
still slighter shuffling of women's shoes, and all was quiet again, and
every eye on the preacher.
He paused a little; then kneeling in the pulpit's bows, folded his large
brown hands across his chest, uplifted his closed eyes, and offered
a prayer so deeply devout that he seemed kneeling and praying at the
bottom of the sea.
This ended, in prolonged solemn tones, like the continual tolling of
a bell in a ship that is foundering at sea in a fog--in such tones he
commenced reading the following hymn; but changing his manner towards
the concluding stanzas, burst forth with a pealing exultation and joy--
"The ribs and terrors in the whale,
Arched over me a dismal gloom,
While all God's sun-lit waves rolled by,
And lift me deepening down to doom.
"I saw the opening maw of hell,
With endless pains and sorrows there;
Which none but they that feel can tell--
Oh, I was plunging to despair.
"In black distress, I called my God,
When I could scarce believe him mine,
He bowed his ear to my complaints--
No more the whale did me confine.
"With speed he flew to my relief,
As on a radiant dolphin borne;
Awful, yet bright, as lightning shone
The face of my Deliverer God.
"My song for ever shall record
That terrible, that joyful hour;
I give the glory to my God,
His all the mercy and the power."
Nearly all joined in singing this hymn, which swelled high above the
howling of the storm. A brief pause ensued; the preacher slowly turned
over the leaves of the Bible, and at last, folding his hand down upon
the proper page, said: "Beloved shipmates, clinch the last verse of the
first chapter of Jonah--'And God had prepared a great fish to swallow up
Jonah.'"
"Shipmates, this book, containing only four chapters--four yarns--is one
of the smallest strands in the mighty cable of the Scriptures. Yet what
depths of the soul does Jonah's deep sealine sound! what a pregnant
lesson to us is this prophet! What a noble thing is that canticle in the
fish's belly! How billow-like and boisterously grand! We feel the floods
surging over us; we sound with him to the kelpy bottom of the waters;
sea-weed and all the slime of the sea is about us! But WHAT is this
lesson that the book of Jonah teaches? Shipmates, it is a two-stranded
lesson; a lesson to us all as sinful men, and a lesson to me as a pilot
of the living God. As sinful men, it is a lesson to us all, because it
is a story of the sin, hard-heartedness, suddenly awakened fears, the
swift punishment, repentance, prayers, and finally the deliverance and
joy of Jonah. As with all sinners among men, the sin of this son of
Amittai was in his wilful disobedience of the command of God--never
mind now what that command was, or how conveyed--which he found a hard
command. But all the things that God would have us do are hard for us to
do--remember that--and hence, he oftener commands us than endeavors to
persuade. And if we obey God, we must disobey ourselves; and it is in
this disobeying ourselves, wherein the hardness of obeying God consists.
"With this sin of disobedience in him, Jonah still further flouts at
God, by seeking to flee from Him. He thinks that a ship made by men will
carry him into countries where God does not reign, but only the Captains
of this earth. He skulks about the wharves of Joppa, and seeks a ship
that's bound for Tarshish. There lurks, perhaps, a hitherto unheeded
meaning here. By all accounts Tarshish could have been no other city
than the modern Cadiz. That's the opinion of learned men. And where is
Cadiz, shipmates? Cadiz is in Spain; as far by water, from Joppa,
as Jonah could possibly have sailed in those ancient days, when the
Atlantic was an almost unknown sea. Because Joppa, the modern Jaffa,
shipmates, is on the most easterly coast of the Mediterranean, the
Syrian; and Tarshish or Cadiz more than two thousand miles to the
westward from that, just outside the Straits of Gibraltar. See ye
not then, shipmates, that Jonah sought to flee world-wide from God?
Miserable man! Oh! most contemptible and worthy of all scorn; with
slouched hat and guilty eye, skulking from his God; prowling among the
shipping like a vile burglar hastening to cross the seas. So disordered,
self-condemning is his look, that had there been policemen in those
days, Jonah, on the mere suspicion of something wrong, had been arrested
ere he touched a deck. How plainly he's a fugitive! no baggage, not a
hat-box, valise, or carpet-bag,--no friends accompany him to the wharf
with their adieux. At last, after much dodging search, he finds the
Tarshish ship receiving the last items of her cargo; and as he steps on
board to see its Captain in the cabin, all the sailors for the moment
desist from hoisting in the goods, to mark the stranger's evil eye.
Jonah sees this; but in vain he tries to look all ease and confidence;
in vain essays his wretched smile. Strong intuitions of the man assure
the mariners he can be no innocent. In their gamesome but still serious
way, one whispers to the other--"Jack, he's robbed a widow;" or, "Joe,
do you mark him; he's a bigamist;" or, "Harry lad, I guess he's the
adulterer that broke jail in old Gomorrah, or belike, one of the missing
murderers from Sodom." Another runs to read the bill that's stuck
against the spile upon the wharf to which the ship is moored, offering
five hundred gold coins for the apprehension of a parricide, and
containing a description of his person. He reads, and looks from Jonah
to the bill; while all his sympathetic shipmates now crowd round Jonah,
prepared to lay their hands upon him. Frighted Jonah trembles, and
summoning all his boldness to his face, only looks so much the more a
coward. He will not confess himself suspected; but that itself is strong
suspicion. So he makes the best of it; and when the sailors find him
not to be the man that is advertised, they let him pass, and he descends
into the cabin.
"'Who's there?' cries the Captain at his busy desk, hurriedly making
out his papers for the Customs--'Who's there?' Oh! how that harmless
question mangles Jonah! For the instant he almost turns to flee again.
But he rallies. 'I seek a passage in this ship to Tarshish; how soon
sail ye, sir?' Thus far the busy Captain had not looked up to Jonah,
though the man now stands before him; but no sooner does he hear that
hollow voice, than he darts a scrutinizing glance. 'We sail with the
next coming tide,' at last he slowly answered, still intently eyeing
him. 'No sooner, sir?'--'Soon enough for any honest man that goes a
passenger.' Ha! Jonah, that's another stab. But he swiftly calls away
the Captain from that scent. 'I'll sail with ye,'--he says,--'the
passage money how much is that?--I'll pay now.' For it is particularly
written, shipmates, as if it were a thing not to be overlooked in this
history, 'that he paid the fare thereof' ere the craft did sail. And
taken with the context, this is full of meaning.
"Now Jonah's Captain, shipmates, was one whose discernment detects crime
in any, but whose cupidity exposes it only in the penniless. In this
world, shipmates, sin that pays its way can travel freely, and without
a passport; whereas Virtue, if a pauper, is stopped at all frontiers.
So Jonah's Captain prepares to test the length of Jonah's purse, ere he
judge him openly. He charges him thrice the usual sum; and it's assented
to. Then the Captain knows that Jonah is a fugitive; but at the same
time resolves to help a flight that paves its rear with gold. Yet when
Jonah fairly takes out his purse, prudent suspicions still molest the
Captain. He rings every coin to find a counterfeit. Not a forger, any
way, he mutters; and Jonah is put down for his passage. 'Point out my
state-room, Sir,' says Jonah now, 'I'm travel-weary; I need sleep.'
'Thou lookest like it,' says the Captain, 'there's thy room.' Jonah
enters, and would lock the door, but the lock contains no key. Hearing
him foolishly fumbling there, the Captain laughs lowly to himself, and
mutters something about the doors of convicts' cells being never allowed
to be locked within. All dressed and dusty as he is, Jonah throws
himself into his berth, and finds the little state-room ceiling almost
resting on his forehead. The air is close, and Jonah gasps. Then, in
that contracted hole, sunk, too, beneath the ship's water-line, Jonah
feels the heralding presentiment of that stifling hour, when the whale
shall hold him in the smallest of his bowels' wards.
"Screwed at its axis against the side, a swinging lamp slightly
oscillates in Jonah's room; and the ship, heeling over towards the wharf
with the weight of the last bales received, the lamp, flame and all,
though in slight motion, still maintains a permanent obliquity with
reference to the room; though, in truth, infallibly straight itself, it
but made obvious the false, lying levels among which it hung. The lamp
alarms and frightens Jonah; as lying in his berth his tormented eyes
roll round the place, and this thus far successful fugitive finds no
refuge for his restless glance. But that contradiction in the lamp more
and more appals him. The floor, the ceiling, and the side, are all awry.
'Oh! so my conscience hangs in me!' he groans, 'straight upwards, so it
burns; but the chambers of my soul are all in crookedness!'
"Like one who after a night of drunken revelry hies to his bed, still
reeling, but with conscience yet pricking him, as the plungings of the
Roman race-horse but so much the more strike his steel tags into him; as
one who in that miserable plight still turns and turns in giddy anguish,
praying God for annihilation until the fit be passed; and at last amid
the whirl of woe he feels, a deep stupor steals over him, as over the
man who bleeds to death, for conscience is the wound, and there's naught
to staunch it; so, after sore wrestlings in his berth, Jonah's prodigy
of ponderous misery drags him drowning down to sleep.
"And now the time of tide has come; the ship casts off her cables; and
from the deserted wharf the uncheered ship for Tarshish, all careening,
glides to sea. That ship, my friends, was the first of recorded
smugglers! the contraband was Jonah. But the sea rebels; he will not
bear the wicked burden. A dreadful storm comes on, the ship is like to
break. But now when the boatswain calls all hands to lighten her;
when boxes, bales, and jars are clattering overboard; when the wind
is shrieking, and the men are yelling, and every plank thunders with
trampling feet right over Jonah's head; in all this raging tumult, Jonah
sleeps his hideous sleep. He sees no black sky and raging sea, feels not
the reeling timbers, and little hears he or heeds he the far rush of the
mighty whale, which even now with open mouth is cleaving the seas after
him. Aye, shipmates, Jonah was gone down into the sides of the ship--a
berth in the cabin as I have taken it, and was fast asleep. But the
frightened master comes to him, and shrieks in his dead ear, 'What
meanest thou, O, sleeper! arise!' Startled from his lethargy by that
direful cry, Jonah staggers to his feet, and stumbling to the deck,
grasps a shroud, to look out upon the sea. But at that moment he is
sprung upon by a panther billow leaping over the bulwarks. Wave after
wave thus leaps into the ship, and finding no speedy vent runs roaring
fore and aft, till the mariners come nigh to drowning while yet afloat.
And ever, as the white moon shows her affrighted face from the steep
gullies in the blackness overhead, aghast Jonah sees the rearing
bowsprit pointing high upward, but soon beat downward again towards the
tormented deep.
"Terrors upon terrors run shouting through his soul. In all his cringing
attitudes, the God-fugitive is now too plainly known. The sailors mark
him; more and more certain grow their suspicions of him, and at last,
fully to test the truth, by referring the whole matter to high Heaven,
they fall to casting lots, to see for whose cause this great tempest was
upon them. The lot is Jonah's; that discovered, then how furiously they
mob him with their questions. 'What is thine occupation? Whence comest
thou? Thy country? What people? But mark now, my shipmates, the behavior
of poor Jonah. The eager mariners but ask him who he is, and where
from; whereas, they not only receive an answer to those questions,
but likewise another answer to a question not put by them, but the
unsolicited answer is forced from Jonah by the hard hand of God that is
upon him.
"'I am a Hebrew,' he cries--and then--'I fear the Lord the God of Heaven
who hath made the sea and the dry land!' Fear him, O Jonah? Aye, well
mightest thou fear the Lord God THEN! Straightway, he now goes on to
make a full confession; whereupon the mariners became more and more
appalled, but still are pitiful. For when Jonah, not yet supplicating
God for mercy, since he but too well knew the darkness of his
deserts,--when wretched Jonah cries out to them to take him and cast him
forth into the sea, for he knew that for HIS sake this great tempest
was upon them; they mercifully turn from him, and seek by other means to
save the ship. But all in vain; the indignant gale howls louder;
then, with one hand raised invokingly to God, with the other they not
unreluctantly lay hold of Jonah.
"And now behold Jonah taken up as an anchor and dropped into the sea;
when instantly an oily calmness floats out from the east, and the sea
is still, as Jonah carries down the gale with him, leaving smooth
water behind. He goes down in the whirling heart of such a masterless
commotion that he scarce heeds the moment when he drops seething into
the yawning jaws awaiting him; and the whale shoots-to all his ivory
teeth, like so many white bolts, upon his prison. Then Jonah prayed unto
the Lord out of the fish's belly. But observe his prayer, and learn a
weighty lesson. For sinful as he is, Jonah does not weep and wail for
direct deliverance. He feels that his dreadful punishment is just. He
leaves all his deliverance to God, contenting himself with this, that
spite of all his pains and pangs, he will still look towards His holy
temple. And here, shipmates, is true and faithful repentance; not
clamorous for pardon, but grateful for punishment. And how pleasing to
God was this conduct in Jonah, is shown in the eventual deliverance of
him from the sea and the whale. Shipmates, I do not place Jonah before
you to be copied for his sin but I do place him before you as a model
for repentance. Sin not; but if you do, take heed to repent of it like
Jonah."
While he was speaking these words, the howling of the shrieking,
slanting storm without seemed to add new power to the preacher, who,
when describing Jonah's sea-storm, seemed tossed by a storm himself.
His deep chest heaved as with a ground-swell; his tossed arms seemed the
warring elements at work; and the thunders that rolled away from off his
swarthy brow, and the light leaping from his eye, made all his simple
hearers look on him with a quick fear that was strange to them.
There now came a lull in his look, as he silently turned over the leaves
of the Book once more; and, at last, standing motionless, with closed
eyes, for the moment, seemed communing with God and himself.
But again he leaned over towards the people, and bowing his head lowly,
with an aspect of the deepest yet manliest humility, he spake these
words:
"Shipmates, God has laid but one hand upon you; both his hands press
upon me. I have read ye by what murky light may be mine the lesson that
Jonah teaches to all sinners; and therefore to ye, and still more to me,
for I am a greater sinner than ye. And now how gladly would I come down
from this mast-head and sit on the hatches there where you sit, and
listen as you listen, while some one of you reads ME that other and more
awful lesson which Jonah teaches to ME, as a pilot of the living God.
How being an anointed pilot-prophet, or speaker of true things, and
bidden by the Lord to sound those unwelcome truths in the ears of a
wicked Nineveh, Jonah, appalled at the hostility he should raise, fled
from his mission, and sought to escape his duty and his God by taking
ship at Joppa. But God is everywhere; Tarshish he never reached. As we
have seen, God came upon him in the whale, and swallowed him down to
living gulfs of doom, and with swift slantings tore him along 'into the
midst of the seas,' where the eddying depths sucked him ten thousand
fathoms down, and 'the weeds were wrapped about his head,' and all the
watery world of woe bowled over him. Yet even then beyond the reach of
any plummet--'out of the belly of hell'--when the whale grounded upon
the ocean's utmost bones, even then, God heard the engulphed, repenting
prophet when he cried. Then God spake unto the fish; and from the
shuddering cold and blackness of the sea, the whale came breeching
up towards the warm and pleasant sun, and all the delights of air and
earth; and 'vomited out Jonah upon the dry land;' when the word of the
Lord came a second time; and Jonah, bruised and beaten--his ears, like
two sea-shells, still multitudinously murmuring of the ocean--Jonah
did the Almighty's bidding. And what was that, shipmates? To preach the
Truth to the face of Falsehood! That was it!
"This, shipmates, this is that other lesson; and woe to that pilot of
the living God who slights it. Woe to him whom this world charms from
Gospel duty! Woe to him who seeks to pour oil upon the waters when God
has brewed them into a gale! Woe to him who seeks to please rather than
to appal! Woe to him whose good name is more to him than goodness! Woe
to him who, in this world, courts not dishonour! Woe to him who would
not be true, even though to be false were salvation! Yea, woe to him
who, as the great Pilot Paul has it, while preaching to others is
himself a castaway!"
He dropped and fell away from himself for a moment; then lifting his
face to them again, showed a deep joy in his eyes, as he cried out with
a heavenly enthusiasm,--"But oh! shipmates! on the starboard hand of
every woe, there is a sure delight; and higher the top of that delight,
than the bottom of the woe is deep. Is not the main-truck higher than
the kelson is low? Delight is to him--a far, far upward, and inward
delight--who against the proud gods and commodores of this earth, ever
stands forth his own inexorable self. Delight is to him whose strong
arms yet support him, when the ship of this base treacherous world has
gone down beneath him. Delight is to him, who gives no quarter in the
truth, and kills, burns, and destroys all sin though he pluck it out
from under the robes of Senators and Judges. Delight,--top-gallant
delight is to him, who acknowledges no law or lord, but the Lord his
God, and is only a patriot to heaven. Delight is to him, whom all the
waves of the billows of the seas of the boisterous mob can never shake
from this sure Keel of the Ages. And eternal delight and deliciousness
will be his, who coming to lay him down, can say with his final
breath--O Father!--chiefly known to me by Thy rod--mortal or immortal,
here I die. I have striven to be Thine, more than to be this world's, or
mine own. Yet this is nothing: I leave eternity to Thee; for what is man
that he should live out the lifetime of his God?"
He said no more, but slowly waving a benediction, covered his face with
his hands, and so remained kneeling, till all the people had departed,
and he was left alone in the place.
CHAPTER 10. A Bosom Friend.
Returning to the Spouter-Inn from the Chapel, I found Queequeg there
quite alone; he having left the Chapel before the benediction some time.
He was sitting on a bench before the fire, with his feet on the stove
hearth, and in one hand was holding close up to his face that little
negro idol of his; peering hard into its face, and with a jack-knife
gently whittling away at its nose, meanwhile humming to himself in his
heathenish way.
But being now interrupted, he put up the image; and pretty soon, going
to the table, took up a large book there, and placing it on his lap
began counting the pages with deliberate regularity; at every fiftieth
page--as I fancied--stopping a moment, looking vacantly around him, and
giving utterance to a long-drawn gurgling whistle of astonishment. He
would then begin again at the next fifty; seeming to commence at number
one each time, as though he could not count more than fifty, and it was
only by such a large number of fifties being found together, that his
astonishment at the multitude of pages was excited.
With much interest I sat watching him. Savage though he was, and
hideously marred about the face--at least to my taste--his countenance
yet had a something in it which was by no means disagreeable. You cannot
hide the soul. Through all his unearthly tattooings, I thought I saw
the traces of a simple honest heart; and in his large, deep eyes,
fiery black and bold, there seemed tokens of a spirit that would dare a
thousand devils. And besides all this, there was a certain lofty bearing
about the Pagan, which even his uncouthness could not altogether maim.
He looked like a man who had never cringed and never had had a creditor.
Whether it was, too, that his head being shaved, his forehead was drawn
out in freer and brighter relief, and looked more expansive than it
otherwise would, this I will not venture to decide; but certain it was
his head was phrenologically an excellent one. It may seem ridiculous,
but it reminded me of General Washington's head, as seen in the popular
busts of him. It had the same long regularly graded retreating slope
from above the brows, which were likewise very projecting, like two
long promontories thickly wooded on top. Queequeg was George Washington
cannibalistically developed.
Whilst I was thus closely scanning him, half-pretending meanwhile to be
looking out at the storm from the casement, he never heeded my presence,
never troubled himself with so much as a single glance; but appeared
wholly occupied with counting the pages of the marvellous book.
Considering how sociably we had been sleeping together the night
previous, and especially considering the affectionate arm I had found
thrown over me upon waking in the morning, I thought this indifference
of his very strange. But savages are strange beings; at times you do not
know exactly how to take them. At first they are overawing; their calm
self-collectedness of simplicity seems a Socratic wisdom. I had noticed
also that Queequeg never consorted at all, or but very little, with the
other seamen in the inn. He made no advances whatever; appeared to have
no desire to enlarge the circle of his acquaintances. All this struck
me as mighty singular; yet, upon second thoughts, there was something
almost sublime in it. Here was a man some twenty thousand miles from
home, by the way of Cape Horn, that is--which was the only way he could
get there--thrown among people as strange to him as though he were in
the planet Jupiter; and yet he seemed entirely at his ease; preserving
the utmost serenity; content with his own companionship; always equal to
himself. Surely this was a touch of fine philosophy; though no doubt he
had never heard there was such a thing as that. But, perhaps, to be
true philosophers, we mortals should not be conscious of so living or
so striving. So soon as I hear that such or such a man gives himself
out for a philosopher, I conclude that, like the dyspeptic old woman, he
must have "broken his digester."
As I sat there in that now lonely room; the fire burning low, in that
mild stage when, after its first intensity has warmed the air, it then
only glows to be looked at; the evening shades and phantoms gathering
round the casements, and peering in upon us silent, solitary twain;
the storm booming without in solemn swells; I began to be sensible of
strange feelings. I felt a melting in me. No more my splintered heart
and maddened hand were turned against the wolfish world. This soothing
savage had redeemed it. There he sat, his very indifference speaking a
nature in which there lurked no civilized hypocrisies and bland deceits.
Wild he was; a very sight of sights to see; yet I began to feel myself
mysteriously drawn towards him. And those same things that would have
repelled most others, they were the very magnets that thus drew me. I'll
try a pagan friend, thought I, since Christian kindness has proved but
hollow courtesy. I drew my bench near him, and made some friendly signs
and hints, doing my best to talk with him meanwhile. At first he little
noticed these advances; but presently, upon my referring to his last
night's hospitalities, he made out to ask me whether we were again to be
bedfellows. I told him yes; whereat I thought he looked pleased, perhaps
a little complimented.
We then turned over the book together, and I endeavored to explain to
him the purpose of the printing, and the meaning of the few pictures
that were in it. Thus I soon engaged his interest; and from that we went
to jabbering the best we could about the various outer sights to be seen
in this famous town. Soon I proposed a social smoke; and, producing
his pouch and tomahawk, he quietly offered me a puff. And then we sat
exchanging puffs from that wild pipe of his, and keeping it regularly
passing between us.
If there yet lurked any ice of indifference towards me in the Pagan's
breast, this pleasant, genial smoke we had, soon thawed it out, and left
us cronies. He seemed to take to me quite as naturally and unbiddenly as
I to him; and when our smoke was over, he pressed his forehead against
mine, clasped me round the waist, and said that henceforth we were
married; meaning, in his country's phrase, that we were bosom friends;
he would gladly die for me, if need should be. In a countryman, this
sudden flame of friendship would have seemed far too premature, a thing
to be much distrusted; but in this simple savage those old rules would
not apply.
After supper, and another social chat and smoke, we went to our room
together. He made me a present of his embalmed head; took out his
enormous tobacco wallet, and groping under the tobacco, drew out
some thirty dollars in silver; then spreading them on the table, and
mechanically dividing them into two equal portions, pushed one of them
towards me, and said it was mine. I was going to remonstrate; but he
silenced me by pouring them into my trowsers' pockets. I let them stay.
He then went about his evening prayers, took out his idol, and removed
the paper fireboard. By certain signs and symptoms, I thought he seemed
anxious for me to join him; but well knowing what was to follow, I
deliberated a moment whether, in case he invited me, I would comply or
otherwise.
I was a good Christian; born and bred in the bosom of the infallible
Presbyterian Church. How then could I unite with this wild idolator in
worshipping his piece of wood? But what is worship? thought I. Do
you suppose now, Ishmael, that the magnanimous God of heaven and
earth--pagans and all included--can possibly be jealous of an
insignificant bit of black wood? Impossible! But what is worship?--to do
the will of God--THAT is worship. And what is the will of God?--to do to
my fellow man what I would have my fellow man to do to me--THAT is the
will of God. Now, Queequeg is my fellow man. And what do I wish that
this Queequeg would do to me? Why, unite with me in my particular
Presbyterian form of worship. Consequently, I must then unite with him
in his; ergo, I must turn idolator. So I kindled the shavings; helped
prop up the innocent little idol; offered him burnt biscuit with
Queequeg; salamed before him twice or thrice; kissed his nose; and that
done, we undressed and went to bed, at peace with our own consciences
and all the world. But we did not go to sleep without some little chat.
How it is I know not; but there is no place like a bed for confidential
disclosures between friends. Man and wife, they say, there open the very
bottom of their souls to each other; and some old couples often lie
and chat over old times till nearly morning. Thus, then, in our hearts'
honeymoon, lay I and Queequeg--a cosy, loving pair.
"""
|
pypa/pip
|
refs/heads/main
|
src/pip/_vendor/urllib3/contrib/appengine.py
|
11
|
"""
This module provides a pool manager that uses Google App Engine's
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Example usage::
from pip._vendor.urllib3 import PoolManager
from pip._vendor.urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
if is_appengine_sandbox():
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
http = AppEngineManager()
else:
# PoolManager uses a socket-level API behind the scenes
http = PoolManager()
r = http.request('GET', 'https://google.com/')
There are `limitations <https://cloud.google.com/appengine/docs/python/\
urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
the best choice for your application. There are three options for using
urllib3 on Google App Engine:
1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
cost-effective in many circumstances as long as your usage is within the
limitations.
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
Sockets also have `limitations and restrictions
<https://cloud.google.com/appengine/docs/python/sockets/\
#limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
To use sockets, be sure to specify the following in your ``app.yaml``::
env_variables:
GAE_USE_SOCKETS_HTTPLIB : 'true'
3. If you are using `App Engine Flexible
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
:class:`PoolManager` without any configuration or special environment variables.
"""
from __future__ import absolute_import
import io
import logging
import warnings
from ..exceptions import (
HTTPError,
HTTPWarning,
MaxRetryError,
ProtocolError,
SSLError,
TimeoutError,
)
from ..packages.six.moves.urllib.parse import urljoin
from ..request import RequestMethods
from ..response import HTTPResponse
from ..util.retry import Retry
from ..util.timeout import Timeout
from . import _appengine_environ
try:
from google.appengine.api import urlfetch
except ImportError:
urlfetch = None
log = logging.getLogger(__name__)
class AppEnginePlatformWarning(HTTPWarning):
pass
class AppEnginePlatformError(HTTPError):
pass
class AppEngineManager(RequestMethods):
"""
Connection manager for Google App Engine sandbox applications.
This manager uses the URLFetch service directly instead of using the
emulated httplib, and is subject to URLFetch limitations as described in
the App Engine documentation `here
<https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Notably it will raise an :class:`AppEnginePlatformError` if:
* URLFetch is not available.
* If you attempt to use this on App Engine Flexible, as full socket
support is available.
* If a request size is more than 10 megabytes.
* If a response size is more than 32 megabytes.
* If you use an unsupported request method such as OPTIONS.
Beyond those cases, it will raise normal urllib3 errors.
"""
def __init__(
self,
headers=None,
retries=None,
validate_certificate=True,
urlfetch_retries=True,
):
if not urlfetch:
raise AppEnginePlatformError(
"URLFetch is not available in this environment."
)
warnings.warn(
"urllib3 is using URLFetch on Google App Engine sandbox instead "
"of sockets. To use sockets directly instead of URLFetch see "
"https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.",
AppEnginePlatformWarning,
)
RequestMethods.__init__(self, headers)
self.validate_certificate = validate_certificate
self.urlfetch_retries = urlfetch_retries
self.retries = retries or Retry.DEFAULT
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# Return False to re-raise any potential exceptions
return False
def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=None,
redirect=True,
timeout=Timeout.DEFAULT_TIMEOUT,
**response_kw
):
retries = self._get_retries(retries, redirect)
try:
follow_redirects = redirect and retries.redirect != 0 and retries.total
response = urlfetch.fetch(
url,
payload=body,
method=method,
headers=headers or {},
allow_truncated=False,
follow_redirects=self.urlfetch_retries and follow_redirects,
deadline=self._get_absolute_timeout(timeout),
validate_certificate=self.validate_certificate,
)
except urlfetch.DeadlineExceededError as e:
raise TimeoutError(self, e)
except urlfetch.InvalidURLError as e:
if "too large" in str(e):
raise AppEnginePlatformError(
"URLFetch request too large, URLFetch only "
"supports requests up to 10mb in size.",
e,
)
raise ProtocolError(e)
except urlfetch.DownloadError as e:
if "Too many redirects" in str(e):
raise MaxRetryError(self, url, reason=e)
raise ProtocolError(e)
except urlfetch.ResponseTooLargeError as e:
raise AppEnginePlatformError(
"URLFetch response too large, URLFetch only supports"
"responses up to 32mb in size.",
e,
)
except urlfetch.SSLCertificateError as e:
raise SSLError(e)
except urlfetch.InvalidMethodError as e:
raise AppEnginePlatformError(
"URLFetch does not support method: %s" % method, e
)
http_response = self._urlfetch_response_to_http_response(
response, retries=retries, **response_kw
)
# Handle redirect?
redirect_location = redirect and http_response.get_redirect_location()
if redirect_location:
# Check for redirect response
if self.urlfetch_retries and retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
else:
if http_response.status == 303:
method = "GET"
try:
retries = retries.increment(
method, url, response=http_response, _pool=self
)
except MaxRetryError:
if retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
return http_response
retries.sleep_for_retry(http_response)
log.debug("Redirecting %s -> %s", url, redirect_location)
redirect_url = urljoin(url, redirect_location)
return self.urlopen(
method,
redirect_url,
body,
headers,
retries=retries,
redirect=redirect,
timeout=timeout,
**response_kw
)
# Check if we should retry the HTTP response.
has_retry_after = bool(http_response.getheader("Retry-After"))
if retries.is_retry(method, http_response.status, has_retry_after):
retries = retries.increment(method, url, response=http_response, _pool=self)
log.debug("Retry: %s", url)
retries.sleep(http_response)
return self.urlopen(
method,
url,
body=body,
headers=headers,
retries=retries,
redirect=redirect,
timeout=timeout,
**response_kw
)
return http_response
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
if is_prod_appengine():
# Production GAE handles deflate encoding automatically, but does
# not remove the encoding header.
content_encoding = urlfetch_resp.headers.get("content-encoding")
if content_encoding == "deflate":
del urlfetch_resp.headers["content-encoding"]
transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
# We have a full response's content,
# so let's make sure we don't report ourselves as chunked data.
if transfer_encoding == "chunked":
encodings = transfer_encoding.split(",")
encodings.remove("chunked")
urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
original_response = HTTPResponse(
# In order for decoding to work, we must present the content as
# a file-like object.
body=io.BytesIO(urlfetch_resp.content),
msg=urlfetch_resp.header_msg,
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
**response_kw
)
return HTTPResponse(
body=io.BytesIO(urlfetch_resp.content),
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
original_response=original_response,
**response_kw
)
def _get_absolute_timeout(self, timeout):
if timeout is Timeout.DEFAULT_TIMEOUT:
return None # Defer to URLFetch's default.
if isinstance(timeout, Timeout):
if timeout._read is not None or timeout._connect is not None:
warnings.warn(
"URLFetch does not support granular timeout settings, "
"reverting to total or default URLFetch timeout.",
AppEnginePlatformWarning,
)
return timeout.total
return timeout
def _get_retries(self, retries, redirect):
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if retries.connect or retries.read or retries.redirect:
warnings.warn(
"URLFetch only supports total retries and does not "
"recognize connect, read, or redirect retry parameters.",
AppEnginePlatformWarning,
)
return retries
# Alias methods from _appengine_environ to maintain public API interface.
is_appengine = _appengine_environ.is_appengine
is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
is_local_appengine = _appengine_environ.is_local_appengine
is_prod_appengine = _appengine_environ.is_prod_appengine
is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
|
ajoaoff/django
|
refs/heads/master
|
django/contrib/flatpages/sitemaps.py
|
729
|
from django.apps import apps as django_apps
from django.contrib.sitemaps import Sitemap
from django.core.exceptions import ImproperlyConfigured
class FlatPageSitemap(Sitemap):
def items(self):
if not django_apps.is_installed('django.contrib.sites'):
raise ImproperlyConfigured("FlatPageSitemap requires django.contrib.sites, which isn't installed.")
Site = django_apps.get_model('sites.Site')
current_site = Site.objects.get_current()
return current_site.flatpage_set.filter(registration_required=False)
|
mgpyh/django-contrib-comments
|
refs/heads/master
|
tests/testapp/tests/comment_view_tests.py
|
4
|
from __future__ import absolute_import, unicode_literals
import re
from django.conf import settings
from django.contrib.auth.models import User
from django_comments import signals
from django_comments.models import Comment
from . import CommentTestCase
from ..models import Article, Book
post_redirect_re = re.compile(r'^http://testserver/posted/\?c=(?P<pk>\d+$)')
class CommentViewTests(CommentTestCase):
def testPostCommentHTTPMethods(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
response = self.client.get("/post/", data)
self.assertEqual(response.status_code, 405)
self.assertEqual(response["Allow"], "POST")
def testPostCommentMissingCtype(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
del data["content_type"]
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testPostCommentBadCtype(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["content_type"] = "Nobody expects the Spanish Inquisition!"
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testPostCommentMissingObjectPK(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
del data["object_pk"]
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testPostCommentBadObjectPK(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["object_pk"] = "14"
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testPostInvalidIntegerPK(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["comment"] = "This is another comment"
data["object_pk"] = '\ufffd'
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testPostInvalidDecimalPK(self):
b = Book.objects.get(pk='12.34')
data = self.getValidData(b)
data["comment"] = "This is another comment"
data["object_pk"] = 'cookies'
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testCommentPreview(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["preview"] = "Preview"
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "comments/preview.html")
def testHashTampering(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["security_hash"] = "Nobody expects the Spanish Inquisition!"
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testDebugCommentErrors(self):
"""The debug error template should be shown only if DEBUG is True"""
olddebug = settings.DEBUG
settings.DEBUG = True
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["security_hash"] = "Nobody expects the Spanish Inquisition!"
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
self.assertTemplateUsed(response, "comments/400-debug.html")
settings.DEBUG = False
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
self.assertTemplateNotUsed(response, "comments/400-debug.html")
settings.DEBUG = olddebug
def testCreateValidComment(self):
address = "1.2.3.4"
a = Article.objects.get(pk=1)
data = self.getValidData(a)
self.response = self.client.post("/post/", data, REMOTE_ADDR=address)
self.assertEqual(self.response.status_code, 302)
self.assertEqual(Comment.objects.count(), 1)
c = Comment.objects.all()[0]
self.assertEqual(c.ip_address, address)
self.assertEqual(c.comment, "This is my comment")
def testCreateValidCommentIPv6(self):
"""
Test creating a valid comment with a long IPv6 address.
Note that this test should fail when Comment.ip_address is an IPAddress instead of a GenericIPAddress,
but does not do so on SQLite or PostgreSQL, because they use the TEXT and INET types, which already
allow storing an IPv6 address internally.
"""
address = "2a02::223:6cff:fe8a:2e8a"
a = Article.objects.get(pk=1)
data = self.getValidData(a)
self.response = self.client.post("/post/", data, REMOTE_ADDR=address)
self.assertEqual(self.response.status_code, 302)
self.assertEqual(Comment.objects.count(), 1)
c = Comment.objects.all()[0]
self.assertEqual(c.ip_address, address)
self.assertEqual(c.comment, "This is my comment")
def testCreateValidCommentIPv6Unpack(self):
address = "::ffff:18.52.18.52"
a = Article.objects.get(pk=1)
data = self.getValidData(a)
self.response = self.client.post("/post/", data, REMOTE_ADDR=address)
self.assertEqual(self.response.status_code, 302)
self.assertEqual(Comment.objects.count(), 1)
c = Comment.objects.all()[0]
# We trim the '::ffff:' bit off because it is an IPv4 addr
self.assertEqual(c.ip_address, address[7:])
self.assertEqual(c.comment, "This is my comment")
def testPostAsAuthenticatedUser(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data['name'] = data['email'] = ''
self.client.login(username="normaluser", password="normaluser")
self.response = self.client.post("/post/", data, REMOTE_ADDR="1.2.3.4")
self.assertEqual(self.response.status_code, 302)
self.assertEqual(Comment.objects.count(), 1)
c = Comment.objects.all()[0]
self.assertEqual(c.ip_address, "1.2.3.4")
u = User.objects.get(username='normaluser')
self.assertEqual(c.user, u)
self.assertEqual(c.user_name, u.get_full_name())
self.assertEqual(c.user_email, u.email)
def testPostAsAuthenticatedUserWithoutFullname(self):
"""
Check that the user's name in the comment is populated for
authenticated users without first_name and last_name.
"""
user = User.objects.create_user(username='jane_other',
email='jane@example.com', password='jane_other')
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data['name'] = data['email'] = ''
self.client.login(username="jane_other", password="jane_other")
self.response = self.client.post("/post/", data, REMOTE_ADDR="1.2.3.4")
c = Comment.objects.get(user=user)
self.assertEqual(c.ip_address, "1.2.3.4")
self.assertEqual(c.user_name, 'jane_other')
user.delete()
def testPreventDuplicateComments(self):
"""Prevent posting the exact same comment twice"""
a = Article.objects.get(pk=1)
data = self.getValidData(a)
self.client.post("/post/", data)
self.client.post("/post/", data)
self.assertEqual(Comment.objects.count(), 1)
# This should not trigger the duplicate prevention
self.client.post("/post/", dict(data, comment="My second comment."))
self.assertEqual(Comment.objects.count(), 2)
def testCommentSignals(self):
"""Test signals emitted by the comment posting view"""
# callback
def receive(sender, **kwargs):
self.assertEqual(kwargs['comment'].comment, "This is my comment")
self.assertTrue('request' in kwargs)
received_signals.append(kwargs.get('signal'))
# Connect signals and keep track of handled ones
received_signals = []
expected_signals = [
signals.comment_will_be_posted, signals.comment_was_posted
]
for signal in expected_signals:
signal.connect(receive)
# Post a comment and check the signals
self.testCreateValidComment()
self.assertEqual(received_signals, expected_signals)
for signal in expected_signals:
signal.disconnect(receive)
def testWillBePostedSignal(self):
"""
Test that the comment_will_be_posted signal can prevent the comment from
actually getting saved
"""
def receive(sender, **kwargs): return False
signals.comment_will_be_posted.connect(receive, dispatch_uid="comment-test")
a = Article.objects.get(pk=1)
data = self.getValidData(a)
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
self.assertEqual(Comment.objects.count(), 0)
signals.comment_will_be_posted.disconnect(dispatch_uid="comment-test")
def testWillBePostedSignalModifyComment(self):
"""
Test that the comment_will_be_posted signal can modify a comment before
it gets posted
"""
def receive(sender, **kwargs):
# a bad but effective spam filter :)...
kwargs['comment'].is_public = False
signals.comment_will_be_posted.connect(receive)
self.testCreateValidComment()
c = Comment.objects.all()[0]
self.assertFalse(c.is_public)
def testCommentNext(self):
"""Test the different "next" actions the comment view can take"""
a = Article.objects.get(pk=1)
data = self.getValidData(a)
response = self.client.post("/post/", data)
location = response["Location"]
match = post_redirect_re.match(location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
data["next"] = "/somewhere/else/"
data["comment"] = "This is another comment"
response = self.client.post("/post/", data)
location = response["Location"]
match = re.search(r"^http://testserver/somewhere/else/\?c=\d+$", location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
data["next"] = "http://badserver/somewhere/else/"
data["comment"] = "This is another comment with an unsafe next url"
response = self.client.post("/post/", data)
location = response["Location"]
match = post_redirect_re.match(location)
self.assertTrue(match != None, "Unsafe redirection to: %s" % location)
def testCommentDoneView(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
response = self.client.post("/post/", data)
location = response["Location"]
match = post_redirect_re.match(location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
pk = int(match.group('pk'))
response = self.client.get(location)
self.assertTemplateUsed(response, "comments/posted.html")
self.assertEqual(response.context[0]["comment"], Comment.objects.get(pk=pk))
def testCommentNextWithQueryString(self):
"""
The `next` key needs to handle already having a query string (#10585)
"""
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["next"] = "/somewhere/else/?foo=bar"
data["comment"] = "This is another comment"
response = self.client.post("/post/", data)
location = response["Location"]
match = re.search(r"^http://testserver/somewhere/else/\?foo=bar&c=\d+$", location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
def testCommentPostRedirectWithInvalidIntegerPK(self):
"""
Tests that attempting to retrieve the location specified in the
post redirect, after adding some invalid data to the expected
querystring it ends with, doesn't cause a server error.
"""
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["comment"] = "This is another comment"
response = self.client.post("/post/", data)
location = response["Location"]
broken_location = location + "\ufffd"
response = self.client.get(broken_location)
self.assertEqual(response.status_code, 200)
def testCommentNextWithQueryStringAndAnchor(self):
"""
The `next` key needs to handle already having an anchor. Refs #13411.
"""
# With a query string also.
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["next"] = "/somewhere/else/?foo=bar#baz"
data["comment"] = "This is another comment"
response = self.client.post("/post/", data)
location = response["Location"]
match = re.search(r"^http://testserver/somewhere/else/\?foo=bar&c=\d+#baz$", location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
# Without a query string
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["next"] = "/somewhere/else/#baz"
data["comment"] = "This is another comment"
response = self.client.post("/post/", data)
location = response["Location"]
match = re.search(r"^http://testserver/somewhere/else/\?c=\d+#baz$", location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
|
proxysh/Safejumper-for-Mac
|
refs/heads/master
|
buildlinux/env32/lib/python2.7/site-packages/twisted/plugins/twisted_inet.py
|
83
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.application.service import ServiceMaker
TwistedINETD = ServiceMaker(
"Twisted INETD Server",
"twisted.runner.inetdtap",
"An inetd(8) replacement.",
"inetd")
|
alajara/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/test/test_dispatch.py
|
488
|
#!/usr/bin/env python
#
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for dispatch module."""
import os
import unittest
import set_sys_path # Update sys.path to locate mod_pywebsocket module.
from mod_pywebsocket import dispatch
from mod_pywebsocket import handshake
from test import mock
_TEST_HANDLERS_DIR = os.path.join(
os.path.split(__file__)[0], 'testdata', 'handlers')
_TEST_HANDLERS_SUB_DIR = os.path.join(_TEST_HANDLERS_DIR, 'sub')
class DispatcherTest(unittest.TestCase):
"""A unittest for dispatch module."""
def test_normalize_path(self):
self.assertEqual(os.path.abspath('/a/b').replace('\\', '/'),
dispatch._normalize_path('/a/b'))
self.assertEqual(os.path.abspath('/a/b').replace('\\', '/'),
dispatch._normalize_path('\\a\\b'))
self.assertEqual(os.path.abspath('/a/b').replace('\\', '/'),
dispatch._normalize_path('/a/c/../b'))
self.assertEqual(os.path.abspath('abc').replace('\\', '/'),
dispatch._normalize_path('abc'))
def test_converter(self):
converter = dispatch._create_path_to_resource_converter('/a/b')
# Python built by MSC inserts a drive name like 'C:\' via realpath().
# Converter Generator expands provided path using realpath() and uses
# the path including a drive name to verify the prefix.
os_root = os.path.realpath('/')
self.assertEqual('/h', converter(os_root + 'a/b/h_wsh.py'))
self.assertEqual('/c/h', converter(os_root + 'a/b/c/h_wsh.py'))
self.assertEqual(None, converter(os_root + 'a/b/h.py'))
self.assertEqual(None, converter('a/b/h_wsh.py'))
converter = dispatch._create_path_to_resource_converter('a/b')
self.assertEqual('/h', converter(dispatch._normalize_path(
'a/b/h_wsh.py')))
converter = dispatch._create_path_to_resource_converter('/a/b///')
self.assertEqual('/h', converter(os_root + 'a/b/h_wsh.py'))
self.assertEqual('/h', converter(dispatch._normalize_path(
'/a/b/../b/h_wsh.py')))
converter = dispatch._create_path_to_resource_converter(
'/a/../a/b/../b/')
self.assertEqual('/h', converter(os_root + 'a/b/h_wsh.py'))
converter = dispatch._create_path_to_resource_converter(r'\a\b')
self.assertEqual('/h', converter(os_root + r'a\b\h_wsh.py'))
self.assertEqual('/h', converter(os_root + r'a/b/h_wsh.py'))
def test_enumerate_handler_file_paths(self):
paths = list(
dispatch._enumerate_handler_file_paths(_TEST_HANDLERS_DIR))
paths.sort()
self.assertEqual(8, len(paths))
expected_paths = [
os.path.join(_TEST_HANDLERS_DIR, 'abort_by_user_wsh.py'),
os.path.join(_TEST_HANDLERS_DIR, 'blank_wsh.py'),
os.path.join(_TEST_HANDLERS_DIR, 'origin_check_wsh.py'),
os.path.join(_TEST_HANDLERS_DIR, 'sub',
'exception_in_transfer_wsh.py'),
os.path.join(_TEST_HANDLERS_DIR, 'sub', 'non_callable_wsh.py'),
os.path.join(_TEST_HANDLERS_DIR, 'sub', 'plain_wsh.py'),
os.path.join(_TEST_HANDLERS_DIR, 'sub',
'wrong_handshake_sig_wsh.py'),
os.path.join(_TEST_HANDLERS_DIR, 'sub',
'wrong_transfer_sig_wsh.py'),
]
for expected, actual in zip(expected_paths, paths):
self.assertEqual(expected, actual)
def test_source_handler_file(self):
self.assertRaises(
dispatch.DispatchException, dispatch._source_handler_file, '')
self.assertRaises(
dispatch.DispatchException, dispatch._source_handler_file, 'def')
self.assertRaises(
dispatch.DispatchException, dispatch._source_handler_file, '1/0')
self.failUnless(dispatch._source_handler_file(
'def web_socket_do_extra_handshake(request):pass\n'
'def web_socket_transfer_data(request):pass\n'))
def test_source_warnings(self):
dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
warnings = dispatcher.source_warnings()
warnings.sort()
expected_warnings = [
(os.path.realpath(os.path.join(
_TEST_HANDLERS_DIR, 'blank_wsh.py')) +
': web_socket_do_extra_handshake is not defined.'),
(os.path.realpath(os.path.join(
_TEST_HANDLERS_DIR, 'sub', 'non_callable_wsh.py')) +
': web_socket_do_extra_handshake is not callable.'),
(os.path.realpath(os.path.join(
_TEST_HANDLERS_DIR, 'sub', 'wrong_handshake_sig_wsh.py')) +
': web_socket_do_extra_handshake is not defined.'),
(os.path.realpath(os.path.join(
_TEST_HANDLERS_DIR, 'sub', 'wrong_transfer_sig_wsh.py')) +
': web_socket_transfer_data is not defined.'),
]
self.assertEquals(4, len(warnings))
for expected, actual in zip(expected_warnings, warnings):
self.assertEquals(expected, actual)
def test_do_extra_handshake(self):
dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
request = mock.MockRequest()
request.ws_resource = '/origin_check'
request.ws_origin = 'http://example.com'
dispatcher.do_extra_handshake(request) # Must not raise exception.
request.ws_origin = 'http://bad.example.com'
try:
dispatcher.do_extra_handshake(request)
self.fail('Could not catch HandshakeException with 403 status')
except handshake.HandshakeException, e:
self.assertEquals(403, e.status)
except Exception, e:
self.fail('Unexpected exception: %r' % e)
def test_abort_extra_handshake(self):
dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
request = mock.MockRequest()
request.ws_resource = '/abort_by_user'
self.assertRaises(handshake.AbortedByUserException,
dispatcher.do_extra_handshake, request)
def test_transfer_data(self):
dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
request = mock.MockRequest(connection=mock.MockConn('\xff\x00'))
request.ws_resource = '/origin_check'
request.ws_protocol = 'p1'
dispatcher.transfer_data(request)
self.assertEqual('origin_check_wsh.py is called for /origin_check, p1'
'\xff\x00',
request.connection.written_data())
request = mock.MockRequest(connection=mock.MockConn('\xff\x00'))
request.ws_resource = '/sub/plain'
request.ws_protocol = None
dispatcher.transfer_data(request)
self.assertEqual('sub/plain_wsh.py is called for /sub/plain, None'
'\xff\x00',
request.connection.written_data())
request = mock.MockRequest(connection=mock.MockConn('\xff\x00'))
request.ws_resource = '/sub/plain?'
request.ws_protocol = None
dispatcher.transfer_data(request)
self.assertEqual('sub/plain_wsh.py is called for /sub/plain?, None'
'\xff\x00',
request.connection.written_data())
request = mock.MockRequest(connection=mock.MockConn('\xff\x00'))
request.ws_resource = '/sub/plain?q=v'
request.ws_protocol = None
dispatcher.transfer_data(request)
self.assertEqual('sub/plain_wsh.py is called for /sub/plain?q=v, None'
'\xff\x00',
request.connection.written_data())
def test_transfer_data_no_handler(self):
dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
for resource in ['/blank', '/sub/non_callable',
'/sub/no_wsh_at_the_end', '/does/not/exist']:
request = mock.MockRequest(connection=mock.MockConn(''))
request.ws_resource = resource
request.ws_protocol = 'p2'
try:
dispatcher.transfer_data(request)
self.fail()
except dispatch.DispatchException, e:
self.failUnless(str(e).find('No handler') != -1)
except Exception:
self.fail()
def test_transfer_data_handler_exception(self):
dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
request = mock.MockRequest(connection=mock.MockConn(''))
request.ws_resource = '/sub/exception_in_transfer'
request.ws_protocol = 'p3'
try:
dispatcher.transfer_data(request)
self.fail()
except Exception, e:
self.failUnless(str(e).find('Intentional') != -1,
'Unexpected exception: %s' % e)
def test_abort_transfer_data(self):
dispatcher = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
request = mock.MockRequest()
request.ws_resource = '/abort_by_user'
self.assertRaises(handshake.AbortedByUserException,
dispatcher.transfer_data, request)
def test_scan_dir(self):
disp = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
self.assertEqual(4, len(disp._handler_suite_map))
self.failUnless('/origin_check' in disp._handler_suite_map)
self.failUnless(
'/sub/exception_in_transfer' in disp._handler_suite_map)
self.failUnless('/sub/plain' in disp._handler_suite_map)
def test_scan_sub_dir(self):
disp = dispatch.Dispatcher(_TEST_HANDLERS_DIR, _TEST_HANDLERS_SUB_DIR)
self.assertEqual(2, len(disp._handler_suite_map))
self.failIf('/origin_check' in disp._handler_suite_map)
self.failUnless(
'/sub/exception_in_transfer' in disp._handler_suite_map)
self.failUnless('/sub/plain' in disp._handler_suite_map)
def test_scan_sub_dir_as_root(self):
disp = dispatch.Dispatcher(_TEST_HANDLERS_SUB_DIR,
_TEST_HANDLERS_SUB_DIR)
self.assertEqual(2, len(disp._handler_suite_map))
self.failIf('/origin_check' in disp._handler_suite_map)
self.failIf('/sub/exception_in_transfer' in disp._handler_suite_map)
self.failIf('/sub/plain' in disp._handler_suite_map)
self.failUnless('/exception_in_transfer' in disp._handler_suite_map)
self.failUnless('/plain' in disp._handler_suite_map)
def test_scan_dir_must_under_root(self):
dispatch.Dispatcher('a/b', 'a/b/c') # OK
dispatch.Dispatcher('a/b///', 'a/b') # OK
self.assertRaises(dispatch.DispatchException,
dispatch.Dispatcher, 'a/b/c', 'a/b')
def test_resource_path_alias(self):
disp = dispatch.Dispatcher(_TEST_HANDLERS_DIR, None)
disp.add_resource_path_alias('/', '/origin_check')
self.assertEqual(5, len(disp._handler_suite_map))
self.failUnless('/origin_check' in disp._handler_suite_map)
self.failUnless(
'/sub/exception_in_transfer' in disp._handler_suite_map)
self.failUnless('/sub/plain' in disp._handler_suite_map)
self.failUnless('/' in disp._handler_suite_map)
self.assertRaises(dispatch.DispatchException,
disp.add_resource_path_alias, '/alias', '/not-exist')
if __name__ == '__main__':
unittest.main()
# vi:sts=4 sw=4 et
|
liosha2007/temporary-groupdocs-python-sdk
|
refs/heads/master
|
groupdocs/models/UpdateUserResponse.py
|
1
|
#!/usr/bin/env python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class UpdateUserResponse:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'result': 'UpdateUserResult',
'status': 'str',
'error_message': 'str',
'composedOn': 'long'
}
self.result = None # UpdateUserResult
self.status = None # str
self.error_message = None # str
self.composedOn = None # long
|
skylarker/jasper-client
|
refs/heads/master
|
client/conversation.py
|
30
|
# -*- coding: utf-8-*-
import logging
from notifier import Notifier
from brain import Brain
class Conversation(object):
def __init__(self, persona, mic, profile):
self._logger = logging.getLogger(__name__)
self.persona = persona
self.mic = mic
self.profile = profile
self.brain = Brain(mic, profile)
self.notifier = Notifier(profile)
def handleForever(self):
"""
Delegates user input to the handling function when activated.
"""
self._logger.info("Starting to handle conversation with keyword '%s'.",
self.persona)
while True:
# Print notifications until empty
notifications = self.notifier.getAllNotifications()
for notif in notifications:
self._logger.info("Received notification: '%s'", str(notif))
self._logger.debug("Started listening for keyword '%s'",
self.persona)
threshold, transcribed = self.mic.passiveListen(self.persona)
self._logger.debug("Stopped listening for keyword '%s'",
self.persona)
if not transcribed or not threshold:
self._logger.info("Nothing has been said or transcribed.")
continue
self._logger.info("Keyword '%s' has been said!", self.persona)
self._logger.debug("Started to listen actively with threshold: %r",
threshold)
input = self.mic.activeListenToAllOptions(threshold)
self._logger.debug("Stopped to listen actively with threshold: %r",
threshold)
if input:
self.brain.query(input)
else:
self.mic.say("Pardon?")
|
ehashman/oh-mainline
|
refs/heads/master
|
vendor/packages/sphinx/sphinx/util/jsonimpl.py
|
16
|
# -*- coding: utf-8 -*-
"""
sphinx.util.jsonimpl
~~~~~~~~~~~~~~~~~~~~
JSON serializer implementation wrapper.
:copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import UserString
try:
import json
# json-py's json module has no JSONEncoder; this will raise AttributeError
# if json-py is imported instead of the built-in json module
JSONEncoder = json.JSONEncoder
except (ImportError, AttributeError):
try:
import simplejson as json
JSONEncoder = json.JSONEncoder
except ImportError:
json = None
JSONEncoder = object
class SphinxJSONEncoder(JSONEncoder):
"""JSONEncoder subclass that forces translation proxies."""
def default(self, obj):
if isinstance(obj, UserString.UserString):
return unicode(obj)
return JSONEncoder.default(self, obj)
def dump(obj, fp, *args, **kwds):
kwds['cls'] = SphinxJSONEncoder
return json.dump(obj, fp, *args, **kwds)
def dumps(obj, *args, **kwds):
kwds['cls'] = SphinxJSONEncoder
return json.dumps(obj, *args, **kwds)
def load(*args, **kwds):
return json.load(*args, **kwds)
def loads(*args, **kwds):
return json.loads(*args, **kwds)
|
stanlyxiang/incubator-hawq
|
refs/heads/master
|
tools/bin/gppylib/operations/test/unit/test_unit_utils.py
|
12
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
import pickle
from gppylib.commands.base import ExecutionError
from gppylib.operations.utils import RemoteOperation
from gppylib.operations.test_utils_helper import TestOperation, RaiseOperation, RaiseOperation_Nested, RaiseOperation_Unsafe, RaiseOperation_Unpicklable, RaiseOperation_Safe, MyException, ExceptionWithArgs, ExceptionWithArgsUnsafe
# TODO: much of this code could be improved with assertion context managers that
# exist in a later version of unit test, I believe
class utilsTestCase(unittest.TestCase):
def test_Remote_basic(self):
""" Basic RemoteOperation test """
self.assertTrue(TestOperation().run() == RemoteOperation(TestOperation(), "localhost").run())
def test_Remote_exceptions(self):
""" Test that an Exception returned remotely will be raised locally. """
try:
RemoteOperation(RaiseOperation(), "localhost").run()
except MyException, e:
pass
else:
self.fail("RaiseOperation should have thrown a MyException")
def test_inner_exceptions(self):
""" Verify that an object not at the global level of this file cannot be pickled properly. """
try:
RemoteOperation(RaiseOperation_Nested(), "localhost").run()
except ExecutionError, e:
self.assertTrue(e.cmd.get_results().stderr.strip().endswith("raise RaiseOperation_Nested.MyException2()"))
else:
self.fail("A PicklingError should have been caused remotely, because RaiseOperation_Nested is not at the global-level.")
def test_unsafe_exceptions_with_args(self):
try:
RemoteOperation(RaiseOperation_Unsafe(), "localhost").run()
except TypeError, e: # Because Exceptions don't retain init args, they are not pickle-able normally
pass
else:
self.fail("RaiseOperation_Unsafe should have caused a TypeError, due to an improper Exception idiom. See test_utils.ExceptionWithArgsUnsafe")
def test_proper_exceptions_sanity(self):
try:
RemoteOperation(RaiseOperation_Safe(), "localhost").run()
except ExceptionWithArgs, e:
pass
else:
self.fail("ExceptionWithArgs should have been successfully raised + caught, because proper idiom is used.")
def test_proper_exceptions_with_args(self):
try:
RemoteOperation(RaiseOperation_Safe(), "localhost").run()
except ExceptionWithArgs, e:
self.assertTrue(e.x == 1 and e.y == 2)
else:
self.fail("RaiseOperation_Safe should have thrown ExceptionWithArgs(1, 2)")
# It is crucial that the RMI is debuggable!
def test_Remote_harden(self):
""" Ensure that some logging occurs in event of error. """
# One case encountered thus far is the raising of a pygresql DatabaseError,
# which due to the import from a shared object (I think), does not behave
# nicely in terms of imports and namespacing. """
try:
RemoteOperation(RaiseOperation_Unpicklable(), "localhost").run()
except ExecutionError, e:
self.assertTrue(e.cmd.get_results().stderr.strip().endswith("raise pg.DatabaseError()"))
else:
self.fail("""A pg.DatabaseError should have been raised remotely, and because it cannot
be pickled cleanly (due to a strange import in pickle.py),
an ExecutionError should have ultimately been caused.""")
# TODO: Check logs on disk. With gplogfilter?
if __name__ == '__main__':
unittest.main()
|
mbroadst/rethinkdb
|
refs/heads/next
|
test/common/http_support/flask/debughelpers.py
|
777
|
# -*- coding: utf-8 -*-
"""
flask.debughelpers
~~~~~~~~~~~~~~~~~~
Various helpers to make the development experience better.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from ._compat import implements_to_string
class UnexpectedUnicodeError(AssertionError, UnicodeError):
"""Raised in places where we want some better error reporting for
unexpected unicode or binary data.
"""
@implements_to_string
class DebugFilesKeyError(KeyError, AssertionError):
"""Raised from request.files during debugging. The idea is that it can
provide a better error message than just a generic KeyError/BadRequest.
"""
def __init__(self, request, key):
form_matches = request.form.getlist(key)
buf = ['You tried to access the file "%s" in the request.files '
'dictionary but it does not exist. The mimetype for the request '
'is "%s" instead of "multipart/form-data" which means that no '
'file contents were transmitted. To fix this error you should '
'provide enctype="multipart/form-data" in your form.' %
(key, request.mimetype)]
if form_matches:
buf.append('\n\nThe browser instead transmitted some file names. '
'This was submitted: %s' % ', '.join('"%s"' % x
for x in form_matches))
self.msg = ''.join(buf)
def __str__(self):
return self.msg
class FormDataRoutingRedirect(AssertionError):
"""This exception is raised by Flask in debug mode if it detects a
redirect caused by the routing system when the request method is not
GET, HEAD or OPTIONS. Reasoning: form data will be dropped.
"""
def __init__(self, request):
exc = request.routing_exception
buf = ['A request was sent to this URL (%s) but a redirect was '
'issued automatically by the routing system to "%s".'
% (request.url, exc.new_url)]
# In case just a slash was appended we can be extra helpful
if request.base_url + '/' == exc.new_url.split('?')[0]:
buf.append(' The URL was defined with a trailing slash so '
'Flask will automatically redirect to the URL '
'with the trailing slash if it was accessed '
'without one.')
buf.append(' Make sure to directly send your %s-request to this URL '
'since we can\'t make browsers or HTTP clients redirect '
'with form data reliably or without user interaction.' %
request.method)
buf.append('\n\nNote: this exception is only raised in debug mode')
AssertionError.__init__(self, ''.join(buf).encode('utf-8'))
def attach_enctype_error_multidict(request):
"""Since Flask 0.8 we're monkeypatching the files object in case a
request is detected that does not use multipart form data but the files
object is accessed.
"""
oldcls = request.files.__class__
class newcls(oldcls):
def __getitem__(self, key):
try:
return oldcls.__getitem__(self, key)
except KeyError as e:
if key not in request.form:
raise
raise DebugFilesKeyError(request, key)
newcls.__name__ = oldcls.__name__
newcls.__module__ = oldcls.__module__
request.files.__class__ = newcls
|
fstagni/DIRAC
|
refs/heads/v7r0-fixes27
|
Core/Utilities/CFG.py
|
1
|
""" This is the main module that interprets DIRAC cfg format
"""
from __future__ import print_function
__RCSID__ = "$Id$"
import copy
import os
import re
import zipfile
try:
from DIRAC.Core.Utilities import List, ThreadSafe
from DIRAC.Core.Utilities.ReturnValues import S_OK, S_ERROR
gCFGSynchro = ThreadSafe.Synchronizer(recursive=True)
except Exception:
# We're out of python, define required utilities
import threading
def S_ERROR(messageString=''):
return {'OK': False, 'Message': str(messageString)}
def S_OK(value=''):
return {'OK': True, 'Value': value}
class ListDummy:
def fromChar(self, inputString, sepChar=","):
if not (isinstance(inputString, basestring) and
isinstance(sepChar, basestring) and
sepChar): # to prevent getting an empty String as argument
return None
return [fieldString.strip() for fieldString in inputString.split(sepChar) if len(fieldString.strip()) > 0]
List = ListDummy()
class Synchronizer:
""" Class enapsulating a lock
allowing it to be used as a synchronizing
decorator making the call thread-safe"""
def __init__(self, lockName="", recursive=False):
self.lockName = lockName
if recursive:
self.lock = threading.RLock()
else:
self.lock = threading.Lock()
def __call__(self, funcToCall):
def lockedFunc(*args, **kwargs):
try:
if self.lockName:
print("LOCKING", self.lockName)
self.lock.acquire()
return funcToCall(*args, **kwargs)
finally:
if self.lockName:
print("UNLOCKING", self.lockName)
self.lock.release()
return lockedFunc
gCFGSynchro = Synchronizer(recursive=True)
# END OF OUT OF DIRAC
# START OF CFG MODULE
class CFG(object):
def __init__(self):
"""
Constructor
"""
self.__orderedList = []
self.__commentDict = {}
self.__dataDict = {}
self.reset()
@gCFGSynchro
def reset(self):
"""
Empty the CFG
"""
self.__orderedList = []
self.__commentDict = {}
self.__dataDict = {}
@gCFGSynchro
def createNewSection(self, sectionName, comment="", contents=False):
"""
Create a new section
:type sectionName: string
:param sectionName: Name of the section
:type comment: string
:param comment: Comment for the section
:type contents: CFG
:param contents: Optional cfg with the contents of the section.
"""
if sectionName == "":
raise ValueError("Creating a section with empty name! You shouldn't do that!")
if sectionName.find("/") > -1:
recDict = self.getRecursive(sectionName, -1)
if not recDict:
return S_ERROR("Parent section does not exist %s" % sectionName)
parentSection = recDict['value']
if isinstance(parentSection, basestring):
raise KeyError("Entry %s doesn't seem to be a section" % recDict['key'])
return parentSection.createNewSection(recDict['levelsBelow'], comment, contents)
self.__addEntry(sectionName, comment)
if sectionName not in self.__dataDict:
if not contents:
self.__dataDict[sectionName] = CFG()
else:
self.__dataDict[sectionName] = contents
else:
raise KeyError("%s key already exists" % sectionName)
return self.__dataDict[sectionName]
def __overrideAndCloneSection(self, sectionName, oCFGToClone):
"""
Replace the contents of a section
:type sectionName: string
:params sectionName: Name of the section
:type oCFGToClone: CFG
:param oCFGToClone: CFG with the contents of the section
"""
if sectionName not in self.listSections():
raise KeyError("Section %s does not exist" % sectionName)
self.__dataDict[sectionName] = oCFGToClone.clone()
@gCFGSynchro
def setOption(self, optionName, value, comment=""):
"""
Create a new option.
:type optionName: string
:param optionName: Name of the option to create
:type value: string
:param value: Value of the option
:type comment: string
:param comment: Comment for the option
"""
if optionName == "":
raise ValueError("Creating an option with empty name! You shouldn't do that!")
if optionName.find("/") > -1:
recDict = self.getRecursive(optionName, -1)
if not recDict:
return S_ERROR("Parent section does not exist %s" % optionName)
parentSection = recDict['value']
if isinstance(parentSection, basestring):
raise KeyError("Entry %s doesn't seem to be a section" % recDict['key'])
return parentSection.setOption(recDict['levelsBelow'], value, comment)
self.__addEntry(optionName, comment)
self.__dataDict[optionName] = str(value)
def __addEntry(self, entryName, comment):
"""
Add an entry and set the comment
:type entryName: string
:param entryName: Name of the entry
:type comment: string
:param comment: Comment for the entry
"""
if entryName not in self.__orderedList:
self.__orderedList.append(entryName)
self.__commentDict[entryName] = comment
def existsKey(self, key):
"""
Check if an option/section with that name exists
:type key: string
:param key: Name of the option/section to check
:return: Boolean with the result
"""
return key in self.__orderedList
def sortAlphabetically(self, ascending=True):
"""
Order this cfg alphabetically
returns True if modified
"""
if not ascending:
return self.sortByKey(reverse=True)
return self.sortByKey()
def sortByKey(self, key=None, reverse=False):
"""
Order this cfg by function refered in key, default is None
corresponds to alphabetic sort
returns True if modified
"""
unordered = list(self.__orderedList)
self.__orderedList.sort(key=key, reverse=reverse)
return unordered != self.__orderedList
@gCFGSynchro
def deleteKey(self, key):
"""
Delete an option/section
:type key: string
:param key: Name of the option/section to delete
:return: Boolean with the result
"""
result = self.getRecursive(key, -1)
if not result:
raise KeyError("%s does not exist" % "/".join(List.fromChar(key, "/")[:-1]))
cfg = result['value']
end = result['levelsBelow']
if end in cfg.__orderedList:
del cfg.__commentDict[end]
del cfg.__dataDict[end]
cfg.__orderedList.remove(end)
return True
return False
@gCFGSynchro
def copyKey(self, oldName, newName):
"""
Copy an option/section
:type oldName: string
:param oldName: Name of the option / section to copy
:type newName: string
:param newName: Destination name
:return: Boolean with the result
"""
if oldName == newName:
return True
result = self.getRecursive(oldName, -1)
if not result:
raise KeyError("%s does not exist" % "/".join(List.fromChar(oldName, "/")[:-1]))
oldCfg = result['value']
oldEnd = result['levelsBelow']
if oldEnd in oldCfg.__dataDict:
result = self.getRecursive(newName, -1)
if not result:
raise KeyError("%s does not exist" % "/".join(List.fromChar(newName, "/")[:-1]))
newCfg = result['value']
newEnd = result['levelsBelow']
newCfg.__dataDict[newEnd] = oldCfg.__dataDict[oldEnd]
newCfg.__commentDict[newEnd] = oldCfg.__commentDict[oldEnd]
refKeyPos = oldCfg.__orderedList.index(oldEnd)
newCfg.__orderedList.insert(refKeyPos + 1, newEnd)
return True
else:
return False
@gCFGSynchro
def listOptions(self, ordered=True):
"""
List options
:type ordered: boolean
:param ordered: Return the options ordered. By default is False
:return: List with the option names
"""
if ordered:
return [sKey for sKey in self.__orderedList if isinstance(self.__dataDict[sKey], basestring)]
else:
return [sKey for sKey in self.__dataDict.keys() if isinstance(self.__dataDict[sKey], basestring)]
@gCFGSynchro
def listSections(self, ordered=True):
"""
List subsections
:type ordered: boolean
:param ordered: Return the subsections ordered. By default is False
:return: List with the subsection names
"""
if ordered:
return [sKey for sKey in self.__orderedList if not isinstance(self.__dataDict[sKey], basestring)]
else:
return [sKey for sKey in self.__dataDict.keys() if not isinstance(self.__dataDict[sKey], basestring)]
@gCFGSynchro
def isSection(self, key):
"""
Return if a section exists
:type key: string
:param key: Name to check
:return: Boolean with the results
"""
if key.find("/") != -1:
keyDict = self.getRecursive(key, -1)
if not keyDict:
return False
section = keyDict['value']
if isinstance(section, basestring):
return False
secKey = keyDict['levelsBelow']
return section.isSection(secKey)
return key in self.__dataDict and not isinstance(self.__dataDict[key], basestring)
@gCFGSynchro
def isOption(self, key):
"""
Return if an option exists
:type key: string
:param key: Name to check
:return: Boolean with the results
"""
if key.find("/") != -1:
keyDict = self.getRecursive(key, -1)
if not keyDict:
return False
section = keyDict['value']
if isinstance(section, basestring):
return False
secKey = keyDict['levelsBelow']
return section.isOption(secKey)
return key in self.__dataDict and isinstance(self.__dataDict[key], basestring)
def listAll(self):
"""
List all sections and options
:return: List with names of all options and subsections
"""
return self.__orderedList
def __recurse(self, pathList):
"""
Explore recursively a path
:type pathList: list
:param pathList: List containing the path to explore
:return: Dictionary with the contents { key, value, comment }
"""
if pathList[0] in self.__dataDict:
if len(pathList) == 1:
return {'key': pathList[0],
'value': self.__dataDict[pathList[0]],
'comment': self.__commentDict[pathList[0]]}
else:
return self.__dataDict[pathList[0]].__recurse(pathList[1:])
else:
return False
@gCFGSynchro
def getRecursive(self, path, levelsAbove=0):
"""
Get path contents
:type path: string
:param path: Path to explore recursively and get the contents
:type levelsAbove: integer
:param levelsAbove: Number of children levels in the path that won't be explored.
For instance, to explore all sections in a path except the last one use
levelsAbove = 1
:return: Dictionary containing:
key -> name of the entry
value -> content of the key
comment -> comment of the key
"""
pathList = [dirName.strip() for dirName in path.split("/") if not dirName.strip() == ""]
levelsAbove = abs(levelsAbove)
if len(pathList) - levelsAbove < 0:
return None
if len(pathList) - levelsAbove == 0:
lBel = ""
if levelsAbove > 0:
lBel = "/".join(pathList[len(pathList) - levelsAbove:])
return {'key': "", 'value': self, 'comment': "", 'levelsBelow': lBel}
levelsBelow = ""
if levelsAbove > 0:
levelsBelow = "/".join(pathList[-levelsAbove:])
pathList = pathList[:-levelsAbove]
retDict = self.__recurse(pathList)
if not retDict:
return None
retDict['levelsBelow'] = levelsBelow
return retDict
def getOption(self, opName, defaultValue=None):
"""
Get option value with default applied
:type opName: string
:param opName: Path to the option to retrieve
:type defaultValue: optional (any python type)
:param defaultValue: Default value for the option if the option is not defined.
If the option is defined, the value will be returned casted to
the type of defaultValue if it is defined.
:return: Value of the option casted to defaultValue type, or defaultValue
"""
levels = List.fromChar(opName, "/")
dataD = self.__dataDict
while len(levels) > 0:
try:
dataV = dataD[levels.pop(0)]
except KeyError:
return defaultValue
dataD = dataV
if not isinstance(dataV, basestring):
optionValue = defaultValue
else:
optionValue = dataV
# Return value if existing, defaultValue if not
if optionValue == defaultValue:
if defaultValue is None or isinstance(defaultValue, type):
return defaultValue
return optionValue
# Value has been returned from the configuration
if defaultValue is None:
return optionValue
# Casting to defaultValue's type
defaultType = defaultValue
if not isinstance(defaultValue, type):
defaultType = type(defaultValue)
if defaultType == list:
try:
return List.fromChar(optionValue, ',')
except Exception:
return defaultValue
elif defaultType == bool:
try:
return optionValue.lower() in ("y", "yes", "true", "1")
except Exception:
return defaultValue
else:
try:
return defaultType(optionValue)
except Exception:
return defaultValue
def getAsCFG(self, path=""):
"""Return subsection as CFG object.
:param str path: Path to the section
:return: CFG object, of path is not found the CFG is empty
"""
if not path:
return self.clone()
splitPath = path.lstrip('/').split('/')
basePath = splitPath[0]
remainingPath = splitPath[1:]
if basePath not in self.__dataDict:
return CFG()
return self.__dataDict[basePath].getAsCFG("/".join(remainingPath))
def getAsDict(self, path=""):
"""
Get the contents below a given path as a dict
:type path: string
:param path: Path to retrieve as dict
:return: Dictionary containing the data
"""
resVal = {}
if path:
reqDict = self.getRecursive(path)
if not reqDict:
return resVal
keyCfg = reqDict['value']
if isinstance(keyCfg, basestring):
return resVal
return keyCfg.getAsDict()
for op in self.listOptions():
resVal[op] = self[op]
for sec in self.listSections():
resVal[sec] = self[sec].getAsDict()
return resVal
@gCFGSynchro
def appendToOption(self, optionName, value):
"""
Append a value to an option prepending a comma
:type optionName: string
:param optionName: Name of the option to append the value
:type value: string
:param value: Value to append to the option
"""
result = self.getRecursive(optionName, -1)
if not result:
raise KeyError("%s does not exist" % "/".join(List.fromChar(optionName, "/")[:-1]))
cfg = result['value']
end = result['levelsBelow']
if end not in cfg.__dataDict:
raise KeyError("Option %s has not been declared" % end)
cfg.__dataDict[end] += str(value)
@gCFGSynchro
def addKey(self, key, value, comment, beforeKey=""):
"""
Add a new entry (option or section)
:type key: string
:param key: Name of the option/section to add
:type value: string/CFG
:param value: Contents of the new option/section
:type comment: string
:param comment: Comment for the option/section
:type beforeKey: string
:param beforeKey: Name of the option/section to add the entry above. By default
the new entry will be added at the end.
"""
result = self.getRecursive(key, -1)
if not result:
raise KeyError("%s does not exist" % "/".join(List.fromChar(key, "/")[:-1]))
cfg = result['value']
end = result['levelsBelow']
if end in cfg.__dataDict:
raise KeyError("%s already exists" % key)
cfg.__dataDict[end] = value
cfg.__commentDict[end] = comment
if beforeKey == "":
cfg.__orderedList.append(end)
else:
refKeyPos = cfg.__orderedList.index(beforeKey)
cfg.__orderedList.insert(refKeyPos, end)
@gCFGSynchro
def renameKey(self, oldName, newName):
"""
Rename a option/section
:type oldName: string
:param oldName: Name of the option/section to change
:type newName: string
:param newName: New name of the option/section
:return: Boolean with the result of the rename
"""
if oldName == newName:
return True
result = self.getRecursive(oldName, -1)
if not result:
raise KeyError("%s does not exist" % "/".join(List.fromChar(oldName, "/")[:-1]))
oldCfg = result['value']
oldEnd = result['levelsBelow']
if oldEnd in oldCfg.__dataDict:
result = self.getRecursive(newName, -1)
if not result:
raise KeyError("%s does not exist" % "/".join(List.fromChar(newName, "/")[:-1]))
newCfg = result['value']
newEnd = result['levelsBelow']
newCfg.__dataDict[newEnd] = oldCfg.__dataDict[oldEnd]
newCfg.__commentDict[newEnd] = oldCfg.__commentDict[oldEnd]
refKeyPos = oldCfg.__orderedList.index(oldEnd)
oldCfg.__orderedList.remove(oldEnd)
newCfg.__orderedList.insert(refKeyPos, newEnd)
del oldCfg.__dataDict[oldEnd]
del oldCfg.__commentDict[oldEnd]
return True
else:
return False
def __getitem__(self, key):
"""
Get the contents of a section/option
:type key: string
:param key: Name of the section/option to retrieve
:return: String/CFG with the contents
"""
if key.find("/") > -1:
subDict = self.getRecursive(key)
if not subDict:
return False
return subDict['value']
return self.__dataDict[key]
def __iter__(self):
"""
Iterate though the contents in order
"""
for key in self.__orderedList:
yield key
def __contains__(self, key):
"""
Check if a key is defined
"""
return self.getRecursive(key)
def __str__(self):
"""
Get a print friendly representation of the CFG
:return: String with the contents of the CFG
"""
return self.serialize()
def __repr__(self):
"""
Get a print friendly representation of the CFG
:return: String with the contents of the CFG
"""
return self.serialize()
def __nonzero__(self):
"""
CFGs are not zeroes! ;)
"""
return True
def __eq__(self, cfg):
"""
Check CFGs
"""
if not self.__orderedList == cfg.__orderedList:
return False
for key in self.__orderedList:
if not self.__commentDict[key].strip() == cfg.__commentDict[key].strip():
return False
if not self.__dataDict[key] == cfg.__dataDict[key]:
return False
return True
@gCFGSynchro
def getComment(self, entryName):
"""
Get the comment for an option/section
:type entryName: string
:param entryName: Name of the option/section
:return: String with the comment
"""
try:
return self.__commentDict[entryName]
except BaseException:
raise ValueError("%s does not have any comment defined" % entryName)
@gCFGSynchro
def setComment(self, entryName, comment):
"""
Set the comment for an option/section
:type entryName: string
:param entryName: Name of the option/section
:type comment: string
:param comment: Comment for the option/section
"""
if entryName in self.__orderedList:
self.__commentDict[entryName] = comment
return True
return False
@gCFGSynchro
def serialize(self, tabLevelString=""):
"""
Generate a human readable serialization of a CFG
:type tabLevelString: string
:param tabLevelString: Tab string to apply to entries before representing them
:return: String with the contents of the CFG
"""
indentation = " "
cfgString = ""
for entryName in self.__orderedList:
if entryName in self.__commentDict:
for commentLine in List.fromChar(self.__commentDict[entryName], "\n"):
cfgString += "%s#%s\n" % (tabLevelString, commentLine)
if entryName in self.listSections():
cfgString += "%s%s\n%s{\n" % (tabLevelString, entryName, tabLevelString)
cfgString += self.__dataDict[entryName].serialize("%s%s" % (tabLevelString, indentation))
cfgString += "%s}\n" % tabLevelString
elif entryName in self.listOptions():
valueList = List.fromChar(self.__dataDict[entryName])
if len(valueList) == 0:
cfgString += "%s%s = \n" % (tabLevelString, entryName)
else:
cfgString += "%s%s = %s\n" % (tabLevelString, entryName, valueList[0])
for value in valueList[1:]:
cfgString += "%s%s += %s\n" % (tabLevelString, entryName, value)
else:
raise ValueError("Oops. There is an entry in the order which is not a section nor an option")
return cfgString
@gCFGSynchro
def clone(self):
"""
Create a copy of the CFG
:return: CFG copy
"""
clonedCFG = CFG()
clonedCFG.__orderedList = copy.deepcopy(self.__orderedList)
clonedCFG.__commentDict = copy.deepcopy(self.__commentDict)
for option in self.listOptions():
clonedCFG.__dataDict[option] = self[option]
for section in self.listSections():
clonedCFG.__dataDict[section] = self[section].clone()
return clonedCFG
@gCFGSynchro
def mergeWith(self, cfgToMergeWith):
"""
Generate a CFG by merging with the contents of another CFG.
:type cfgToMergeWith: CFG
:param cfgToMergeWith: CFG with the contents to merge with. This contents are more
preemtive than this CFG ones
:return: CFG with the result of the merge
"""
mergedCFG = CFG()
for option in self.listOptions():
mergedCFG.setOption(option,
self[option],
self.getComment(option))
for option in cfgToMergeWith.listOptions():
mergedCFG.setOption(option,
cfgToMergeWith[option],
cfgToMergeWith.getComment(option))
for section in self.listSections():
if section in cfgToMergeWith.listSections():
oSectionCFG = self[section].mergeWith(cfgToMergeWith[section])
mergedCFG.createNewSection(section,
cfgToMergeWith.getComment(section),
oSectionCFG)
else:
mergedCFG.createNewSection(section,
self.getComment(section),
self[section].clone())
for section in cfgToMergeWith.listSections():
if section not in self.listSections():
mergedCFG.createNewSection(section,
cfgToMergeWith.getComment(section),
cfgToMergeWith[section])
return mergedCFG
def getModifications(self, newerCfg, ignoreMask=None, parentPath="",
ignoreOrder=False, ignoreComments=False):
"""
Compare two cfgs
:type newerCfg: ~DIRAC.Core.Utilities.CFG.CFG
:param newerCfg: Cfg to compare with
:param list ignoreMask: List of paths to ignore
:param str parentPath: Start from this path
:param ignoreOrder: Do not return changes only in ordering
:param ignoreComments: Do not return changes for changed commens
:return: A list of modifications
"""
modList = []
# Options
oldOptions = self.listOptions(True)
newOptions = newerCfg.listOptions(True)
for newOption in newOptions:
iPos = newerCfg.__orderedList.index(newOption)
newOptPath = "%s/%s" % (parentPath, newOption)
if ignoreMask and newOptPath in ignoreMask:
continue
if newOption not in oldOptions:
modList.append(('addOpt', newOption, iPos,
newerCfg[newOption],
newerCfg.getComment(newOption)))
else:
modified = False
if iPos != self.__orderedList.index(newOption) and not ignoreOrder:
modified = True
elif newerCfg[newOption] != self[newOption]:
modified = True
elif newerCfg.getComment(newOption) != self.getComment(newOption) and not ignoreComments:
modified = True
if modified:
modList.append(('modOpt', newOption, iPos,
newerCfg[newOption],
newerCfg.getComment(newOption)))
for oldOption in oldOptions:
oldOptPath = "%s/%s" % (parentPath, oldOption)
if ignoreMask and oldOptPath in ignoreMask:
continue
if oldOption not in newOptions:
modList.append(('delOpt', oldOption, -1, ''))
# Sections
oldSections = self.listSections(True)
newSections = newerCfg.listSections(True)
for newSection in newSections:
iPos = newerCfg.__orderedList.index(newSection)
newSecPath = "%s/%s" % (parentPath, newSection)
if ignoreMask and newSecPath in ignoreMask:
continue
if newSection not in oldSections:
modList.append(('addSec', newSection, iPos,
str(newerCfg[newSection]),
newerCfg.getComment(newSection)))
else:
modified = False
if iPos != self.__orderedList.index(newSection):
modified = True
elif newerCfg.getComment(newSection) != self.getComment(newSection):
modified = True
subMod = self[newSection].getModifications(newerCfg[newSection],
ignoreMask, newSecPath,
ignoreOrder, ignoreComments)
if subMod:
modified = True
if modified:
modList.append(('modSec', newSection, iPos,
subMod,
newerCfg.getComment(newSection)))
for oldSection in oldSections:
oldSecPath = "%s/%s" % (parentPath, oldSection)
if ignoreMask and oldSecPath in ignoreMask:
continue
if oldSection not in newSections:
modList.append(('delSec', oldSection, -1, ''))
return modList
def applyModifications(self, modList, parentSection=""):
"""
Apply modifications to a CFG
:type modList: List
:param modList: Modifications from a getModifications call
:return: True/False
"""
for modAction in modList:
action = modAction[0]
key = modAction[1]
iPos = modAction[2]
value = modAction[3]
if action == 'addSec':
if key in self.listSections():
return S_ERROR("Section %s/%s already exists" % (parentSection, key))
# key, value, comment, beforeKey = ""
value = CFG().loadFromBuffer(value)
comment = modAction[4].strip()
if iPos < len(self.__orderedList):
beforeKey = self.__orderedList[iPos]
else:
beforeKey = ""
self.addKey(key, value, comment, beforeKey)
elif action == 'delSec':
if key not in self.listSections():
return S_ERROR("Section %s/%s does not exist" % (parentSection, key))
self.deleteKey(key)
elif action == 'modSec':
if key not in self.listSections():
return S_ERROR("Section %s/%s does not exist" % (parentSection, key))
comment = modAction[4].strip()
self.setComment(key, comment)
if value:
result = self[key].applyModifications(value, "%s/%s" % (parentSection, key))
if not result['OK']:
return result
if iPos >= len(self.__orderedList) or key != self.__orderedList[iPos]:
prevPos = self.__orderedList.index(key)
del self.__orderedList[prevPos]
self.__orderedList.insert(iPos, key)
elif action == "addOpt":
if key in self.listOptions():
return S_ERROR("Option %s/%s exists already" % (parentSection, key))
# key, value, comment, beforeKey = ""
comment = modAction[4].strip()
if iPos < len(self.__orderedList):
beforeKey = self.__orderedList[iPos]
else:
beforeKey = ""
self.addKey(key, value, comment, beforeKey)
elif action == "modOpt":
if key not in self.listOptions():
return S_ERROR("Option %s/%s does not exist" % (parentSection, key))
comment = modAction[4].strip()
self.setOption(key, value, comment)
if iPos >= len(self.__orderedList) or key != self.__orderedList[iPos]:
prevPos = self.__orderedList.index(key)
del(self.__orderedList[prevPos])
self.__orderedList.insert(iPos, key)
elif action == "delOpt":
if key not in self.listOptions():
return S_ERROR("Option %s/%s does not exist" % (parentSection, key))
self.deleteKey(key)
return S_OK()
# Functions to load a CFG
def loadFromFile(self, fileName):
"""
Load the contents of the CFG from a file
:type fileName: string
:param fileName: File name to load the contents from
:return: This CFG
"""
if zipfile.is_zipfile(fileName):
# Zipped file
zipHandler = zipfile.ZipFile(fileName)
nameList = zipHandler.namelist()
fileToRead = nameList[0]
fileData = zipHandler.read(fileToRead)
zipHandler.close()
else:
with open(fileName) as fd:
fileData = fd.read()
return self.loadFromBuffer(fileData)
@gCFGSynchro
def loadFromBuffer(self, data):
"""
Load the contents of the CFG from a string
:type data: string
:param data: Contents of the CFG
:return: This CFG
"""
commentRE = re.compile(r"^\s*#")
self.reset()
levelList = []
currentLevel = self
currentlyParsedString = ""
currentComment = ""
for line in data.split("\n"):
line = line.strip()
if len(line) < 1:
continue
if commentRE.match(line):
currentComment += "%s\n" % line.replace("#", "")
continue
for index in range(len(line)):
if line[index] == "{":
currentlyParsedString = currentlyParsedString.strip()
currentLevel.createNewSection(currentlyParsedString, currentComment)
levelList.append(currentLevel)
currentLevel = currentLevel[currentlyParsedString]
currentlyParsedString = ""
currentComment = ""
elif line[index] == "}":
currentLevel = levelList.pop()
elif line[index] == "=":
lFields = line.split("=")
currentLevel.setOption(lFields[0].strip(), "=".join(lFields[1:]).strip(), currentComment)
currentlyParsedString = ""
currentComment = ""
break
elif line[index: index + 2] == "+=":
valueList = line.split("+=")
currentLevel.appendToOption(valueList[0].strip(), ", %s" % "+=".join(valueList[1:]).strip())
currentlyParsedString = ""
currentComment = ""
break
else:
currentlyParsedString += line[index]
return self
@gCFGSynchro
def loadFromDict(self, data):
for k in data:
value = data[k]
if isinstance(value, dict):
self.createNewSection(k, "", CFG().loadFromDict(value))
elif isinstance(value, (list, tuple)):
self.setOption(k, ", ".join(value), "")
else:
self.setOption(k, str(value), "")
return self
def writeToFile(self, fileName):
"""
Write the contents of the cfg to file
:type fileName: string
:param fileName: Name of the file to write the cfg to
:return: True/False
"""
try:
directory = os.path.dirname(fileName)
if directory and (not os.path.exists(directory)):
os.makedirs(directory)
with open(fileName, "w") as fd:
fd.write(str(self))
return True
except Exception:
return False
|
andrew-luhring/yolo-robot
|
refs/heads/master
|
node_modules/sqlite3/deps/extract.py
|
775
|
import sys
import tarfile
import os
tarball = os.path.abspath(sys.argv[1])
dirname = os.path.abspath(sys.argv[2])
tfile = tarfile.open(tarball,'r:gz');
tfile.extractall(dirname)
sys.exit(0)
|
Drudenhaus/aws-ec2rescue-linux
|
refs/heads/develop
|
lib/requests/certs.py
|
516
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests.certs
~~~~~~~~~~~~~~
This module returns the preferred default CA certificate bundle.
If you are packaging Requests, e.g., for a Linux distribution or a managed
environment, you can change the definition of where() to return a separately
packaged CA bundle.
"""
import os.path
try:
from certifi import where
except ImportError:
def where():
"""Return the preferred certificate bundle."""
# vendored bundle inside Requests
return os.path.join(os.path.dirname(__file__), 'cacert.pem')
if __name__ == '__main__':
print(where())
|
ejpbruel/servo
|
refs/heads/master
|
tests/wpt/css-tests/tools/wptserve/wptserve/constants.py
|
326
|
import utils
content_types = utils.invert_dict({"text/html": ["htm", "html"],
"application/json": ["json"],
"application/xhtml+xml": ["xht", "xhtm", "xhtml"],
"application/xml": ["xml"],
"application/x-xpinstall": ["xpi"],
"text/javascript": ["js"],
"text/css": ["css"],
"text/plain": ["txt", "md"],
"image/svg+xml": ["svg"],
"image/gif": ["gif"],
"image/jpeg": ["jpg", "jpeg"],
"image/png": ["png"],
"image/bmp": ["bmp"],
"text/event-stream": ["event_stream"],
"text/cache-manifest": ["manifest"],
"video/mp4": ["mp4", "m4v"],
"audio/mp4": ["m4a"],
"audio/mpeg": ["mp3"],
"video/webm": ["webm"],
"audio/webm": ["weba"],
"video/ogg": ["ogg", "ogv"],
"audio/ogg": ["oga"],
"audio/x-wav": ["wav"],
"text/vtt": ["vtt"],})
response_codes = {
100: ('Continue', 'Request received, please continue'),
101: ('Switching Protocols',
'Switching to new protocol; obey Upgrade header'),
200: ('OK', 'Request fulfilled, document follows'),
201: ('Created', 'Document created, URL follows'),
202: ('Accepted',
'Request accepted, processing continues off-line'),
203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
204: ('No Content', 'Request fulfilled, nothing follows'),
205: ('Reset Content', 'Clear input form for further input.'),
206: ('Partial Content', 'Partial content follows.'),
300: ('Multiple Choices',
'Object has several resources -- see URI list'),
301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
302: ('Found', 'Object moved temporarily -- see URI list'),
303: ('See Other', 'Object moved -- see Method and URL list'),
304: ('Not Modified',
'Document has not changed since given time'),
305: ('Use Proxy',
'You must use proxy specified in Location to access this '
'resource.'),
307: ('Temporary Redirect',
'Object moved temporarily -- see URI list'),
400: ('Bad Request',
'Bad request syntax or unsupported method'),
401: ('Unauthorized',
'No permission -- see authorization schemes'),
402: ('Payment Required',
'No payment -- see charging schemes'),
403: ('Forbidden',
'Request forbidden -- authorization will not help'),
404: ('Not Found', 'Nothing matches the given URI'),
405: ('Method Not Allowed',
'Specified method is invalid for this resource.'),
406: ('Not Acceptable', 'URI not available in preferred format.'),
407: ('Proxy Authentication Required', 'You must authenticate with '
'this proxy before proceeding.'),
408: ('Request Timeout', 'Request timed out; try again later.'),
409: ('Conflict', 'Request conflict.'),
410: ('Gone',
'URI no longer exists and has been permanently removed.'),
411: ('Length Required', 'Client must specify Content-Length.'),
412: ('Precondition Failed', 'Precondition in headers is false.'),
413: ('Request Entity Too Large', 'Entity is too large.'),
414: ('Request-URI Too Long', 'URI is too long.'),
415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
416: ('Requested Range Not Satisfiable',
'Cannot satisfy request range.'),
417: ('Expectation Failed',
'Expect condition could not be satisfied.'),
500: ('Internal Server Error', 'Server got itself in trouble'),
501: ('Not Implemented',
'Server does not support this operation'),
502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
503: ('Service Unavailable',
'The server cannot process the request due to a high load'),
504: ('Gateway Timeout',
'The gateway server did not receive a timely response'),
505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
}
|
anirudhSK/chromium
|
refs/heads/master
|
native_client_sdk/src/build_tools/tests/verify_filelist_test.py
|
132
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import unittest
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_TOOLS_DIR = os.path.dirname(SCRIPT_DIR)
sys.path.append(BUILD_TOOLS_DIR)
import verify_filelist
def Verify(platform, rules_contents, directory_list):
rules = verify_filelist.Rules('test', platform, rules_contents)
rules.VerifyDirectoryList(directory_list)
class VerifyFilelistTestCase(unittest.TestCase):
def testBasic(self):
rules = """\
foo/file1
foo/file2
foo/file3
bar/baz/other
"""
dirlist = ['foo/file1', 'foo/file2', 'foo/file3', 'bar/baz/other']
Verify('linux', rules, dirlist)
def testGlob(self):
rules = 'foo/*'
dirlist = ['foo/file1', 'foo/file2', 'foo/file3/and/subdir']
Verify('linux', rules, dirlist)
def testPlatformVar(self):
rules = 'dir/${PLATFORM}/blah'
dirlist = ['dir/linux/blah']
Verify('linux', rules, dirlist)
def testPlatformVarGlob(self):
rules = 'dir/${PLATFORM}/*'
dirlist = ['dir/linux/file1', 'dir/linux/file2']
Verify('linux', rules, dirlist)
def testPlatformRule(self):
rules = """\
[linux]dir/linux/only
all/platforms
"""
linux_dirlist = ['dir/linux/only', 'all/platforms']
other_dirlist = ['all/platforms']
Verify('linux', rules, linux_dirlist)
Verify('mac', rules, other_dirlist)
def testMultiPlatformRule(self):
rules = """\
[linux,win]dir/no/macs
all/platforms
"""
nonmac_dirlist = ['dir/no/macs', 'all/platforms']
mac_dirlist = ['all/platforms']
Verify('linux', rules, nonmac_dirlist)
Verify('win', rules, nonmac_dirlist)
Verify('mac', rules, mac_dirlist)
def testPlatformRuleBadPlatform(self):
rules = '[frob]bad/platform'
self.assertRaises(verify_filelist.ParseException, Verify,
'linux', rules, [])
def testMissingFile(self):
rules = """\
foo/file1
foo/missing
"""
dirlist = ['foo/file1']
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testExtraFile(self):
rules = 'foo/file1'
dirlist = ['foo/file1', 'foo/extra_file']
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testEmptyGlob(self):
rules = 'foo/*'
dirlist = ['foo'] # Directory existing is not enough!
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testBadGlob(self):
rules = '*/foo/bar'
dirlist = []
self.assertRaises(verify_filelist.ParseException, Verify,
'linux', rules, dirlist)
def testUnknownPlatform(self):
rules = 'foo'
dirlist = ['foo']
for platform in ('linux', 'mac', 'win'):
Verify(platform, rules, dirlist)
self.assertRaises(verify_filelist.ParseException, Verify,
'foobar', rules, dirlist)
def testUnexpectedPlatformFile(self):
rules = '[mac,win]foo/file1'
dirlist = ['foo/file1']
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testWindowsPaths(self):
if os.path.sep != '/':
rules = 'foo/bar/baz'
dirlist = ['foo\\bar\\baz']
Verify('win', rules, dirlist)
else:
rules = 'foo/bar/baz\\foo'
dirlist = ['foo/bar/baz\\foo']
Verify('linux', rules, dirlist)
def testNestedGlobs(self):
rules = """\
foo/*
foo/bar/*"""
dirlist = ['foo/file', 'foo/bar/file']
Verify('linux', rules, dirlist)
rules = """\
foo/bar/*
foo/*"""
dirlist = ['foo/file', 'foo/bar/file']
Verify('linux', rules, dirlist)
if __name__ == '__main__':
unittest.main()
|
zemogle/raspberrysky
|
refs/heads/master
|
app.py
|
1
|
#!/usr/bin/env python
from importlib import import_module
import os
from flask import Flask, render_template, Response, send_from_directory, request
import socket
import json
from allsky import single_image_raspistill, check_image_status
from tasks import background_task
app = Flask(__name__)
# Create the Celery instance, referring to the task queue (or broker) as redis
@app.route('/')
def index():
"""All sky streaming home page."""
return render_template('index.html', name=socket.gethostname())
#background process happening without any refreshing
@app.route('/snap')
def background_process():
task = background_task.apply_async()
return json.dumps({'pid':task.id})
@app.route('/status', methods=['GET'])
def check_camera_exposure():
if 'pid' in request.args:
pid = request.args['pid']
return check_image_status(taskid=pid)
else:
return json.dumps({'status':'FAILED'})
# This is the celery task that will be run by the worker in the background
# We need to give it the celery decorator to denote this
if __name__ == '__main__':
app.run(host='0.0.0.0', threaded=True, port=8000)
|
arju88nair/projectCulminate
|
refs/heads/master
|
venv/lib/python3.5/site-packages/nltk/app/nemo_app.py
|
2
|
# Finding (and Replacing) Nemo, Version 1.1, Aristide Grange 2006/06/06
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/496783
"""
Finding (and Replacing) Nemo
Instant Regular Expressions
Created by Aristide Grange
"""
from six.moves.tkinter import (Frame, Label, PhotoImage, Scrollbar, Text, Tk,
SEL_FIRST, SEL_LAST)
import re
import itertools
windowTitle = "Finding (and Replacing) Nemo"
initialFind = r"n(.*?)e(.*?)m(.*?)o"
initialRepl = r"M\1A\2K\3I"
initialText = """\
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.
Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
"""
images = {
"FIND":"R0lGODlhMAAiAPcAMf/////37//35//n1v97Off///f/9/f37/fexvfOvfeEQvd7QvdrQvdrKfdaKfdSMfdSIe/v9+/v7+/v5+/n3u/e1u/Wxu/Gre+1lO+tnO+thO+Ua+97Y+97Oe97Me9rOe9rMe9jOe9jMe9jIe9aMefe5+fe3ufezuece+eEWudzQudaIedSIedKMedKIedCKedCId7e1t7Wzt7Oxt7Gvd69vd69rd61pd6ljN6UjN6Ue96EY95zY95rUt5rQt5jMd5SId5KIdbn59be3tbGztbGvda1rdaEa9Z7a9Z7WtZzQtZzOdZzMdZjMdZaQtZSOdZSMdZKMdZCKdZCGNY5Ic7W1s7Oxs7Gtc69xs69tc69rc6tpc6llM6clM6cjM6Ue86EY85zWs5rSs5SKc5KKc5KGMa1tcatrcalvcalnMaUpcZ7c8ZzMcZrUsZrOcZrMcZaQsZSOcZSMcZKMcZCKcZCGMYxIcYxGL3Gxr21tb21rb2lpb2crb2cjL2UnL2UlL2UhL2Ec717Wr17Ur1zWr1rMb1jUr1KMb1KIb1CIb0xGLWlrbWlpbWcnLWEe7V7c7VzY7VzUrVSKbVKMbVCMbVCIbU5KbUxIbUxEK2lta2lpa2clK2UjK2MnK2MlK2Ea617e61za61rY61rMa1jSq1aUq1aSq1SQq1KKa0xEKWlnKWcnKWUnKWUhKWMjKWEa6Vza6VrWqVjMaVaUqVaKaVSMaVCMaU5KaUxIaUxGJyclJyMe5yElJyEhJx7e5x7c5xrOZxaQpxSOZxKQpw5IZSMhJSEjJR7c5Rre5RrY5RrUpRSQpRSKZRCOZRCKZQxKZQxIYyEhIx7hIxza4xzY4xrc4xjUoxaa4xaUoxSSoxKQoxCMYw5GIR7c4Rzc4Rre4RjY4RjWoRaa4RSWoRSUoRSMYRKQoRCOYQ5KYQxIXtra3taY3taSntKOXtCMXtCKXNCMXM5MXMxIWtSUmtKSmtKQmtCOWs5MWs5KWs5IWNCKWMxIVIxKUIQCDkhGAAAACH+AS4ALAAAAAAwACIAAAj/AAEIHEiwoMGDCBMqXMiwoUOHMqxIeEiRoZVp7cpZ29WrF4WKIAd208dGAQEVbiTVChUjZMU9+pYQmPmBZpxgvVw+nDdKwQICNVcIXQEkTgKdDdUJ+/nggVAXK1xI3TEA6UIr2uJ8iBqka1cXXTlkqGoVYRZ7iLyqBSs0iiEtZQVKiDGxBI1u3NR6lUpGDKg8MSgEQCphU7Z22vhg0dILXRCpYLuSCcYJT4wqXASBQaBzU7klHxC127OHD7ZDJFpERqRt0x5OnwQpmZmCLEhrbgg4WIHO1RY+nbQ9WRGEDJlmnXwJ+9FBgXMCIzYMVijBBgYMFxIMqJBMSc0Ht7qh/+Gjpte2rnYsYeNlasWIBgQ6yCewIoPCCp/cyP/wgUGbXVu0QcADZNBDnh98gHMLGXYQUw02w61QU3wdbNWDbQVVIIhMMwFF1DaZiPLBAy7E04kafrjSizaK3LFNNc0AAYRQDsAHHQlJ2IDQJ2zE1+EKDjiAijShkECCC8Qgw4cr7ZgyzC2WaHPNLWWoNeNWPiRAw0QFWQFMhz8C+QQ20yAiVSrY+MGOJCsccsst2GCzoHFxxEGGC+8hgs0MB2kyCpgzrUDCbs1Es41UdtATHFFkWELMOtsoQsYcgvRRQw5RSDgGOjZMR1AvPQIq6KCo9AKOJWDd48owQlHR4DXEKP9iyRrK+DNNBTu4RwIPFeTAGUG7hAomkA84gEg1m6ADljy9PBKGGJY4ig0xlsTBRSn98FOFDUC8pwQOPkgHbCGAzhTkA850s0c7j6Hjix9+gBIrMXLeAccWXUCyiRBcBEECdEJ98KtAqtBCYQc/OvDENnl4gYpUxISCIjjzylkGGV9okYUVNogRhAOBuuAEhjG08wOgDYzAgA5bCjIoCe5uwUk80RKTTSppPREGGGCIISOQ9AXBg6cC6WIywvCpoMHAocRBwhP4bHLFLujYkV42xNxBRhAyGrc113EgYtRBerDDDHMoDCyQEL5sE083EkgwQyBhxGFHMM206DUixGxmE0wssbQjCQ4JCaFKFwgQTVAVVhQUwAVPIFJKrHfYYRwi6OCDzzuIJIFhXAD0EccPsYRiSyqKSDpFcWSMIcZRoBMkQyA2BGZDIKSYcggih8TRRg4VxM5QABVYYLxgwiev/PLMCxQQADs=",
"find":"R0lGODlhMAAiAPQAMf////f39+/v7+fn597e3tbW1s7OzsbGxr29vbW1ta2traWlpZycnJSUlIyMjISEhHt7e3Nzc2tra2NjY1paWlJSUkpKSkJCQjk5OSkpKRgYGAAAAAAAAAAAAAAAAAAAACH+AS4ALAAAAAAwACIAAAX/ICCOZGmeaKquY2AGLiuvMCAUBuHWc48Kh0iFInEYCb4kSQCxPBiMxkMigRQEgJiSFVBYHNGG0RiZOHjblWAiiY4fkDhEYoBp06dAWfyAQyKAgAwDaHgnB0RwgYASgQ0IhDuGJDAIFhMRVFSLEX8QCJJ4AQM5AgQHTZqqjBAOCQQEkWkCDRMUFQsICQ4Vm5maEwwHOAsPDTpKMAsUDlO4CssTcb+2DAp8YGCyNFoCEsZwFQ3QDRTTVBRS0g1QbgsCd5QAAwgIBwYFAwStzQ8UEdCKVchky0yVBw7YuXkAKt4IAg74vXHVagqFBRgXSCAyYWAVCH0SNhDTitCJfSL5/4RbAPKPhQYYjVCYYAvCP0BxEDaD8CheAAHNwqh8MMGPSwgLeJWhwHSjqkYI+xg4MMCEgQjtRvZ7UAYCpghMF7CxONOWJkYR+rCpY4JlVpVxKDwYWEactKW9mhYRtqCTgwgWEMArERSK1j5q//6T8KXonFsShpiJkAECgQYVjykooCVA0JGHEWNiYCHThTFeb3UkoiCCBgwGEKQ1kuAJlhFwhA71h5SukwUM5qqeCSGBgicEWkfNiWSERtBad4JNIBaQBaQah1ToyGZBAnsIuIJs1qnqiAIVjIE2gnAB1T5x0icgzXT79ipgMOOEH6HBbREBMJCeGEY08IoLAkzB1YYFwjxwSUGSNULQJnNUwRYlCcyEkALIxECAP9cNMMABYpRhy3ZsSLDaR70oUAiABGCkAxowCGCAAfDYIQACXoElGRsdXWDBdg2Y90IWktDYGYAB9PWHP0PMdFZaF07SQgAFNDAMAQg0QA1UC8xoZQl22JGFPgWkOUCOL1pZQyhjxinnnCWEAAA7",
"REPL":"R0lGODlhMAAjAPcAMf/////3//+lOf+UKf+MEPf///f39/f35/fv7/ecQvecOfecKfeUIfeUGPeUEPeUCPeMAO/37+/v9+/v3u/n3u/n1u+9jO+9c++1hO+ta++tY++tWu+tUu+tSu+lUu+lQu+lMe+UMe+UKe+UGO+UEO+UAO+MCOfv5+fvxufn7+fn5+fnzue9lOe9c+e1jOe1e+e1c+e1a+etWuetUuelQuecOeeUUueUCN7e597e3t7e1t7ezt7evd7Wzt7Oxt7Ovd7Otd7Opd7OnN7Gtd7Gpd69lN61hN6ta96lStbextberdbW3tbWztbWxtbOvdbOrda1hNalUtaECM7W1s7Ozs7Oxs7Otc7Gxs7Gvc69tc69rc69pc61jM6lc8bWlMbOvcbGxsbGpca9tca9pca1nMaMAL3OhL3Gtb21vb21tb2tpb2tnL2tlLW9tbW9pbW9e7W1pbWtjLWcKa21nK2tra2tnK2tlK2lpa2llK2ljK2le6WlnKWljKWUe6WUc6WUY5y1QpyclJycjJychJyUc5yMY5StY5SUe5SMhJSMe5SMc5SMWpSEa5SESoyUe4yMhIyEY4SlKYScWoSMe4SEe4SEa4R7c4R7Y3uMY3uEe3t7e3t7c3tza3tzY3trKXtjIXOcAHOUMXOEY3Nzc3NzWnNrSmulCGuUMWuMGGtzWmtrY2taMWtaGGOUOWOMAGNzUmNjWmNjSmNaUmNaQmNaOWNaIWNSCFqcAFpjUlpSMVpSIVpSEFpKKVKMAFJSUlJSSlJSMVJKMVJKGFJKAFI5CEqUAEqEAEpzQkpKIUpCQkpCGEpCAEo5EEoxAEJjOUJCOUJCAEI5IUIxADl7ADlaITlCOTkxMTkxKTkxEDkhADFzADFrGDE5OTExADEpEClrCCkxKSkpKSkpISkpACkhCCkhACkYACFzACFrACEhCCEYGBhjEBhjABghABgYCBgYABgQEBgQABAQABAIAAhjAAhSAAhKAAgIEAgICABaAABCAAAhAAAQAAAIAAAAAAAAACH+AS4ALAAAAAAwACMAAAj/AAEIHEiwoMGDCBMqXMiwocOHAA4cgEixIIIJO3JMmAjADIqKFU/8MHIkg5EgYXx4iaTkI0iHE6wE2TCggYILQayEAgXIy8uGCKz8sDCAQAMRG3iEcXULlJkJPwli3OFjh9UdYYLE6NBhA04UXHoVA2XoTZgfPKBWlOBDphAWOdfMcfMDLloeO3hIMjbWVCQ5Fn6E2UFxgpsgFjYIEBADrZU6luqEEfqjTqpt54z1uuWqTIcgWAk7PECGzIUQDRosDmxlUrVJkwQJkqVuX71v06YZcyUlROAdbnLAJKPFyAYFAhoMwFlnEh0rWkpz8raPHm7dqKKc/KFFkBUrVn1M/ziBcEIeLUEQI8/AYk0i9Be4sqjsrN66c9/OnbobhpR3HkIUoZ0WVnBE0AGLFKKFD0HAFUQe77HQgQI1hRBDEHMcY0899bBzihZuCPILJD8EccEGGzwAQhFaUHHQH82sUkgeNHISDBk8WCCCcsqFUEQWmOyzjz3sUGNNOO5Y48YOEgowAAQhnBScQV00k82V47jzjy9CXZBcjziFoco//4CDiSOyhPMPLkJZkEBqJmRQxA9uZGEQD8Ncmc044/zzDF2IZQBCCDYE8QMZz/iiCSx0neHGI7BIhhhNn+1gxRpokEcQAp7seWU7/PwTyxqG/iCEEVzQmUombnDRxRExzP9nBR2PCKLFD3UJwcMPa/SRqUGNWJmNOVn+M44ukMRB4KGcWDNLVhuUMEIJAlzwA3DJBHMJIXm4sQYhqyxCRQQGLSIsn1qac2UzysQSyzX/hLMGD0F0IMCODYAQBA9W/PKPOcRiw0wzwxTiokF9dLMnuv/Mo+fCZF7jBr0xbDDCACWEYKgb1vzjDp/jZNOMLX0IZxAKq2TZTjtaOjwOsXyG+s8sZJTIQsUdIGHoJPf8w487QI/TDSt5mGwQFZxc406o8HiDJchk/ltLHpSlJwSvz5DpTjvmuGNOM57koelBOaAhiCaaPBLL0wwbm003peRBnBZqJMJL1ECz/HXYYx/NdAIOOVCxQyLorswymU93o0wuwfAiTDNR/xz0MLXU0XdCE+UwSTRZAq2lsSATu+4wkGvt+TjNzPLrQyegAUku2Hij5cd8LhxyM8QIg4w18HgcdC6BTBFSDmfQqsovttveDcG7lFLHI75cE841sARCxeWsnxC4G9HADPK6ywzDCRqBo0EHHWhMgT1IJzziNci1N7PMKnSYfML96/90AiJKey/0KtbLX1QK0rrNnQ541xugQ7SHhkXBghN0SKACWRc4KlAhBwKcIOYymJCAAAA7",
"repl":"R0lGODlhMAAjAPQAMf////f39+/v7+fn597e3tbW1s7OzsbGxr29vbW1ta2traWlpZycnJSUlIyMjISEhHt7e3Nzc2tra2NjY1paWlJSUkpKSkJCQjk5OTExMSkpKSEhIRgYGBAQEAgICAAAACH+AS4ALAAAAAAwACMAAAX/ICCOZGmeaKqubOu+gCDANBkIQ1EMQhAghFptYEAkEgjEwXBo7ISvweGgWCwUysPjwTgEoCafTySYIhYMxgLBjEQgCULvCw0QdAZdoVhUIJUFChISEAxYeQM1N1OMTAp+UwZ5eA4TEhFbDWYFdC4ECVMJjwl5BwsQa0umEhUVlhESDgqlBp0rAn5nVpBMDxeZDRQbHBgWFBSWDgtLBnFjKwRYCI9VqQsPs0YKEcMXFq0UEalFDWx4BAO2IwPjppAKDkrTWKYUGd7fEJJFEZpM00cOzCgh4EE8SaoWxKNixQooBRMyZMBwAYIRBhUgLDGS4MoBJeoANMhAgQsaCRZm/5lqaCUJhA4cNHjDoKEDBlJUHqkBlYBTiQUZNGjYMMxDhY3VWk6R4MEDBoMUak5AqoYBqANIBo4wcGGDUKIeLlzVZmWJggsVIkwAZaQSA3kdZzlKkIiEAAlDvW5oOkEBs488JTw44oeUIwdvVTFTUK7uiAAPgubt8GFDhQepqETAQCFU1UMGzlqAgFhUsAcCS0AO6lUDhw8xNRSbENGDhgWSHjWUe6ACbKITizmopZoBa6KvOwj9uuHDhwxyj3xekgDDhw5EvWKo0IB4iQLCOCC/njc7ZQ8UeGvza+ABZZgcxJNc4FO1gc0cOsCUrHevc8tdIMTIAhc4F198G2Qwwd8CBIQUAwEINABBBJUwR9R5wElgVRLwWODBBx4cGB8GEzDQIAo33CGJA8gh+JoH/clUgQU0YvDhdfmJdwEFC6Sjgg8yEPAABsPkh2F22cl2AQbn6QdTghTQ5eAJAQyQAAQV0MSBB9gRVZ4GE1mw5JZOAmiAVi1UWcAZDrDyZXYTeaOhA/bIVuIBPtKQ4h7ViYekUPdcEAEbzTzCRp5CADmAAwj+ORGPBcgwAAHo9ABGCYtm0ChwFHShlRiXhmHlkAcCiOeUodqQw5W0oXLAiamy4MOkjOyAaqxUymApDCEAADs=",
}
colors = ["#FF7B39","#80F121"]
emphColors = ["#DAFC33","#F42548"]
fieldParams = {
"height":3,
"width":70,
"font":("monaco",14),
"highlightthickness":0,
"borderwidth":0,
"background":"white",
}
textParams = {
"bg":"#F7E0D4",
"fg":"#2321F1",
"highlightthickness":0,
"width":1,
"height":10,
"font":("verdana",16),
"wrap":"word",
}
class Zone:
def __init__(self, image, initialField, initialText):
frm = Frame(root)
frm.config(background="white")
self.image = PhotoImage(format='gif',data=images[image.upper()])
self.imageDimmed = PhotoImage(format='gif',data=images[image])
self.img = Label(frm)
self.img.config(borderwidth=0)
self.img.pack(side = "left")
self.fld = Text(frm, **fieldParams)
self.initScrollText(frm,self.fld,initialField)
frm = Frame(root)
self.txt = Text(frm, **textParams)
self.initScrollText(frm,self.txt,initialText)
for i in range(2):
self.txt.tag_config(colors[i], background = colors[i])
self.txt.tag_config("emph"+colors[i], foreground = emphColors[i])
def initScrollText(self,frm,txt,contents):
scl = Scrollbar(frm)
scl.config(command = txt.yview)
scl.pack(side="right",fill="y")
txt.pack(side = "left", expand=True, fill="x")
txt.config(yscrollcommand = scl.set)
txt.insert("1.0",contents)
frm.pack(fill = "x")
Frame(height=2, bd=1, relief="ridge").pack(fill="x")
def refresh(self):
self.colorCycle = itertools.cycle(colors)
try:
self.substitute()
self.img.config(image = self.image)
except re.error:
self.img.config(image = self.imageDimmed)
class FindZone(Zone):
def addTags(self,m):
color = next(self.colorCycle)
self.txt.tag_add(color,"1.0+%sc"%m.start(),"1.0+%sc"%m.end())
try:
self.txt.tag_add("emph"+color,"1.0+%sc"%m.start("emph"),
"1.0+%sc"%m.end("emph"))
except:
pass
def substitute(self,*args):
for color in colors:
self.txt.tag_remove(color,"1.0","end")
self.txt.tag_remove("emph"+color,"1.0","end")
self.rex = re.compile("") # default value in case of misformed regexp
self.rex = re.compile(self.fld.get("1.0","end")[:-1],re.MULTILINE)
try:
re.compile("(?P<emph>%s)" % self.fld.get(SEL_FIRST,
SEL_LAST))
self.rexSel = re.compile("%s(?P<emph>%s)%s" % (
self.fld.get("1.0",SEL_FIRST),
self.fld.get(SEL_FIRST,SEL_LAST),
self.fld.get(SEL_LAST,"end")[:-1],
),re.MULTILINE)
except:
self.rexSel = self.rex
self.rexSel.sub(self.addTags,self.txt.get("1.0","end"))
class ReplaceZone(Zone):
def addTags(self,m):
s = sz.rex.sub(self.repl,m.group())
self.txt.delete("1.0+%sc"%(m.start()+self.diff),
"1.0+%sc"%(m.end()+self.diff))
self.txt.insert("1.0+%sc"%(m.start()+self.diff),s,
next(self.colorCycle))
self.diff += len(s) - (m.end() - m.start())
def substitute(self):
self.txt.delete("1.0","end")
self.txt.insert("1.0",sz.txt.get("1.0","end")[:-1])
self.diff = 0
self.repl = rex0.sub(r"\\g<\1>",self.fld.get("1.0","end")[:-1])
sz.rex.sub(self.addTags,sz.txt.get("1.0","end")[:-1])
def launchRefresh(_):
sz.fld.after_idle(sz.refresh)
rz.fld.after_idle(rz.refresh)
def app():
global root, sz, rz, rex0
root = Tk()
root.resizable(height=False,width=True)
root.title(windowTitle)
root.minsize(width=250,height=0)
sz = FindZone("find",initialFind,initialText)
sz.fld.bind("<Button-1>",launchRefresh)
sz.fld.bind("<ButtonRelease-1>",launchRefresh)
sz.fld.bind("<B1-Motion>",launchRefresh)
sz.rexSel = re.compile("")
rz = ReplaceZone("repl",initialRepl,"")
rex0 = re.compile(r"(?<!\\)\\([0-9]+)")
root.bind_all("<Key>",launchRefresh)
launchRefresh(None)
root.mainloop()
if __name__ == '__main__':
app()
__all__ = ['app']
|
prashanthr/wakatime
|
refs/heads/master
|
wakatime/packages/pygments_py2/pygments/lexers/actionscript.py
|
72
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.actionscript
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for ActionScript and MXML.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, using, this, words, default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
class ActionScriptLexer(RegexLexer):
"""
For ActionScript source code.
.. versionadded:: 0.9
"""
name = 'ActionScript'
aliases = ['as', 'actionscript']
filenames = ['*.as']
mimetypes = ['application/x-actionscript', 'text/x-actionscript',
'text/actionscript']
flags = re.DOTALL
tokens = {
'root': [
(r'\s+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
(r'[~^*!%&<>|+=:;,/?\\-]+', Operator),
(r'[{}\[\]();.]+', Punctuation),
(words((
'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break',
'return', 'continue', 'if', 'else', 'throw', 'try', 'catch',
'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this',
'switch'), suffix=r'\b'),
Keyword),
(words((
'class', 'public', 'final', 'internal', 'native', 'override', 'private',
'protected', 'static', 'import', 'extends', 'implements', 'interface',
'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get',
'namespace', 'package', 'set'), suffix=r'\b'),
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
Keyword.Constant),
(words((
'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion',
'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array',
'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData',
'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType',
'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle',
'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu',
'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem',
'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError',
'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject',
'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter',
'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher',
'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference',
'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType',
'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter',
'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent',
'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput'
'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable',
'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int',
'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent',
'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation',
'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection',
'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent',
'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent',
'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping',
'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy',
'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample',
'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError',
'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject',
'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel',
'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite',
'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState',
'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet',
'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField',
'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign',
'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform',
'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest',
'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError',
'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket',
'XMLUI'), suffix=r'\b'),
Name.Builtin),
(words((
'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN',
'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion',
'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent',
'unescape'), suffix=r'\b'),
Name.Function),
(r'[$a-zA-Z_]\w*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
]
}
class ActionScript3Lexer(RegexLexer):
"""
For ActionScript 3 source code.
.. versionadded:: 0.11
"""
name = 'ActionScript 3'
aliases = ['as3', 'actionscript3']
filenames = ['*.as']
mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
'text/actionscript3']
identifier = r'[$a-zA-Z_]\w*'
typeidentifier = identifier + '(?:\.<\w+>)?'
flags = re.DOTALL | re.MULTILINE
tokens = {
'root': [
(r'\s+', Text),
(r'(function\s+)(' + identifier + r')(\s*)(\()',
bygroups(Keyword.Declaration, Name.Function, Text, Operator),
'funcparams'),
(r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
typeidentifier + r')',
bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
Keyword.Type)),
(r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
bygroups(Keyword, Text, Name.Namespace, Text)),
(r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
(r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
(r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
r'switch|import|include|as|is)\b',
Keyword),
(r'(class|public|final|internal|native|override|private|protected|'
r'static|import|extends|implements|interface|intrinsic|return|super|'
r'dynamic|function|const|get|namespace|package|set)\b',
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
Keyword.Constant),
(r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
r'unescape)\b', Name.Function),
(identifier, Name),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator),
],
'funcparams': [
(r'\s+', Text),
(r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
typeidentifier + r'|\*)(\s*)',
bygroups(Text, Punctuation, Name, Text, Operator, Text,
Keyword.Type, Text), 'defval'),
(r'\)', Operator, 'type')
],
'type': [
(r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
(r'\s+', Text, '#pop:2'),
default('#pop:2')
],
'defval': [
(r'(=)(\s*)([^(),]+)(\s*)(,?)',
bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
(r',', Operator, '#pop'),
default('#pop')
]
}
def analyse_text(text):
if re.match(r'\w+\s*:\s*\w', text):
return 0.3
return 0
class MxmlLexer(RegexLexer):
"""
For MXML markup.
Nested AS3 in <script> tags is highlighted by the appropriate lexer.
.. versionadded:: 1.1
"""
flags = re.MULTILINE | re.DOTALL
name = 'MXML'
aliases = ['mxml']
filenames = ['*.mxml']
mimetimes = ['text/xml', 'application/xml']
tokens = {
'root': [
('[^<&]+', Text),
(r'&\S*?;', Name.Entity),
(r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
bygroups(String, using(ActionScript3Lexer), String)),
('<!--', Comment, 'comment'),
(r'<\?.*?\?>', Comment.Preproc),
('<![^>]*>', Comment.Preproc),
(r'<\s*[\w:.-]+', Name.Tag, 'tag'),
(r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
],
'comment': [
('[^-]+', Comment),
('-->', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
|
jaggu303619/asylum-v2.0
|
refs/heads/master
|
openerp/tools/config.py
|
17
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import ConfigParser
import optparse
import os
import sys
import openerp
import openerp.conf
import openerp.loglevels as loglevels
import logging
import openerp.release as release
class MyOption (optparse.Option, object):
""" optparse Option with two additional attributes.
The list of command line options (getopt.Option) is used to create the
list of the configuration file options. When reading the file, and then
reading the command line arguments, we don't want optparse.parse results
to override the configuration file values. But if we provide default
values to optparse, optparse will return them and we can't know if they
were really provided by the user or not. A solution is to not use
optparse's default attribute, but use a custom one (that will be copied
to create the default values of the configuration file).
"""
def __init__(self, *opts, **attrs):
self.my_default = attrs.pop('my_default', None)
super(MyOption, self).__init__(*opts, **attrs)
#.apidoc title: Server Configuration Loader
def check_ssl():
try:
from OpenSSL import SSL
import socket
return hasattr(socket, 'ssl') and hasattr(SSL, "Connection")
except:
return False
DEFAULT_LOG_HANDLER = [':INFO']
class configmanager(object):
def __init__(self, fname=None):
# Options not exposed on the command line. Command line options will be added
# from optparse's parser.
self.options = {
'admin_passwd': 'admin',
'csv_internal_sep': ',',
'login_message': False,
'publisher_warranty_url': 'http://services.openerp.com/publisher-warranty/',
'reportgz': False,
'root_path': None,
}
# Not exposed in the configuration file.
self.blacklist_for_save = set(
['publisher_warranty_url', 'load_language', 'root_path',
'init', 'save', 'config', 'update', 'stop_after_init'])
# dictionary mapping option destination (keys in self.options) to MyOptions.
self.casts = {}
self.misc = {}
self.config_file = fname
self.has_ssl = check_ssl()
self._LOGLEVELS = dict([(getattr(loglevels, 'LOG_%s' % x), getattr(logging, x)) for x in ('CRITICAL', 'ERROR', 'WARNING', 'INFO', 'TEST', 'DEBUG', 'NOTSET')])
version = "%s %s" % (release.description, release.version)
self.parser = parser = optparse.OptionParser(version=version, option_class=MyOption)
# Server startup config
group = optparse.OptionGroup(parser, "Common options")
group.add_option("-c", "--config", dest="config", help="specify alternate config file")
group.add_option("-s", "--save", action="store_true", dest="save", default=False,
help="save configuration to ~/.openerp_serverrc")
group.add_option("-i", "--init", dest="init", help="install one or more modules (comma-separated list, use \"all\" for all modules), requires -d")
group.add_option("-u", "--update", dest="update",
help="update one or more modules (comma-separated list, use \"all\" for all modules). Requires -d.")
group.add_option("--without-demo", dest="without_demo",
help="disable loading demo data for modules to be installed (comma-separated, use \"all\" for all modules). Requires -d and -i. Default is %default",
my_default=False)
group.add_option("-P", "--import-partial", dest="import_partial", my_default='',
help="Use this for big data importation, if it crashes you will be able to continue at the current state. Provide a filename to store intermediate importation states.")
group.add_option("--pidfile", dest="pidfile", help="file where the server pid will be stored")
group.add_option("--addons-path", dest="addons_path",
help="specify additional addons paths (separated by commas).",
action="callback", callback=self._check_addons_path, nargs=1, type="string")
group.add_option("--load", dest="server_wide_modules", help="Comma-separated list of server-wide modules default=web")
parser.add_option_group(group)
# XML-RPC / HTTP
group = optparse.OptionGroup(parser, "XML-RPC Configuration")
group.add_option("--xmlrpc-interface", dest="xmlrpc_interface", my_default='',
help="Specify the TCP IP address for the XML-RPC protocol. The empty string binds to all interfaces.")
group.add_option("--xmlrpc-port", dest="xmlrpc_port", my_default=8069,
help="specify the TCP port for the XML-RPC protocol", type="int")
group.add_option("--no-xmlrpc", dest="xmlrpc", action="store_false", my_default=True,
help="disable the XML-RPC protocol")
group.add_option("--proxy-mode", dest="proxy_mode", action="store_true", my_default=False,
help="Enable correct behavior when behind a reverse proxy")
parser.add_option_group(group)
# XML-RPC / HTTPS
title = "XML-RPC Secure Configuration"
if not self.has_ssl:
title += " (disabled as ssl is unavailable)"
group = optparse.OptionGroup(parser, title)
group.add_option("--xmlrpcs-interface", dest="xmlrpcs_interface", my_default='',
help="Specify the TCP IP address for the XML-RPC Secure protocol. The empty string binds to all interfaces.")
group.add_option("--xmlrpcs-port", dest="xmlrpcs_port", my_default=8071,
help="specify the TCP port for the XML-RPC Secure protocol", type="int")
group.add_option("--no-xmlrpcs", dest="xmlrpcs", action="store_false", my_default=True,
help="disable the XML-RPC Secure protocol")
group.add_option("--cert-file", dest="secure_cert_file", my_default='server.cert',
help="specify the certificate file for the SSL connection")
group.add_option("--pkey-file", dest="secure_pkey_file", my_default='server.pkey',
help="specify the private key file for the SSL connection")
parser.add_option_group(group)
# NET-RPC
group = optparse.OptionGroup(parser, "NET-RPC Configuration")
group.add_option("--netrpc-interface", dest="netrpc_interface", my_default='',
help="specify the TCP IP address for the NETRPC protocol")
group.add_option("--netrpc-port", dest="netrpc_port", my_default=8070,
help="specify the TCP port for the NETRPC protocol", type="int")
# Needed a few day for runbot and saas
group.add_option("--no-netrpc", dest="netrpc", action="store_false", my_default=False, help="disable the NETRPC protocol")
group.add_option("--netrpc", dest="netrpc", action="store_true", my_default=False, help="enable the NETRPC protocol")
parser.add_option_group(group)
# WEB
# TODO move to web addons after MetaOption merge
group = optparse.OptionGroup(parser, "Web interface Configuration")
group.add_option("--db-filter", dest="dbfilter", my_default='.*',
help="Filter listed database", metavar="REGEXP")
parser.add_option_group(group)
# Static HTTP
group = optparse.OptionGroup(parser, "Static HTTP service")
group.add_option("--static-http-enable", dest="static_http_enable", action="store_true", my_default=False, help="enable static HTTP service for serving plain HTML files")
group.add_option("--static-http-document-root", dest="static_http_document_root", help="specify the directory containing your static HTML files (e.g '/var/www/')")
group.add_option("--static-http-url-prefix", dest="static_http_url_prefix", help="specify the URL root prefix where you want web browsers to access your static HTML files (e.g '/')")
parser.add_option_group(group)
# Testing Group
group = optparse.OptionGroup(parser, "Testing Configuration")
group.add_option("--test-file", dest="test_file", my_default=False,
help="Launch a YML test file.")
group.add_option("--test-report-directory", dest="test_report_directory", my_default=False,
help="If set, will save sample of all reports in this directory.")
group.add_option("--test-enable", action="store_true", dest="test_enable",
my_default=False, help="Enable YAML and unit tests.")
group.add_option("--test-commit", action="store_true", dest="test_commit",
my_default=False, help="Commit database changes performed by YAML or XML tests.")
parser.add_option_group(group)
# Logging Group
group = optparse.OptionGroup(parser, "Logging Configuration")
group.add_option("--logfile", dest="logfile", help="file where the server log will be stored")
group.add_option("--no-logrotate", dest="logrotate", action="store_false", my_default=True, help="do not rotate the logfile")
group.add_option("--syslog", action="store_true", dest="syslog", my_default=False, help="Send the log to the syslog server")
group.add_option('--log-handler', action="append", default=DEFAULT_LOG_HANDLER, my_default=DEFAULT_LOG_HANDLER, metavar="PREFIX:LEVEL", help='setup a handler at LEVEL for a given PREFIX. An empty PREFIX indicates the root logger. This option can be repeated. Example: "openerp.orm:DEBUG" or "werkzeug:CRITICAL" (default: ":INFO")')
group.add_option('--log-request', action="append_const", dest="log_handler", const="openerp.netsvc.rpc.request:DEBUG", help='shortcut for --log-handler=openerp.netsvc.rpc.request:DEBUG')
group.add_option('--log-response', action="append_const", dest="log_handler", const="openerp.netsvc.rpc.response:DEBUG", help='shortcut for --log-handler=openerp.netsvc.rpc.response:DEBUG')
group.add_option('--log-web', action="append_const", dest="log_handler", const="openerp.addons.web.http:DEBUG", help='shortcut for --log-handler=openerp.addons.web.http:DEBUG')
group.add_option('--log-sql', action="append_const", dest="log_handler", const="openerp.sql_db:DEBUG", help='shortcut for --log-handler=openerp.sql_db:DEBUG')
# For backward-compatibility, map the old log levels to something
# quite close.
levels = ['info', 'debug_rpc', 'warn', 'test', 'critical',
'debug_sql', 'error', 'debug', 'debug_rpc_answer', 'notset']
group.add_option('--log-level', dest='log_level', type='choice', choices=levels,
my_default='info', help='specify the level of the logging. Accepted values: ' + str(levels))
parser.add_option_group(group)
# SMTP Group
group = optparse.OptionGroup(parser, "SMTP Configuration")
group.add_option('--email-from', dest='email_from', my_default=False,
help='specify the SMTP email address for sending email')
group.add_option('--smtp', dest='smtp_server', my_default='localhost',
help='specify the SMTP server for sending email')
group.add_option('--smtp-port', dest='smtp_port', my_default=25,
help='specify the SMTP port', type="int")
group.add_option('--smtp-ssl', dest='smtp_ssl', action='store_true', my_default=False,
help='if passed, SMTP connections will be encrypted with SSL (STARTTLS)')
group.add_option('--smtp-user', dest='smtp_user', my_default=False,
help='specify the SMTP username for sending email')
group.add_option('--smtp-password', dest='smtp_password', my_default=False,
help='specify the SMTP password for sending email')
parser.add_option_group(group)
group = optparse.OptionGroup(parser, "Database related options")
group.add_option("-d", "--database", dest="db_name", my_default=False,
help="specify the database name")
group.add_option("-r", "--db_user", dest="db_user", my_default=False,
help="specify the database user name")
group.add_option("-w", "--db_password", dest="db_password", my_default=False,
help="specify the database password")
group.add_option("--pg_path", dest="pg_path", help="specify the pg executable path")
group.add_option("--db_host", dest="db_host", my_default=False,
help="specify the database host")
group.add_option("--db_port", dest="db_port", my_default=False,
help="specify the database port", type="int")
group.add_option("--db_maxconn", dest="db_maxconn", type='int', my_default=64,
help="specify the the maximum number of physical connections to posgresql")
group.add_option("--db-template", dest="db_template", my_default="template1",
help="specify a custom database template to create a new database")
parser.add_option_group(group)
group = optparse.OptionGroup(parser, "Internationalisation options",
"Use these options to translate OpenERP to another language."
"See i18n section of the user manual. Option '-d' is mandatory."
"Option '-l' is mandatory in case of importation"
)
group.add_option('--load-language', dest="load_language",
help="specifies the languages for the translations you want to be loaded")
group.add_option('-l', "--language", dest="language",
help="specify the language of the translation file. Use it with --i18n-export or --i18n-import")
group.add_option("--i18n-export", dest="translate_out",
help="export all sentences to be translated to a CSV file, a PO file or a TGZ archive and exit")
group.add_option("--i18n-import", dest="translate_in",
help="import a CSV or a PO file with translations and exit. The '-l' option is required.")
group.add_option("--i18n-overwrite", dest="overwrite_existing_translations", action="store_true", my_default=False,
help="overwrites existing translation terms on updating a module or importing a CSV or a PO file.")
group.add_option("--modules", dest="translate_modules",
help="specify modules to export. Use in combination with --i18n-export")
parser.add_option_group(group)
security = optparse.OptionGroup(parser, 'Security-related options')
security.add_option('--no-database-list', action="store_false", dest='list_db', my_default=True,
help="disable the ability to return the list of databases")
parser.add_option_group(security)
# Advanced options
group = optparse.OptionGroup(parser, "Advanced options")
group.add_option('--debug', dest='debug_mode', action='store_true', my_default=False, help='enable debug mode')
group.add_option("--stop-after-init", action="store_true", dest="stop_after_init", my_default=False,
help="stop the server after its initialization")
group.add_option("-t", "--timezone", dest="timezone", my_default=False,
help="specify reference timezone for the server (e.g. Europe/Brussels")
group.add_option("--osv-memory-count-limit", dest="osv_memory_count_limit", my_default=False,
help="Force a limit on the maximum number of records kept in the virtual "
"osv_memory tables. The default is False, which means no count-based limit.",
type="int")
group.add_option("--osv-memory-age-limit", dest="osv_memory_age_limit", my_default=1.0,
help="Force a limit on the maximum age of records kept in the virtual "
"osv_memory tables. This is a decimal value expressed in hours, "
"and the default is 1 hour.",
type="float")
group.add_option("--max-cron-threads", dest="max_cron_threads", my_default=2,
help="Maximum number of threads processing concurrently cron jobs (default 2).",
type="int")
group.add_option("--unaccent", dest="unaccent", my_default=False, action="store_true",
help="Use the unaccent function provided by the database when available.")
parser.add_option_group(group)
group = optparse.OptionGroup(parser, "Multiprocessing options")
# TODO sensible default for the three following limits.
group.add_option("--workers", dest="workers", my_default=0,
help="Specify the number of workers, 0 disable prefork mode.",
type="int")
group.add_option("--limit-memory-soft", dest="limit_memory_soft", my_default=640 * 1024 * 1024,
help="Maximum allowed virtual memory per worker, when reached the worker be reset after the current request (default 671088640 aka 640MB).",
type="int")
group.add_option("--limit-memory-hard", dest="limit_memory_hard", my_default=768 * 1024 * 1024,
help="Maximum allowed virtual memory per worker, when reached, any memory allocation will fail (default 805306368 aka 768MB).",
type="int")
group.add_option("--limit-time-cpu", dest="limit_time_cpu", my_default=60,
help="Maximum allowed CPU time per request (default 60).",
type="int")
group.add_option("--limit-time-real", dest="limit_time_real", my_default=120,
help="Maximum allowed Real time per request (default 120).",
type="int")
group.add_option("--limit-request", dest="limit_request", my_default=8192,
help="Maximum number of request to be processed per worker (default 8192).",
type="int")
parser.add_option_group(group)
# Copy all optparse options (i.e. MyOption) into self.options.
for group in parser.option_groups:
for option in group.option_list:
if option.dest not in self.options:
self.options[option.dest] = option.my_default
self.casts[option.dest] = option
self.parse_config(None, False)
def parse_config(self, args=None, complete=True):
""" Parse the configuration file (if any) and the command-line
arguments.
This method initializes openerp.tools.config and openerp.conf (the
former should be removed in the furture) with library-wide
configuration values.
This method must be called before proper usage of this library can be
made.
Typical usage of this method:
openerp.tools.config.parse_config(sys.argv[1:])
:param complete: this is a hack used in __init__(), leave it to True.
"""
if args is None:
args = []
opt, args = self.parser.parse_args(args)
def die(cond, msg):
if cond:
self.parser.error(msg)
# Ensures no illegitimate argument is silently discarded (avoids insidious "hyphen to dash" problem)
die(args, "unrecognized parameters: '%s'" % " ".join(args))
die(bool(opt.syslog) and bool(opt.logfile),
"the syslog and logfile options are exclusive")
die(opt.translate_in and (not opt.language or not opt.db_name),
"the i18n-import option cannot be used without the language (-l) and the database (-d) options")
die(opt.overwrite_existing_translations and not (opt.translate_in or opt.update),
"the i18n-overwrite option cannot be used without the i18n-import option or without the update option")
die(opt.translate_out and (not opt.db_name),
"the i18n-export option cannot be used without the database (-d) option")
# Check if the config file exists (-c used, but not -s)
die(not opt.save and opt.config and not os.access(opt.config, os.R_OK),
"The config file '%s' selected with -c/--config doesn't exist or is not readable, "\
"use -s/--save if you want to generate it"% opt.config)
# place/search the config file on Win32 near the server installation
# (../etc from the server)
# if the server is run by an unprivileged user, he has to specify location of a config file where he has the rights to write,
# else he won't be able to save the configurations, or even to start the server...
if os.name == 'nt':
rcfilepath = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'openerp-server.conf')
else:
rcfilepath = os.path.expanduser('~/.openerp_serverrc')
self.rcfile = os.path.abspath(
self.config_file or opt.config \
or os.environ.get('OPENERP_SERVER') or rcfilepath)
self.load()
# Verify that we want to log or not, if not the output will go to stdout
if self.options['logfile'] in ('None', 'False'):
self.options['logfile'] = False
# the same for the pidfile
if self.options['pidfile'] in ('None', 'False'):
self.options['pidfile'] = False
# if defined dont take the configfile value even if the defined value is None
keys = ['xmlrpc_interface', 'xmlrpc_port', 'db_name', 'db_user', 'db_password', 'db_host',
'db_port', 'db_template', 'logfile', 'pidfile', 'smtp_port',
'email_from', 'smtp_server', 'smtp_user', 'smtp_password',
'netrpc_interface', 'netrpc_port', 'db_maxconn', 'import_partial', 'addons_path',
'netrpc', 'xmlrpc', 'syslog', 'without_demo', 'timezone',
'xmlrpcs_interface', 'xmlrpcs_port', 'xmlrpcs',
'static_http_enable', 'static_http_document_root', 'static_http_url_prefix',
'secure_cert_file', 'secure_pkey_file', 'dbfilter', 'log_handler', 'log_level'
]
for arg in keys:
# Copy the command-line argument (except the special case for log_handler, due to
# action=append requiring a real default, so we cannot use the my_default workaround)
if getattr(opt, arg) and getattr(opt, arg) != DEFAULT_LOG_HANDLER:
self.options[arg] = getattr(opt, arg)
# ... or keep, but cast, the config file value.
elif isinstance(self.options[arg], basestring) and self.casts[arg].type in optparse.Option.TYPE_CHECKER:
self.options[arg] = optparse.Option.TYPE_CHECKER[self.casts[arg].type](self.casts[arg], arg, self.options[arg])
if isinstance(self.options['log_handler'], basestring):
self.options['log_handler'] = self.options['log_handler'].split(',')
# if defined but None take the configfile value
keys = [
'language', 'translate_out', 'translate_in', 'overwrite_existing_translations',
'debug_mode', 'smtp_ssl', 'load_language',
'stop_after_init', 'logrotate', 'without_demo', 'netrpc', 'xmlrpc', 'syslog',
'list_db', 'xmlrpcs', 'proxy_mode',
'test_file', 'test_enable', 'test_commit', 'test_report_directory',
'osv_memory_count_limit', 'osv_memory_age_limit', 'max_cron_threads', 'unaccent',
'workers', 'limit_memory_hard', 'limit_memory_soft', 'limit_time_cpu', 'limit_time_real', 'limit_request'
]
for arg in keys:
# Copy the command-line argument...
if getattr(opt, arg) is not None:
self.options[arg] = getattr(opt, arg)
# ... or keep, but cast, the config file value.
elif isinstance(self.options[arg], basestring) and self.casts[arg].type in optparse.Option.TYPE_CHECKER:
self.options[arg] = optparse.Option.TYPE_CHECKER[self.casts[arg].type](self.casts[arg], arg, self.options[arg])
self.options['root_path'] = os.path.abspath(os.path.expanduser(os.path.expandvars(os.path.dirname(openerp.__file__))))
if not self.options['addons_path'] or self.options['addons_path']=='None':
self.options['addons_path'] = os.path.join(self.options['root_path'], 'addons')
else:
self.options['addons_path'] = ",".join(
os.path.abspath(os.path.expanduser(os.path.expandvars(x)))
for x in self.options['addons_path'].split(','))
self.options['init'] = opt.init and dict.fromkeys(opt.init.split(','), 1) or {}
self.options["demo"] = not opt.without_demo and self.options['init'] or {}
self.options['update'] = opt.update and dict.fromkeys(opt.update.split(','), 1) or {}
self.options['translate_modules'] = opt.translate_modules and map(lambda m: m.strip(), opt.translate_modules.split(',')) or ['all']
self.options['translate_modules'].sort()
# TODO checking the type of the parameters should be done for every
# parameters, not just the timezone.
# The call to get_server_timezone() sets the timezone; this should
# probably done here.
if self.options['timezone']:
# Prevent the timezone to be True. (The config file parsing changes
# the string 'True' to the boolean value True. It would be probably
# be better to remove that conversion.)
die(not isinstance(self.options['timezone'], basestring),
"Invalid timezone value in configuration or environment: %r.\n"
"Please fix this in your configuration." %(self.options['timezone']))
# If an explicit TZ was provided in the config, make sure it is known
try:
import pytz
pytz.timezone(self.options['timezone'])
except pytz.UnknownTimeZoneError:
die(True, "The specified timezone (%s) is invalid" % self.options['timezone'])
except:
# If pytz is missing, don't check the provided TZ, it will be ignored anyway.
pass
if opt.pg_path:
self.options['pg_path'] = opt.pg_path
if self.options.get('language', False):
if len(self.options['language']) > 5:
raise Exception('ERROR: The Lang name must take max 5 chars, Eg: -lfr_BE')
if not self.options['db_user']:
try:
import getpass
self.options['db_user'] = getpass.getuser()
except:
self.options['db_user'] = None
die(not self.options['db_user'], 'ERROR: No user specified for the connection to the database')
if self.options['db_password']:
if sys.platform == 'win32' and not self.options['db_host']:
self.options['db_host'] = 'localhost'
#if self.options['db_host']:
# self._generate_pgpassfile()
if opt.save:
self.save()
openerp.conf.addons_paths = self.options['addons_path'].split(',')
if opt.server_wide_modules:
openerp.conf.server_wide_modules = map(lambda m: m.strip(), opt.server_wide_modules.split(','))
else:
openerp.conf.server_wide_modules = ['web','web_kanban']
if complete:
openerp.modules.module.initialize_sys_path()
openerp.modules.loading.open_openerp_namespace()
def _generate_pgpassfile(self):
"""
Generate the pgpass file with the parameters from the command line (db_host, db_user,
db_password)
Used because pg_dump and pg_restore can not accept the password on the command line.
"""
is_win32 = sys.platform == 'win32'
if is_win32:
filename = os.path.join(os.environ['APPDATA'], 'pgpass.conf')
else:
filename = os.path.join(os.environ['HOME'], '.pgpass')
text_to_add = "%(db_host)s:*:*:%(db_user)s:%(db_password)s" % self.options
if os.path.exists(filename):
content = [x.strip() for x in file(filename, 'r').readlines()]
if text_to_add in content:
return
fp = file(filename, 'a+')
fp.write(text_to_add + "\n")
fp.close()
if is_win32:
try:
import _winreg
except ImportError:
_winreg = None
x=_winreg.ConnectRegistry(None,_winreg.HKEY_LOCAL_MACHINE)
y = _winreg.OpenKey(x, r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", 0,_winreg.KEY_ALL_ACCESS)
_winreg.SetValueEx(y,"PGPASSFILE", 0, _winreg.REG_EXPAND_SZ, filename )
_winreg.CloseKey(y)
_winreg.CloseKey(x)
else:
import stat
os.chmod(filename, stat.S_IRUSR + stat.S_IWUSR)
def _is_addons_path(self, path):
for f in os.listdir(path):
modpath = os.path.join(path, f)
if os.path.isdir(modpath):
def hasfile(filename):
return os.path.isfile(os.path.join(modpath, filename))
if hasfile('__init__.py') and (hasfile('__openerp__.py') or hasfile('__terp__.py')):
return True
return False
def _check_addons_path(self, option, opt, value, parser):
ad_paths = []
for path in value.split(','):
path = path.strip()
res = os.path.abspath(os.path.expanduser(path))
if not os.path.isdir(res):
raise optparse.OptionValueError("option %s: no such directory: %r" % (opt, path))
if not self._is_addons_path(res):
raise optparse.OptionValueError("option %s: The addons-path %r does not seem to a be a valid Addons Directory!" % (opt, path))
ad_paths.append(res)
setattr(parser.values, option.dest, ",".join(ad_paths))
def load(self):
p = ConfigParser.ConfigParser()
try:
p.read([self.rcfile])
for (name,value) in p.items('options'):
if value=='True' or value=='true':
value = True
if value=='False' or value=='false':
value = False
self.options[name] = value
#parse the other sections, as well
for sec in p.sections():
if sec == 'options':
continue
if not self.misc.has_key(sec):
self.misc[sec]= {}
for (name, value) in p.items(sec):
if value=='True' or value=='true':
value = True
if value=='False' or value=='false':
value = False
self.misc[sec][name] = value
except IOError:
pass
except ConfigParser.NoSectionError:
pass
def save(self):
p = ConfigParser.ConfigParser()
loglevelnames = dict(zip(self._LOGLEVELS.values(), self._LOGLEVELS.keys()))
p.add_section('options')
for opt in sorted(self.options.keys()):
if opt in ('version', 'language', 'translate_out', 'translate_in', 'overwrite_existing_translations', 'init', 'update'):
continue
if opt in self.blacklist_for_save:
continue
if opt in ('log_level',):
p.set('options', opt, loglevelnames.get(self.options[opt], self.options[opt]))
else:
p.set('options', opt, self.options[opt])
for sec in sorted(self.misc.keys()):
p.add_section(sec)
for opt in sorted(self.misc[sec].keys()):
p.set(sec,opt,self.misc[sec][opt])
# try to create the directories and write the file
try:
rc_exists = os.path.exists(self.rcfile)
if not rc_exists and not os.path.exists(os.path.dirname(self.rcfile)):
os.makedirs(os.path.dirname(self.rcfile))
try:
p.write(file(self.rcfile, 'w'))
if not rc_exists:
os.chmod(self.rcfile, 0600)
except IOError:
sys.stderr.write("ERROR: couldn't write the config file\n")
except OSError:
# what to do if impossible?
sys.stderr.write("ERROR: couldn't create the config directory\n")
def get(self, key, default=None):
return self.options.get(key, default)
def get_misc(self, sect, key, default=None):
return self.misc.get(sect,{}).get(key, default)
def __setitem__(self, key, value):
self.options[key] = value
if key in self.options and isinstance(self.options[key], basestring) and \
key in self.casts and self.casts[key].type in optparse.Option.TYPE_CHECKER:
self.options[key] = optparse.Option.TYPE_CHECKER[self.casts[key].type](self.casts[key], key, self.options[key])
def __getitem__(self, key):
return self.options[key]
config = configmanager()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
avadacatavra/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/third_party/funcsigs/tests/test_inspect.py
|
40
|
# Copyright 2001-2013 Python Software Foundation; All Rights Reserved
from __future__ import absolute_import, division, print_function
import collections
import sys
try:
import unittest2 as unittest
except ImportError:
import unittest
import funcsigs as inspect
class TestSignatureObject(unittest.TestCase):
@staticmethod
def signature(func):
sig = inspect.signature(func)
return (tuple((param.name,
(Ellipsis if param.default is param.empty else param.default),
(Ellipsis if param.annotation is param.empty
else param.annotation),
str(param.kind).lower())
for param in sig.parameters.values()),
(Ellipsis if sig.return_annotation is sig.empty
else sig.return_annotation))
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
if not hasattr(self, 'assertRaisesRegex'):
self.assertRaisesRegex = self.assertRaisesRegexp
if sys.version_info[0] > 2:
exec("""
def test_signature_object(self):
S = inspect.Signature
P = inspect.Parameter
self.assertEqual(str(S()), '()')
def test(po, pk, *args, ko, **kwargs):
pass
sig = inspect.signature(test)
po = sig.parameters['po'].replace(kind=P.POSITIONAL_ONLY)
pk = sig.parameters['pk']
args = sig.parameters['args']
ko = sig.parameters['ko']
kwargs = sig.parameters['kwargs']
S((po, pk, args, ko, kwargs))
with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
S((pk, po, args, ko, kwargs))
with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
S((po, args, pk, ko, kwargs))
with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
S((args, po, pk, ko, kwargs))
with self.assertRaisesRegex(ValueError, 'wrong parameter order'):
S((po, pk, args, kwargs, ko))
kwargs2 = kwargs.replace(name='args')
with self.assertRaisesRegex(ValueError, 'duplicate parameter name'):
S((po, pk, args, kwargs2, ko))
""")
def test_signature_immutability(self):
def test(a):
pass
sig = inspect.signature(test)
with self.assertRaises(AttributeError):
sig.foo = 'bar'
# Python2 does not have MappingProxyType class
if sys.version_info[:2] < (3, 3):
return
with self.assertRaises(TypeError):
sig.parameters['a'] = None
def test_signature_on_noarg(self):
def test():
pass
self.assertEqual(self.signature(test), ((), Ellipsis))
if sys.version_info[0] > 2:
exec("""
def test_signature_on_wargs(self):
def test(a, b:'foo') -> 123:
pass
self.assertEqual(self.signature(test),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
('b', Ellipsis, 'foo', "positional_or_keyword")),
123))
""")
if sys.version_info[0] > 2:
exec("""
def test_signature_on_wkwonly(self):
def test(*, a:float, b:str) -> int:
pass
self.assertEqual(self.signature(test),
((('a', Ellipsis, float, "keyword_only"),
('b', Ellipsis, str, "keyword_only")),
int))
""")
if sys.version_info[0] > 2:
exec("""
def test_signature_on_complex_args(self):
def test(a, b:'foo'=10, *args:'bar', spam:'baz', ham=123, **kwargs:int):
pass
self.assertEqual(self.signature(test),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
('b', 10, 'foo', "positional_or_keyword"),
('args', Ellipsis, 'bar', "var_positional"),
('spam', Ellipsis, 'baz', "keyword_only"),
('ham', 123, Ellipsis, "keyword_only"),
('kwargs', Ellipsis, int, "var_keyword")),
Ellipsis))
""")
def test_signature_on_builtin_function(self):
with self.assertRaisesRegex(ValueError, 'not supported by signature'):
inspect.signature(type)
with self.assertRaisesRegex(ValueError, 'not supported by signature'):
# support for 'wrapper_descriptor'
inspect.signature(type.__call__)
if hasattr(sys, 'pypy_version_info'):
raise ValueError('not supported by signature')
with self.assertRaisesRegex(ValueError, 'not supported by signature'):
# support for 'method-wrapper'
inspect.signature(min.__call__)
if hasattr(sys, 'pypy_version_info'):
raise ValueError('not supported by signature')
with self.assertRaisesRegex(ValueError,
'no signature found for builtin function'):
# support for 'method-wrapper'
inspect.signature(min)
def test_signature_on_non_function(self):
with self.assertRaisesRegex(TypeError, 'is not a callable object'):
inspect.signature(42)
with self.assertRaisesRegex(TypeError, 'is not a Python function'):
inspect.Signature.from_function(42)
if sys.version_info[0] > 2:
exec("""
def test_signature_on_method(self):
class Test:
def foo(self, arg1, arg2=1) -> int:
pass
meth = Test().foo
self.assertEqual(self.signature(meth),
((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"),
('arg2', 1, Ellipsis, "positional_or_keyword")),
int))
""")
if sys.version_info[0] > 2:
exec("""
def test_signature_on_classmethod(self):
class Test:
@classmethod
def foo(cls, arg1, *, arg2=1):
pass
meth = Test().foo
self.assertEqual(self.signature(meth),
((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"),
('arg2', 1, Ellipsis, "keyword_only")),
Ellipsis))
meth = Test.foo
self.assertEqual(self.signature(meth),
((('arg1', Ellipsis, Ellipsis, "positional_or_keyword"),
('arg2', 1, Ellipsis, "keyword_only")),
Ellipsis))
""")
if sys.version_info[0] > 2:
exec("""
def test_signature_on_staticmethod(self):
class Test:
@staticmethod
def foo(cls, *, arg):
pass
meth = Test().foo
self.assertEqual(self.signature(meth),
((('cls', Ellipsis, Ellipsis, "positional_or_keyword"),
('arg', Ellipsis, Ellipsis, "keyword_only")),
Ellipsis))
meth = Test.foo
self.assertEqual(self.signature(meth),
((('cls', Ellipsis, Ellipsis, "positional_or_keyword"),
('arg', Ellipsis, Ellipsis, "keyword_only")),
Ellipsis))
""")
if sys.version_info[0] > 2:
exec("""
def test_signature_on_partial(self):
from functools import partial
def test():
pass
self.assertEqual(self.signature(partial(test)), ((), Ellipsis))
with self.assertRaisesRegex(ValueError, "has incorrect arguments"):
inspect.signature(partial(test, 1))
with self.assertRaisesRegex(ValueError, "has incorrect arguments"):
inspect.signature(partial(test, a=1))
def test(a, b, *, c, d):
pass
self.assertEqual(self.signature(partial(test)),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
('b', Ellipsis, Ellipsis, "positional_or_keyword"),
('c', Ellipsis, Ellipsis, "keyword_only"),
('d', Ellipsis, Ellipsis, "keyword_only")),
Ellipsis))
self.assertEqual(self.signature(partial(test, 1)),
((('b', Ellipsis, Ellipsis, "positional_or_keyword"),
('c', Ellipsis, Ellipsis, "keyword_only"),
('d', Ellipsis, Ellipsis, "keyword_only")),
Ellipsis))
self.assertEqual(self.signature(partial(test, 1, c=2)),
((('b', Ellipsis, Ellipsis, "positional_or_keyword"),
('c', 2, Ellipsis, "keyword_only"),
('d', Ellipsis, Ellipsis, "keyword_only")),
Ellipsis))
self.assertEqual(self.signature(partial(test, b=1, c=2)),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
('b', 1, Ellipsis, "positional_or_keyword"),
('c', 2, Ellipsis, "keyword_only"),
('d', Ellipsis, Ellipsis, "keyword_only")),
Ellipsis))
self.assertEqual(self.signature(partial(test, 0, b=1, c=2)),
((('b', 1, Ellipsis, "positional_or_keyword"),
('c', 2, Ellipsis, "keyword_only"),
('d', Ellipsis, Ellipsis, "keyword_only"),),
Ellipsis))
def test(a, *args, b, **kwargs):
pass
self.assertEqual(self.signature(partial(test, 1)),
((('args', Ellipsis, Ellipsis, "var_positional"),
('b', Ellipsis, Ellipsis, "keyword_only"),
('kwargs', Ellipsis, Ellipsis, "var_keyword")),
Ellipsis))
self.assertEqual(self.signature(partial(test, 1, 2, 3)),
((('args', Ellipsis, Ellipsis, "var_positional"),
('b', Ellipsis, Ellipsis, "keyword_only"),
('kwargs', Ellipsis, Ellipsis, "var_keyword")),
Ellipsis))
self.assertEqual(self.signature(partial(test, 1, 2, 3, test=True)),
((('args', Ellipsis, Ellipsis, "var_positional"),
('b', Ellipsis, Ellipsis, "keyword_only"),
('kwargs', Ellipsis, Ellipsis, "var_keyword")),
Ellipsis))
self.assertEqual(self.signature(partial(test, 1, 2, 3, test=1, b=0)),
((('args', Ellipsis, Ellipsis, "var_positional"),
('b', 0, Ellipsis, "keyword_only"),
('kwargs', Ellipsis, Ellipsis, "var_keyword")),
Ellipsis))
self.assertEqual(self.signature(partial(test, b=0)),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
('args', Ellipsis, Ellipsis, "var_positional"),
('b', 0, Ellipsis, "keyword_only"),
('kwargs', Ellipsis, Ellipsis, "var_keyword")),
Ellipsis))
self.assertEqual(self.signature(partial(test, b=0, test=1)),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
('args', Ellipsis, Ellipsis, "var_positional"),
('b', 0, Ellipsis, "keyword_only"),
('kwargs', Ellipsis, Ellipsis, "var_keyword")),
Ellipsis))
def test(a, b, c:int) -> 42:
pass
sig = test.__signature__ = inspect.signature(test)
self.assertEqual(self.signature(partial(partial(test, 1))),
((('b', Ellipsis, Ellipsis, "positional_or_keyword"),
('c', Ellipsis, int, "positional_or_keyword")),
42))
self.assertEqual(self.signature(partial(partial(test, 1), 2)),
((('c', Ellipsis, int, "positional_or_keyword"),),
42))
psig = inspect.signature(partial(partial(test, 1), 2))
def foo(a):
return a
_foo = partial(partial(foo, a=10), a=20)
self.assertEqual(self.signature(_foo),
((('a', 20, Ellipsis, "positional_or_keyword"),),
Ellipsis))
# check that we don't have any side-effects in signature(),
# and the partial object is still functioning
self.assertEqual(_foo(), 20)
def foo(a, b, c):
return a, b, c
_foo = partial(partial(foo, 1, b=20), b=30)
self.assertEqual(self.signature(_foo),
((('b', 30, Ellipsis, "positional_or_keyword"),
('c', Ellipsis, Ellipsis, "positional_or_keyword")),
Ellipsis))
self.assertEqual(_foo(c=10), (1, 30, 10))
_foo = partial(_foo, 2) # now 'b' has two values -
# positional and keyword
with self.assertRaisesRegex(ValueError, "has incorrect arguments"):
inspect.signature(_foo)
def foo(a, b, c, *, d):
return a, b, c, d
_foo = partial(partial(foo, d=20, c=20), b=10, d=30)
self.assertEqual(self.signature(_foo),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
('b', 10, Ellipsis, "positional_or_keyword"),
('c', 20, Ellipsis, "positional_or_keyword"),
('d', 30, Ellipsis, "keyword_only")),
Ellipsis))
ba = inspect.signature(_foo).bind(a=200, b=11)
self.assertEqual(_foo(*ba.args, **ba.kwargs), (200, 11, 20, 30))
def foo(a=1, b=2, c=3):
return a, b, c
_foo = partial(foo, a=10, c=13)
ba = inspect.signature(_foo).bind(11)
self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 2, 13))
ba = inspect.signature(_foo).bind(11, 12)
self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 12, 13))
ba = inspect.signature(_foo).bind(11, b=12)
self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 12, 13))
ba = inspect.signature(_foo).bind(b=12)
self.assertEqual(_foo(*ba.args, **ba.kwargs), (10, 12, 13))
_foo = partial(_foo, b=10)
ba = inspect.signature(_foo).bind(12, 14)
self.assertEqual(_foo(*ba.args, **ba.kwargs), (12, 14, 13))
""")
if sys.version_info[0] > 2:
exec("""
def test_signature_on_decorated(self):
import functools
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs) -> int:
return func(*args, **kwargs)
return wrapper
class Foo:
@decorator
def bar(self, a, b):
pass
self.assertEqual(self.signature(Foo.bar),
((('self', Ellipsis, Ellipsis, "positional_or_keyword"),
('a', Ellipsis, Ellipsis, "positional_or_keyword"),
('b', Ellipsis, Ellipsis, "positional_or_keyword")),
Ellipsis))
self.assertEqual(self.signature(Foo().bar),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
('b', Ellipsis, Ellipsis, "positional_or_keyword")),
Ellipsis))
# Test that we handle method wrappers correctly
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs) -> int:
return func(42, *args, **kwargs)
sig = inspect.signature(func)
new_params = tuple(sig.parameters.values())[1:]
wrapper.__signature__ = sig.replace(parameters=new_params)
return wrapper
class Foo:
@decorator
def __call__(self, a, b):
pass
self.assertEqual(self.signature(Foo.__call__),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),
('b', Ellipsis, Ellipsis, "positional_or_keyword")),
Ellipsis))
self.assertEqual(self.signature(Foo().__call__),
((('b', Ellipsis, Ellipsis, "positional_or_keyword"),),
Ellipsis))
""")
if sys.version_info[0] > 2:
exec("""
def test_signature_on_class(self):
class C:
def __init__(self, a):
pass
self.assertEqual(self.signature(C),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
Ellipsis))
class CM(type):
def __call__(cls, a):
pass
class C(metaclass=CM):
def __init__(self, b):
pass
self.assertEqual(self.signature(C),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
Ellipsis))
class CM(type):
def __new__(mcls, name, bases, dct, *, foo=1):
return super().__new__(mcls, name, bases, dct)
class C(metaclass=CM):
def __init__(self, b):
pass
self.assertEqual(self.signature(C),
((('b', Ellipsis, Ellipsis, "positional_or_keyword"),),
Ellipsis))
self.assertEqual(self.signature(CM),
((('name', Ellipsis, Ellipsis, "positional_or_keyword"),
('bases', Ellipsis, Ellipsis, "positional_or_keyword"),
('dct', Ellipsis, Ellipsis, "positional_or_keyword"),
('foo', 1, Ellipsis, "keyword_only")),
Ellipsis))
class CMM(type):
def __new__(mcls, name, bases, dct, *, foo=1):
return super().__new__(mcls, name, bases, dct)
def __call__(cls, nm, bs, dt):
return type(nm, bs, dt)
class CM(type, metaclass=CMM):
def __new__(mcls, name, bases, dct, *, bar=2):
return super().__new__(mcls, name, bases, dct)
class C(metaclass=CM):
def __init__(self, b):
pass
self.assertEqual(self.signature(CMM),
((('name', Ellipsis, Ellipsis, "positional_or_keyword"),
('bases', Ellipsis, Ellipsis, "positional_or_keyword"),
('dct', Ellipsis, Ellipsis, "positional_or_keyword"),
('foo', 1, Ellipsis, "keyword_only")),
Ellipsis))
self.assertEqual(self.signature(CM),
((('nm', Ellipsis, Ellipsis, "positional_or_keyword"),
('bs', Ellipsis, Ellipsis, "positional_or_keyword"),
('dt', Ellipsis, Ellipsis, "positional_or_keyword")),
Ellipsis))
self.assertEqual(self.signature(C),
((('b', Ellipsis, Ellipsis, "positional_or_keyword"),),
Ellipsis))
class CM(type):
def __init__(cls, name, bases, dct, *, bar=2):
return super().__init__(name, bases, dct)
class C(metaclass=CM):
def __init__(self, b):
pass
self.assertEqual(self.signature(CM),
((('name', Ellipsis, Ellipsis, "positional_or_keyword"),
('bases', Ellipsis, Ellipsis, "positional_or_keyword"),
('dct', Ellipsis, Ellipsis, "positional_or_keyword"),
('bar', 2, Ellipsis, "keyword_only")),
Ellipsis))
""")
def test_signature_on_callable_objects(self):
class Foo(object):
def __call__(self, a):
pass
self.assertEqual(self.signature(Foo()),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
Ellipsis))
class Spam(object):
pass
with self.assertRaisesRegex(TypeError, "is not a callable object"):
inspect.signature(Spam())
class Bar(Spam, Foo):
pass
self.assertEqual(self.signature(Bar()),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
Ellipsis))
class ToFail(object):
__call__ = type
with self.assertRaisesRegex(ValueError, "not supported by signature"):
inspect.signature(ToFail())
if sys.version_info[0] < 3:
return
class Wrapped(object):
pass
Wrapped.__wrapped__ = lambda a: None
self.assertEqual(self.signature(Wrapped),
((('a', Ellipsis, Ellipsis, "positional_or_keyword"),),
Ellipsis))
def test_signature_on_lambdas(self):
self.assertEqual(self.signature((lambda a=10: a)),
((('a', 10, Ellipsis, "positional_or_keyword"),),
Ellipsis))
if sys.version_info[0] > 2:
exec("""
def test_signature_equality(self):
def foo(a, *, b:int) -> float: pass
self.assertNotEqual(inspect.signature(foo), 42)
def bar(a, *, b:int) -> float: pass
self.assertEqual(inspect.signature(foo), inspect.signature(bar))
def bar(a, *, b:int) -> int: pass
self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
def bar(a, *, b:int): pass
self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
def bar(a, *, b:int=42) -> float: pass
self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
def bar(a, *, c) -> float: pass
self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
def bar(a, b:int) -> float: pass
self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
def spam(b:int, a) -> float: pass
self.assertNotEqual(inspect.signature(spam), inspect.signature(bar))
def foo(*, a, b, c): pass
def bar(*, c, b, a): pass
self.assertEqual(inspect.signature(foo), inspect.signature(bar))
def foo(*, a=1, b, c): pass
def bar(*, c, b, a=1): pass
self.assertEqual(inspect.signature(foo), inspect.signature(bar))
def foo(pos, *, a=1, b, c): pass
def bar(pos, *, c, b, a=1): pass
self.assertEqual(inspect.signature(foo), inspect.signature(bar))
def foo(pos, *, a, b, c): pass
def bar(pos, *, c, b, a=1): pass
self.assertNotEqual(inspect.signature(foo), inspect.signature(bar))
def foo(pos, *args, a=42, b, c, **kwargs:int): pass
def bar(pos, *args, c, b, a=42, **kwargs:int): pass
self.assertEqual(inspect.signature(foo), inspect.signature(bar))
""")
def test_signature_unhashable(self):
def foo(a): pass
sig = inspect.signature(foo)
with self.assertRaisesRegex(TypeError, 'unhashable type'):
hash(sig)
if sys.version_info[0] > 2:
exec("""
def test_signature_str(self):
def foo(a:int=1, *, b, c=None, **kwargs) -> 42:
pass
self.assertEqual(str(inspect.signature(foo)),
'(a:int=1, *, b, c=None, **kwargs) -> 42')
def foo(a:int=1, *args, b, c=None, **kwargs) -> 42:
pass
self.assertEqual(str(inspect.signature(foo)),
'(a:int=1, *args, b, c=None, **kwargs) -> 42')
def foo():
pass
self.assertEqual(str(inspect.signature(foo)), '()')
""")
if sys.version_info[0] > 2:
exec("""
def test_signature_str_positional_only(self):
P = inspect.Parameter
def test(a_po, *, b, **kwargs):
return a_po, kwargs
sig = inspect.signature(test)
new_params = list(sig.parameters.values())
new_params[0] = new_params[0].replace(kind=P.POSITIONAL_ONLY)
test.__signature__ = sig.replace(parameters=new_params)
self.assertEqual(str(inspect.signature(test)),
'(<a_po>, *, b, **kwargs)')
sig = inspect.signature(test)
new_params = list(sig.parameters.values())
new_params[0] = new_params[0].replace(name=None)
test.__signature__ = sig.replace(parameters=new_params)
self.assertEqual(str(inspect.signature(test)),
'(<0>, *, b, **kwargs)')
""")
if sys.version_info[0] > 2:
exec("""
def test_signature_replace_anno(self):
def test() -> 42:
pass
sig = inspect.signature(test)
sig = sig.replace(return_annotation=None)
self.assertIs(sig.return_annotation, None)
sig = sig.replace(return_annotation=sig.empty)
self.assertIs(sig.return_annotation, sig.empty)
sig = sig.replace(return_annotation=42)
self.assertEqual(sig.return_annotation, 42)
self.assertEqual(sig, inspect.signature(test))
""")
class TestParameterObject(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
if not hasattr(self, 'assertRaisesRegex'):
self.assertRaisesRegex = self.assertRaisesRegexp
def test_signature_parameter_kinds(self):
P = inspect.Parameter
self.assertTrue(P.POSITIONAL_ONLY < P.POSITIONAL_OR_KEYWORD < \
P.VAR_POSITIONAL < P.KEYWORD_ONLY < P.VAR_KEYWORD)
self.assertEqual(str(P.POSITIONAL_ONLY), 'POSITIONAL_ONLY')
self.assertTrue('POSITIONAL_ONLY' in repr(P.POSITIONAL_ONLY))
def test_signature_parameter_object(self):
p = inspect.Parameter('foo', default=10,
kind=inspect.Parameter.POSITIONAL_ONLY)
self.assertEqual(p.name, 'foo')
self.assertEqual(p.default, 10)
self.assertIs(p.annotation, p.empty)
self.assertEqual(p.kind, inspect.Parameter.POSITIONAL_ONLY)
with self.assertRaisesRegex(ValueError, 'invalid value'):
inspect.Parameter('foo', default=10, kind='123')
with self.assertRaisesRegex(ValueError, 'not a valid parameter name'):
inspect.Parameter('1', kind=inspect.Parameter.VAR_KEYWORD)
with self.assertRaisesRegex(ValueError,
'non-positional-only parameter'):
inspect.Parameter(None, kind=inspect.Parameter.VAR_KEYWORD)
with self.assertRaisesRegex(ValueError, 'cannot have default values'):
inspect.Parameter('a', default=42,
kind=inspect.Parameter.VAR_KEYWORD)
with self.assertRaisesRegex(ValueError, 'cannot have default values'):
inspect.Parameter('a', default=42,
kind=inspect.Parameter.VAR_POSITIONAL)
p = inspect.Parameter('a', default=42,
kind=inspect.Parameter.POSITIONAL_OR_KEYWORD)
with self.assertRaisesRegex(ValueError, 'cannot have default values'):
p.replace(kind=inspect.Parameter.VAR_POSITIONAL)
self.assertTrue(repr(p).startswith('<Parameter'))
def test_signature_parameter_equality(self):
P = inspect.Parameter
p = P('foo', default=42, kind=inspect.Parameter.KEYWORD_ONLY)
self.assertEqual(p, p)
self.assertNotEqual(p, 42)
self.assertEqual(p, P('foo', default=42,
kind=inspect.Parameter.KEYWORD_ONLY))
def test_signature_parameter_unhashable(self):
p = inspect.Parameter('foo', default=42,
kind=inspect.Parameter.KEYWORD_ONLY)
with self.assertRaisesRegex(TypeError, 'unhashable type'):
hash(p)
def test_signature_parameter_replace(self):
p = inspect.Parameter('foo', default=42,
kind=inspect.Parameter.KEYWORD_ONLY)
self.assertIsNot(p, p.replace())
self.assertEqual(p, p.replace())
p2 = p.replace(annotation=1)
self.assertEqual(p2.annotation, 1)
p2 = p2.replace(annotation=p2.empty)
self.assertEqual(p, p2)
p2 = p2.replace(name='bar')
self.assertEqual(p2.name, 'bar')
self.assertNotEqual(p2, p)
with self.assertRaisesRegex(ValueError, 'not a valid parameter name'):
p2 = p2.replace(name=p2.empty)
p2 = p2.replace(name='foo', default=None)
self.assertIs(p2.default, None)
self.assertNotEqual(p2, p)
p2 = p2.replace(name='foo', default=p2.empty)
self.assertIs(p2.default, p2.empty)
p2 = p2.replace(default=42, kind=p2.POSITIONAL_OR_KEYWORD)
self.assertEqual(p2.kind, p2.POSITIONAL_OR_KEYWORD)
self.assertNotEqual(p2, p)
with self.assertRaisesRegex(ValueError, 'invalid value for'):
p2 = p2.replace(kind=p2.empty)
p2 = p2.replace(kind=p2.KEYWORD_ONLY)
self.assertEqual(p2, p)
def test_signature_parameter_positional_only(self):
p = inspect.Parameter(None, kind=inspect.Parameter.POSITIONAL_ONLY)
self.assertEqual(str(p), '<>')
p = p.replace(name='1')
self.assertEqual(str(p), '<1>')
def test_signature_parameter_immutability(self):
p = inspect.Parameter(None, kind=inspect.Parameter.POSITIONAL_ONLY)
with self.assertRaises(AttributeError):
p.foo = 'bar'
with self.assertRaises(AttributeError):
p.kind = 123
class TestSignatureBind(unittest.TestCase):
@staticmethod
def call(func, *args, **kwargs):
sig = inspect.signature(func)
ba = sig.bind(*args, **kwargs)
return func(*ba.args, **ba.kwargs)
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
if not hasattr(self, 'assertRaisesRegex'):
self.assertRaisesRegex = self.assertRaisesRegexp
def test_signature_bind_empty(self):
def test():
return 42
self.assertEqual(self.call(test), 42)
with self.assertRaisesRegex(TypeError, 'too many positional arguments'):
self.call(test, 1)
with self.assertRaisesRegex(TypeError, 'too many positional arguments'):
self.call(test, 1, spam=10)
with self.assertRaisesRegex(TypeError, 'too many keyword arguments'):
self.call(test, spam=1)
def test_signature_bind_var(self):
def test(*args, **kwargs):
return args, kwargs
self.assertEqual(self.call(test), ((), {}))
self.assertEqual(self.call(test, 1), ((1,), {}))
self.assertEqual(self.call(test, 1, 2), ((1, 2), {}))
self.assertEqual(self.call(test, foo='bar'), ((), {'foo': 'bar'}))
self.assertEqual(self.call(test, 1, foo='bar'), ((1,), {'foo': 'bar'}))
self.assertEqual(self.call(test, args=10), ((), {'args': 10}))
self.assertEqual(self.call(test, 1, 2, foo='bar'),
((1, 2), {'foo': 'bar'}))
def test_signature_bind_just_args(self):
def test(a, b, c):
return a, b, c
self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3))
with self.assertRaisesRegex(TypeError, 'too many positional arguments'):
self.call(test, 1, 2, 3, 4)
with self.assertRaisesRegex(TypeError, "'b' parameter lacking default"):
self.call(test, 1)
with self.assertRaisesRegex(TypeError, "'a' parameter lacking default"):
self.call(test)
def test(a, b, c=10):
return a, b, c
self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3))
self.assertEqual(self.call(test, 1, 2), (1, 2, 10))
def test(a=1, b=2, c=3):
return a, b, c
self.assertEqual(self.call(test, a=10, c=13), (10, 2, 13))
self.assertEqual(self.call(test, a=10), (10, 2, 3))
self.assertEqual(self.call(test, b=10), (1, 10, 3))
def test_signature_bind_varargs_order(self):
def test(*args):
return args
self.assertEqual(self.call(test), ())
self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3))
def test_signature_bind_args_and_varargs(self):
def test(a, b, c=3, *args):
return a, b, c, args
self.assertEqual(self.call(test, 1, 2, 3, 4, 5), (1, 2, 3, (4, 5)))
self.assertEqual(self.call(test, 1, 2), (1, 2, 3, ()))
self.assertEqual(self.call(test, b=1, a=2), (2, 1, 3, ()))
self.assertEqual(self.call(test, 1, b=2), (1, 2, 3, ()))
with self.assertRaisesRegex(TypeError,
"multiple values for argument 'c'"):
self.call(test, 1, 2, 3, c=4)
def test_signature_bind_just_kwargs(self):
def test(**kwargs):
return kwargs
self.assertEqual(self.call(test), {})
self.assertEqual(self.call(test, foo='bar', spam='ham'),
{'foo': 'bar', 'spam': 'ham'})
def test_signature_bind_args_and_kwargs(self):
def test(a, b, c=3, **kwargs):
return a, b, c, kwargs
self.assertEqual(self.call(test, 1, 2), (1, 2, 3, {}))
self.assertEqual(self.call(test, 1, 2, foo='bar', spam='ham'),
(1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
self.assertEqual(self.call(test, b=2, a=1, foo='bar', spam='ham'),
(1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
self.assertEqual(self.call(test, a=1, b=2, foo='bar', spam='ham'),
(1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
self.assertEqual(self.call(test, 1, b=2, foo='bar', spam='ham'),
(1, 2, 3, {'foo': 'bar', 'spam': 'ham'}))
self.assertEqual(self.call(test, 1, b=2, c=4, foo='bar', spam='ham'),
(1, 2, 4, {'foo': 'bar', 'spam': 'ham'}))
self.assertEqual(self.call(test, 1, 2, 4, foo='bar'),
(1, 2, 4, {'foo': 'bar'}))
self.assertEqual(self.call(test, c=5, a=4, b=3),
(4, 3, 5, {}))
if sys.version_info[0] > 2:
exec("""
def test_signature_bind_kwonly(self):
def test(*, foo):
return foo
with self.assertRaisesRegex(TypeError,
'too many positional arguments'):
self.call(test, 1)
self.assertEqual(self.call(test, foo=1), 1)
def test(a, *, foo=1, bar):
return foo
with self.assertRaisesRegex(TypeError,
"'bar' parameter lacking default value"):
self.call(test, 1)
def test(foo, *, bar):
return foo, bar
self.assertEqual(self.call(test, 1, bar=2), (1, 2))
self.assertEqual(self.call(test, bar=2, foo=1), (1, 2))
with self.assertRaisesRegex(TypeError,
'too many keyword arguments'):
self.call(test, bar=2, foo=1, spam=10)
with self.assertRaisesRegex(TypeError,
'too many positional arguments'):
self.call(test, 1, 2)
with self.assertRaisesRegex(TypeError,
'too many positional arguments'):
self.call(test, 1, 2, bar=2)
with self.assertRaisesRegex(TypeError,
'too many keyword arguments'):
self.call(test, 1, bar=2, spam='ham')
with self.assertRaisesRegex(TypeError,
"'bar' parameter lacking default value"):
self.call(test, 1)
def test(foo, *, bar, **bin):
return foo, bar, bin
self.assertEqual(self.call(test, 1, bar=2), (1, 2, {}))
self.assertEqual(self.call(test, foo=1, bar=2), (1, 2, {}))
self.assertEqual(self.call(test, 1, bar=2, spam='ham'),
(1, 2, {'spam': 'ham'}))
self.assertEqual(self.call(test, spam='ham', foo=1, bar=2),
(1, 2, {'spam': 'ham'}))
with self.assertRaisesRegex(TypeError,
"'foo' parameter lacking default value"):
self.call(test, spam='ham', bar=2)
self.assertEqual(self.call(test, 1, bar=2, bin=1, spam=10),
(1, 2, {'bin': 1, 'spam': 10}))
""")
#
if sys.version_info[0] > 2:
exec("""
def test_signature_bind_arguments(self):
def test(a, *args, b, z=100, **kwargs):
pass
sig = inspect.signature(test)
ba = sig.bind(10, 20, b=30, c=40, args=50, kwargs=60)
# we won't have 'z' argument in the bound arguments object, as we didn't
# pass it to the 'bind'
self.assertEqual(tuple(ba.arguments.items()),
(('a', 10), ('args', (20,)), ('b', 30),
('kwargs', {'c': 40, 'args': 50, 'kwargs': 60})))
self.assertEqual(ba.kwargs,
{'b': 30, 'c': 40, 'args': 50, 'kwargs': 60})
self.assertEqual(ba.args, (10, 20))
""")
#
if sys.version_info[0] > 2:
exec("""
def test_signature_bind_positional_only(self):
P = inspect.Parameter
def test(a_po, b_po, c_po=3, foo=42, *, bar=50, **kwargs):
return a_po, b_po, c_po, foo, bar, kwargs
sig = inspect.signature(test)
new_params = collections.OrderedDict(tuple(sig.parameters.items()))
for name in ('a_po', 'b_po', 'c_po'):
new_params[name] = new_params[name].replace(kind=P.POSITIONAL_ONLY)
new_sig = sig.replace(parameters=new_params.values())
test.__signature__ = new_sig
self.assertEqual(self.call(test, 1, 2, 4, 5, bar=6),
(1, 2, 4, 5, 6, {}))
with self.assertRaisesRegex(TypeError, "parameter is positional only"):
self.call(test, 1, 2, c_po=4)
with self.assertRaisesRegex(TypeError, "parameter is positional only"):
self.call(test, a_po=1, b_po=2)
""")
class TestBoundArguments(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
if not hasattr(self, 'assertRaisesRegex'):
self.assertRaisesRegex = self.assertRaisesRegexp
def test_signature_bound_arguments_unhashable(self):
def foo(a): pass
ba = inspect.signature(foo).bind(1)
with self.assertRaisesRegex(TypeError, 'unhashable type'):
hash(ba)
def test_signature_bound_arguments_equality(self):
def foo(a): pass
ba = inspect.signature(foo).bind(1)
self.assertEqual(ba, ba)
ba2 = inspect.signature(foo).bind(1)
self.assertEqual(ba, ba2)
ba3 = inspect.signature(foo).bind(2)
self.assertNotEqual(ba, ba3)
ba3.arguments['a'] = 1
self.assertEqual(ba, ba3)
def bar(b): pass
ba4 = inspect.signature(bar).bind(1)
self.assertNotEqual(ba, ba4)
if __name__ == "__main__":
unittest.begin()
|
vmanoria/bluemix-hue-filebrowser
|
refs/heads/master
|
hue-3.8.1-bluemix/desktop/core/ext-py/kazoo-2.0/kazoo/tests/test_security.py
|
51
|
import unittest
from nose.tools import eq_
from kazoo.security import Permissions
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl.perms & Permissions.READ, Permissions.READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", read=True, write=True,
create=True, delete=True, admin=True)
for perm in [Permissions.READ, Permissions.CREATE, Permissions.WRITE,
Permissions.DELETE, Permissions.ADMIN]:
eq_(acl.perms & perm, perm)
def test_perm_listing(self):
from kazoo.security import ACL
f = ACL(15, 'fred')
self.assert_('READ' in f.acl_list)
self.assert_('WRITE' in f.acl_list)
self.assert_('CREATE' in f.acl_list)
self.assert_('DELETE' in f.acl_list)
f = ACL(16, 'fred')
self.assert_('ADMIN' in f.acl_list)
f = ACL(31, 'george')
self.assert_('ALL' in f.acl_list)
def test_perm_repr(self):
from kazoo.security import ACL
f = ACL(16, 'fred')
self.assert_("ACL(perms=16, acl_list=['ADMIN']" in repr(f))
|
ylbian/robot-d
|
refs/heads/master
|
robotx/template/project_template/results/__init__.py
|
9
|
#this directory is used for saving running results
|
zhouzhenghui/python-for-android
|
refs/heads/master
|
python3-alpha/extra_modules/gdata/contentforshopping/client.py
|
63
|
#!/usr/bin/python
#
# Copyright (C) 2010-2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Extend the gdata client for the Content API for Shopping.
TODO:
1. Proper MCA Support.
2. Add classes for datafeed functions instead of asking for raw XML.
"""
__author__ = 'afshar (Ali Afshar), dhermes (Daniel Hermes)'
import atom.data
import gdata.client
from gdata.contentforshopping.data import ClientAccount
from gdata.contentforshopping.data import ClientAccountFeed
from gdata.contentforshopping.data import DatafeedEntry
from gdata.contentforshopping.data import DatafeedFeed
from gdata.contentforshopping.data import ProductEntry
from gdata.contentforshopping.data import ProductFeed
CFS_VERSION = 'v1'
CFS_HOST = 'content.googleapis.com'
CFS_URI = 'https://%s/content' % CFS_HOST
CFS_PROJECTION = 'generic'
class ContentForShoppingClient(gdata.client.GDClient):
"""Client for Content for Shopping API.
:param account_id: Merchant account ID. This value will be used by default
for all requests, but may be overridden on a
request-by-request basis.
:param api_version: The version of the API to target. Default value: 'v1'.
:param **kwargs: Pass all addtional keywords to the GDClient constructor.
"""
api_version = '1.0'
def __init__(self, account_id=None, api_version=CFS_VERSION, **kwargs):
self.cfs_account_id = account_id
self.cfs_api_version = api_version
gdata.client.GDClient.__init__(self, **kwargs)
def _create_uri(self, account_id, resource, path=(), use_projection=True,
dry_run=False, warnings=False):
"""Create a request uri from the given arguments.
If arguments are None, use the default client attributes.
"""
account_id = account_id or self.cfs_account_id
if account_id is None:
raise ValueError('No Account ID set. '
'Either set for the client, or per request')
segments = [CFS_URI, self.cfs_api_version, account_id, resource]
if use_projection:
segments.append(CFS_PROJECTION)
segments.extend(path)
result = '/'.join(segments)
request_params = []
if dry_run:
request_params.append('dry-run')
if warnings:
request_params.append('warnings')
request_params = '&'.join(request_params)
if request_params:
result = '%s?%s' % (result, request_params)
return result
def _create_product_id(self, id, country, language):
return 'online:%s:%s:%s' % (language, country, id)
def _create_batch_feed(self, entries, operation, feed=None):
if feed is None:
feed = ProductFeed()
for entry in entries:
entry.batch_operation = gdata.data.BatchOperation(type=operation)
feed.entry.append(entry)
return feed
# Operations on a single product
def get_product(self, id, country, language, account_id=None,
auth_token=None):
"""Get a product by id, country and language.
:param id: The product ID
:param country: The country (target_country)
:param language: The language (content_language)
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
"""
pid = self._create_product_id(id, country, language)
uri = self._create_uri(account_id, 'items/products', path=[pid])
return self.get_entry(uri, desired_class=ProductEntry,
auth_token=auth_token)
def insert_product(self, product, account_id=None, auth_token=None,
dry_run=False, warnings=False):
"""Create a new product, by posting the product entry feed.
:param product: A :class:`gdata.contentforshopping.data.ProductEntry` with
the required product data.
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
"""
uri = self._create_uri(account_id, 'items/products',
dry_run=dry_run, warnings=warnings)
return self.post(product, uri=uri, auth_token=auth_token)
def update_product(self, product, account_id=None, auth_token=None,
dry_run=False, warnings=False):
"""Update a product, by putting the product entry feed.
:param product: A :class:`gdata.contentforshopping.data.ProductEntry` with
the required product data.
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False
by default.
"""
pid = self._create_product_id(product.product_id.text,
product.target_country.text,
product.content_language.text)
uri = self._create_uri(account_id, 'items/products', path=[pid],
dry_run=dry_run, warnings=warnings)
return self.update(product, uri=uri, auth_token=auth_token)
def delete_product(self, product, account_id=None, auth_token=None,
dry_run=False, warnings=False):
"""Delete a product
:param product: A :class:`gdata.contentforshopping.data.ProductEntry` with
the required product data.
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
"""
pid = self._create_product_id(product.product_id.text,
product.target_country.text,
product.content_language.text)
uri = self._create_uri(account_id, 'items/products', path=[pid],
dry_run=dry_run, warnings=warnings)
return self.delete(uri, auth_token=auth_token)
# Operations on multiple products
def get_products(self, start_index=None, max_results=None, account_id=None,
auth_token=None):
"""Get a feed of products for the account.
:param max_results: The maximum number of results to return (default 25,
maximum 250).
:param start_index: The starting index of the feed to return (default 1,
maximum 10000)
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
"""
uri = self._create_uri(account_id, 'items/products')
return self.get_feed(uri, auth_token=auth_token,
desired_class=gdata.contentforshopping.data.ProductFeed)
def batch(self, feed, account_id=None, auth_token=None,
dry_run=False, warnings=False):
"""Send a batch request.
:param feed: The feed of batch entries to send.
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
"""
uri = self._create_uri(account_id, 'items/products', path=['batch'],
dry_run=dry_run, warnings=warnings)
return self.post(feed, uri=uri, auth_token=auth_token,
desired_class=ProductFeed)
def insert_products(self, products, account_id=None, auth_token=None,
dry_run=False, warnings=False):
"""Insert the products using a batch request
:param products: A list of product entries
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
"""
feed = self._create_batch_feed(products, 'insert')
return self.batch(feed)
def update_products(self, products, account_id=None, auth_token=None,
dry_run=False, warnings=False):
"""Update the products using a batch request
:param products: A list of product entries
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
.. note:: Entries must have the atom:id element set.
"""
feed = self._create_batch_feed(products, 'update')
return self.batch(feed)
def delete_products(self, products, account_id=None, auth_token=None,
dry_run=False, warnings=False):
"""Delete the products using a batch request.
:param products: A list of product entries
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
.. note:: Entries must have the atom:id element set.
"""
feed = self._create_batch_feed(products, 'delete')
return self.batch(feed)
# Operations on datafeeds
def get_datafeeds(self, account_id=None):
"""Get the feed of datafeeds.
:param account_id: The Sub-Account ID. If ommitted the default
Account ID will be used for this client.
"""
uri = self._create_uri(account_id, 'datafeeds/products',
use_projection=False)
return self.get_feed(uri, desired_class=DatafeedFeed)
# Operations on a single datafeed
def get_datafeed(self, feed_id, account_id=None, auth_token=None):
"""Get the feed of a single datafeed.
:param feed_id: The ID of the desired datafeed.
:param account_id: The Sub-Account ID. If ommitted the default
Account ID will be used for this client.
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
"""
uri = self._create_uri(account_id, 'datafeeds/products', path=[feed_id],
use_projection=False)
return self.get_feed(uri, auth_token=auth_token,
desired_class=DatafeedEntry)
def insert_datafeed(self, entry, account_id=None, auth_token=None,
dry_run=False, warnings=False):
"""Insert a datafeed.
:param entry: XML Content of post request required for registering a
datafeed.
:param account_id: The Sub-Account ID. If ommitted the default
Account ID will be used for this client.
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
"""
uri = self._create_uri(account_id, 'datafeeds/products',
use_projection=False, dry_run=dry_run,
warnings=warnings)
return self.post(entry, uri=uri, auth_token=auth_token)
def update_datafeed(self, entry, feed_id, account_id=None, auth_token=None,
dry_run=False, warnings=False):
"""Update the feed of a single datafeed.
:param entry: XML Content of put request required for updating a
datafeed.
:param feed_id: The ID of the desired datafeed.
:param account_id: The Sub-Account ID. If ommitted the default
Account ID will be used for this client.
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
"""
uri = self._create_uri(account_id, 'datafeeds/products', path=[feed_id],
use_projection=False, dry_run=dry_run,
warnings=warnings)
return self.update(entry, auth_token=auth_token, uri=uri)
def delete_datafeed(self, feed_id, account_id=None, auth_token=None):
"""Delete a single datafeed.
:param feed_id: The ID of the desired datafeed.
:param account_id: The Sub-Account ID. If ommitted the default
Account ID will be used for this client.
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
"""
uri = self._create_uri(account_id, 'datafeeds/products', path=[feed_id],
use_projection=False)
return self.delete(uri, auth_token=auth_token)
# Operations on client accounts
def get_client_accounts(self, account_id=None, auth_token=None):
"""Get the feed of managed accounts
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
"""
uri = self._create_uri(account_id, 'managedaccounts/products',
use_projection=False)
return self.get_feed(uri, desired_class=ClientAccountFeed,
auth_token=auth_token)
def insert_client_account(self, entry, account_id=None, auth_token=None,
dry_run=False, warnings=False):
"""Insert a client account entry
:param entry: An entry of type ClientAccount
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
"""
uri = self._create_uri(account_id, 'managedaccounts/products',
use_projection=False, dry_run=dry_run,
warnings=warnings)
return self.post(entry, uri=uri, auth_token=auth_token)
def update_client_account(self, entry, client_account_id, account_id=None,
auth_token=None, dry_run=False, warnings=False):
"""Update a client account
:param entry: An entry of type ClientAccount to update to
:param client_account_id: The client account ID
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
"""
uri = self._create_uri(account_id, 'managedaccounts/products',
path=[client_account_id], use_projection=False,
dry_run=dry_run, warnings=warnings)
return self.update(entry, uri=uri, auth_token=auth_token)
def delete_client_account(self, client_account_id, account_id=None,
auth_token=None, dry_run=False, warnings=False):
"""Delete a client account
:param client_account_id: The client account ID
:param account_id: The Merchant Center Account ID. If ommitted the default
Account ID will be used for this client
:param auth_token: An object which sets the Authorization HTTP header in its
modify_request method.
:param dry_run: Flag to run all requests that modify persistent data in
dry-run mode. False by default.
:param warnings: Flag to include warnings in response. False by default.
"""
uri = self._create_uri(account_id, 'managedaccounts/products',
path=[client_account_id], use_projection=False,
dry_run=dry_run, warnings=warnings)
return self.delete(uri, auth_token=auth_token)
|
RealTimeWeb/datasets
|
refs/heads/master
|
builder/languages/detect_distribution.py
|
1
|
import warnings
import numpy as np
import pandas as pd
import scipy.stats as st
import statsmodels as sm
import matplotlib
import matplotlib.pyplot as plt
import random
matplotlib.rcParams['figure.figsize'] = (16.0, 12.0)
matplotlib.style.use('ggplot')
# Create models from data
def best_fit_distribution(data, bins=200, ax=None):
"""Model data by finding best fit distribution to data"""
# Get histogram of original data
y, x = np.histogram(data, bins=bins, normed=True)
x = (x + np.roll(x, -1))[:-1] / 2.0
# Distributions to check
DISTRIBUTIONS = [
st.alpha,st.anglit,st.arcsine,st.beta,st.betaprime,st.bradford,st.burr,st.cauchy,st.chi,st.chi2,st.cosine,
st.dgamma,st.dweibull,st.erlang,st.expon,st.exponnorm,st.exponweib,st.exponpow,st.f,st.fatiguelife,st.fisk,
st.foldcauchy,st.foldnorm,st.frechet_r,st.frechet_l,st.genlogistic,st.genpareto,st.gennorm,st.genexpon,
st.genextreme,st.gausshyper,st.gamma,st.gengamma,st.genhalflogistic,st.gilbrat,st.gompertz,st.gumbel_r,
st.gumbel_l,st.halfcauchy,st.halflogistic,st.halfnorm,st.halfgennorm,st.hypsecant,st.invgamma,st.invgauss,
st.invweibull,st.johnsonsb,st.johnsonsu,st.ksone,st.kstwobign,st.laplace,st.levy,st.levy_l,st.levy_stable,
st.logistic,st.loggamma,st.loglaplace,st.lognorm,st.lomax,st.maxwell,st.mielke,st.nakagami,st.ncx2,st.ncf,
st.nct,st.norm,st.pareto,st.pearson3,st.powerlaw,st.powerlognorm,st.powernorm,st.rdist,st.reciprocal,
st.rayleigh,st.rice,st.recipinvgauss,st.semicircular,st.t,st.triang,st.truncexpon,st.truncnorm,st.tukeylambda,
st.uniform,st.vonmises,st.vonmises_line,st.wald,st.weibull_min,st.weibull_max,st.wrapcauchy
]
# Best holders
best_distribution = st.norm
best_params = (0.0, 1.0)
best_sse = np.inf
# Estimate distribution parameters from data
for distribution in DISTRIBUTIONS:
# Try to fit the distribution
try:
# Ignore warnings from data that can't be fit
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
# fit dist to data
params = distribution.fit(data)
# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]
# Calculate fitted PDF and error with fit in distribution
pdf = distribution.pdf(x, loc=loc, scale=scale, *arg)
sse = np.sum(np.power(y - pdf, 2.0))
# if axis pass in add to plot
try:
if ax:
pd.Series(pdf, x).plot(ax=ax)
end
except Exception:
pass
# identify if this distribution is better
if best_sse > sse > 0:
best_distribution = distribution
best_params = params
best_sse = sse
except Exception:
pass
return (best_distribution.name, best_params)
def make_pdf(dist, params, size=10000):
"""Generate distributions's Propbability Distribution Function """
# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]
# Get sane start and end points of distribution
start = dist.ppf(0.01, *arg, loc=loc, scale=scale) if arg else dist.ppf(0.01, loc=loc, scale=scale)
end = dist.ppf(0.99, *arg, loc=loc, scale=scale) if arg else dist.ppf(0.99, loc=loc, scale=scale)
# Build PDF and turn into pandas Series
x = np.linspace(start, end, size)
y = dist.pdf(x, loc=loc, scale=scale, *arg)
pdf = pd.Series(y, x)
return pdf
# Load data from statsmodels datasets
data = pd.Series(sm.datasets.elnino.load_pandas().data.set_index('YEAR').values.ravel())
data = pd.Series([random.normalvariate(10, 5) for x in range(5000)])
# Plot for comparison
plt.figure(figsize=(12,8))
ax = data.plot(kind='hist', bins=50, normed=True, alpha=0.5, color=plt.rcParams['axes.color_cycle'][1])
# Save plot limits
dataYLim = ax.get_ylim()
# Find best fit distribution
best_fit_name, best_fir_paramms = best_fit_distribution(data, 200, ax)
best_dist = getattr(st, best_fit_name)
# Update plots
ax.set_ylim(dataYLim)
ax.set_title(u'El Niño sea temp.\n All Fitted Distributions')
ax.set_xlabel(u'Temp (°C)')
ax.set_ylabel('Frequency')
# Make PDF
pdf = make_pdf(best_dist, best_fir_paramms)
# Display
plt.figure(figsize=(12,8))
ax = pdf.plot(lw=2, label='PDF', legend=True)
data.plot(kind='hist', bins=50, normed=True, alpha=0.5, label='Data', legend=True, ax=ax)
param_names = (best_dist.shapes + ', loc, scale').split(', ') if best_dist.shapes else ['loc', 'scale']
param_str = ', '.join(['{}={:0.2f}'.format(k,v) for k,v in zip(param_names, best_fir_paramms)])
dist_str = '{}({})'.format(best_fit_name, param_str)
ax.set_title(u'El Niño sea temp. with best fit distribution \n' + dist_str)
ax.set_xlabel(u'Temp. (°C)')
ax.set_ylabel('Frequency')
|
tbinjiayou/Odoo
|
refs/heads/master
|
addons/web_tests_demo/__openerp__.py
|
384
|
{
'name': "Demonstration of web/javascript tests",
'category': 'Hidden',
'description': """
OpenERP Web demo of a test suite
================================
Test suite example, same code as that used in the testing documentation.
""",
'depends': ['web'],
'data' : [
'views/web_tests_demo.xml',
],
'qweb': ['static/src/xml/demo.xml'],
}
|
noisyboiler/wampy
|
refs/heads/master
|
wampy/messages/registered.py
|
1
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
class Registered(object):
""" [REGISTERED, REGISTER.Request|id, Registration|id]
"""
WAMP_CODE = 65
name = "registered"
def __init__(self, request_id, registration_id):
super(Registered, self).__init__()
self.request_id = request_id
self.registration_id = registration_id
@property
def message(self):
return [
self.WAMP_CODE, self.request_id, self.registration_id,
]
|
wdkk/iSuperColliderKit
|
refs/heads/master
|
editors/sced/sced/Settings.py
|
46
|
# sced (SuperCollider mode for gedit)
# Copyright 2009 Artem Popov and other contributors (see AUTHORS)
#
# sced is free software:
# you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gobject
import gconf
# map gconf options to gobject properties
class Settings(gobject.GObject):
__gproperties__ = {
"runtime-folder": (gobject.TYPE_STRING,
"runtime folder",
"sclang runtime folder",
None,
gobject.PARAM_READWRITE),
} # __gproperties__
def __init__(self):
gobject.GObject.__init__(self)
self.base = "/apps/gedit-2/plugins/sced"
self.client = gconf.client_get_default()
self.client.add_dir(self.base, gconf.CLIENT_PRELOAD_NONE)
self.client.notify_add(self.base + "/runtime_folder",
self.__on_runtime_folder_changed)
def do_get_property(self, property):
if property.name == "runtime-folder":
return self.client.get_string(self.base + "/runtime_folder")
else:
raise AttributeError("Unknown property %s" % property.name)
def do_set_property(self, property, value):
if property.name == "runtime-folder":
self.freeze_notify()
self.client.set_string(self.base + "/runtime_folder", value)
self.thaw_notify()
else:
raise AttributeError("Unknown property %s" % property.name)
def __on_runtime_folder_changed(self, client, cnxn_id, entry, user_data):
self.notify("runtime-folder")
|
omarestrella/repolab
|
refs/heads/master
|
repolab/urls.py
|
1
|
from django.conf.urls import patterns, url
from repolab import views
urlpatterns = patterns('',
url(r'^$', views.Homepage.as_view(), name='homepage_url'),
url(r'^repo/add/$', views.AddRepo.as_view(), name='add_repo_url'),
url(r'^repo/(?P<slug>[\w\d_-]+)/$', views.ViewRepo.as_view(), name='repo_url'),
url(r'^repo/(?P<slug>[\w\d_-]+)/tree/(?P<changeset>[\w\d]+)/$',
views.ViewChangesetPath.as_view(), name='repo_changeset_url', kwargs={'path': '/'}),
url(r'^repo/(?P<slug>[\w\d_-]+)/tree/(?P<changeset>[\w\d]+)/(?P<path>.*)/$',
views.ViewChangesetPath.as_view(), name='repo_path_url'),
url(r'^repo/(?P<slug>[\w\d_-]+)/changesets/$',
views.ListChangesets.as_view(), name='list_changesets_url'),
url(r'^repo/(?P<slug>[\w\d_-]+)/edit/(?P<changeset>[\w\d]+)/(?P<path>[.*])/$',
views.EditChangesetPath.as_view(), name='repo_edit_url'),
url(r'^repo/(?P<slug>[\w\d_-]+)/commit/(?P<changeset>[\w\d]+)/(?P<path>[.*])/$',
views.ViewChangesetCommit.as_view(), name='repo_commit_url'),
)
|
mattclark/osf.io
|
refs/heads/develop
|
osf/models/spam.py
|
7
|
import abc
import logging
from django.db import models
from django.utils import timezone
from osf.exceptions import ValidationValueError, ValidationTypeError
from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
from osf.utils.fields import NonNaiveDateTimeField
from osf.utils import akismet
from website import settings
logger = logging.getLogger(__name__)
def _get_client():
return akismet.AkismetClient(
apikey=settings.AKISMET_APIKEY,
website=settings.DOMAIN,
verify=True
)
def _validate_reports(value, *args, **kwargs):
from osf.models import OSFUser
for key, val in value.items():
if not OSFUser.load(key):
raise ValidationValueError('Keys must be user IDs')
if not isinstance(val, dict):
raise ValidationTypeError('Values must be dictionaries')
if ('category' not in val or 'text' not in val or 'date' not in val or 'retracted' not in val):
raise ValidationValueError(
('Values must include `date`, `category`, ',
'`text`, `retracted` keys')
)
class SpamStatus(object):
UNKNOWN = None
FLAGGED = 1
SPAM = 2
HAM = 4
class SpamMixin(models.Model):
"""Mixin to add to objects that can be marked as spam.
"""
class Meta:
abstract = True
# # Node fields that trigger an update to search on save
# SPAM_UPDATE_FIELDS = {
# 'spam_status',
# }
spam_status = models.IntegerField(default=SpamStatus.UNKNOWN, null=True, blank=True, db_index=True)
spam_pro_tip = models.CharField(default=None, null=True, blank=True, max_length=200)
# Data representing the original spam indication
# - author: author name
# - author_email: email of the author
# - content: data flagged
# - headers: request headers
# - Remote-Addr: ip address from request
# - User-Agent: user agent from request
# - Referer: referrer header from request (typo +1, rtd)
spam_data = DateTimeAwareJSONField(default=dict, blank=True)
date_last_reported = NonNaiveDateTimeField(default=None, null=True, blank=True, db_index=True)
# Reports is a dict of reports keyed on reporting user
# Each report is a dictionary including:
# - date: date reported
# - retracted: if a report has been retracted
# - category: What type of spam does the reporter believe this is
# - text: Comment on the comment
reports = DateTimeAwareJSONField(
default=dict, blank=True, validators=[_validate_reports]
)
def flag_spam(self):
# If ham and unedited then tell user that they should read it again
if self.spam_status == SpamStatus.UNKNOWN:
self.spam_status = SpamStatus.FLAGGED
def remove_flag(self, save=False):
if self.spam_status != SpamStatus.FLAGGED:
return
for report in self.reports.values():
if not report.get('retracted', True):
return
self.spam_status = SpamStatus.UNKNOWN
if save:
self.save()
@property
def is_spam(self):
return self.spam_status == SpamStatus.SPAM
@property
def is_spammy(self):
return self.spam_status in [SpamStatus.FLAGGED, SpamStatus.SPAM]
def report_abuse(self, user, save=False, **kwargs):
"""Report object is spam or other abuse of OSF
:param user: User submitting report
:param save: Save changes
:param kwargs: Should include category and message
:raises ValueError: if user is reporting self
"""
if user == self.user:
raise ValueError('User cannot report self.')
self.flag_spam()
date = timezone.now()
report = {'date': date, 'retracted': False}
report.update(kwargs)
if 'text' not in report:
report['text'] = None
self.reports[user._id] = report
self.date_last_reported = report['date']
if save:
self.save()
def retract_report(self, user, save=False):
"""Retract last report by user
Only marks the last report as retracted because there could be
history in how the object is edited that requires a user
to flag or retract even if object is marked as HAM.
:param user: User retracting
:param save: Save changes
"""
if user._id in self.reports:
if not self.reports[user._id]['retracted']:
self.reports[user._id]['retracted'] = True
self.remove_flag()
else:
raise ValueError('User has not reported this content')
if save:
self.save()
def confirm_ham(self, save=False):
# not all mixins will implement check spam pre-req, only submit ham when it was incorrectly flagged
if (
settings.SPAM_CHECK_ENABLED and
self.spam_data and self.spam_status in [SpamStatus.FLAGGED, SpamStatus.SPAM]
):
client = _get_client()
client.submit_ham(
user_ip=self.spam_data['headers']['Remote-Addr'],
user_agent=self.spam_data['headers'].get('User-Agent'),
referrer=self.spam_data['headers'].get('Referer'),
comment_content=self.spam_data['content'],
comment_author=self.spam_data['author'],
comment_author_email=self.spam_data['author_email'],
)
logger.info('confirm_ham update sent')
self.spam_status = SpamStatus.HAM
if save:
self.save()
def confirm_spam(self, save=False):
# not all mixins will implement check spam pre-req, only submit spam when it was incorrectly flagged
if (
settings.SPAM_CHECK_ENABLED and
self.spam_data and self.spam_status in [SpamStatus.UNKNOWN, SpamStatus.HAM]
):
client = _get_client()
client.submit_spam(
user_ip=self.spam_data['headers']['Remote-Addr'],
user_agent=self.spam_data['headers'].get('User-Agent'),
referrer=self.spam_data['headers'].get('Referer'),
comment_content=self.spam_data['content'],
comment_author=self.spam_data['author'],
comment_author_email=self.spam_data['author_email'],
)
logger.info('confirm_spam update sent')
self.spam_status = SpamStatus.SPAM
if save:
self.save()
@abc.abstractmethod
def check_spam(self, user, saved_fields, request_headers, save=False):
"""Must return is_spam"""
pass
def do_check_spam(self, author, author_email, content, request_headers, update=True):
if self.spam_status == SpamStatus.HAM:
return False
if self.is_spammy:
return True
client = _get_client()
remote_addr = request_headers['Remote-Addr']
user_agent = request_headers.get('User-Agent')
referer = request_headers.get('Referer')
try:
is_spam, pro_tip = client.check_comment(
user_ip=remote_addr,
user_agent=user_agent,
referrer=referer,
comment_content=content,
comment_author=author,
comment_author_email=author_email
)
except akismet.AkismetClientError:
logger.exception('Error performing SPAM check')
return False
if update:
self.spam_pro_tip = pro_tip
self.spam_data['headers'] = {
'Remote-Addr': remote_addr,
'User-Agent': user_agent,
'Referer': referer,
}
self.spam_data['content'] = content
self.spam_data['author'] = author
self.spam_data['author_email'] = author_email
if is_spam:
self.flag_spam()
return is_spam
|
crawln45/Flexget
|
refs/heads/develop
|
flexget/plugins/modify/plugin_priority.py
|
13
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('p_priority')
class PluginPriority(object):
"""
Allows modifying plugin priorities from default values.
Example:
plugin_priority:
ignore: 50
series: 100
"""
schema = {'type': 'object', 'additionalProperties': {'type': 'integer'}}
def __init__(self):
self.priorities = {}
def on_task_start(self, task, config):
self.priorities = {}
names = []
for name, priority in config.items():
names.append(name)
originals = self.priorities.setdefault(name, {})
for phase, phase_event in plugin.plugins[name].phase_handlers.items():
originals[phase] = phase_event.priority
log.debug('stored %s original value %s' % (phase, phase_event.priority))
phase_event.priority = priority
log.debug('set %s new value %s' % (phase, priority))
log.debug('Changed priority for: %s' % ', '.join(names))
def on_task_exit(self, task, config):
if not self.priorities:
log.debug('nothing changed, aborting restore')
return
names = []
for name in list(config.keys()):
names.append(name)
originals = self.priorities[name]
for phase, priority in originals.items():
plugin.plugins[name].phase_handlers[phase].priority = priority
log.debug('Restored priority for: %s' % ', '.join(names))
self.priorities = {}
on_task_abort = on_task_exit
@event('plugin.register')
def register_plugin():
plugin.register(PluginPriority, 'plugin_priority', api_ver=2)
|
jianghuaw/nova
|
refs/heads/master
|
nova/tests/unit/utils.py
|
4
|
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import errno
import platform
import socket
import sys
import mock
from six.moves import range
from nova.compute import flavors
import nova.conf
import nova.context
import nova.db
from nova import exception
from nova.image import glance
from nova.network import minidns
from nova.network import model as network_model
from nova import objects
from nova.objects import base as obj_base
import nova.utils
CONF = nova.conf.CONF
def get_test_admin_context():
return nova.context.get_admin_context()
def get_test_image_object(context, instance_ref):
if not context:
context = get_test_admin_context()
image_ref = instance_ref['image_ref']
image_service, image_id = glance.get_remote_image_service(context,
image_ref)
return objects.ImageMeta.from_dict(
image_service.show(context, image_id))
def get_test_flavor(context=None, options=None):
options = options or {}
if not context:
context = get_test_admin_context()
test_flavor = {'name': 'kinda.big',
'flavorid': 'someid',
'memory_mb': 2048,
'vcpus': 4,
'root_gb': 40,
'ephemeral_gb': 80,
'swap': 1024}
test_flavor.update(options)
try:
flavor_ref = nova.db.flavor_create(context, test_flavor)
except (exception.FlavorExists, exception.FlavorIdExists):
flavor_ref = nova.db.flavor_get_by_name(context, 'kinda.big')
return flavor_ref
def get_test_instance(context=None, flavor=None, obj=False):
if not context:
context = get_test_admin_context()
if not flavor:
flavor = get_test_flavor(context)
test_instance = {'memory_kb': '2048000',
'basepath': '/some/path',
'bridge_name': 'br100',
'vcpus': 4,
'root_gb': 40,
'bridge': 'br101',
'image_ref': 'cedef40a-ed67-4d10-800e-17455edce175',
'instance_type_id': flavor['id'],
'system_metadata': {},
'extra_specs': {},
'user_id': context.user_id,
'project_id': context.project_id,
}
if obj:
instance = objects.Instance(context, **test_instance)
instance.flavor = objects.Flavor.get_by_id(context, flavor['id'])
instance.create()
else:
flavors.save_flavor_info(test_instance['system_metadata'], flavor, '')
instance = nova.db.instance_create(context, test_instance)
return instance
FAKE_NETWORK_VLAN = 100
FAKE_NETWORK_BRIDGE = 'br0'
FAKE_NETWORK_INTERFACE = 'eth0'
FAKE_NETWORK_IP4_ADDR1 = '10.0.0.73'
FAKE_NETWORK_IP4_ADDR2 = '10.0.0.74'
FAKE_NETWORK_IP6_ADDR1 = '2001:b105:f00d::1'
FAKE_NETWORK_IP6_ADDR2 = '2001:b105:f00d::2'
FAKE_NETWORK_IP6_ADDR3 = '2001:b105:f00d::3'
FAKE_NETWORK_IP4_GATEWAY = '10.0.0.254'
FAKE_NETWORK_IP6_GATEWAY = '2001:b105:f00d::ff'
FAKE_NETWORK_IP4_CIDR = '10.0.0.0/24'
FAKE_NETWORK_IP6_CIDR = '2001:b105:f00d::0/64'
FAKE_NETWORK_DNS_IP4_ADDR1 = '192.168.122.1'
FAKE_NETWORK_DNS_IP4_ADDR2 = '192.168.122.2'
FAKE_NETWORK_DNS_IP6_ADDR1 = '2001:dead:beef::1'
FAKE_NETWORK_DNS_IP6_ADDR2 = '2001:dead:beef::2'
FAKE_NETWORK_DHCP_IP4_ADDR = '192.168.122.253'
FAKE_NETWORK_UUID = '4587c867-a2e6-4356-8c5b-bc077dcb8620'
FAKE_VIF_UUID = '51a9642b-1414-4bd6-9a92-1320ddc55a63'
FAKE_VIF_MAC = 'de:ad:be:ef:ca:fe'
def get_test_network_info(count=1):
ipv6 = CONF.use_ipv6
def current():
subnet_4 = network_model.Subnet(
cidr=FAKE_NETWORK_IP4_CIDR,
dns=[network_model.IP(FAKE_NETWORK_DNS_IP4_ADDR1),
network_model.IP(FAKE_NETWORK_DNS_IP4_ADDR2)],
gateway=network_model.IP(FAKE_NETWORK_IP4_GATEWAY),
ips=[network_model.IP(FAKE_NETWORK_IP4_ADDR1),
network_model.IP(FAKE_NETWORK_IP4_ADDR2)],
routes=None,
dhcp_server=FAKE_NETWORK_DHCP_IP4_ADDR)
subnet_6 = network_model.Subnet(
cidr=FAKE_NETWORK_IP6_CIDR,
gateway=network_model.IP(FAKE_NETWORK_IP6_GATEWAY),
ips=[network_model.IP(FAKE_NETWORK_IP6_ADDR1),
network_model.IP(FAKE_NETWORK_IP6_ADDR2),
network_model.IP(FAKE_NETWORK_IP6_ADDR3)],
routes=None,
version=6)
subnets = [subnet_4]
if ipv6:
subnets.append(subnet_6)
network = network_model.Network(
id=FAKE_NETWORK_UUID,
bridge=FAKE_NETWORK_BRIDGE,
label=None,
subnets=subnets,
vlan=FAKE_NETWORK_VLAN,
bridge_interface=FAKE_NETWORK_INTERFACE,
injected=False)
if CONF.use_neutron:
vif_type = network_model.VIF_TYPE_OVS
else:
vif_type = network_model.VIF_TYPE_BRIDGE
vif = network_model.VIF(
id=FAKE_VIF_UUID,
address=FAKE_VIF_MAC,
network=network,
type=vif_type,
devname=None,
ovs_interfaceid=None)
return vif
return network_model.NetworkInfo([current() for x in range(0, count)])
def is_osx():
return platform.mac_ver()[0] != ''
def is_linux():
return platform.system() == 'Linux'
def coreutils_readlink_available():
_out, err = nova.utils.trycmd('readlink', '-nm', '/')
return err == ''
test_dns_managers = []
def dns_manager():
global test_dns_managers
manager = minidns.MiniDNS()
test_dns_managers.append(manager)
return manager
def cleanup_dns_managers():
global test_dns_managers
for manager in test_dns_managers:
manager.delete_dns_file()
test_dns_managers = []
def killer_xml_body():
return (("""<!DOCTYPE x [
<!ENTITY a "%(a)s">
<!ENTITY b "%(b)s">
<!ENTITY c "%(c)s">]>
<foo>
<bar>
<v1>%(d)s</v1>
</bar>
</foo>""") % {
'a': 'A' * 10,
'b': '&a;' * 10,
'c': '&b;' * 10,
'd': '&c;' * 9999,
}).strip()
def is_ipv6_supported():
has_ipv6_support = socket.has_ipv6
try:
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
s.close()
except socket.error as e:
if e.errno == errno.EAFNOSUPPORT:
has_ipv6_support = False
else:
raise
# check if there is at least one interface with ipv6
if has_ipv6_support and sys.platform.startswith('linux'):
try:
with open('/proc/net/if_inet6') as f:
if not f.read():
has_ipv6_support = False
except IOError:
has_ipv6_support = False
return has_ipv6_support
def get_api_version(request):
if request.path[2:3].isdigit():
return int(request.path[2:3])
def compare_obj_primitive(thing1, thing2):
if isinstance(thing1, obj_base.NovaObject):
return thing1.obj_to_primitive() == thing2.obj_to_primitive()
else:
return thing1 == thing2
def _compare_args(args1, args2, cmp):
return all(cmp(*pair) for pair in zip(args1, args2))
def _compare_kwargs(kwargs1, kwargs2, cmp):
return all(cmp(kwargs1[k], kwargs2[k])
for k in set(list(kwargs1.keys()) + list(kwargs2.keys())))
def _obj_called_with(the_mock, *args, **kwargs):
if 'obj_cmp' in kwargs:
cmp = kwargs.pop('obj_cmp')
else:
cmp = compare_obj_primitive
count = 0
for call in the_mock.call_args_list:
if (_compare_args(call[0], args, cmp) and
_compare_kwargs(call[1], kwargs, cmp)):
count += 1
return count
def obj_called_with(the_mock, *args, **kwargs):
return _obj_called_with(the_mock, *args, **kwargs) != 0
def obj_called_once_with(the_mock, *args, **kwargs):
return _obj_called_with(the_mock, *args, **kwargs) == 1
class CustomMockCallMatcher(object):
def __init__(self, comparator):
self.comparator = comparator
def __eq__(self, other):
return self.comparator(other)
def assert_instance_delete_notification_by_uuid(
mock_notify, expected_instance_uuid, expected_notifier,
expected_context):
match_by_instance_uuid = CustomMockCallMatcher(
lambda instance:
instance.uuid == expected_instance_uuid)
mock_notify.assert_has_calls([
mock.call(expected_notifier,
expected_context,
match_by_instance_uuid,
'delete.start'),
mock.call(expected_notifier,
expected_context,
match_by_instance_uuid,
'delete.end',
system_metadata={})])
|
gibil5/openhealth
|
refs/heads/master
|
models/product/exc_vars.py
|
1
|
# -*- coding: utf-8 -*-
# Used by: Product Template
_family_list = [
# Products
'topical',
'card',
'kit',
# Services
'consultation',
'laser',
'medical',
'cosmetology',
'gynecology',
'promotion',
'echography',
'other',
'test',
]
_subfamily_list = [
# Products
'chavarri',
'commercial',
# Consultation
'consultation',
# Laser
'co2',
'quick',
'excilite',
'm22',
# Cosmetology
#'cosmetology', # Dep
'carboxytherapy',
'diamond_tip',
'laser_triactive_carboxytherapy',
# Medical
#'medical', # Dep
'botox',
'cryosurgery',
'hyaluronic_acid',
'infiltrations',
'mesotherapy',
'plasma',
'redux',
'sclerotherapy',
'vitamin_c_intravenous',
# New
'echography',
'gynecology',
'promotion',
'other',
'commission',
'test',
]
|
eusoubrasileiro/fatiando_seismic
|
refs/heads/sim-class-migration
|
cookbook/seismic_wavefd_love_wave.py
|
9
|
"""
Seismic: 2D finite difference simulation of elastic SH wave propagation in a
medium with a discontinuity (i.e., Moho), generating Love waves.
"""
import numpy as np
from matplotlib import animation
from fatiando import gridder
from fatiando.seismic import wavefd
from fatiando.vis import mpl
# Set the parameters of the finite difference grid
shape = (200, 1000)
area = [0, 800000, 0, 160000]
# Make a density and S wave velocity model
density = 2400 * np.ones(shape)
svel = 3500 * np.ones(shape)
moho = 50
density[moho:] = 2800
svel[moho:] = 4500
mu = wavefd.lame_mu(svel, density)
# Make a wave source from a mexican hat wavelet
sources = [wavefd.MexHatSource(
10000, 10000, area, shape, 100000, 0.5, delay=2)]
# Get the iterator. This part only generates an iterator object. The actual
# computations take place at each iteration in the for loop below
dt = wavefd.maxdt(area, shape, svel.max())
duration = 250
maxit = int(duration / dt)
stations = [[100000, 0], [700000, 0]]
snapshots = int(1. / dt)
simulation = wavefd.elastic_sh(mu, density, area, dt, maxit, sources, stations,
snapshots, padding=70, taper=0.005)
# This part makes an animation using matplotlibs animation API
background = svel * 5 * 10 ** -7
fig = mpl.figure(figsize=(10, 8))
mpl.subplots_adjust(right=0.98, left=0.11, hspace=0.3, top=0.93)
mpl.subplot(3, 1, 1)
mpl.title('Seismogram 1')
seismogram1, = mpl.plot([], [], '-k')
mpl.xlim(0, duration)
mpl.ylim(-0.1, 0.1)
mpl.ylabel('Amplitude')
mpl.subplot(3, 1, 2)
mpl.title('Seismogram 2')
seismogram2, = mpl.plot([], [], '-k')
mpl.xlim(0, duration)
mpl.ylim(-0.1, 0.1)
mpl.ylabel('Amplitude')
ax = mpl.subplot(3, 1, 3)
mpl.title('time: 0.0 s')
wavefield = mpl.imshow(background, extent=area, cmap=mpl.cm.gray_r,
vmin=-0.005, vmax=0.005)
mpl.points(stations, '^b', size=8)
mpl.text(750000, 20000, 'Crust')
mpl.text(740000, 100000, 'Mantle')
fig.text(0.82, 0.33, 'Seismometer 2')
fig.text(0.16, 0.33, 'Seismometer 1')
mpl.ylim(area[2:][::-1])
mpl.xlabel('x (km)')
mpl.ylabel('z (km)')
mpl.m2km()
times = np.linspace(0, dt * maxit, maxit)
# This function updates the plot every few timesteps
def animate(i):
t, u, seismogram = simulation.next()
mpl.title('time: %0.1f s' % (times[t]))
wavefield.set_array((background + u)[::-1])
seismogram1.set_data(times[:t + 1], seismogram[0][:t + 1])
seismogram2.set_data(times[:t + 1], seismogram[1][:t + 1])
return wavefield, seismogram1, seismogram2
anim = animation.FuncAnimation(
fig, animate, frames=maxit / snapshots, interval=1)
mpl.show()
|
TanguyPatte/phantomjs-packaging
|
refs/heads/master
|
src/qt/qtwebkit/Tools/QueueStatusServer/main.py
|
119
|
# Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Request a modern Django
from google.appengine.dist import use_library
use_library('django', '1.2') # Must agree with __init.py__
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from handlers.activebots import ActiveBots
from handlers.dashboard import Dashboard
from handlers.gc import GC
from handlers.nextpatch import NextPatch
from handlers.patch import Patch
from handlers.patchstatus import PatchStatus
from handlers.queuecharts import QueueCharts
from handlers.queuestatus import QueueStatus
from handlers.recentstatus import QueuesOverview
from handlers.releasepatch import ReleasePatch
from handlers.showresults import ShowResults
from handlers.statusbubble import StatusBubble
from handlers.submittoews import SubmitToEWS
from handlers.svnrevision import SVNRevision
from handlers.syncqueuelogs import SyncQueueLogs
from handlers.updatestatus import UpdateStatus
from handlers.updatesvnrevision import UpdateSVNRevision
from handlers.updateworkitems import UpdateWorkItems
webapp.template.register_template_library('filters.webkit_extras')
routes = [
('/', QueuesOverview),
('/dashboard', Dashboard),
('/gc', GC),
('/sync-queue-logs', SyncQueueLogs),
(r'/patch-status/(.*)/(.*)', PatchStatus),
(r'/patch/(.*)', Patch),
(r'/submit-to-ews', SubmitToEWS),
(r'/results/(.*)', ShowResults),
(r'/status-bubble/(.*)', StatusBubble),
(r'/svn-revision/(.*)', SVNRevision),
(r'/queue-charts/(.*)', QueueCharts),
(r'/queue-status/(.*)/bots/(.*)', QueueStatus),
(r'/queue-status/(.*)', QueueStatus),
(r'/next-patch/(.*)', NextPatch),
(r'/release-patch', ReleasePatch),
('/update-status', UpdateStatus),
('/update-work-items', UpdateWorkItems),
('/update-svn-revision', UpdateSVNRevision),
('/active-bots', ActiveBots),
]
application = webapp.WSGIApplication(routes, debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
|
phillxnet/rockstor-core
|
refs/heads/master
|
src/rockstor/storageadmin/models/validators.py
|
2
|
"""
Copyright (c) 2012-2020 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
from django.core.exceptions import ValidationError
from django.core.validators import validate_ipv46_address
def validate_nfs_host_str(value):
error_count = 0
host_regex = (
"^(([a-zA-Z0-9\*]|[a-zA-Z0-9\*][a-zA-Z0-9\-\*]*"
"[a-zA-Z0-9\*])\.)*([A-Za-z0-9\*]|[A-Za-z0-9\*]"
"[A-Za-z0-9\-\*]*[A-Za-z0-9\*])$"
)
if re.match(host_regex, value) is None:
error_count += 1
# ip networks
try:
validate_ipv46_address(value)
except ValidationError:
error_count += 1
if error_count == 2:
raise ValidationError("Invalid host string: %s" % value)
def validate_nfs_modify_str(value):
if value != "ro" and value != "rw":
raise ValidationError("Invalid mod choice. Possible options: ro, rw")
def validate_nfs_sync_choice(value):
if value != "async" and value != "sync":
msg = "Invalid sync choice. Possible options: async, sync"
raise ValidationError(msg)
|
vaporry/pyethereum
|
refs/heads/master
|
ethereum/tester.py
|
2
|
import shutil
import tempfile
import time
from ethereum import spv
import ethereum
import ethereum.db as db
import ethereum.opcodes as opcodes
import ethereum.abi as abi
from ethereum.slogging import LogRecorder, configure_logging, set_level
from ethereum.utils import to_string
from ethereum.config import Env
from ethereum._solidity import get_solidity
import rlp
from rlp.utils import decode_hex, encode_hex, ascii_chr
serpent = None
u = ethereum.utils
t = ethereum.transactions
b = ethereum.blocks
pb = ethereum.processblock
vm = ethereum.vm
accounts = []
keys = []
for i in range(10):
keys.append(u.sha3(to_string(i)))
accounts.append(u.privtoaddr(keys[-1]))
k0, k1, k2, k3, k4, k5, k6, k7, k8, k9 = keys[:10]
a0, a1, a2, a3, a4, a5, a6, a7, a8, a9 = accounts[:10]
languages = {}
_solidity = get_solidity()
if _solidity:
languages['solidity'] = _solidity
seed = 3 ** 160
def dict_without(d, *args):
o = {}
for k, v in list(d.items()):
if k not in args:
o[k] = v
return o
def dict_with(d, **kwargs):
o = {}
for k, v in list(d.items()):
o[k] = v
for k, v in list(kwargs.items()):
o[k] = v
return o
# Pseudo-RNG (deterministic for now for testing purposes)
def rand():
global seed
seed = pow(seed, 2, 2 ** 512)
return seed % 2 ** 256
class TransactionFailed(Exception):
pass
class ContractCreationFailed(Exception):
pass
class ABIContract():
def __init__(self, _state, _abi, address, listen=True, log_listener=None):
self.address = address
self._translator = abi.ContractTranslator(_abi)
self.abi = _abi
if listen:
if not log_listener:
listener = lambda log: self._translator.listen(log, noprint=False)
else:
def listener(log):
r = self._translator.listen(log, noprint=True)
if r:
log_listener(r)
_state.block.log_listeners.append(listener)
def kall_factory(f):
def kall(*args, **kwargs):
o = _state._send(kwargs.get('sender', k0),
self.address,
kwargs.get('value', 0),
self._translator.encode(f, args),
**dict_without(kwargs, 'sender', 'value', 'output'))
# Compute output data
if kwargs.get('output', '') == 'raw':
outdata = o['output']
elif not o['output']:
outdata = None
else:
outdata = self._translator.decode(f, o['output'])
outdata = outdata[0] if len(outdata) == 1 else outdata
# Format output
if kwargs.get('profiling', ''):
return dict_with(o, output=outdata)
else:
return outdata
return kall
for f in self._translator.function_data:
vars(self)[f] = kall_factory(f)
class state():
def __init__(self, num_accounts=len(keys)):
global serpent
if not serpent:
serpent = __import__('serpent')
self.temp_data_dir = tempfile.mkdtemp()
self.db = db.EphemDB()
self.env = Env(self.db)
o = {}
for i in range(num_accounts):
o[accounts[i]] = {"wei": 10 ** 24}
for i in range(1, 5):
o[u.int_to_addr(i)] = {"wei": 1}
self.block = b.genesis(self.env, start_alloc=o)
self.blocks = [self.block]
self.block.timestamp = 1410973349
self.block.coinbase = a0
self.block.gas_limit = 10 ** 9
def __del__(self):
shutil.rmtree(self.temp_data_dir)
def contract(self, code, sender=k0, endowment=0, language='serpent', gas=None):
if language not in languages:
languages[language] = __import__(language)
language = languages[language]
evm = language.compile(code)
o = self.evm(evm, sender, endowment)
assert len(self.block.get_code(o)), "Contract code empty"
return o
def abi_contract(self, code, sender=k0, endowment=0, language='serpent', contract_name='',
gas=None, log_listener=None, listen=True):
if contract_name:
assert language == 'solidity'
cn_args = dict(contract_name=contract_name)
else:
cn_args = {}
if language not in languages:
languages[language] = __import__(language)
language = languages[language]
evm = language.compile(code, **cn_args)
address = self.evm(evm, sender, endowment, gas)
assert len(self.block.get_code(address)), "Contract code empty"
_abi = language.mk_full_signature(code, **cn_args)
return ABIContract(self, _abi, address, listen=listen, log_listener=log_listener)
def evm(self, evm, sender=k0, endowment=0, gas=None):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
tx = t.contract(sendnonce, gas_price, gas_limit, endowment, evm)
tx.sign(sender)
if gas is not None:
tx.startgas = gas
# print('starting', tx.startgas, gas_limit)
(s, a) = pb.apply_transaction(self.block, tx)
if not s:
raise ContractCreationFailed()
return a
def call(*args, **kwargs):
raise Exception("Call deprecated. Please use the abi_contract "
"mechanism or send(sender, to, value, "
"data) directly, using the abi module to generate "
"data if needed")
def _send(self, sender, to, value, evmdata='', output=None,
funid=None, abi=None, profiling=0):
if funid is not None or abi is not None:
raise Exception("Send with funid+abi is deprecated. Please use"
" the abi_contract mechanism")
tm, g = time.time(), self.block.gas_used
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
tx = t.Transaction(sendnonce, gas_price, gas_limit, to, value, evmdata)
self.last_tx = tx
tx.sign(sender)
recorder = LogRecorder() if profiling > 1 else None
(s, o) = pb.apply_transaction(self.block, tx)
if not s:
raise TransactionFailed()
out = {"output": o}
if profiling > 0:
zero_bytes = tx.data.count(ascii_chr(0))
non_zero_bytes = len(tx.data) - zero_bytes
intrinsic_gas_used = opcodes.GTXDATAZERO * zero_bytes + \
opcodes.GTXDATANONZERO * non_zero_bytes
ntm, ng = time.time(), self.block.gas_used
out["time"] = ntm - tm
out["gas"] = ng - g - intrinsic_gas_used
if profiling > 1:
trace = recorder.pop_records()
ops = [x['op'] for x in trace if x['event'] == 'vm']
opdict = {}
for op in ops:
opdict[op] = opdict.get(op, 0) + 1
out["ops"] = opdict
return out
def profile(self, *args, **kwargs):
kwargs['profiling'] = True
return self._send(*args, **kwargs)
def send(self, *args, **kwargs):
return self._send(*args, **kwargs)["output"]
def mkspv(self, sender, to, value, data=[], funid=None, abi=None):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
if funid is not None:
evmdata = serpent.encode_abi(funid, *abi)
else:
evmdata = serpent.encode_datalist(*data)
tx = t.Transaction(sendnonce, gas_price, gas_limit, to, value, evmdata)
self.last_tx = tx
tx.sign(sender)
return spv.mk_transaction_spv_proof(self.block, tx)
def verifyspv(self, sender, to, value, data=[],
funid=None, abi=None, proof=[]):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
if funid is not None:
evmdata = serpent.encode_abi(funid, *abi)
else:
evmdata = serpent.encode_datalist(*data)
tx = t.Transaction(sendnonce, gas_price, gas_limit, to, value, evmdata)
self.last_tx = tx
tx.sign(sender)
return spv.verify_transaction_spv_proof(self.block, tx, proof)
def trace(self, sender, to, value, data=[]):
# collect log events (independent of loglevel filters)
recorder = LogRecorder()
self.send(sender, to, value, data)
return recorder.pop_records()
def mine(self, n=1, coinbase=a0):
for i in range(n):
self.block.finalize()
self.block.commit_state()
self.db.put(self.block.hash, rlp.encode(self.block))
t = self.block.timestamp + 6 + rand() % 12
x = b.Block.init_from_parent(self.block, coinbase, timestamp=t)
# copy listeners
x.log_listeners = self.block.log_listeners
self.block = x
self.blocks.append(self.block)
def snapshot(self):
return rlp.encode(self.block)
def revert(self, data):
self.block = rlp.decode(data, b.Block, env=self.env)
# logging
def set_logging_level(lvl=0):
trace_lvl_map = [
':info',
'eth.vm.log:trace',
':info,eth.vm.log:trace,eth.vm.exit:trace',
':info,eth.vm.log:trace,eth.vm.op:trace,eth.vm.stack:trace',
':info,eth.vm.log:trace,eth.vm.op:trace,eth.vm.stack:trace,' +
'eth.vm.storage:trace,eth.vm.memory:trace'
]
configure_logging(config_string=trace_lvl_map[lvl])
if lvl == 0:
set_level(None, 'info')
print('Set logging level: %d' % lvl)
def set_log_trace(logger_names=[]):
"""
sets all named loggers to level 'trace'
attention: vm.op.* are only active if vm.op is active
"""
for name in logger_names:
assert name in slogging.get_logger_names()
slogging.set_level(name, 'trace')
def enable_logging():
set_logging_level(1)
def disable_logging():
set_logging_level(0)
gas_limit = 1000000
gas_price = 1
|
maljac/connector
|
refs/heads/8.0
|
connector/unit/synchronizer.py
|
18
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from ..connector import ConnectorUnit
from .mapper import Mapper, ImportMapper, ExportMapper
from .backend_adapter import BackendAdapter
class Synchronizer(ConnectorUnit):
""" Base class for synchronizers """
# implement in sub-classes
_model_name = None
_base_mapper = Mapper
_base_backend_adapter = BackendAdapter
def __init__(self, connector_env):
super(Synchronizer, self).__init__(connector_env)
self._backend_adapter = None
self._binder = None
self._mapper = None
def run(self):
""" Run the synchronization """
raise NotImplementedError
@property
def mapper(self):
""" Return an instance of ``Mapper`` for the synchronization.
The instanciation is delayed because some synchronisations do
not need such an unit and the unit may not exist.
:rtype: :py:class:`connector.unit.mapper.Mapper`
"""
if self._mapper is None:
self._mapper = self.unit_for(self._base_mapper)
return self._mapper
@property
def binder(self):
""" Return an instance of ``Binder`` for the synchronization.
The instanciation is delayed because some synchronisations do
not need such an unit and the unit may not exist.
:rtype: :py:class:`connector.unit.binder.Binder`
"""
if self._binder is None:
self._binder = self.binder_for()
return self._binder
@property
def backend_adapter(self):
""" Return an instance of ``BackendAdapter`` for the
synchronization.
The instanciation is delayed because some synchronisations do
not need such an unit and the unit may not exist.
:rtype: :py:class:`connector.unit.backend_adapter.BackendAdapter`
"""
if self._backend_adapter is None:
self._backend_adapter = self.unit_for(self._base_backend_adapter)
return self._backend_adapter
class Exporter(Synchronizer):
""" Synchronizer for exporting data from OpenERP to a backend """
_base_mapper = ExportMapper
ExportSynchronizer = Exporter # deprecated
class Importer(Synchronizer):
""" Synchronizer for importing data from a backend to OpenERP """
_base_mapper = ImportMapper
ImportSynchronizer = Importer # deprecated
class Deleter(Synchronizer):
""" Synchronizer for deleting a record on the backend """
DeleteSynchronizer = Deleter # deprecated
|
NeovaHealth/odoo
|
refs/heads/8.0
|
openerp/tests/addons/test_translation_import/tests/test_term_count.py
|
323
|
# -*- coding: utf-8 -*-
import openerp
from openerp.tests import common
class TestTermCount(common.TransactionCase):
def test_count_term(self):
"""
Just make sure we have as many translation entries as we wanted.
"""
openerp.tools.trans_load(self.cr, 'test_translation_import/i18n/fr.po', 'fr_FR', verbose=False)
ids = self.registry('ir.translation').search(self.cr, self.uid,
[('src', '=', '1XBUO5PUYH2RYZSA1FTLRYS8SPCNU1UYXMEYMM25ASV7JC2KTJZQESZYRV9L8CGB')])
self.assertEqual(len(ids), 2)
|
Centre-Alt-Rendiment-Esportiu/att
|
refs/heads/master
|
old_project/Python/win_libs/scipy/sparse/tests/test_spfuncs.py
|
122
|
from __future__ import division, print_function, absolute_import
from numpy import array, kron, matrix, diag
from numpy.testing import TestCase, run_module_suite, assert_, assert_equal
from scipy.sparse import spfuncs
from scipy.sparse import csr_matrix, csc_matrix, bsr_matrix
from scipy.sparse._sparsetools import csr_scale_rows, csr_scale_columns, \
bsr_scale_rows, bsr_scale_columns
class TestSparseFunctions(TestCase):
def test_scale_rows_and_cols(self):
D = matrix([[1,0,0,2,3],
[0,4,0,5,0],
[0,0,6,7,0]])
#TODO expose through function
S = csr_matrix(D)
v = array([1,2,3])
csr_scale_rows(3,5,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), diag(v)*D)
S = csr_matrix(D)
v = array([1,2,3,4,5])
csr_scale_columns(3,5,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), D*diag(v))
# blocks
E = kron(D,[[1,2],[3,4]])
S = bsr_matrix(E,blocksize=(2,2))
v = array([1,2,3,4,5,6])
bsr_scale_rows(3,5,2,2,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), diag(v)*E)
S = bsr_matrix(E,blocksize=(2,2))
v = array([1,2,3,4,5,6,7,8,9,10])
bsr_scale_columns(3,5,2,2,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), E*diag(v))
E = kron(D,[[1,2,3],[4,5,6]])
S = bsr_matrix(E,blocksize=(2,3))
v = array([1,2,3,4,5,6])
bsr_scale_rows(3,5,2,3,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), diag(v)*E)
S = bsr_matrix(E,blocksize=(2,3))
v = array([1,2,3,4,5,6,7,8,9,10,11,12,13,14,15])
bsr_scale_columns(3,5,2,3,S.indptr,S.indices,S.data,v)
assert_equal(S.todense(), E*diag(v))
def test_estimate_blocksize(self):
mats = []
mats.append([[0,1],[1,0]])
mats.append([[1,1,0],[0,0,1],[1,0,1]])
mats.append([[0],[0],[1]])
mats = [array(x) for x in mats]
blks = []
blks.append([[1]])
blks.append([[1,1],[1,1]])
blks.append([[1,1],[0,1]])
blks.append([[1,1,0],[1,0,1],[1,1,1]])
blks = [array(x) for x in blks]
for A in mats:
for B in blks:
X = kron(A,B)
r,c = spfuncs.estimate_blocksize(X)
assert_(r >= B.shape[0])
assert_(c >= B.shape[1])
def test_count_blocks(self):
def gold(A,bs):
R,C = bs
I,J = A.nonzero()
return len(set(zip(I//R,J//C)))
mats = []
mats.append([[0]])
mats.append([[1]])
mats.append([[1,0]])
mats.append([[1,1]])
mats.append([[0,1],[1,0]])
mats.append([[1,1,0],[0,0,1],[1,0,1]])
mats.append([[0],[0],[1]])
for A in mats:
for B in mats:
X = kron(A,B)
Y = csr_matrix(X)
for R in range(1,6):
for C in range(1,6):
assert_equal(spfuncs.count_blocks(Y, (R, C)), gold(X, (R, C)))
X = kron([[1,1,0],[0,0,1],[1,0,1]],[[1,1]])
Y = csc_matrix(X)
assert_equal(spfuncs.count_blocks(X, (1, 2)), gold(X, (1, 2)))
assert_equal(spfuncs.count_blocks(Y, (1, 2)), gold(X, (1, 2)))
if __name__ == "__main__":
run_module_suite()
|
zjsxzy/datahub
|
refs/heads/master
|
src/apps/viz2/urls.py
|
7
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^$', 'viz2.views.index'),
)
|
bernardorufino/pick
|
refs/heads/master
|
src/view.py
|
1
|
import curses
from utils import printstr
class View(object):
DIRECTIONS = {
curses.KEY_UP: (-1, 0),
curses.KEY_RIGHT: (0, 1),
curses.KEY_DOWN: (1, 0),
curses.KEY_LEFT: (0, -1),
}
MARGIN = (1, 2)
def __init__(self, table, selectors, output_processors, delimiter):
self._table = table
self.delimiter = delimiter
self.table_offset = (0, 0)
self._selectors = selectors[:]
self._set_selector(0)
self._output_processors = output_processors[:]
self._set_output_processor(0)
# To be assigned when run is called by curses
self.screen = None
self.table_pad = None
self.output_pad = None
def _set_selector(self, i):
old_position = self._selector.position if hasattr(self, '_selector') else (0, 0)
self._selector = self._selectors[i]
self._selector.setup(self._table, self)
self._selector.position = old_position
def _set_output_processor(self, i):
self._output_processor = self._output_processors[i]
self._output_processor.setup(self._table, self.delimiter)
def _setup_curses(self, screen):
curses.curs_set(0)
curses.init_pair(1, 251, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_BLACK, curses.COLOR_YELLOW)
curses.init_pair(3, curses.COLOR_GREEN, curses.COLOR_BLACK)
curses.init_pair(4, 237, curses.COLOR_BLACK)
curses.init_pair(5, curses.COLOR_WHITE, curses.COLOR_RED)
curses.init_pair(6, curses.COLOR_WHITE, curses.COLOR_BLUE)
curses.init_pair(7, 240, curses.COLOR_BLACK)
self.screen = screen
self.table_pad = curses.newpad(self._table.height + 1, self._table.width)
# Have to account for header size (width and height)
# TODO: Remove hard-coded 50 value
self.output_pad = curses.newpad(self._table.ncells + 2, max(self._table.width, 50))
def run(self, screen):
self._setup_curses(screen)
self.draw(resizing=False, redraw_output=True)
while True:
c = self.screen.getch()
redraw_output = False
if c == ord('q'):
return
elif c == ord('i'):
i = self._selectors.index(self._selector)
i = (i + 1) % len(self._selectors)
self._set_selector(i)
elif c == ord('o'):
# TODO: Support switch from table to list with different orientations (row-first or column-first)
i = self._next_output_processor_index()
self._set_output_processor(i)
redraw_output = True
elif c == ord('p'):
return self._table.get(self._selector.position)
elif c in self.DIRECTIONS:
di, dj = self.DIRECTIONS[c]
self._selector.move(di, dj)
elif self.is_enter(c):
return self._output_processor.process()
else:
# TODO: Revisit selector precedence over input handling (mainly capture <enter>)
redraw_output = self._selector.handle_input(c)
resizing = (c == -1)
self.draw(resizing=resizing, redraw_output=redraw_output)
def _next_output_processor_index(self):
i = self._output_processors.index(self._output_processor)
i = (i + 1) % len(self._output_processors)
return i
def draw(self, resizing, redraw_output):
# Compute table_pad dimensions
top_margin, left_margin = self.MARGIN
mi, mj = self.screen.getmaxyx()
table_pad_height = mi / 2 - top_margin
table_pad_width = mj - left_margin
# Clear all pads and windows
self.screen.clear()
self._selector.clear(self.table_pad)
if redraw_output:
self.output_pad.clear()
# Draw table
self._selector.draw(self.table_pad)
# Scroll up/down
top_offset, left_offset = self.table_offset
i, j = self._selector.position
if i > top_offset + table_pad_height - 1:
top_offset += 1
elif i < top_offset:
top_offset -= 1
# Scroll left/right
# There's no guarantee that shifting the table one column to the right will make the entire column of the
# current position visible bc unlike rows columns can have variable width. So, we shift until the column is
# fully visible.
shift_left = lambda left: self._table.column_offset(j + 1) > self._table.column_offset(left) + table_pad_width - 1
if shift_left(left_offset):
while shift_left(left_offset):
left_offset += 1
elif resizing:
while left_offset >= 1 and self._table.column_offset(j + 1) - self._table.column_offset(left_offset - 1) < table_pad_width:
left_offset -= 1
if j < left_offset:
left_offset -= 1
# Set h/v scroll
self.table_offset = (top_offset, left_offset)
# Draw instructions
self.screen.move(top_margin + table_pad_height + 1, left_margin)
h1, _ = self.screen.getyx()
self._selector.draw_instructions(self.screen)
printstr(self.screen, self._output_processor.name, curses.color_pair(7))
next_output_processor = self._output_processors[self._next_output_processor_index()]
printstr(self.screen, " [o] {} mode".format(next_output_processor.name), curses.color_pair(3))
printstr(self.screen)
h2, _ = self.screen.getyx()
instructions_h = h2 - h1
# Output preview
self._output_processor.draw_preview(self.output_pad)
# Refresh
self.screen.noutrefresh()
self.table_pad.noutrefresh(top_offset, self._table.column_offset(left_offset), top_margin, left_margin,
top_margin + table_pad_height - 1, left_margin + table_pad_width - 1)
self.output_pad.noutrefresh(0, 0, top_margin + table_pad_height + instructions_h + 1, left_margin, mi - 1, mj - 1)
curses.doupdate()
@staticmethod
def is_enter(c):
return c == ord('\n') or c == curses.KEY_ENTER
|
EdLogan18/logan-repository
|
refs/heads/master
|
plugin.video.SportsDevil/dateutil/tz/__init__.py
|
105
|
from .tz import *
__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",
"tzstr", "tzical", "tzwin", "tzwinlocal", "gettz"]
|
mitocw/edx-platform
|
refs/heads/master
|
lms/djangoapps/commerce/constants.py
|
6
|
""" Constants for this app as well as the external API. """
class OrderStatus(object):
"""Constants representing all known order statuses. """
OPEN = 'Open'
FULFILLMENT_ERROR = 'Fulfillment Error'
COMPLETE = 'Complete'
class Messages(object):
""" Strings used to populate response messages. """
NO_ECOM_API = u'E-Commerce API not setup. Enrolled {username} in {course_id} directly.'
NO_SKU_ENROLLED = u'The {enrollment_mode} mode for {course_id}, {course_name}, does not have a SKU. Enrolling ' \
u'{username} directly. Course announcement is {announcement}.'
ENROLL_DIRECTLY = u'Enroll {username} in {course_id} directly because no need for E-Commerce baskets and orders.'
ORDER_COMPLETED = u'Order {order_number} was completed.'
ORDER_INCOMPLETE_ENROLLED = u'Order {order_number} was created, but is not yet complete. User was enrolled.'
NO_HONOR_MODE = u'Course {course_id} does not have an honor mode.'
NO_DEFAULT_ENROLLMENT_MODE = u'Course {course_id} does not have an honor or audit mode.'
ENROLLMENT_EXISTS = u'User {username} is already enrolled in {course_id}.'
ENROLLMENT_CLOSED = u'Enrollment is closed for {course_id}.'
|
shichao-an/python-daemon
|
refs/heads/master
|
daemon/runner.py
|
9
|
# -*- coding: utf-8 -*-
# daemon/runner.py
# Part of ‘python-daemon’, an implementation of PEP 3143.
#
# Copyright © 2009–2015 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2007–2008 Robert Niederreiter, Jens Klein
# Copyright © 2003 Clark Evans
# Copyright © 2002 Noah Spurrier
# Copyright © 2001 Jürgen Hermann
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Apache License, version 2.0 as published by the
# Apache Software Foundation.
# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details.
""" Daemon runner library.
"""
from __future__ import (absolute_import, unicode_literals)
import sys
import os
import signal
import errno
try:
# Python 3 standard library.
ProcessLookupError
except NameError:
# No such class in Python 2.
ProcessLookupError = NotImplemented
import lockfile
from . import pidfile
from .daemon import (basestring, unicode)
from .daemon import DaemonContext
from .daemon import _chain_exception_from_existing_exception_context
class DaemonRunnerError(Exception):
""" Abstract base class for errors from DaemonRunner. """
def __init__(self, *args, **kwargs):
self._chain_from_context()
super(DaemonRunnerError, self).__init__(*args, **kwargs)
def _chain_from_context(self):
_chain_exception_from_existing_exception_context(self, as_cause=True)
class DaemonRunnerInvalidActionError(DaemonRunnerError, ValueError):
""" Raised when specified action for DaemonRunner is invalid. """
def _chain_from_context(self):
# This exception is normally not caused by another.
_chain_exception_from_existing_exception_context(self, as_cause=False)
class DaemonRunnerStartFailureError(DaemonRunnerError, RuntimeError):
""" Raised when failure starting DaemonRunner. """
class DaemonRunnerStopFailureError(DaemonRunnerError, RuntimeError):
""" Raised when failure stopping DaemonRunner. """
class DaemonRunner:
""" Controller for a callable running in a separate background process.
The first command-line argument is the action to take:
* 'start': Become a daemon and call `app.run()`.
* 'stop': Exit the daemon process specified in the PID file.
* 'restart': Stop, then start.
"""
__metaclass__ = type
start_message = "started with pid {pid:d}"
def __init__(self, app):
""" Set up the parameters of a new runner.
:param app: The application instance; see below.
:return: ``None``.
The `app` argument must have the following attributes:
* `stdin_path`, `stdout_path`, `stderr_path`: Filesystem paths
to open and replace the existing `sys.stdin`, `sys.stdout`,
`sys.stderr`.
* `pidfile_path`: Absolute filesystem path to a file that will
be used as the PID file for the daemon. If ``None``, no PID
file will be used.
* `pidfile_timeout`: Used as the default acquisition timeout
value supplied to the runner's PID lock file.
* `run`: Callable that will be invoked when the daemon is
started.
"""
self.parse_args()
self.app = app
self.daemon_context = DaemonContext()
self.daemon_context.stdin = open(app.stdin_path, 'rt')
self.daemon_context.stdout = open(app.stdout_path, 'w+t')
self.daemon_context.stderr = open(
app.stderr_path, 'w+t', buffering=0)
self.pidfile = None
if app.pidfile_path is not None:
self.pidfile = make_pidlockfile(
app.pidfile_path, app.pidfile_timeout)
self.daemon_context.pidfile = self.pidfile
def _usage_exit(self, argv):
""" Emit a usage message, then exit.
:param argv: The command-line arguments used to invoke the
program, as a sequence of strings.
:return: ``None``.
"""
progname = os.path.basename(argv[0])
usage_exit_code = 2
action_usage = "|".join(self.action_funcs.keys())
message = "usage: {progname} {usage}".format(
progname=progname, usage=action_usage)
emit_message(message)
sys.exit(usage_exit_code)
def parse_args(self, argv=None):
""" Parse command-line arguments.
:param argv: The command-line arguments used to invoke the
program, as a sequence of strings.
:return: ``None``.
The parser expects the first argument as the program name, the
second argument as the action to perform.
If the parser fails to parse the arguments, emit a usage
message and exit the program.
"""
if argv is None:
argv = sys.argv
min_args = 2
if len(argv) < min_args:
self._usage_exit(argv)
self.action = unicode(argv[1])
if self.action not in self.action_funcs:
self._usage_exit(argv)
def _start(self):
""" Open the daemon context and run the application.
:return: ``None``.
:raises DaemonRunnerStartFailureError: If the PID file cannot
be locked by this process.
"""
if is_pidfile_stale(self.pidfile):
self.pidfile.break_lock()
try:
self.daemon_context.open()
except lockfile.AlreadyLocked:
error = DaemonRunnerStartFailureError(
"PID file {pidfile.path!r} already locked".format(
pidfile=self.pidfile))
raise error
pid = os.getpid()
message = self.start_message.format(pid=pid)
emit_message(message)
self.app.run()
def _terminate_daemon_process(self):
""" Terminate the daemon process specified in the current PID file.
:return: ``None``.
:raises DaemonRunnerStopFailureError: If terminating the daemon
fails with an OS error.
"""
pid = self.pidfile.read_pid()
try:
os.kill(pid, signal.SIGTERM)
except OSError as exc:
error = DaemonRunnerStopFailureError(
"Failed to terminate {pid:d}: {exc}".format(
pid=pid, exc=exc))
raise error
def _stop(self):
""" Exit the daemon process specified in the current PID file.
:return: ``None``.
:raises DaemonRunnerStopFailureError: If the PID file is not
already locked.
"""
if not self.pidfile.is_locked():
error = DaemonRunnerStopFailureError(
"PID file {pidfile.path!r} not locked".format(
pidfile=self.pidfile))
raise error
if is_pidfile_stale(self.pidfile):
self.pidfile.break_lock()
else:
self._terminate_daemon_process()
def _restart(self):
""" Stop, then start.
"""
self._stop()
self._start()
action_funcs = {
'start': _start,
'stop': _stop,
'restart': _restart,
}
def _get_action_func(self):
""" Get the function for the specified action.
:return: The function object corresponding to the specified
action.
:raises DaemonRunnerInvalidActionError: if the action is
unknown.
The action is specified by the `action` attribute, which is set
during `parse_args`.
"""
try:
func = self.action_funcs[self.action]
except KeyError:
error = DaemonRunnerInvalidActionError(
"Unknown action: {action!r}".format(
action=self.action))
raise error
return func
def do_action(self):
""" Perform the requested action.
:return: ``None``.
The action is specified by the `action` attribute, which is set
during `parse_args`.
"""
func = self._get_action_func()
func(self)
def emit_message(message, stream=None):
""" Emit a message to the specified stream (default `sys.stderr`). """
if stream is None:
stream = sys.stderr
stream.write("{message}\n".format(message=message))
stream.flush()
def make_pidlockfile(path, acquire_timeout):
""" Make a PIDLockFile instance with the given filesystem path. """
if not isinstance(path, basestring):
error = ValueError("Not a filesystem path: {path!r}".format(
path=path))
raise error
if not os.path.isabs(path):
error = ValueError("Not an absolute path: {path!r}".format(
path=path))
raise error
lockfile = pidfile.TimeoutPIDLockFile(path, acquire_timeout)
return lockfile
def is_pidfile_stale(pidfile):
""" Determine whether a PID file is stale.
:return: ``True`` iff the PID file is stale; otherwise ``False``.
The PID file is “stale” if its contents are valid but do not
match the PID of a currently-running process.
"""
result = False
pidfile_pid = pidfile.read_pid()
if pidfile_pid is not None:
try:
os.kill(pidfile_pid, signal.SIG_DFL)
except ProcessLookupError:
# The specified PID does not exist.
result = True
except OSError as exc:
if exc.errno == errno.ESRCH:
# Under Python 2, process lookup error is an OSError.
# The specified PID does not exist.
result = True
return result
# Local variables:
# coding: utf-8
# mode: python
# End:
# vim: fileencoding=utf-8 filetype=python :
|
cartwheelweb/packaginator
|
refs/heads/master
|
apps/pypi/__init__.py
|
4
|
""" All connection points with PyPI """
|
hyperized/ansible
|
refs/heads/devel
|
test/units/modules/network/fortios/test_fortios_firewall_ssl_server.py
|
21
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_firewall_ssl_server
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_firewall_ssl_server.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_firewall_ssl_server_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_ssl_server': {
'add_header_x_forwarded_proto': 'enable',
'ip': 'test_value_4',
'mapped_port': '5',
'name': 'default_name_6',
'port': '7',
'ssl_algorithm': 'high',
'ssl_cert': 'test_value_9',
'ssl_client_renegotiation': 'allow',
'ssl_dh_bits': '768',
'ssl_max_version': 'tls-1.0',
'ssl_min_version': 'tls-1.0',
'ssl_mode': 'half',
'ssl_send_empty_frags': 'enable',
'url_rewrite': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ssl_server.fortios_firewall(input_data, fos_instance)
expected_data = {
'add-header-x-forwarded-proto': 'enable',
'ip': 'test_value_4',
'mapped-port': '5',
'name': 'default_name_6',
'port': '7',
'ssl-algorithm': 'high',
'ssl-cert': 'test_value_9',
'ssl-client-renegotiation': 'allow',
'ssl-dh-bits': '768',
'ssl-max-version': 'tls-1.0',
'ssl-min-version': 'tls-1.0',
'ssl-mode': 'half',
'ssl-send-empty-frags': 'enable',
'url-rewrite': 'enable'
}
set_method_mock.assert_called_with('firewall', 'ssl-server', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_ssl_server_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_ssl_server': {
'add_header_x_forwarded_proto': 'enable',
'ip': 'test_value_4',
'mapped_port': '5',
'name': 'default_name_6',
'port': '7',
'ssl_algorithm': 'high',
'ssl_cert': 'test_value_9',
'ssl_client_renegotiation': 'allow',
'ssl_dh_bits': '768',
'ssl_max_version': 'tls-1.0',
'ssl_min_version': 'tls-1.0',
'ssl_mode': 'half',
'ssl_send_empty_frags': 'enable',
'url_rewrite': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ssl_server.fortios_firewall(input_data, fos_instance)
expected_data = {
'add-header-x-forwarded-proto': 'enable',
'ip': 'test_value_4',
'mapped-port': '5',
'name': 'default_name_6',
'port': '7',
'ssl-algorithm': 'high',
'ssl-cert': 'test_value_9',
'ssl-client-renegotiation': 'allow',
'ssl-dh-bits': '768',
'ssl-max-version': 'tls-1.0',
'ssl-min-version': 'tls-1.0',
'ssl-mode': 'half',
'ssl-send-empty-frags': 'enable',
'url-rewrite': 'enable'
}
set_method_mock.assert_called_with('firewall', 'ssl-server', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_ssl_server_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_ssl_server': {
'add_header_x_forwarded_proto': 'enable',
'ip': 'test_value_4',
'mapped_port': '5',
'name': 'default_name_6',
'port': '7',
'ssl_algorithm': 'high',
'ssl_cert': 'test_value_9',
'ssl_client_renegotiation': 'allow',
'ssl_dh_bits': '768',
'ssl_max_version': 'tls-1.0',
'ssl_min_version': 'tls-1.0',
'ssl_mode': 'half',
'ssl_send_empty_frags': 'enable',
'url_rewrite': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ssl_server.fortios_firewall(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall', 'ssl-server', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_ssl_server_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_ssl_server': {
'add_header_x_forwarded_proto': 'enable',
'ip': 'test_value_4',
'mapped_port': '5',
'name': 'default_name_6',
'port': '7',
'ssl_algorithm': 'high',
'ssl_cert': 'test_value_9',
'ssl_client_renegotiation': 'allow',
'ssl_dh_bits': '768',
'ssl_max_version': 'tls-1.0',
'ssl_min_version': 'tls-1.0',
'ssl_mode': 'half',
'ssl_send_empty_frags': 'enable',
'url_rewrite': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ssl_server.fortios_firewall(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall', 'ssl-server', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_ssl_server_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_ssl_server': {
'add_header_x_forwarded_proto': 'enable',
'ip': 'test_value_4',
'mapped_port': '5',
'name': 'default_name_6',
'port': '7',
'ssl_algorithm': 'high',
'ssl_cert': 'test_value_9',
'ssl_client_renegotiation': 'allow',
'ssl_dh_bits': '768',
'ssl_max_version': 'tls-1.0',
'ssl_min_version': 'tls-1.0',
'ssl_mode': 'half',
'ssl_send_empty_frags': 'enable',
'url_rewrite': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ssl_server.fortios_firewall(input_data, fos_instance)
expected_data = {
'add-header-x-forwarded-proto': 'enable',
'ip': 'test_value_4',
'mapped-port': '5',
'name': 'default_name_6',
'port': '7',
'ssl-algorithm': 'high',
'ssl-cert': 'test_value_9',
'ssl-client-renegotiation': 'allow',
'ssl-dh-bits': '768',
'ssl-max-version': 'tls-1.0',
'ssl-min-version': 'tls-1.0',
'ssl-mode': 'half',
'ssl-send-empty-frags': 'enable',
'url-rewrite': 'enable'
}
set_method_mock.assert_called_with('firewall', 'ssl-server', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_firewall_ssl_server_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_ssl_server': {
'random_attribute_not_valid': 'tag',
'add_header_x_forwarded_proto': 'enable',
'ip': 'test_value_4',
'mapped_port': '5',
'name': 'default_name_6',
'port': '7',
'ssl_algorithm': 'high',
'ssl_cert': 'test_value_9',
'ssl_client_renegotiation': 'allow',
'ssl_dh_bits': '768',
'ssl_max_version': 'tls-1.0',
'ssl_min_version': 'tls-1.0',
'ssl_mode': 'half',
'ssl_send_empty_frags': 'enable',
'url_rewrite': 'enable'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_ssl_server.fortios_firewall(input_data, fos_instance)
expected_data = {
'add-header-x-forwarded-proto': 'enable',
'ip': 'test_value_4',
'mapped-port': '5',
'name': 'default_name_6',
'port': '7',
'ssl-algorithm': 'high',
'ssl-cert': 'test_value_9',
'ssl-client-renegotiation': 'allow',
'ssl-dh-bits': '768',
'ssl-max-version': 'tls-1.0',
'ssl-min-version': 'tls-1.0',
'ssl-mode': 'half',
'ssl-send-empty-frags': 'enable',
'url-rewrite': 'enable'
}
set_method_mock.assert_called_with('firewall', 'ssl-server', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
fairbird/OpenPLI-BlackHole
|
refs/heads/master
|
lib/python/Components/Element.py
|
47
|
from Tools.CList import CList
# down up
# Render Converter Converter Source
# a bidirectional connection
def cached(f):
name = f.__name__
def wrapper(self):
cache = self.cache
if cache is None:
return f(self)
if name not in cache:
cache[name] = (True, f(self))
return cache[name][1]
return wrapper
class ElementError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return self.msg
class Element(object):
CHANGED_DEFAULT = 0 # initial "pull" state
CHANGED_ALL = 1 # really everything changed
CHANGED_CLEAR = 2 # we're expecting a real update soon. don't bother polling NOW, but clear data.
CHANGED_SPECIFIC = 3 # second tuple will specify what exactly changed
CHANGED_POLL = 4 # a timer expired
SINGLE_SOURCE = True
def __init__(self):
self.downstream_elements = CList()
self.master = None
self.sources = [ ]
self.source = None
self.__suspended = True
self.cache = None
def connectDownstream(self, downstream):
self.downstream_elements.append(downstream)
if self.master is None:
self.master = downstream
def connectUpstream(self, upstream):
assert not self.SINGLE_SOURCE or self.source is None
self.sources.append(upstream)
# self.source always refers to the last recent source added.
self.source = upstream
self.changed((self.CHANGED_DEFAULT,))
def connect(self, upstream):
self.connectUpstream(upstream)
upstream.connectDownstream(self)
# we disconnect from down to up
def disconnectAll(self):
# we should not disconnect from upstream if
# there are still elements depending on us.
assert len(self.downstream_elements) == 0, "there are still downstream elements left"
# Sources don't have a source themselves. don't do anything here.
for s in self.sources:
s.disconnectDownstream(self)
if self.source:
# sources are owned by the Screen, so don't destroy them here.
self.destroy()
self.source = None
self.sources = [ ]
def disconnectDownstream(self, downstream):
self.downstream_elements.remove(downstream)
if self.master == downstream:
self.master = None
if len(self.downstream_elements) == 0:
self.disconnectAll()
# default action: push downstream
def changed(self, *args, **kwargs):
self.cache = { }
self.downstream_elements.changed(*args, **kwargs)
self.cache = None
def setSuspend(self, suspended):
changed = self.__suspended != suspended
if not self.__suspended and suspended:
self.doSuspend(1)
elif self.__suspended and not suspended:
self.doSuspend(0)
self.__suspended = suspended
if changed:
for s in self.sources:
s.checkSuspend()
suspended = property(lambda self: self.__suspended, setSuspend)
def checkSuspend(self):
self.suspended = reduce(lambda x, y: x and y.__suspended, self.downstream_elements, True)
def doSuspend(self, suspend):
pass
def destroy(self):
pass
|
angelapper/odoo
|
refs/heads/9.0
|
addons/website/models/ir_qweb.py
|
29
|
# -*- coding: utf-8 -*-
from openerp.addons.web.http import request
from openerp.osv import orm
class QWeb(orm.AbstractModel):
""" QWeb object for rendering stuff in the website context
"""
_inherit = 'ir.qweb'
URL_ATTRS = {
'form': 'action',
'a': 'href',
}
CDN_TRIGGERS = {
'link': 'href',
'script': 'src',
'img': 'src',
}
PRESERVE_WHITESPACE = [
'pre',
'textarea',
'script',
'style',
]
def render_attribute(self, element, name, value, qwebcontext):
context = qwebcontext.context or {}
if not context.get('rendering_bundle'):
if name == self.URL_ATTRS.get(element.tag) and qwebcontext.get('url_for'):
value = qwebcontext.get('url_for')(value)
elif request and request.website and request.website.cdn_activated and (name == self.URL_ATTRS.get(element.tag) or name == self.CDN_TRIGGERS.get(element.tag)):
value = request.website.get_cdn_url(value)
return super(QWeb, self).render_attribute(element, name, value, qwebcontext)
def render_text(self, text, element, qwebcontext):
compress = request and not request.debug and request.website and request.website.compress_html
if compress and element.tag not in self.PRESERVE_WHITESPACE:
text = self.re_remove_spaces.sub(' ', text)
return super(QWeb, self).render_text(text, element, qwebcontext)
def render_tail(self, tail, element, qwebcontext):
compress = request and not request.debug and request.website and request.website.compress_html
if compress and element.getparent().tag not in self.PRESERVE_WHITESPACE:
# No need to recurse because those tags children are not html5 parser friendly
tail = self.re_remove_spaces.sub(' ', tail.rstrip())
return super(QWeb, self).render_tail(tail, element, qwebcontext)
|
jjkester/django-auditlog
|
refs/heads/master
|
auditlog/migrations/0003_logentry_remote_addr.py
|
1
|
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('auditlog', '0002_auto_support_long_primary_keys'),
]
operations = [
migrations.AddField(
model_name='logentry',
name='remote_addr',
field=models.GenericIPAddressField(null=True, verbose_name='remote address', blank=True),
),
]
|
xiang12835/python_web
|
refs/heads/master
|
py2_web2py/web2py/gluon/contrib/pypyodbc.py
|
21
|
# -*- coding: utf-8 -*-
# PyPyODBC is develped from RealPyODBC 0.1 beta released in 2004 by Michele Petrazzo. Thanks Michele.
# The MIT License (MIT)
#
# Copyright (c) 2014 Henry Zhou <jiangwen365@gmail.com> and PyPyODBC contributors
# Copyright (c) 2004 Michele Petrazzo
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions
# of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
pooling = True
apilevel = '2.0'
paramstyle = 'qmark'
threadsafety = 1
version = '1.3.3'
lowercase=True
DEBUG = 0
# Comment out all "if DEBUG:" statements like below for production
#if DEBUG:print 'DEBUGGING'
import sys, os, datetime, ctypes, threading
from decimal import Decimal
py_ver = sys.version[:3]
py_v3 = py_ver >= '3.0'
if py_v3:
long = int
unicode = str
str_8b = bytes
buffer = memoryview
BYTE_1 = bytes('1','ascii')
use_unicode = True
else:
str_8b = str
BYTE_1 = '1'
use_unicode = False
if py_ver < '2.6':
bytearray = str
if not hasattr(ctypes, 'c_ssize_t'):
if ctypes.sizeof(ctypes.c_uint) == ctypes.sizeof(ctypes.c_void_p):
ctypes.c_ssize_t = ctypes.c_int
elif ctypes.sizeof(ctypes.c_ulong) == ctypes.sizeof(ctypes.c_void_p):
ctypes.c_ssize_t = ctypes.c_long
elif ctypes.sizeof(ctypes.c_ulonglong) == ctypes.sizeof(ctypes.c_void_p):
ctypes.c_ssize_t = ctypes.c_longlong
lock = threading.Lock()
shared_env_h = None
SQLWCHAR_SIZE = ctypes.sizeof(ctypes.c_wchar)
#determin the size of Py_UNICODE
#sys.maxunicode > 65536 and 'UCS4' or 'UCS2'
UNICODE_SIZE = sys.maxunicode > 65536 and 4 or 2
# Define ODBC constants. They are widly used in ODBC documents and programs
# They are defined in cpp header files: sql.h sqlext.h sqltypes.h sqlucode.h
# and you can get these files from the mingw32-runtime_3.13-1_all.deb package
SQL_ATTR_ODBC_VERSION, SQL_OV_ODBC2, SQL_OV_ODBC3 = 200, 2, 3
SQL_DRIVER_NOPROMPT = 0
SQL_ATTR_CONNECTION_POOLING = 201; SQL_CP_ONE_PER_HENV = 2
SQL_FETCH_NEXT, SQL_FETCH_FIRST, SQL_FETCH_LAST = 0x01, 0x02, 0x04
SQL_NULL_HANDLE, SQL_HANDLE_ENV, SQL_HANDLE_DBC, SQL_HANDLE_STMT = 0, 1, 2, 3
SQL_SUCCESS, SQL_SUCCESS_WITH_INFO, SQL_ERROR = 0, 1, -1
SQL_NO_DATA = 100; SQL_NO_TOTAL = -4
SQL_ATTR_ACCESS_MODE = SQL_ACCESS_MODE = 101
SQL_ATTR_AUTOCOMMIT = SQL_AUTOCOMMIT = 102
SQL_MODE_DEFAULT = SQL_MODE_READ_WRITE = 0; SQL_MODE_READ_ONLY = 1
SQL_AUTOCOMMIT_OFF, SQL_AUTOCOMMIT_ON = 0, 1
SQL_IS_UINTEGER = -5
SQL_ATTR_LOGIN_TIMEOUT = 103; SQL_ATTR_CONNECTION_TIMEOUT = 113;SQL_ATTR_QUERY_TIMEOUT = 0
SQL_COMMIT, SQL_ROLLBACK = 0, 1
SQL_INDEX_UNIQUE,SQL_INDEX_ALL = 0,1
SQL_QUICK,SQL_ENSURE = 0,1
SQL_FETCH_NEXT = 1
SQL_COLUMN_DISPLAY_SIZE = 6
SQL_INVALID_HANDLE = -2
SQL_NO_DATA_FOUND = 100; SQL_NULL_DATA = -1; SQL_NTS = -3
SQL_HANDLE_DESCR = 4
SQL_TABLE_NAMES = 3
SQL_PARAM_INPUT = 1; SQL_PARAM_INPUT_OUTPUT = 2
SQL_PARAM_TYPE_UNKNOWN = 0
SQL_RESULT_COL = 3
SQL_PARAM_OUTPUT = 4
SQL_RETURN_VALUE = 5
SQL_PARAM_TYPE_DEFAULT = SQL_PARAM_INPUT_OUTPUT
SQL_RESET_PARAMS = 3
SQL_UNBIND = 2
SQL_CLOSE = 0
# Below defines The constants for sqlgetinfo method, and their coresponding return types
SQL_QUALIFIER_LOCATION = 114
SQL_QUALIFIER_NAME_SEPARATOR = 41
SQL_QUALIFIER_TERM = 42
SQL_QUALIFIER_USAGE = 92
SQL_OWNER_TERM = 39
SQL_OWNER_USAGE = 91
SQL_ACCESSIBLE_PROCEDURES = 20
SQL_ACCESSIBLE_TABLES = 19
SQL_ACTIVE_ENVIRONMENTS = 116
SQL_AGGREGATE_FUNCTIONS = 169
SQL_ALTER_DOMAIN = 117
SQL_ALTER_TABLE = 86
SQL_ASYNC_MODE = 10021
SQL_BATCH_ROW_COUNT = 120
SQL_BATCH_SUPPORT = 121
SQL_BOOKMARK_PERSISTENCE = 82
SQL_CATALOG_LOCATION = SQL_QUALIFIER_LOCATION
SQL_CATALOG_NAME = 10003
SQL_CATALOG_NAME_SEPARATOR = SQL_QUALIFIER_NAME_SEPARATOR
SQL_CATALOG_TERM = SQL_QUALIFIER_TERM
SQL_CATALOG_USAGE = SQL_QUALIFIER_USAGE
SQL_COLLATION_SEQ = 10004
SQL_COLUMN_ALIAS = 87
SQL_CONCAT_NULL_BEHAVIOR = 22
SQL_CONVERT_FUNCTIONS = 48
SQL_CONVERT_VARCHAR = 70
SQL_CORRELATION_NAME = 74
SQL_CREATE_ASSERTION = 127
SQL_CREATE_CHARACTER_SET = 128
SQL_CREATE_COLLATION = 129
SQL_CREATE_DOMAIN = 130
SQL_CREATE_SCHEMA = 131
SQL_CREATE_TABLE = 132
SQL_CREATE_TRANSLATION = 133
SQL_CREATE_VIEW = 134
SQL_CURSOR_COMMIT_BEHAVIOR = 23
SQL_CURSOR_ROLLBACK_BEHAVIOR = 24
SQL_DATABASE_NAME = 16
SQL_DATA_SOURCE_NAME = 2
SQL_DATA_SOURCE_READ_ONLY = 25
SQL_DATETIME_LITERALS = 119
SQL_DBMS_NAME = 17
SQL_DBMS_VER = 18
SQL_DDL_INDEX = 170
SQL_DEFAULT_TXN_ISOLATION = 26
SQL_DESCRIBE_PARAMETER = 10002
SQL_DM_VER = 171
SQL_DRIVER_NAME = 6
SQL_DRIVER_ODBC_VER = 77
SQL_DRIVER_VER = 7
SQL_DROP_ASSERTION = 136
SQL_DROP_CHARACTER_SET = 137
SQL_DROP_COLLATION = 138
SQL_DROP_DOMAIN = 139
SQL_DROP_SCHEMA = 140
SQL_DROP_TABLE = 141
SQL_DROP_TRANSLATION = 142
SQL_DROP_VIEW = 143
SQL_DYNAMIC_CURSOR_ATTRIBUTES1 = 144
SQL_DYNAMIC_CURSOR_ATTRIBUTES2 = 145
SQL_EXPRESSIONS_IN_ORDERBY = 27
SQL_FILE_USAGE = 84
SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES1 = 146
SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES2 = 147
SQL_GETDATA_EXTENSIONS = 81
SQL_GROUP_BY = 88
SQL_IDENTIFIER_CASE = 28
SQL_IDENTIFIER_QUOTE_CHAR = 29
SQL_INDEX_KEYWORDS = 148
SQL_INFO_SCHEMA_VIEWS = 149
SQL_INSERT_STATEMENT = 172
SQL_INTEGRITY = 73
SQL_KEYSET_CURSOR_ATTRIBUTES1 = 150
SQL_KEYSET_CURSOR_ATTRIBUTES2 = 151
SQL_KEYWORDS = 89
SQL_LIKE_ESCAPE_CLAUSE = 113
SQL_MAX_ASYNC_CONCURRENT_STATEMENTS = 10022
SQL_MAX_BINARY_LITERAL_LEN = 112
SQL_MAX_CATALOG_NAME_LEN = 34
SQL_MAX_CHAR_LITERAL_LEN = 108
SQL_MAX_COLUMNS_IN_GROUP_BY = 97
SQL_MAX_COLUMNS_IN_INDEX = 98
SQL_MAX_COLUMNS_IN_ORDER_BY = 99
SQL_MAX_COLUMNS_IN_SELECT = 100
SQL_MAX_COLUMNS_IN_TABLE = 101
SQL_MAX_COLUMN_NAME_LEN = 30
SQL_MAX_CONCURRENT_ACTIVITIES = 1
SQL_MAX_CURSOR_NAME_LEN = 31
SQL_MAX_DRIVER_CONNECTIONS = 0
SQL_MAX_IDENTIFIER_LEN = 10005
SQL_MAX_INDEX_SIZE = 102
SQL_MAX_PROCEDURE_NAME_LEN = 33
SQL_MAX_ROW_SIZE = 104
SQL_MAX_ROW_SIZE_INCLUDES_LONG = 103
SQL_MAX_SCHEMA_NAME_LEN = 32
SQL_MAX_STATEMENT_LEN = 105
SQL_MAX_TABLES_IN_SELECT = 106
SQL_MAX_TABLE_NAME_LEN = 35
SQL_MAX_USER_NAME_LEN = 107
SQL_MULTIPLE_ACTIVE_TXN = 37
SQL_MULT_RESULT_SETS = 36
SQL_NEED_LONG_DATA_LEN = 111
SQL_NON_NULLABLE_COLUMNS = 75
SQL_NULL_COLLATION = 85
SQL_NUMERIC_FUNCTIONS = 49
SQL_ODBC_INTERFACE_CONFORMANCE = 152
SQL_ODBC_VER = 10
SQL_OJ_CAPABILITIES = 65003
SQL_ORDER_BY_COLUMNS_IN_SELECT = 90
SQL_PARAM_ARRAY_ROW_COUNTS = 153
SQL_PARAM_ARRAY_SELECTS = 154
SQL_PROCEDURES = 21
SQL_PROCEDURE_TERM = 40
SQL_QUOTED_IDENTIFIER_CASE = 93
SQL_ROW_UPDATES = 11
SQL_SCHEMA_TERM = SQL_OWNER_TERM
SQL_SCHEMA_USAGE = SQL_OWNER_USAGE
SQL_SCROLL_OPTIONS = 44
SQL_SEARCH_PATTERN_ESCAPE = 14
SQL_SERVER_NAME = 13
SQL_SPECIAL_CHARACTERS = 94
SQL_SQL92_DATETIME_FUNCTIONS = 155
SQL_SQL92_FOREIGN_KEY_DELETE_RULE = 156
SQL_SQL92_FOREIGN_KEY_UPDATE_RULE = 157
SQL_SQL92_GRANT = 158
SQL_SQL92_NUMERIC_VALUE_FUNCTIONS = 159
SQL_SQL92_PREDICATES = 160
SQL_SQL92_RELATIONAL_JOIN_OPERATORS = 161
SQL_SQL92_REVOKE = 162
SQL_SQL92_ROW_VALUE_CONSTRUCTOR = 163
SQL_SQL92_STRING_FUNCTIONS = 164
SQL_SQL92_VALUE_EXPRESSIONS = 165
SQL_SQL_CONFORMANCE = 118
SQL_STANDARD_CLI_CONFORMANCE = 166
SQL_STATIC_CURSOR_ATTRIBUTES1 = 167
SQL_STATIC_CURSOR_ATTRIBUTES2 = 168
SQL_STRING_FUNCTIONS = 50
SQL_SUBQUERIES = 95
SQL_SYSTEM_FUNCTIONS = 51
SQL_TABLE_TERM = 45
SQL_TIMEDATE_ADD_INTERVALS = 109
SQL_TIMEDATE_DIFF_INTERVALS = 110
SQL_TIMEDATE_FUNCTIONS = 52
SQL_TXN_CAPABLE = 46
SQL_TXN_ISOLATION_OPTION = 72
SQL_UNION = 96
SQL_USER_NAME = 47
SQL_XOPEN_CLI_YEAR = 10000
aInfoTypes = {
SQL_ACCESSIBLE_PROCEDURES : 'GI_YESNO',SQL_ACCESSIBLE_TABLES : 'GI_YESNO',SQL_ACTIVE_ENVIRONMENTS : 'GI_USMALLINT',
SQL_AGGREGATE_FUNCTIONS : 'GI_UINTEGER',SQL_ALTER_DOMAIN : 'GI_UINTEGER',
SQL_ALTER_TABLE : 'GI_UINTEGER',SQL_ASYNC_MODE : 'GI_UINTEGER',SQL_BATCH_ROW_COUNT : 'GI_UINTEGER',
SQL_BATCH_SUPPORT : 'GI_UINTEGER',SQL_BOOKMARK_PERSISTENCE : 'GI_UINTEGER',SQL_CATALOG_LOCATION : 'GI_USMALLINT',
SQL_CATALOG_NAME : 'GI_YESNO',SQL_CATALOG_NAME_SEPARATOR : 'GI_STRING',SQL_CATALOG_TERM : 'GI_STRING',
SQL_CATALOG_USAGE : 'GI_UINTEGER',SQL_COLLATION_SEQ : 'GI_STRING',SQL_COLUMN_ALIAS : 'GI_YESNO',
SQL_CONCAT_NULL_BEHAVIOR : 'GI_USMALLINT',SQL_CONVERT_FUNCTIONS : 'GI_UINTEGER',SQL_CONVERT_VARCHAR : 'GI_UINTEGER',
SQL_CORRELATION_NAME : 'GI_USMALLINT',SQL_CREATE_ASSERTION : 'GI_UINTEGER',SQL_CREATE_CHARACTER_SET : 'GI_UINTEGER',
SQL_CREATE_COLLATION : 'GI_UINTEGER',SQL_CREATE_DOMAIN : 'GI_UINTEGER',SQL_CREATE_SCHEMA : 'GI_UINTEGER',
SQL_CREATE_TABLE : 'GI_UINTEGER',SQL_CREATE_TRANSLATION : 'GI_UINTEGER',SQL_CREATE_VIEW : 'GI_UINTEGER',
SQL_CURSOR_COMMIT_BEHAVIOR : 'GI_USMALLINT',SQL_CURSOR_ROLLBACK_BEHAVIOR : 'GI_USMALLINT',SQL_DATABASE_NAME : 'GI_STRING',
SQL_DATA_SOURCE_NAME : 'GI_STRING',SQL_DATA_SOURCE_READ_ONLY : 'GI_YESNO',SQL_DATETIME_LITERALS : 'GI_UINTEGER',
SQL_DBMS_NAME : 'GI_STRING',SQL_DBMS_VER : 'GI_STRING',SQL_DDL_INDEX : 'GI_UINTEGER',
SQL_DEFAULT_TXN_ISOLATION : 'GI_UINTEGER',SQL_DESCRIBE_PARAMETER : 'GI_YESNO',SQL_DM_VER : 'GI_STRING',
SQL_DRIVER_NAME : 'GI_STRING',SQL_DRIVER_ODBC_VER : 'GI_STRING',SQL_DRIVER_VER : 'GI_STRING',SQL_DROP_ASSERTION : 'GI_UINTEGER',
SQL_DROP_CHARACTER_SET : 'GI_UINTEGER', SQL_DROP_COLLATION : 'GI_UINTEGER',SQL_DROP_DOMAIN : 'GI_UINTEGER',
SQL_DROP_SCHEMA : 'GI_UINTEGER',SQL_DROP_TABLE : 'GI_UINTEGER',SQL_DROP_TRANSLATION : 'GI_UINTEGER',
SQL_DROP_VIEW : 'GI_UINTEGER',SQL_DYNAMIC_CURSOR_ATTRIBUTES1 : 'GI_UINTEGER',SQL_DYNAMIC_CURSOR_ATTRIBUTES2 : 'GI_UINTEGER',
SQL_EXPRESSIONS_IN_ORDERBY : 'GI_YESNO',SQL_FILE_USAGE : 'GI_USMALLINT',
SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES1 : 'GI_UINTEGER',SQL_FORWARD_ONLY_CURSOR_ATTRIBUTES2 : 'GI_UINTEGER',
SQL_GETDATA_EXTENSIONS : 'GI_UINTEGER',SQL_GROUP_BY : 'GI_USMALLINT',SQL_IDENTIFIER_CASE : 'GI_USMALLINT',
SQL_IDENTIFIER_QUOTE_CHAR : 'GI_STRING',SQL_INDEX_KEYWORDS : 'GI_UINTEGER',SQL_INFO_SCHEMA_VIEWS : 'GI_UINTEGER',
SQL_INSERT_STATEMENT : 'GI_UINTEGER',SQL_INTEGRITY : 'GI_YESNO',SQL_KEYSET_CURSOR_ATTRIBUTES1 : 'GI_UINTEGER',
SQL_KEYSET_CURSOR_ATTRIBUTES2 : 'GI_UINTEGER',SQL_KEYWORDS : 'GI_STRING',
SQL_LIKE_ESCAPE_CLAUSE : 'GI_YESNO',SQL_MAX_ASYNC_CONCURRENT_STATEMENTS : 'GI_UINTEGER',
SQL_MAX_BINARY_LITERAL_LEN : 'GI_UINTEGER',SQL_MAX_CATALOG_NAME_LEN : 'GI_USMALLINT',
SQL_MAX_CHAR_LITERAL_LEN : 'GI_UINTEGER',SQL_MAX_COLUMNS_IN_GROUP_BY : 'GI_USMALLINT',
SQL_MAX_COLUMNS_IN_INDEX : 'GI_USMALLINT',SQL_MAX_COLUMNS_IN_ORDER_BY : 'GI_USMALLINT',
SQL_MAX_COLUMNS_IN_SELECT : 'GI_USMALLINT',SQL_MAX_COLUMNS_IN_TABLE : 'GI_USMALLINT',
SQL_MAX_COLUMN_NAME_LEN : 'GI_USMALLINT',SQL_MAX_CONCURRENT_ACTIVITIES : 'GI_USMALLINT',
SQL_MAX_CURSOR_NAME_LEN : 'GI_USMALLINT',SQL_MAX_DRIVER_CONNECTIONS : 'GI_USMALLINT',
SQL_MAX_IDENTIFIER_LEN : 'GI_USMALLINT',SQL_MAX_INDEX_SIZE : 'GI_UINTEGER',
SQL_MAX_PROCEDURE_NAME_LEN : 'GI_USMALLINT',SQL_MAX_ROW_SIZE : 'GI_UINTEGER',
SQL_MAX_ROW_SIZE_INCLUDES_LONG : 'GI_YESNO',SQL_MAX_SCHEMA_NAME_LEN : 'GI_USMALLINT',
SQL_MAX_STATEMENT_LEN : 'GI_UINTEGER',SQL_MAX_TABLES_IN_SELECT : 'GI_USMALLINT',
SQL_MAX_TABLE_NAME_LEN : 'GI_USMALLINT',SQL_MAX_USER_NAME_LEN : 'GI_USMALLINT',
SQL_MULTIPLE_ACTIVE_TXN : 'GI_YESNO',SQL_MULT_RESULT_SETS : 'GI_YESNO',
SQL_NEED_LONG_DATA_LEN : 'GI_YESNO',SQL_NON_NULLABLE_COLUMNS : 'GI_USMALLINT',
SQL_NULL_COLLATION : 'GI_USMALLINT',SQL_NUMERIC_FUNCTIONS : 'GI_UINTEGER',
SQL_ODBC_INTERFACE_CONFORMANCE : 'GI_UINTEGER',SQL_ODBC_VER : 'GI_STRING',SQL_OJ_CAPABILITIES : 'GI_UINTEGER',
SQL_ORDER_BY_COLUMNS_IN_SELECT : 'GI_YESNO',SQL_PARAM_ARRAY_ROW_COUNTS : 'GI_UINTEGER',
SQL_PARAM_ARRAY_SELECTS : 'GI_UINTEGER',SQL_PROCEDURES : 'GI_YESNO',SQL_PROCEDURE_TERM : 'GI_STRING',
SQL_QUOTED_IDENTIFIER_CASE : 'GI_USMALLINT',SQL_ROW_UPDATES : 'GI_YESNO',SQL_SCHEMA_TERM : 'GI_STRING',
SQL_SCHEMA_USAGE : 'GI_UINTEGER',SQL_SCROLL_OPTIONS : 'GI_UINTEGER',SQL_SEARCH_PATTERN_ESCAPE : 'GI_STRING',
SQL_SERVER_NAME : 'GI_STRING',SQL_SPECIAL_CHARACTERS : 'GI_STRING',SQL_SQL92_DATETIME_FUNCTIONS : 'GI_UINTEGER',
SQL_SQL92_FOREIGN_KEY_DELETE_RULE : 'GI_UINTEGER',SQL_SQL92_FOREIGN_KEY_UPDATE_RULE : 'GI_UINTEGER',
SQL_SQL92_GRANT : 'GI_UINTEGER',SQL_SQL92_NUMERIC_VALUE_FUNCTIONS : 'GI_UINTEGER',
SQL_SQL92_PREDICATES : 'GI_UINTEGER',SQL_SQL92_RELATIONAL_JOIN_OPERATORS : 'GI_UINTEGER',
SQL_SQL92_REVOKE : 'GI_UINTEGER',SQL_SQL92_ROW_VALUE_CONSTRUCTOR : 'GI_UINTEGER',
SQL_SQL92_STRING_FUNCTIONS : 'GI_UINTEGER',SQL_SQL92_VALUE_EXPRESSIONS : 'GI_UINTEGER',
SQL_SQL_CONFORMANCE : 'GI_UINTEGER',SQL_STANDARD_CLI_CONFORMANCE : 'GI_UINTEGER',
SQL_STATIC_CURSOR_ATTRIBUTES1 : 'GI_UINTEGER',SQL_STATIC_CURSOR_ATTRIBUTES2 : 'GI_UINTEGER',
SQL_STRING_FUNCTIONS : 'GI_UINTEGER',SQL_SUBQUERIES : 'GI_UINTEGER',
SQL_SYSTEM_FUNCTIONS : 'GI_UINTEGER',SQL_TABLE_TERM : 'GI_STRING',SQL_TIMEDATE_ADD_INTERVALS : 'GI_UINTEGER',
SQL_TIMEDATE_DIFF_INTERVALS : 'GI_UINTEGER',SQL_TIMEDATE_FUNCTIONS : 'GI_UINTEGER',
SQL_TXN_CAPABLE : 'GI_USMALLINT',SQL_TXN_ISOLATION_OPTION : 'GI_UINTEGER',
SQL_UNION : 'GI_UINTEGER',SQL_USER_NAME : 'GI_STRING',SQL_XOPEN_CLI_YEAR : 'GI_STRING',
}
#Definations for types
BINARY = bytearray
Binary = bytearray
DATETIME = datetime.datetime
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
STRING = str
NUMBER = float
ROWID = int
DateFromTicks = datetime.date.fromtimestamp
TimeFromTicks = lambda x: datetime.datetime.fromtimestamp(x).time()
TimestampFromTicks = datetime.datetime.fromtimestamp
#Define exceptions
class OdbcNoLibrary(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class OdbcLibraryError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class OdbcInvalidHandle(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class OdbcGenericError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Warning(Exception):
def __init__(self, error_code, error_desc):
self.value = (error_code, error_desc)
self.args = (error_code, error_desc)
class Error(Exception):
def __init__(self, error_code, error_desc):
self.value = (error_code, error_desc)
self.args = (error_code, error_desc)
class InterfaceError(Error):
def __init__(self, error_code, error_desc):
self.value = (error_code, error_desc)
self.args = (error_code, error_desc)
class DatabaseError(Error):
def __init__(self, error_code, error_desc):
self.value = (error_code, error_desc)
self.args = (error_code, error_desc)
class InternalError(DatabaseError):
def __init__(self, error_code, error_desc):
self.value = (error_code, error_desc)
self.args = (error_code, error_desc)
class ProgrammingError(DatabaseError):
def __init__(self, error_code, error_desc):
self.value = (error_code, error_desc)
self.args = (error_code, error_desc)
class DataError(DatabaseError):
def __init__(self, error_code, error_desc):
self.value = (error_code, error_desc)
self.args = (error_code, error_desc)
class IntegrityError(DatabaseError):
def __init__(self, error_code, error_desc):
self.value = (error_code, error_desc)
self.args = (error_code, error_desc)
class NotSupportedError(Error):
def __init__(self, error_code, error_desc):
self.value = (error_code, error_desc)
self.args = (error_code, error_desc)
class OperationalError(DatabaseError):
def __init__(self, error_code, error_desc):
self.value = (error_code, error_desc)
self.args = (error_code, error_desc)
############################################################################
#
# Find the ODBC library on the platform and connect to it using ctypes
#
############################################################################
# Get the References of the platform's ODBC functions via ctypes
odbc_decoding = 'utf_16'
odbc_encoding = 'utf_16_le'
ucs_length = 2
if sys.platform in ('win32','cli'):
ODBC_API = ctypes.windll.odbc32
# On Windows, the size of SQLWCHAR is hardcoded to 2-bytes.
SQLWCHAR_SIZE = ctypes.sizeof(ctypes.c_ushort)
else:
# Set load the library on linux
try:
# First try direct loading libodbc.so
ODBC_API = ctypes.cdll.LoadLibrary('libodbc.so')
except:
# If direct loading libodbc.so failed
# We try finding the libodbc.so by using find_library
from ctypes.util import find_library
library = find_library('odbc')
if library is None:
# If find_library still can not find the library
# we try finding it manually from where libodbc.so usually appears
lib_paths = ("/usr/lib/libodbc.so","/usr/lib/i386-linux-gnu/libodbc.so","/usr/lib/x86_64-linux-gnu/libodbc.so","/usr/lib/libiodbc.dylib")
lib_paths = [path for path in lib_paths if os.path.exists(path)]
if len(lib_paths) == 0 :
raise OdbcNoLibrary('ODBC Library is not found. Is LD_LIBRARY_PATH set?')
else:
library = lib_paths[0]
# Then we try loading the found libodbc.so again
try:
ODBC_API = ctypes.cdll.LoadLibrary(library)
except:
# If still fail loading, abort.
raise OdbcLibraryError('Error while loading ' + library)
# only iODBC uses utf-32 / UCS4 encoding data, others normally use utf-16 / UCS2
# So we set those for handling.
if 'libiodbc.dylib' in library:
odbc_decoding = 'utf_32'
odbc_encoding = 'utf_32_le'
ucs_length = 4
# unixODBC defaults to 2-bytes SQLWCHAR, unless "-DSQL_WCHART_CONVERT" was
# added to CFLAGS, in which case it will be the size of wchar_t.
# Note that using 4-bytes SQLWCHAR will break most ODBC drivers, as driver
# development mostly targets the Windows platform.
if py_v3:
from subprocess import getstatusoutput
else:
from commands import getstatusoutput
status, output = getstatusoutput('odbc_config --cflags')
if status == 0 and 'SQL_WCHART_CONVERT' in output:
SQLWCHAR_SIZE = ctypes.sizeof(ctypes.c_wchar)
else:
SQLWCHAR_SIZE = ctypes.sizeof(ctypes.c_ushort)
create_buffer_u = ctypes.create_unicode_buffer
create_buffer = ctypes.create_string_buffer
wchar_pointer = ctypes.c_wchar_p
UCS_buf = lambda s: s
def UCS_dec(buffer):
i = 0
uchars = []
while True:
uchar = buffer.raw[i:i + ucs_length].decode(odbc_decoding)
if uchar == unicode('\x00'):
break
uchars.append(uchar)
i += ucs_length
return ''.join(uchars)
from_buffer_u = lambda buffer: buffer.value
# This is the common case on Linux, which uses wide Python build together with
# the default unixODBC without the "-DSQL_WCHART_CONVERT" CFLAGS.
if sys.platform not in ('win32','cli'):
if UNICODE_SIZE >= SQLWCHAR_SIZE:
# We can only use unicode buffer if the size of wchar_t (UNICODE_SIZE) is
# the same as the size expected by the driver manager (SQLWCHAR_SIZE).
create_buffer_u = create_buffer
wchar_pointer = ctypes.c_char_p
def UCS_buf(s):
return s.encode(odbc_encoding)
from_buffer_u = UCS_dec
# Exoteric case, don't really care.
elif UNICODE_SIZE < SQLWCHAR_SIZE:
raise OdbcLibraryError('Using narrow Python build with ODBC library '
'expecting wide unicode is not supported.')
############################################################
# Database value to Python data type mappings
SQL_TYPE_NULL = 0
SQL_DECIMAL = 3
SQL_FLOAT = 6
SQL_DATE = 9
SQL_TIME = 10
SQL_TIMESTAMP = 11
SQL_VARCHAR = 12
SQL_LONGVARCHAR = -1
SQL_VARBINARY = -3
SQL_LONGVARBINARY = -4
SQL_BIGINT = -5
SQL_WVARCHAR = -9
SQL_WLONGVARCHAR = -10
SQL_ALL_TYPES = 0
SQL_SIGNED_OFFSET = -20
SQL_SS_VARIANT = -150
SQL_SS_UDT = -151
SQL_SS_XML = -152
SQL_SS_TIME2 = -154
SQL_C_CHAR = SQL_CHAR = 1
SQL_C_NUMERIC = SQL_NUMERIC = 2
SQL_C_LONG = SQL_INTEGER = 4
SQL_C_SLONG = SQL_C_LONG + SQL_SIGNED_OFFSET
SQL_C_SHORT = SQL_SMALLINT = 5
SQL_C_FLOAT = SQL_REAL = 7
SQL_C_DOUBLE = SQL_DOUBLE = 8
SQL_C_TYPE_DATE = SQL_TYPE_DATE = 91
SQL_C_TYPE_TIME = SQL_TYPE_TIME = 92
SQL_C_BINARY = SQL_BINARY = -2
SQL_C_SBIGINT = SQL_BIGINT + SQL_SIGNED_OFFSET
SQL_C_TINYINT = SQL_TINYINT = -6
SQL_C_BIT = SQL_BIT = -7
SQL_C_WCHAR = SQL_WCHAR = -8
SQL_C_GUID = SQL_GUID = -11
SQL_C_TYPE_TIMESTAMP = SQL_TYPE_TIMESTAMP = 93
SQL_C_DEFAULT = 99
SQL_DESC_DISPLAY_SIZE = SQL_COLUMN_DISPLAY_SIZE
def dttm_cvt(x):
if py_v3:
x = x.decode('ascii')
if x == '': return None
x = x.ljust(26,'0')
return datetime.datetime(int(x[0:4]),int(x[5:7]),int(x[8:10]),int(x[10:13]),int(x[14:16]),int(x[17:19]),int(x[20:26]))
def tm_cvt(x):
if py_v3:
x = x.decode('ascii')
if x == '': return None
x = x.ljust(15,'0')
return datetime.time(int(x[0:2]),int(x[3:5]),int(x[6:8]),int(x[9:15]))
def dt_cvt(x):
if py_v3:
x = x.decode('ascii')
if x == '': return None
else:return datetime.date(int(x[0:4]),int(x[5:7]),int(x[8:10]))
def Decimal_cvt(x):
if py_v3:
x = x.decode('ascii')
return Decimal(x)
bytearray_cvt = bytearray
if sys.platform == 'cli':
bytearray_cvt = lambda x: bytearray(buffer(x))
# Below Datatype mappings referenced the document at
# http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.help.sdk_12.5.1.aseodbc/html/aseodbc/CACFDIGH.htm
SQL_data_type_dict = { \
#SQL Data TYPE 0.Python Data Type 1.Default Output Converter 2.Buffer Type 3.Buffer Allocator 4.Default Size 5.Variable Length
SQL_TYPE_NULL : (None, lambda x: None, SQL_C_CHAR, create_buffer, 2 , False ),
SQL_CHAR : (str, lambda x: x, SQL_C_CHAR, create_buffer, 2048 , False ),
SQL_NUMERIC : (Decimal, Decimal_cvt, SQL_C_CHAR, create_buffer, 150 , False ),
SQL_DECIMAL : (Decimal, Decimal_cvt, SQL_C_CHAR, create_buffer, 150 , False ),
SQL_INTEGER : (int, int, SQL_C_CHAR, create_buffer, 150 , False ),
SQL_SMALLINT : (int, int, SQL_C_CHAR, create_buffer, 150 , False ),
SQL_FLOAT : (float, float, SQL_C_CHAR, create_buffer, 150 , False ),
SQL_REAL : (float, float, SQL_C_CHAR, create_buffer, 150 , False ),
SQL_DOUBLE : (float, float, SQL_C_CHAR, create_buffer, 200 , False ),
SQL_DATE : (datetime.date, dt_cvt, SQL_C_CHAR, create_buffer, 30 , False ),
SQL_TIME : (datetime.time, tm_cvt, SQL_C_CHAR, create_buffer, 20 , False ),
SQL_SS_TIME2 : (datetime.time, tm_cvt, SQL_C_CHAR, create_buffer, 20 , False ),
SQL_TIMESTAMP : (datetime.datetime, dttm_cvt, SQL_C_CHAR, create_buffer, 30 , False ),
SQL_VARCHAR : (str, lambda x: x, SQL_C_CHAR, create_buffer, 2048 , False ),
SQL_LONGVARCHAR : (str, lambda x: x, SQL_C_CHAR, create_buffer, 20500 , True ),
SQL_BINARY : (bytearray, bytearray_cvt, SQL_C_BINARY, create_buffer, 5120 , True ),
SQL_VARBINARY : (bytearray, bytearray_cvt, SQL_C_BINARY, create_buffer, 5120 , True ),
SQL_LONGVARBINARY : (bytearray, bytearray_cvt, SQL_C_BINARY, create_buffer, 20500 , True ),
SQL_BIGINT : (long, long, SQL_C_CHAR, create_buffer, 150 , False ),
SQL_TINYINT : (int, int, SQL_C_CHAR, create_buffer, 150 , False ),
SQL_BIT : (bool, lambda x:x == BYTE_1, SQL_C_CHAR, create_buffer, 2 , False ),
SQL_WCHAR : (unicode, lambda x: x, SQL_C_WCHAR, create_buffer_u, 2048 , False ),
SQL_WVARCHAR : (unicode, lambda x: x, SQL_C_WCHAR, create_buffer_u, 2048 , False ),
SQL_GUID : (str, str, SQL_C_CHAR, create_buffer, 2048 , False ),
SQL_WLONGVARCHAR : (unicode, lambda x: x, SQL_C_WCHAR, create_buffer_u, 20500 , True ),
SQL_TYPE_DATE : (datetime.date, dt_cvt, SQL_C_CHAR, create_buffer, 30 , False ),
SQL_TYPE_TIME : (datetime.time, tm_cvt, SQL_C_CHAR, create_buffer, 20 , False ),
SQL_TYPE_TIMESTAMP : (datetime.datetime, dttm_cvt, SQL_C_CHAR, create_buffer, 30 , False ),
SQL_SS_VARIANT : (str, lambda x: x, SQL_C_CHAR, create_buffer, 2048 , True ),
SQL_SS_XML : (unicode, lambda x: x, SQL_C_WCHAR, create_buffer_u, 20500 , True ),
SQL_SS_UDT : (bytearray, bytearray_cvt, SQL_C_BINARY, create_buffer, 5120 , True ),
}
"""
Types mapping, applicable for 32-bit and 64-bit Linux / Windows / Mac OS X.
SQLPointer -> ctypes.c_void_p
SQLCHAR * -> ctypes.c_char_p
SQLWCHAR * -> ctypes.c_wchar_p on Windows, ctypes.c_char_p with unixODBC
SQLINT -> ctypes.c_int
SQLSMALLINT -> ctypes.c_short
SQMUSMALLINT -> ctypes.c_ushort
SQLLEN -> ctypes.c_ssize_t
SQLULEN -> ctypes.c_size_t
SQLRETURN -> ctypes.c_short
"""
# Define the python return type for ODBC functions with ret result.
funcs_with_ret = [
"SQLAllocHandle",
"SQLBindParameter",
"SQLBindCol",
"SQLCloseCursor",
"SQLColAttribute",
"SQLColumns",
"SQLColumnsW",
"SQLConnect",
"SQLConnectW",
"SQLDataSources",
"SQLDataSourcesW",
"SQLDescribeCol",
"SQLDescribeColW",
"SQLDescribeParam",
"SQLDisconnect",
"SQLDriverConnect",
"SQLDriverConnectW",
"SQLDrivers",
"SQLDriversW",
"SQLEndTran",
"SQLExecDirect",
"SQLExecDirectW",
"SQLExecute",
"SQLFetch",
"SQLFetchScroll",
"SQLForeignKeys",
"SQLForeignKeysW",
"SQLFreeHandle",
"SQLFreeStmt",
"SQLGetData",
"SQLGetDiagRec",
"SQLGetDiagRecW",
"SQLGetInfo",
"SQLGetInfoW",
"SQLGetTypeInfo",
"SQLMoreResults",
"SQLNumParams",
"SQLNumResultCols",
"SQLPrepare",
"SQLPrepareW",
"SQLPrimaryKeys",
"SQLPrimaryKeysW",
"SQLProcedureColumns",
"SQLProcedureColumnsW",
"SQLProcedures",
"SQLProceduresW",
"SQLRowCount",
"SQLSetConnectAttr",
"SQLSetEnvAttr",
"SQLStatistics",
"SQLStatisticsW",
"SQLTables",
"SQLTablesW",
"SQLSetStmtAttr"
]
for func_name in funcs_with_ret:
getattr(ODBC_API, func_name).restype = ctypes.c_short
if sys.platform not in ('cli'):
#Seems like the IronPython can not declare ctypes.POINTER type arguments
ODBC_API.SQLAllocHandle.argtypes = [
ctypes.c_short, ctypes.c_void_p, ctypes.POINTER(ctypes.c_void_p),
]
ODBC_API.SQLBindParameter.argtypes = [
ctypes.c_void_p, ctypes.c_ushort, ctypes.c_short,
ctypes.c_short, ctypes.c_short, ctypes.c_size_t,
ctypes.c_short, ctypes.c_void_p, ctypes.c_ssize_t, ctypes.POINTER(ctypes.c_ssize_t),
]
ODBC_API.SQLColAttribute.argtypes = [
ctypes.c_void_p, ctypes.c_ushort, ctypes.c_ushort,
ctypes.c_void_p, ctypes.c_short, ctypes.POINTER(ctypes.c_short), ctypes.POINTER(ctypes.c_ssize_t),
]
ODBC_API.SQLDataSources.argtypes = [
ctypes.c_void_p, ctypes.c_ushort, ctypes.c_char_p,
ctypes.c_short, ctypes.POINTER(ctypes.c_short),
ctypes.c_char_p, ctypes.c_short, ctypes.POINTER(ctypes.c_short),
]
ODBC_API.SQLDescribeCol.argtypes = [
ctypes.c_void_p, ctypes.c_ushort, ctypes.c_char_p, ctypes.c_short,
ctypes.POINTER(ctypes.c_short), ctypes.POINTER(ctypes.c_short),
ctypes.POINTER(ctypes.c_size_t), ctypes.POINTER(ctypes.c_short), ctypes.POINTER(ctypes.c_short),
]
ODBC_API.SQLDescribeParam.argtypes = [
ctypes.c_void_p, ctypes.c_ushort,
ctypes.POINTER(ctypes.c_short), ctypes.POINTER(ctypes.c_size_t),
ctypes.POINTER(ctypes.c_short), ctypes.POINTER(ctypes.c_short),
]
ODBC_API.SQLDriverConnect.argtypes = [
ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p,
ctypes.c_short, ctypes.c_char_p, ctypes.c_short,
ctypes.POINTER(ctypes.c_short), ctypes.c_ushort,
]
ODBC_API.SQLDrivers.argtypes = [
ctypes.c_void_p, ctypes.c_ushort,
ctypes.c_char_p, ctypes.c_short, ctypes.POINTER(ctypes.c_short),
ctypes.c_char_p, ctypes.c_short, ctypes.POINTER(ctypes.c_short),
]
ODBC_API.SQLGetData.argtypes = [
ctypes.c_void_p, ctypes.c_ushort, ctypes.c_short,
ctypes.c_void_p, ctypes.c_ssize_t, ctypes.POINTER(ctypes.c_ssize_t),
]
ODBC_API.SQLGetDiagRec.argtypes = [
ctypes.c_short, ctypes.c_void_p, ctypes.c_short,
ctypes.c_char_p, ctypes.POINTER(ctypes.c_int),
ctypes.c_char_p, ctypes.c_short, ctypes.POINTER(ctypes.c_short),
]
ODBC_API.SQLGetInfo.argtypes = [
ctypes.c_void_p, ctypes.c_ushort, ctypes.c_void_p,
ctypes.c_short, ctypes.POINTER(ctypes.c_short),
]
ODBC_API.SQLRowCount.argtypes = [
ctypes.c_void_p, ctypes.POINTER(ctypes.c_ssize_t),
]
ODBC_API.SQLNumParams.argtypes = [
ctypes.c_void_p, ctypes.POINTER(ctypes.c_short),
]
ODBC_API.SQLNumResultCols.argtypes = [
ctypes.c_void_p, ctypes.POINTER(ctypes.c_short),
]
ODBC_API.SQLCloseCursor.argtypes = [ctypes.c_void_p]
ODBC_API.SQLColumns.argtypes = [
ctypes.c_void_p, ctypes.c_char_p, ctypes.c_short,
ctypes.c_char_p, ctypes.c_short, ctypes.c_char_p,
ctypes.c_short, ctypes.c_char_p, ctypes.c_short,
]
ODBC_API.SQLConnect.argtypes = [
ctypes.c_void_p, ctypes.c_char_p, ctypes.c_short,
ctypes.c_char_p, ctypes.c_short, ctypes.c_char_p, ctypes.c_short,
]
ODBC_API.SQLDisconnect.argtypes = [ctypes.c_void_p]
ODBC_API.SQLEndTran.argtypes = [
ctypes.c_short, ctypes.c_void_p, ctypes.c_short,
]
ODBC_API.SQLExecute.argtypes = [ctypes.c_void_p]
ODBC_API.SQLExecDirect.argtypes = [
ctypes.c_void_p, ctypes.c_char_p, ctypes.c_int,
]
ODBC_API.SQLFetch.argtypes = [ctypes.c_void_p]
ODBC_API.SQLFetchScroll.argtypes = [
ctypes.c_void_p, ctypes.c_short, ctypes.c_ssize_t,
]
ODBC_API.SQLForeignKeys.argtypes = [
ctypes.c_void_p, ctypes.c_char_p, ctypes.c_short,
ctypes.c_char_p, ctypes.c_short, ctypes.c_char_p,
ctypes.c_short, ctypes.c_char_p, ctypes.c_short,
ctypes.c_char_p, ctypes.c_short, ctypes.c_char_p, ctypes.c_short,
]
ODBC_API.SQLFreeHandle.argtypes = [
ctypes.c_short, ctypes.c_void_p,
]
ODBC_API.SQLFreeStmt.argtypes = [
ctypes.c_void_p, ctypes.c_ushort,
]
ODBC_API.SQLGetTypeInfo.argtypes = [
ctypes.c_void_p, ctypes.c_short,
]
ODBC_API.SQLMoreResults.argtypes = [ctypes.c_void_p]
ODBC_API.SQLPrepare.argtypes = [
ctypes.c_void_p, ctypes.c_char_p, ctypes.c_int,
]
ODBC_API.SQLPrimaryKeys.argtypes = [
ctypes.c_void_p, ctypes.c_char_p, ctypes.c_short,
ctypes.c_char_p, ctypes.c_short, ctypes.c_char_p, ctypes.c_short,
]
ODBC_API.SQLProcedureColumns.argtypes = [
ctypes.c_void_p, ctypes.c_char_p, ctypes.c_short,
ctypes.c_char_p, ctypes.c_short, ctypes.c_char_p,
ctypes.c_short, ctypes.c_char_p, ctypes.c_short,
]
ODBC_API.SQLProcedures.argtypes = [
ctypes.c_void_p, ctypes.c_char_p, ctypes.c_short,
ctypes.c_char_p, ctypes.c_short, ctypes.c_char_p, ctypes.c_short,
]
ODBC_API.SQLSetConnectAttr.argtypes = [
ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int,
]
ODBC_API.SQLSetEnvAttr.argtypes = [
ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int,
]
ODBC_API.SQLStatistics.argtypes = [
ctypes.c_void_p, ctypes.c_char_p, ctypes.c_short,
ctypes.c_char_p, ctypes.c_short, ctypes.c_char_p,
ctypes.c_short, ctypes.c_ushort, ctypes.c_ushort,
]
ODBC_API.SQLTables.argtypes = [
ctypes.c_void_p, ctypes.c_char_p, ctypes.c_short,
ctypes.c_char_p, ctypes.c_short, ctypes.c_char_p,
ctypes.c_short, ctypes.c_char_p, ctypes.c_short,
]
def to_wchar(argtypes):
if argtypes: # Under IronPython some argtypes are not declared
result = []
for x in argtypes:
if x == ctypes.c_char_p:
result.append(wchar_pointer)
else:
result.append(x)
return result
else:
return argtypes
ODBC_API.SQLColumnsW.argtypes = to_wchar(ODBC_API.SQLColumns.argtypes)
ODBC_API.SQLConnectW.argtypes = to_wchar(ODBC_API.SQLConnect.argtypes)
ODBC_API.SQLDataSourcesW.argtypes = to_wchar(ODBC_API.SQLDataSources.argtypes)
ODBC_API.SQLDescribeColW.argtypes = to_wchar(ODBC_API.SQLDescribeCol.argtypes)
ODBC_API.SQLDriverConnectW.argtypes = to_wchar(ODBC_API.SQLDriverConnect.argtypes)
ODBC_API.SQLDriversW.argtypes = to_wchar(ODBC_API.SQLDrivers.argtypes)
ODBC_API.SQLExecDirectW.argtypes = to_wchar(ODBC_API.SQLExecDirect.argtypes)
ODBC_API.SQLForeignKeysW.argtypes = to_wchar(ODBC_API.SQLForeignKeys.argtypes)
ODBC_API.SQLPrepareW.argtypes = to_wchar(ODBC_API.SQLPrepare.argtypes)
ODBC_API.SQLPrimaryKeysW.argtypes = to_wchar(ODBC_API.SQLPrimaryKeys.argtypes)
ODBC_API.SQLProcedureColumnsW.argtypes = to_wchar(ODBC_API.SQLProcedureColumns.argtypes)
ODBC_API.SQLProceduresW.argtypes = to_wchar(ODBC_API.SQLProcedures.argtypes)
ODBC_API.SQLStatisticsW.argtypes = to_wchar(ODBC_API.SQLStatistics.argtypes)
ODBC_API.SQLTablesW.argtypes = to_wchar(ODBC_API.SQLTables.argtypes)
ODBC_API.SQLGetDiagRecW.argtypes = to_wchar(ODBC_API.SQLGetDiagRec.argtypes)
ODBC_API.SQLGetInfoW.argtypes = to_wchar(ODBC_API.SQLGetInfo.argtypes)
# Set the alias for the ctypes functions for beter code readbility or performance.
ADDR = ctypes.byref
c_short = ctypes.c_short
c_ssize_t = ctypes.c_ssize_t
SQLFetch = ODBC_API.SQLFetch
SQLExecute = ODBC_API.SQLExecute
SQLBindParameter = ODBC_API.SQLBindParameter
SQLGetData = ODBC_API.SQLGetData
SQLRowCount = ODBC_API.SQLRowCount
SQLNumResultCols = ODBC_API.SQLNumResultCols
SQLEndTran = ODBC_API.SQLEndTran
# Set alias for beter code readbility or performance.
NO_FREE_STATEMENT = 0
FREE_STATEMENT = 1
BLANK_BYTE = str_8b()
def ctrl_err(ht, h, val_ret, ansi):
"""Classify type of ODBC error from (type of handle, handle, return value)
, and raise with a list"""
if ansi:
state = create_buffer(22)
Message = create_buffer(1024*4)
ODBC_func = ODBC_API.SQLGetDiagRec
if py_v3:
raw_s = lambda s: bytes(s,'ascii')
else:
raw_s = str_8b
else:
state = create_buffer_u(22)
Message = create_buffer_u(1024*4)
ODBC_func = ODBC_API.SQLGetDiagRecW
raw_s = unicode
NativeError = ctypes.c_int()
Buffer_len = c_short()
err_list = []
number_errors = 1
while 1:
ret = ODBC_func(ht, h, number_errors, state, \
ADDR(NativeError), Message, 1024, ADDR(Buffer_len))
if ret == SQL_NO_DATA_FOUND:
#No more data, I can raise
#print(err_list[0][1])
state = err_list[0][0]
err_text = raw_s('[')+state+raw_s('] ')+err_list[0][1]
if state[:2] in (raw_s('24'),raw_s('25'),raw_s('42')):
raise ProgrammingError(state,err_text)
elif state[:2] in (raw_s('22')):
raise DataError(state,err_text)
elif state[:2] in (raw_s('23')) or state == raw_s('40002'):
raise IntegrityError(state,err_text)
elif state == raw_s('0A000'):
raise NotSupportedError(state,err_text)
elif state in (raw_s('HYT00'),raw_s('HYT01')):
raise OperationalError(state,err_text)
elif state[:2] in (raw_s('IM'),raw_s('HY')):
raise Error(state,err_text)
else:
raise DatabaseError(state,err_text)
break
elif ret == SQL_INVALID_HANDLE:
#The handle passed is an invalid handle
raise ProgrammingError('', 'SQL_INVALID_HANDLE')
elif ret == SQL_SUCCESS:
if ansi:
err_list.append((state.value, Message.value, NativeError.value))
else:
err_list.append((from_buffer_u(state), from_buffer_u(Message), NativeError.value))
number_errors += 1
elif ret == SQL_ERROR:
raise ProgrammingError('', 'SQL_ERROR')
def check_success(ODBC_obj, ret):
""" Validate return value, if not success, raise exceptions based on the handle """
if ret not in (SQL_SUCCESS, SQL_SUCCESS_WITH_INFO, SQL_NO_DATA):
if isinstance(ODBC_obj, Cursor):
ctrl_err(SQL_HANDLE_STMT, ODBC_obj.stmt_h, ret, ODBC_obj.ansi)
elif isinstance(ODBC_obj, Connection):
ctrl_err(SQL_HANDLE_DBC, ODBC_obj.dbc_h, ret, ODBC_obj.ansi)
else:
ctrl_err(SQL_HANDLE_ENV, ODBC_obj, ret, False)
def AllocateEnv():
if pooling:
ret = ODBC_API.SQLSetEnvAttr(SQL_NULL_HANDLE, SQL_ATTR_CONNECTION_POOLING, SQL_CP_ONE_PER_HENV, SQL_IS_UINTEGER)
check_success(SQL_NULL_HANDLE, ret)
'''
Allocate an ODBC environment by initializing the handle shared_env_h
ODBC enviroment needed to be created, so connections can be created under it
connections pooling can be shared under one environment
'''
global shared_env_h
shared_env_h = ctypes.c_void_p()
ret = ODBC_API.SQLAllocHandle(SQL_HANDLE_ENV, SQL_NULL_HANDLE, ADDR(shared_env_h))
check_success(shared_env_h, ret)
# Set the ODBC environment's compatibil leve to ODBC 3.0
ret = ODBC_API.SQLSetEnvAttr(shared_env_h, SQL_ATTR_ODBC_VERSION, SQL_OV_ODBC3, 0)
check_success(shared_env_h, ret)
"""
Here, we have a few callables that determine how a result row is returned.
A new one can be added by creating a callable that:
- accepts a cursor as its parameter.
- returns a callable that accepts an iterable containing the row values.
"""
def TupleRow(cursor):
"""Normal tuple with added attribute `cursor_description`, as in pyodbc.
This is the default.
"""
class Row(tuple):
cursor_description = cursor.description
def get(self, field):
if not hasattr(self, 'field_dict'):
self.field_dict = {}
for i,item in enumerate(self):
self.field_dict[self.cursor_description[i][0]] = item
return self.field_dict.get(field)
def __getitem__(self, field):
if isinstance(field, (unicode,str)):
return self.get(field)
else:
return tuple.__getitem__(self,field)
return Row
def NamedTupleRow(cursor):
"""Named tuple to allow attribute lookup by name.
Requires py2.6 or above.
"""
from collections import namedtuple
attr_names = [x[0] for x in cursor._ColBufferList]
class Row(namedtuple('Row', attr_names, rename=True)):
cursor_description = cursor.description
def __new__(cls, iterable):
return super(Row, cls).__new__(cls, *iterable)
return Row
def MutableNamedTupleRow(cursor):
"""Mutable named tuple to allow attribute to be replaced. This should be
compatible with pyodbc's Row type.
Requires 3rd-party library "recordtype".
"""
from recordtype import recordtype
attr_names = [x[0] for x in cursor._ColBufferList]
class Row(recordtype('Row', attr_names, rename=True)):
cursor_description = cursor.description
def __init__(self, iterable):
super(Row, self).__init__(*iterable)
def __iter__(self):
for field_name in self.__slots__:
yield getattr(self, field_name)
def __getitem__(self, index):
if isinstance(index, slice):
return tuple(getattr(self, x) for x in self.__slots__[index])
return getattr(self, self.__slots__[index])
def __setitem__(self, index, value):
setattr(self, self.__slots__[index], value)
return Row
# When Null is used in a binary parameter, database usually would not
# accept the None for a binary field, so the work around is to use a
# Specical None that the pypyodbc moudle would know this NULL is for
# a binary field.
class BinaryNullType(): pass
BinaryNull = BinaryNullType()
# The get_type function is used to determine if parameters need to be re-binded
# against the changed parameter types
# 'b' for bool, 'U' for long unicode string, 'u' for short unicode string
# 'S' for long 8 bit string, 's' for short 8 bit string, 'l' for big integer, 'i' for normal integer
# 'f' for float, 'D' for Decimal, 't' for datetime.time, 'd' for datetime.datetime, 'dt' for datetime.datetime
# 'bi' for binary
def get_type(v):
if isinstance(v, bool):
return ('b',)
elif isinstance(v, unicode):
if len(v) >= 255:
return ('U',(len(v)//1000+1)*1000)
else:
return ('u',)
elif isinstance(v, (str_8b,str)):
if len(v) >= 255:
return ('S',(len(v)//1000+1)*1000)
else:
return ('s',)
elif isinstance(v, (int, long)):
#SQL_BIGINT defination: http://msdn.microsoft.com/en-us/library/ms187745.aspx
if v > 2147483647 or v < -2147483648:
return ('l',)
else:
return ('i',)
elif isinstance(v, float):
return ('f',)
elif isinstance(v, BinaryNullType):
return ('BN',)
elif v is None:
return ('N',)
elif isinstance(v, Decimal):
t = v.as_tuple() #1.23 -> (1,2,3),-2 , 1.23*E7 -> (1,2,3),5
return ('D',(len(t[1]),0 - t[2])) # number of digits, and number of decimal digits
elif isinstance (v, datetime.datetime):
return ('dt',)
elif isinstance (v, datetime.date):
return ('d',)
elif isinstance(v, datetime.time):
return ('t',)
elif isinstance (v, (bytearray, buffer)):
return ('bi',(len(v)//1000+1)*1000)
return type(v)
# The Cursor Class.
class Cursor:
def __init__(self, conx, row_type_callable=None):
""" Initialize self.stmt_h, which is the handle of a statement
A statement is actually the basis of a python"cursor" object
"""
self.stmt_h = ctypes.c_void_p()
self.connection = conx
self.ansi = conx.ansi
self.row_type_callable = row_type_callable or TupleRow
self.statement = None
self._last_param_types = None
self._ParamBufferList = []
self._ColBufferList = []
self._row_type = None
self._buf_cvt_func = []
self.rowcount = -1
self.description = None
self.autocommit = None
self._ColTypeCodeList = []
self._outputsize = {}
self._inputsizers = []
self.arraysize = 1
ret = ODBC_API.SQLAllocHandle(SQL_HANDLE_STMT, self.connection.dbc_h, ADDR(self.stmt_h))
check_success(self, ret)
self.timeout = conx.timeout
if self.timeout != 0:
self.set_timeout(self.timeout)
self._PARAM_SQL_TYPE_LIST = []
self.closed = False
def set_timeout(self, timeout):
self.timeout = timeout
ret = ODBC_API.SQLSetStmtAttr(self.stmt_h, SQL_ATTR_QUERY_TIMEOUT, self.timeout, 0)
check_success(self, ret)
def prepare(self, query_string):
"""prepare a query"""
#self._free_results(FREE_STATEMENT)
if not self.connection:
self.close()
if type(query_string) == unicode:
c_query_string = wchar_pointer(UCS_buf(query_string))
ret = ODBC_API.SQLPrepareW(self.stmt_h, c_query_string, len(query_string))
else:
c_query_string = ctypes.c_char_p(query_string)
ret = ODBC_API.SQLPrepare(self.stmt_h, c_query_string, len(query_string))
if ret != SQL_SUCCESS:
check_success(self, ret)
self._PARAM_SQL_TYPE_LIST = []
if self.connection.support_SQLDescribeParam:
# SQLServer's SQLDescribeParam only supports DML SQL, so avoid the SELECT statement
if True:# 'SELECT' not in query_string.upper():
#self._free_results(NO_FREE_STATEMENT)
NumParams = c_short()
ret = ODBC_API.SQLNumParams(self.stmt_h, ADDR(NumParams))
if ret != SQL_SUCCESS:
check_success(self, ret)
for col_num in range(NumParams.value):
ParameterNumber = ctypes.c_ushort(col_num + 1)
DataType = c_short()
ParameterSize = ctypes.c_size_t()
DecimalDigits = c_short()
Nullable = c_short()
ret = ODBC_API.SQLDescribeParam(
self.stmt_h,
ParameterNumber,
ADDR(DataType),
ADDR(ParameterSize),
ADDR(DecimalDigits),
ADDR(Nullable),
)
if ret != SQL_SUCCESS:
try:
check_success(self, ret)
except DatabaseError:
if sys.exc_info()[1].value[0] == '07009':
self._PARAM_SQL_TYPE_LIST = []
break
else:
raise sys.exc_info()[1]
except:
raise sys.exc_info()[1]
self._PARAM_SQL_TYPE_LIST.append((DataType.value,DecimalDigits.value))
self.statement = query_string
def _BindParams(self, param_types, pram_io_list = []):
"""Create parameter buffers based on param types, and bind them to the statement"""
# Clear the old Parameters
if not self.connection:
self.close()
#self._free_results(NO_FREE_STATEMENT)
# Get the number of query parameters judged by database.
NumParams = c_short()
ret = ODBC_API.SQLNumParams(self.stmt_h, ADDR(NumParams))
if ret != SQL_SUCCESS:
check_success(self, ret)
if len(param_types) != NumParams.value:
# In case number of parameters provided do not same as number required
error_desc = "The SQL contains %d parameter markers, but %d parameters were supplied" \
%(NumParams.value,len(param_types))
raise ProgrammingError('HY000',error_desc)
# Every parameter needs to be binded to a buffer
ParamBufferList = []
# Temporary holder since we can only call SQLDescribeParam before
# calling SQLBindParam.
temp_holder = []
for col_num in range(NumParams.value):
dec_num = 0
buf_size = 512
if param_types[col_num][0] == 'u':
sql_c_type = SQL_C_WCHAR
sql_type = SQL_WVARCHAR
buf_size = 255
ParameterBuffer = create_buffer_u(buf_size)
elif param_types[col_num][0] == 's':
sql_c_type = SQL_C_CHAR
sql_type = SQL_VARCHAR
buf_size = 255
ParameterBuffer = create_buffer(buf_size)
elif param_types[col_num][0] == 'U':
sql_c_type = SQL_C_WCHAR
sql_type = SQL_WLONGVARCHAR
buf_size = param_types[col_num][1]#len(self._inputsizers)>col_num and self._inputsizers[col_num] or 20500
ParameterBuffer = create_buffer_u(buf_size)
elif param_types[col_num][0] == 'S':
sql_c_type = SQL_C_CHAR
sql_type = SQL_LONGVARCHAR
buf_size = param_types[col_num][1]#len(self._inputsizers)>col_num and self._inputsizers[col_num] or 20500
ParameterBuffer = create_buffer(buf_size)
# bool subclasses int, thus has to go first
elif param_types[col_num][0] == 'b':
sql_c_type = SQL_C_CHAR
sql_type = SQL_BIT
buf_size = SQL_data_type_dict[sql_type][4]
ParameterBuffer = create_buffer(buf_size)
elif param_types[col_num][0] == 'i':
sql_c_type = SQL_C_CHAR
sql_type = SQL_INTEGER
buf_size = SQL_data_type_dict[sql_type][4]
ParameterBuffer = create_buffer(buf_size)
elif param_types[col_num][0] == 'l':
sql_c_type = SQL_C_CHAR
sql_type = SQL_BIGINT
buf_size = SQL_data_type_dict[sql_type][4]
ParameterBuffer = create_buffer(buf_size)
elif param_types[col_num][0] == 'D': #Decimal
sql_c_type = SQL_C_CHAR
sql_type = SQL_NUMERIC
digit_num, dec_num = param_types[col_num][1]
if dec_num > 0:
# has decimal
buf_size = digit_num
dec_num = dec_num
else:
# no decimal
buf_size = digit_num - dec_num
dec_num = 0
ParameterBuffer = create_buffer(buf_size + 4)# add extra length for sign and dot
elif param_types[col_num][0] == 'f':
sql_c_type = SQL_C_CHAR
sql_type = SQL_DOUBLE
buf_size = SQL_data_type_dict[sql_type][4]
ParameterBuffer = create_buffer(buf_size)
# datetime subclasses date, thus has to go first
elif param_types[col_num][0] == 'dt':
sql_c_type = SQL_C_CHAR
sql_type = SQL_TYPE_TIMESTAMP
buf_size = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][0]
ParameterBuffer = create_buffer(buf_size)
dec_num = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][1]
elif param_types[col_num][0] == 'd':
sql_c_type = SQL_C_CHAR
if SQL_TYPE_DATE in self.connection.type_size_dic:
#if DEBUG:print('conx.type_size_dic.has_key(SQL_TYPE_DATE)')
sql_type = SQL_TYPE_DATE
buf_size = self.connection.type_size_dic[SQL_TYPE_DATE][0]
ParameterBuffer = create_buffer(buf_size)
dec_num = self.connection.type_size_dic[SQL_TYPE_DATE][1]
else:
# SQL Sever <2008 doesn't have a DATE type.
sql_type = SQL_TYPE_TIMESTAMP
buf_size = 10
ParameterBuffer = create_buffer(buf_size)
elif param_types[col_num][0] == 't':
sql_c_type = SQL_C_CHAR
if SQL_TYPE_TIME in self.connection.type_size_dic:
sql_type = SQL_TYPE_TIME
buf_size = self.connection.type_size_dic[SQL_TYPE_TIME][0]
ParameterBuffer = create_buffer(buf_size)
dec_num = self.connection.type_size_dic[SQL_TYPE_TIME][1]
elif SQL_SS_TIME2 in self.connection.type_size_dic:
# TIME type added in SQL Server 2008
sql_type = SQL_SS_TIME2
buf_size = self.connection.type_size_dic[SQL_SS_TIME2][0]
ParameterBuffer = create_buffer(buf_size)
dec_num = self.connection.type_size_dic[SQL_SS_TIME2][1]
else:
# SQL Sever <2008 doesn't have a TIME type.
sql_type = SQL_TYPE_TIMESTAMP
buf_size = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][0]
ParameterBuffer = create_buffer(buf_size)
dec_num = 3
elif param_types[col_num][0] == 'BN':
sql_c_type = SQL_C_BINARY
sql_type = SQL_VARBINARY
buf_size = 1
ParameterBuffer = create_buffer(buf_size)
elif param_types[col_num][0] == 'N':
if len(self._PARAM_SQL_TYPE_LIST) > 0:
sql_c_type = SQL_C_DEFAULT
sql_type = self._PARAM_SQL_TYPE_LIST[col_num][0]
buf_size = 1
ParameterBuffer = create_buffer(buf_size)
else:
sql_c_type = SQL_C_CHAR
sql_type = SQL_CHAR
buf_size = 1
ParameterBuffer = create_buffer(buf_size)
elif param_types[col_num][0] == 'bi':
sql_c_type = SQL_C_BINARY
sql_type = SQL_LONGVARBINARY
buf_size = param_types[col_num][1]#len(self._inputsizers)>col_num and self._inputsizers[col_num] or 20500
ParameterBuffer = create_buffer(buf_size)
else:
sql_c_type = SQL_C_CHAR
sql_type = SQL_LONGVARCHAR
buf_size = len(self._inputsizers)>col_num and self._inputsizers[col_num] or 20500
ParameterBuffer = create_buffer(buf_size)
temp_holder.append((sql_c_type, sql_type, buf_size, dec_num, ParameterBuffer))
for col_num, (sql_c_type, sql_type, buf_size, dec_num, ParameterBuffer) in enumerate(temp_holder):
BufferLen = c_ssize_t(buf_size)
LenOrIndBuf = c_ssize_t()
InputOutputType = SQL_PARAM_INPUT
if len(pram_io_list) > col_num:
InputOutputType = pram_io_list[col_num]
ret = SQLBindParameter(self.stmt_h, col_num + 1, InputOutputType, sql_c_type, sql_type, buf_size,\
dec_num, ADDR(ParameterBuffer), BufferLen,ADDR(LenOrIndBuf))
if ret != SQL_SUCCESS:
check_success(self, ret)
# Append the value buffer and the length buffer to the array
ParamBufferList.append((ParameterBuffer,LenOrIndBuf,sql_type))
self._last_param_types = param_types
self._ParamBufferList = ParamBufferList
def execute(self, query_string, params=None, many_mode=False, call_mode=False):
""" Execute the query string, with optional parameters.
If parameters are provided, the query would first be prepared, then executed with parameters;
If parameters are not provided, only th query sting, it would be executed directly
"""
if not self.connection:
self.close()
self._free_stmt(SQL_CLOSE)
if params:
# If parameters exist, first prepare the query then executed with parameters
if not isinstance(params, (tuple, list)):
raise TypeError("Params must be in a list, tuple, or Row")
if query_string != self.statement:
# if the query is not same as last query, then it is not prepared
self.prepare(query_string)
param_types = list(map(get_type, params))
if call_mode:
self._free_stmt(SQL_RESET_PARAMS)
self._BindParams(param_types, self._pram_io_list)
else:
if self._last_param_types is None:
self._free_stmt(SQL_RESET_PARAMS)
self._BindParams(param_types)
elif len(param_types) != len(self._last_param_types):
self._free_stmt(SQL_RESET_PARAMS)
self._BindParams(param_types)
elif sum([p_type[0] != 'N' and p_type != self._last_param_types[i] for i,p_type in enumerate(param_types)]) > 0:
self._free_stmt(SQL_RESET_PARAMS)
self._BindParams(param_types)
# With query prepared, now put parameters into buffers
col_num = 0
for param_buffer, param_buffer_len, sql_type in self._ParamBufferList:
c_char_buf, c_buf_len = '', 0
param_val = params[col_num]
if param_types[col_num][0] in ('N','BN'):
param_buffer_len.value = SQL_NULL_DATA
col_num += 1
continue
elif param_types[col_num][0] in ('i','l','f'):
if py_v3:
c_char_buf = bytes(str(param_val),'ascii')
else:
c_char_buf = str(param_val)
c_buf_len = len(c_char_buf)
elif param_types[col_num][0] in ('s','S'):
c_char_buf = param_val
c_buf_len = len(c_char_buf)
elif param_types[col_num][0] in ('u','U'):
c_char_buf = UCS_buf(param_val)
c_buf_len = len(c_char_buf)
elif param_types[col_num][0] == 'dt':
max_len = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][0]
datetime_str = param_val.strftime('%Y-%m-%d %H:%M:%S.%f')
c_char_buf = datetime_str[:max_len]
if py_v3:
c_char_buf = bytes(c_char_buf,'ascii')
c_buf_len = len(c_char_buf)
# print c_buf_len, c_char_buf
elif param_types[col_num][0] == 'd':
if SQL_TYPE_DATE in self.connection.type_size_dic:
max_len = self.connection.type_size_dic[SQL_TYPE_DATE][0]
else:
max_len = 10
c_char_buf = param_val.isoformat()[:max_len]
if py_v3:
c_char_buf = bytes(c_char_buf,'ascii')
c_buf_len = len(c_char_buf)
#print c_char_buf
elif param_types[col_num][0] == 't':
if SQL_TYPE_TIME in self.connection.type_size_dic:
max_len = self.connection.type_size_dic[SQL_TYPE_TIME][0]
c_char_buf = param_val.isoformat()[:max_len]
c_buf_len = len(c_char_buf)
elif SQL_SS_TIME2 in self.connection.type_size_dic:
max_len = self.connection.type_size_dic[SQL_SS_TIME2][0]
c_char_buf = param_val.isoformat()[:max_len]
c_buf_len = len(c_char_buf)
else:
c_buf_len = self.connection.type_size_dic[SQL_TYPE_TIMESTAMP][0]
time_str = param_val.isoformat()
if len(time_str) == 8:
time_str += '.000'
c_char_buf = '1900-01-01 '+time_str[0:c_buf_len - 11]
if py_v3:
c_char_buf = bytes(c_char_buf,'ascii')
#print c_buf_len, c_char_buf
elif param_types[col_num][0] == 'b':
if param_val == True:
c_char_buf = '1'
else:
c_char_buf = '0'
if py_v3:
c_char_buf = bytes(c_char_buf,'ascii')
c_buf_len = 1
elif param_types[col_num][0] == 'D': #Decimal
sign = param_val.as_tuple()[0] == 0 and '+' or '-'
digit_string = ''.join([str(x) for x in param_val.as_tuple()[1]])
digit_num, dec_num = param_types[col_num][1]
if dec_num > 0:
# has decimal
left_part = digit_string[:digit_num - dec_num]
right_part = digit_string[0-dec_num:]
else:
# no decimal
left_part = digit_string + '0'*(0-dec_num)
right_part = ''
v = ''.join((sign, left_part,'.', right_part))
if py_v3:
c_char_buf = bytes(v,'ascii')
else:
c_char_buf = v
c_buf_len = len(c_char_buf)
elif param_types[col_num][0] == 'bi':
c_char_buf = str_8b(param_val)
c_buf_len = len(c_char_buf)
else:
c_char_buf = param_val
if param_types[col_num][0] == 'bi':
param_buffer.raw = str_8b(param_val)
else:
#print (type(param_val),param_buffer, param_buffer.value)
param_buffer.value = c_char_buf
if param_types[col_num][0] in ('U','u','S','s'):
#ODBC driver will find NUL in unicode and string to determine their length
param_buffer_len.value = SQL_NTS
else:
param_buffer_len.value = c_buf_len
col_num += 1
ret = SQLExecute(self.stmt_h)
if ret != SQL_SUCCESS:
#print param_valparam_buffer, param_buffer.value
check_success(self, ret)
if not many_mode:
self._NumOfRows()
self._UpdateDesc()
#self._BindCols()
else:
self.execdirect(query_string)
return self
def _SQLExecute(self):
if not self.connection:
self.close()
ret = SQLExecute(self.stmt_h)
if ret != SQL_SUCCESS:
check_success(self, ret)
def execdirect(self, query_string):
"""Execute a query directly"""
if not self.connection:
self.close()
self._free_stmt()
self._last_param_types = None
self.statement = None
if type(query_string) == unicode:
c_query_string = wchar_pointer(UCS_buf(query_string))
ret = ODBC_API.SQLExecDirectW(self.stmt_h, c_query_string, len(query_string))
else:
c_query_string = ctypes.c_char_p(query_string)
ret = ODBC_API.SQLExecDirect(self.stmt_h, c_query_string, len(query_string))
check_success(self, ret)
self._NumOfRows()
self._UpdateDesc()
#self._BindCols()
return self
def callproc(self, procname, args):
if not self.connection:
self.close()
raise Warning('', 'Still not fully implemented')
self._pram_io_list = [row[4] for row in self.procedurecolumns(procedure = procname).fetchall() if row[4] not in (SQL_RESULT_COL, SQL_RETURN_VALUE)]
print('pram_io_list: '+str(self._pram_io_list))
call_escape = '{CALL '+procname
if args:
call_escape += '(' + ','.join(['?' for params in args]) + ')'
call_escape += '}'
self.execute(call_escape, args, call_mode = True)
result = []
for buf, buf_len, sql_type in self._ParamBufferList:
if buf_len.value == -1:
result.append(None)
else:
result.append(self.connection.output_converter[sql_type](buf.value))
return result
def executemany(self, query_string, params_list = [None]):
if not self.connection:
self.close()
for params in params_list:
self.execute(query_string, params, many_mode = True)
self._NumOfRows()
self.rowcount = -1
self._UpdateDesc()
#self._BindCols()
def _CreateColBuf(self):
if not self.connection:
self.close()
self._free_stmt(SQL_UNBIND)
NOC = self._NumOfCols()
self._ColBufferList = []
bind_data = True
for col_num in range(NOC):
col_name = self.description[col_num][0]
col_size = self.description[col_num][2]
col_sql_data_type = self._ColTypeCodeList[col_num]
target_type = SQL_data_type_dict[col_sql_data_type][2]
dynamic_length = SQL_data_type_dict[col_sql_data_type][5]
# set default size base on the column's sql data type
total_buf_len = SQL_data_type_dict[col_sql_data_type][4]
# over-write if there's pre-set size value for "large columns"
if total_buf_len > 20500:
total_buf_len = self._outputsize.get(None,total_buf_len)
# over-write if there's pre-set size value for the "col_num" column
total_buf_len = self._outputsize.get(col_num, total_buf_len)
# if the size of the buffer is very long, do not bind
# because a large buffer decrease performance, and sometimes you only get a NULL value.
# in that case use sqlgetdata instead.
if col_size >= 1024:
dynamic_length = True
alloc_buffer = SQL_data_type_dict[col_sql_data_type][3](total_buf_len)
used_buf_len = c_ssize_t()
force_unicode = self.connection.unicode_results
if force_unicode and col_sql_data_type in (SQL_CHAR,SQL_VARCHAR,SQL_LONGVARCHAR):
target_type = SQL_C_WCHAR
alloc_buffer = create_buffer_u(total_buf_len)
buf_cvt_func = self.connection.output_converter[self._ColTypeCodeList[col_num]]
if bind_data:
if dynamic_length:
bind_data = False
self._ColBufferList.append([col_name, target_type, used_buf_len, ADDR(used_buf_len), alloc_buffer, ADDR(alloc_buffer), total_buf_len, buf_cvt_func, bind_data])
if bind_data:
ret = ODBC_API.SQLBindCol(self.stmt_h, col_num + 1, target_type, ADDR(alloc_buffer), total_buf_len, ADDR(used_buf_len))
if ret != SQL_SUCCESS:
check_success(self, ret)
def _UpdateDesc(self):
"Get the information of (name, type_code, display_size, internal_size, col_precision, scale, null_ok)"
if not self.connection:
self.close()
force_unicode = self.connection.unicode_results
if force_unicode:
Cname = create_buffer_u(1024)
else:
Cname = create_buffer(1024)
Cname_ptr = c_short()
Ctype_code = c_short()
Csize = ctypes.c_size_t()
Cdisp_size = c_ssize_t(0)
CDecimalDigits = c_short()
Cnull_ok = c_short()
ColDescr = []
self._ColTypeCodeList = []
NOC = self._NumOfCols()
for col in range(1, NOC+1):
ret = ODBC_API.SQLColAttribute(self.stmt_h, col, SQL_DESC_DISPLAY_SIZE, ADDR(create_buffer(10)),
10, ADDR(c_short()),ADDR(Cdisp_size))
if ret != SQL_SUCCESS:
check_success(self, ret)
if force_unicode:
ret = ODBC_API.SQLDescribeColW(self.stmt_h, col, Cname, len(Cname), ADDR(Cname_ptr),\
ADDR(Ctype_code),ADDR(Csize),ADDR(CDecimalDigits), ADDR(Cnull_ok))
if ret != SQL_SUCCESS:
check_success(self, ret)
else:
ret = ODBC_API.SQLDescribeCol(self.stmt_h, col, Cname, len(Cname), ADDR(Cname_ptr),\
ADDR(Ctype_code),ADDR(Csize),ADDR(CDecimalDigits), ADDR(Cnull_ok))
if ret != SQL_SUCCESS:
check_success(self, ret)
col_name = Cname.value
if lowercase:
col_name = col_name.lower()
#(name, type_code, display_size,
ColDescr.append((col_name, SQL_data_type_dict.get(Ctype_code.value,(Ctype_code.value,))[0],Cdisp_size.value,\
Csize.value, Csize.value,CDecimalDigits.value,Cnull_ok.value == 1 and True or False))
self._ColTypeCodeList.append(Ctype_code.value)
if len(ColDescr) > 0:
self.description = ColDescr
# Create the row type before fetching.
self._row_type = self.row_type_callable(self)
else:
self.description = None
self._CreateColBuf()
def _NumOfRows(self):
"""Get the number of rows"""
if not self.connection:
self.close()
NOR = c_ssize_t()
ret = SQLRowCount(self.stmt_h, ADDR(NOR))
if ret != SQL_SUCCESS:
check_success(self, ret)
self.rowcount = NOR.value
return self.rowcount
def _NumOfCols(self):
"""Get the number of cols"""
if not self.connection:
self.close()
NOC = c_short()
ret = SQLNumResultCols(self.stmt_h, ADDR(NOC))
if ret != SQL_SUCCESS:
check_success(self, ret)
return NOC.value
def fetchall(self):
if not self.connection:
self.close()
rows = []
while True:
row = self.fetchone()
if row is None:
break
rows.append(row)
return rows
def fetchmany(self, num = None):
if not self.connection:
self.close()
if num is None:
num = self.arraysize
rows = []
while len(rows) < num:
row = self.fetchone()
if row is None:
break
rows.append(row)
return rows
def fetchone(self):
if not self.connection:
self.close()
ret = SQLFetch(self.stmt_h)
if ret in (SQL_SUCCESS,SQL_SUCCESS_WITH_INFO):
'''Bind buffers for the record set columns'''
value_list = []
col_num = 1
for col_name, target_type, used_buf_len, ADDR_used_buf_len, alloc_buffer, ADDR_alloc_buffer, total_buf_len, buf_cvt_func, bind_data in self._ColBufferList:
raw_data_parts = []
while 1:
if bind_data:
ret = SQL_SUCCESS
else:
ret = SQLGetData(self.stmt_h, col_num, target_type, ADDR_alloc_buffer, total_buf_len, ADDR_used_buf_len)
if ret == SQL_SUCCESS:
if used_buf_len.value == SQL_NULL_DATA:
value_list.append(None)
else:
if raw_data_parts == []:
# Means no previous data, no need to combine
if target_type == SQL_C_BINARY:
value_list.append(buf_cvt_func(alloc_buffer.raw[:used_buf_len.value]))
elif target_type == SQL_C_WCHAR:
value_list.append(buf_cvt_func(from_buffer_u(alloc_buffer)))
else:
value_list.append(buf_cvt_func(alloc_buffer.value))
else:
# There are previous fetched raw data to combine
if target_type == SQL_C_BINARY:
raw_data_parts.append(alloc_buffer.raw[:used_buf_len.value])
elif target_type == SQL_C_WCHAR:
raw_data_parts.append(from_buffer_u(alloc_buffer))
else:
raw_data_parts.append(alloc_buffer.value)
break
elif ret == SQL_SUCCESS_WITH_INFO:
# Means the data is only partial
if target_type == SQL_C_BINARY:
raw_data_parts.append(alloc_buffer.raw)
else:
raw_data_parts.append(alloc_buffer.value)
elif ret == SQL_NO_DATA:
# Means all data has been transmitted
break
else:
check_success(self, ret)
if raw_data_parts != []:
if py_v3:
if target_type != SQL_C_BINARY:
raw_value = ''.join(raw_data_parts)
else:
raw_value = BLANK_BYTE.join(raw_data_parts)
else:
raw_value = ''.join(raw_data_parts)
value_list.append(buf_cvt_func(raw_value))
col_num += 1
return self._row_type(value_list)
else:
if ret == SQL_NO_DATA_FOUND:
return None
else:
check_success(self, ret)
def __next__(self):
return self.next()
def next(self):
row = self.fetchone()
if row is None:
raise(StopIteration)
return row
def __iter__(self):
return self
def skip(self, count = 0):
if not self.connection:
self.close()
for i in range(count):
ret = ODBC_API.SQLFetchScroll(self.stmt_h, SQL_FETCH_NEXT, 0)
if ret != SQL_SUCCESS:
check_success(self, ret)
return None
def nextset(self):
if not self.connection:
self.close()
ret = ODBC_API.SQLMoreResults(self.stmt_h)
if ret not in (SQL_SUCCESS, SQL_NO_DATA):
check_success(self, ret)
if ret == SQL_NO_DATA:
self._free_stmt()
return False
else:
self._NumOfRows()
self._UpdateDesc()
#self._BindCols()
return True
def _free_stmt(self, free_type = None):
if not self.connection:
self.close()
if not self.connection.connected:
raise ProgrammingError('HY000','Attempt to use a closed connection.')
#self.description = None
#self.rowcount = -1
if free_type in (SQL_CLOSE, None):
ret = ODBC_API.SQLFreeStmt(self.stmt_h, SQL_CLOSE)
if ret != SQL_SUCCESS:
check_success(self, ret)
if free_type in (SQL_UNBIND, None):
ret = ODBC_API.SQLFreeStmt(self.stmt_h, SQL_UNBIND)
if ret != SQL_SUCCESS:
check_success(self, ret)
if free_type in (SQL_RESET_PARAMS, None):
ret = ODBC_API.SQLFreeStmt(self.stmt_h, SQL_RESET_PARAMS)
if ret != SQL_SUCCESS:
check_success(self, ret)
def getTypeInfo(self, sqlType = None):
if not self.connection:
self.close()
if sqlType is None:
type = SQL_ALL_TYPES
else:
type = sqlType
ret = ODBC_API.SQLGetTypeInfo(self.stmt_h, type)
if ret in (SQL_SUCCESS, SQL_SUCCESS_WITH_INFO):
self._NumOfRows()
self._UpdateDesc()
#self._BindCols()
return self.fetchone()
def tables(self, table=None, catalog=None, schema=None, tableType=None):
"""Return a list with all tables"""
if not self.connection:
self.close()
l_catalog = l_schema = l_table = l_tableType = 0
if unicode in [type(x) for x in (table, catalog, schema,tableType)]:
string_p = lambda x:wchar_pointer(UCS_buf(x))
API_f = ODBC_API.SQLTablesW
else:
string_p = ctypes.c_char_p
API_f = ODBC_API.SQLTables
if catalog is not None:
l_catalog = len(catalog)
catalog = string_p(catalog)
if schema is not None:
l_schema = len(schema)
schema = string_p(schema)
if table is not None:
l_table = len(table)
table = string_p(table)
if tableType is not None:
l_tableType = len(tableType)
tableType = string_p(tableType)
self._free_stmt()
self._last_param_types = None
self.statement = None
ret = API_f(self.stmt_h,
catalog, l_catalog,
schema, l_schema,
table, l_table,
tableType, l_tableType)
check_success(self, ret)
self._NumOfRows()
self._UpdateDesc()
#self._BindCols()
return self
def columns(self, table=None, catalog=None, schema=None, column=None):
"""Return a list with all columns"""
if not self.connection:
self.close()
l_catalog = l_schema = l_table = l_column = 0
if unicode in [type(x) for x in (table, catalog, schema,column)]:
string_p = lambda x:wchar_pointer(UCS_buf(x))
API_f = ODBC_API.SQLColumnsW
else:
string_p = ctypes.c_char_p
API_f = ODBC_API.SQLColumns
if catalog is not None:
l_catalog = len(catalog)
catalog = string_p(catalog)
if schema is not None:
l_schema = len(schema)
schema = string_p(schema)
if table is not None:
l_table = len(table)
table = string_p(table)
if column is not None:
l_column = len(column)
column = string_p(column)
self._free_stmt()
self._last_param_types = None
self.statement = None
ret = API_f(self.stmt_h,
catalog, l_catalog,
schema, l_schema,
table, l_table,
column, l_column)
check_success(self, ret)
self._NumOfRows()
self._UpdateDesc()
#self._BindCols()
return self
def primaryKeys(self, table=None, catalog=None, schema=None):
if not self.connection:
self.close()
l_catalog = l_schema = l_table = 0
if unicode in [type(x) for x in (table, catalog, schema)]:
string_p = lambda x:wchar_pointer(UCS_buf(x))
API_f = ODBC_API.SQLPrimaryKeysW
else:
string_p = ctypes.c_char_p
API_f = ODBC_API.SQLPrimaryKeys
if catalog is not None:
l_catalog = len(catalog)
catalog = string_p(catalog)
if schema is not None:
l_schema = len(schema)
schema = string_p(schema)
if table is not None:
l_table = len(table)
table = string_p(table)
self._free_stmt()
self._last_param_types = None
self.statement = None
ret = API_f(self.stmt_h,
catalog, l_catalog,
schema, l_schema,
table, l_table)
check_success(self, ret)
self._NumOfRows()
self._UpdateDesc()
#self._BindCols()
return self
def foreignKeys(self, table=None, catalog=None, schema=None, foreignTable=None, foreignCatalog=None, foreignSchema=None):
if not self.connection:
self.close()
l_catalog = l_schema = l_table = l_foreignTable = l_foreignCatalog = l_foreignSchema = 0
if unicode in [type(x) for x in (table, catalog, schema,foreignTable,foreignCatalog,foreignSchema)]:
string_p = lambda x:wchar_pointer(UCS_buf(x))
API_f = ODBC_API.SQLForeignKeysW
else:
string_p = ctypes.c_char_p
API_f = ODBC_API.SQLForeignKeys
if catalog is not None:
l_catalog = len(catalog)
catalog = string_p(catalog)
if schema is not None:
l_schema = len(schema)
schema = string_p(schema)
if table is not None:
l_table = len(table)
table = string_p(table)
if foreignTable is not None:
l_foreignTable = len(foreignTable)
foreignTable = string_p(foreignTable)
if foreignCatalog is not None:
l_foreignCatalog = len(foreignCatalog)
foreignCatalog = string_p(foreignCatalog)
if foreignSchema is not None:
l_foreignSchema = len(foreignSchema)
foreignSchema = string_p(foreignSchema)
self._free_stmt()
self._last_param_types = None
self.statement = None
ret = API_f(self.stmt_h,
catalog, l_catalog,
schema, l_schema,
table, l_table,
foreignCatalog, l_foreignCatalog,
foreignSchema, l_foreignSchema,
foreignTable, l_foreignTable)
check_success(self, ret)
self._NumOfRows()
self._UpdateDesc()
#self._BindCols()
return self
def procedurecolumns(self, procedure=None, catalog=None, schema=None, column=None):
if not self.connection:
self.close()
l_catalog = l_schema = l_procedure = l_column = 0
if unicode in [type(x) for x in (procedure, catalog, schema,column)]:
string_p = lambda x:wchar_pointer(UCS_buf(x))
API_f = ODBC_API.SQLProcedureColumnsW
else:
string_p = ctypes.c_char_p
API_f = ODBC_API.SQLProcedureColumns
if catalog is not None:
l_catalog = len(catalog)
catalog = string_p(catalog)
if schema is not None:
l_schema = len(schema)
schema = string_p(schema)
if procedure is not None:
l_procedure = len(procedure)
procedure = string_p(procedure)
if column is not None:
l_column = len(column)
column = string_p(column)
self._free_stmt()
self._last_param_types = None
self.statement = None
ret = API_f(self.stmt_h,
catalog, l_catalog,
schema, l_schema,
procedure, l_procedure,
column, l_column)
check_success(self, ret)
self._NumOfRows()
self._UpdateDesc()
return self
def procedures(self, procedure=None, catalog=None, schema=None):
if not self.connection:
self.close()
l_catalog = l_schema = l_procedure = 0
if unicode in [type(x) for x in (procedure, catalog, schema)]:
string_p = lambda x:wchar_pointer(UCS_buf(x))
API_f = ODBC_API.SQLProceduresW
else:
string_p = ctypes.c_char_p
API_f = ODBC_API.SQLProcedures
if catalog is not None:
l_catalog = len(catalog)
catalog = string_p(catalog)
if schema is not None:
l_schema = len(schema)
schema = string_p(schema)
if procedure is not None:
l_procedure = len(procedure)
procedure = string_p(procedure)
self._free_stmt()
self._last_param_types = None
self.statement = None
ret = API_f(self.stmt_h,
catalog, l_catalog,
schema, l_schema,
procedure, l_procedure)
check_success(self, ret)
self._NumOfRows()
self._UpdateDesc()
return self
def statistics(self, table, catalog=None, schema=None, unique=False, quick=True):
if not self.connection:
self.close()
l_table = l_catalog = l_schema = 0
if unicode in [type(x) for x in (table, catalog, schema)]:
string_p = lambda x:wchar_pointer(UCS_buf(x))
API_f = ODBC_API.SQLStatisticsW
else:
string_p = ctypes.c_char_p
API_f = ODBC_API.SQLStatistics
if catalog is not None:
l_catalog = len(catalog)
catalog = string_p(catalog)
if schema is not None:
l_schema = len(schema)
schema = string_p(schema)
if table is not None:
l_table = len(table)
table = string_p(table)
if unique:
Unique = SQL_INDEX_UNIQUE
else:
Unique = SQL_INDEX_ALL
if quick:
Reserved = SQL_QUICK
else:
Reserved = SQL_ENSURE
self._free_stmt()
self._last_param_types = None
self.statement = None
ret = API_f(self.stmt_h,
catalog, l_catalog,
schema, l_schema,
table, l_table,
Unique, Reserved)
check_success(self, ret)
self._NumOfRows()
self._UpdateDesc()
#self._BindCols()
return self
def commit(self):
if not self.connection:
self.close()
self.connection.commit()
def rollback(self):
if not self.connection:
self.close()
self.connection.rollback()
def setoutputsize(self, size, column = None):
if not self.connection:
self.close()
self._outputsize[column] = size
def setinputsizes(self, sizes):
if not self.connection:
self.close()
self._inputsizers = [size for size in sizes]
def close(self):
""" Call SQLCloseCursor API to free the statement handle"""
# ret = ODBC_API.SQLCloseCursor(self.stmt_h)
# check_success(self, ret)
#
if self.connection.connected:
ret = ODBC_API.SQLFreeStmt(self.stmt_h, SQL_CLOSE)
check_success(self, ret)
ret = ODBC_API.SQLFreeStmt(self.stmt_h, SQL_UNBIND)
check_success(self, ret)
ret = ODBC_API.SQLFreeStmt(self.stmt_h, SQL_RESET_PARAMS)
check_success(self, ret)
ret = ODBC_API.SQLFreeHandle(SQL_HANDLE_STMT, self.stmt_h)
check_success(self, ret)
self.closed = True
def __del__(self):
if not self.closed:
self.close()
def __exit__(self, type, value, traceback):
if not self.connection:
self.close()
if value:
self.rollback()
else:
self.commit()
self.close()
def __enter__(self):
return self
# This class implement a odbc connection.
#
#
connection_timeout = 0
class Connection:
def __init__(self, connectString = '', autocommit = False, ansi = False, timeout = 0, unicode_results = use_unicode, readonly = False, **kargs):
"""Init variables and connect to the engine"""
self.connected = 0
self.type_size_dic = {}
self.ansi = False
self.unicode_results = False
self.dbc_h = ctypes.c_void_p()
self.autocommit = autocommit
self.readonly = False
# the query timeout value
self.timeout = 0
# self._cursors = []
for key, value in list(kargs.items()):
connectString = connectString + key + '=' + value + ';'
self.connectString = connectString
self.clear_output_converters()
try:
lock.acquire()
if shared_env_h is None:
#Initialize an enviroment if it is not created.
AllocateEnv()
finally:
lock.release()
# Allocate an DBC handle self.dbc_h under the environment shared_env_h
# This DBC handle is actually the basis of a "connection"
# The handle of self.dbc_h will be used to connect to a certain source
# in the self.connect and self.ConnectByDSN method
ret = ODBC_API.SQLAllocHandle(SQL_HANDLE_DBC, shared_env_h, ADDR(self.dbc_h))
check_success(self, ret)
self.connection_timeout = connection_timeout
if self.connection_timeout != 0:
self.set_connection_timeout(connection_timeout)
self.connect(connectString, autocommit, ansi, timeout, unicode_results, readonly)
def set_connection_timeout(self,connection_timeout):
self.connection_timeout = connection_timeout
ret = ODBC_API.SQLSetConnectAttr(self.dbc_h, SQL_ATTR_CONNECTION_TIMEOUT, connection_timeout, SQL_IS_UINTEGER);
check_success(self, ret)
def connect(self, connectString = '', autocommit = False, ansi = False, timeout = 0, unicode_results = use_unicode, readonly = False):
"""Connect to odbc, using connect strings and set the connection's attributes like autocommit and timeout
by calling SQLSetConnectAttr
"""
# Before we establish the connection by the connection string
# Set the connection's attribute of "timeout" (Actully LOGIN_TIMEOUT)
if timeout != 0:
ret = ODBC_API.SQLSetConnectAttr(self.dbc_h, SQL_ATTR_LOGIN_TIMEOUT, timeout, SQL_IS_UINTEGER);
check_success(self, ret)
# Create one connection with a connect string by calling SQLDriverConnect
# and make self.dbc_h the handle of this connection
# Convert the connetsytring to encoded string
# so it can be converted to a ctypes c_char array object
self.ansi = ansi
if not ansi:
c_connectString = wchar_pointer(UCS_buf(self.connectString))
odbc_func = ODBC_API.SQLDriverConnectW
else:
c_connectString = ctypes.c_char_p(self.connectString)
odbc_func = ODBC_API.SQLDriverConnect
# With unixODBC, SQLDriverConnect will intermittently fail with error:
# [01000] [unixODBC][Driver Manager]Can't open lib '/path/to/so' : file not found"
# or:
# [01000] [unixODBC][Driver Manager]Can't open lib '/path/to/so' : (null)"
# when called concurrently by more than one threads. So, we have to
# use a lock to serialize the calls. By the way, the error is much
# less likely to happen if ODBC Tracing is enabled, likely due to the
# implicit serialization caused by writing to trace file.
if ODBC_API._name != 'odbc32':
try:
lock.acquire()
ret = odbc_func(self.dbc_h, 0, c_connectString, len(self.connectString), None, 0, None, SQL_DRIVER_NOPROMPT)
finally:
lock.release()
else:
ret = odbc_func(self.dbc_h, 0, c_connectString, len(self.connectString), None, 0, None, SQL_DRIVER_NOPROMPT)
check_success(self, ret)
# Set the connection's attribute of "autocommit"
#
self.autocommit = autocommit
if self.autocommit == True:
ret = ODBC_API.SQLSetConnectAttr(self.dbc_h, SQL_ATTR_AUTOCOMMIT, SQL_AUTOCOMMIT_ON, SQL_IS_UINTEGER)
else:
ret = ODBC_API.SQLSetConnectAttr(self.dbc_h, SQL_ATTR_AUTOCOMMIT, SQL_AUTOCOMMIT_OFF, SQL_IS_UINTEGER)
check_success(self, ret)
# Set the connection's attribute of "readonly"
#
self.readonly = readonly
if self.readonly == True:
ret = ODBC_API.SQLSetConnectAttr(self.dbc_h, SQL_ATTR_ACCESS_MODE, SQL_MODE_READ_ONLY, SQL_IS_UINTEGER)
check_success(self, ret)
self.unicode_results = unicode_results
self.connected = 1
self.update_db_special_info()
def clear_output_converters(self):
self.output_converter = {}
for sqltype, profile in SQL_data_type_dict.items():
self.output_converter[sqltype] = profile[1]
def add_output_converter(self, sqltype, func):
self.output_converter[sqltype] = func
def ConnectByDSN(self, dsn, user, passwd = ''):
"""Connect to odbc, we need dsn, user and optionally password"""
self.dsn = dsn
self.user = user
self.passwd = passwd
sn = create_buffer(dsn)
un = create_buffer(user)
pw = create_buffer(passwd)
ret = ODBC_API.SQLConnect(self.dbc_h, sn, len(sn), un, len(un), pw, len(pw))
check_success(self, ret)
self.update_db_special_info()
self.connected = 1
def cursor(self, row_type_callable=None):
#self.settimeout(self.timeout)
if not self.connected:
raise ProgrammingError('HY000','Attempt to use a closed connection.')
cur = Cursor(self, row_type_callable=row_type_callable)
# self._cursors.append(cur)
return cur
def update_db_special_info(self):
for sql_type in (
SQL_TYPE_TIMESTAMP,
SQL_TYPE_DATE,
SQL_TYPE_TIME,
SQL_SS_TIME2,
):
cur = Cursor(self)
try:
info_tuple = cur.getTypeInfo(sql_type)
if info_tuple is not None:
self.type_size_dic[sql_type] = info_tuple[2], info_tuple[14]
except:
pass
cur.close()
self.support_SQLDescribeParam = False
try:
driver_name = self.getinfo(SQL_DRIVER_NAME)
if any(x in driver_name for x in ('SQLSRV','ncli','libsqlncli')):
self.support_SQLDescribeParam = True
except:
pass
def commit(self):
if not self.connected:
raise ProgrammingError('HY000','Attempt to use a closed connection.')
ret = SQLEndTran(SQL_HANDLE_DBC, self.dbc_h, SQL_COMMIT)
if ret != SQL_SUCCESS:
check_success(self, ret)
def rollback(self):
if not self.connected:
raise ProgrammingError('HY000','Attempt to use a closed connection.')
ret = SQLEndTran(SQL_HANDLE_DBC, self.dbc_h, SQL_ROLLBACK)
if ret != SQL_SUCCESS:
check_success(self, ret)
def getinfo(self,infotype):
if infotype not in list(aInfoTypes.keys()):
raise ProgrammingError('HY000','Invalid getinfo value: '+str(infotype))
if aInfoTypes[infotype] == 'GI_UINTEGER':
total_buf_len = 1000
alloc_buffer = ctypes.c_ulong()
used_buf_len = c_short()
ret = ODBC_API.SQLGetInfo(self.dbc_h,infotype,ADDR(alloc_buffer), total_buf_len,\
ADDR(used_buf_len))
check_success(self, ret)
result = alloc_buffer.value
elif aInfoTypes[infotype] == 'GI_USMALLINT':
total_buf_len = 1000
alloc_buffer = ctypes.c_ushort()
used_buf_len = c_short()
ret = ODBC_API.SQLGetInfo(self.dbc_h,infotype,ADDR(alloc_buffer), total_buf_len,\
ADDR(used_buf_len))
check_success(self, ret)
result = alloc_buffer.value
else:
total_buf_len = 1000
alloc_buffer = create_buffer(total_buf_len)
used_buf_len = c_short()
if self.ansi:
API_f = ODBC_API.SQLGetInfo
else:
API_f = ODBC_API.SQLGetInfoW
ret = API_f(self.dbc_h,infotype,ADDR(alloc_buffer), total_buf_len,\
ADDR(used_buf_len))
check_success(self, ret)
if self.ansi:
result = alloc_buffer.value
else:
result = UCS_dec(alloc_buffer)
if aInfoTypes[infotype] == 'GI_YESNO':
if unicode(result[0]) == unicode('Y'):
result = True
else:
result = False
return result
def __exit__(self, type, value, traceback):
if value:
self.rollback()
else:
self.commit()
if self.connected:
self.close()
def __enter__(self):
return self
def __del__(self):
if self.connected:
self.close()
def close(self):
if not self.connected:
raise ProgrammingError('HY000','Attempt to close a closed connection.')
# for cur in self._cursors:
# if not cur is None:
# if not cur.closed:
# cur.close()
if self.connected:
#if DEBUG:print 'disconnect'
if not self.autocommit:
self.rollback()
ret = ODBC_API.SQLDisconnect(self.dbc_h)
check_success(self, ret)
#if DEBUG:print 'free dbc'
ret = ODBC_API.SQLFreeHandle(SQL_HANDLE_DBC, self.dbc_h)
check_success(self, ret)
# if shared_env_h.value:
# #if DEBUG:print 'env'
# ret = ODBC_API.SQLFreeHandle(SQL_HANDLE_ENV, shared_env_h)
# check_success(shared_env_h, ret)
self.connected = 0
odbc = Connection
connect = odbc
'''
def connect(connectString = '', autocommit = False, ansi = False, timeout = 0, unicode_results = False, readonly = False, **kargs):
return odbc(connectString, autocommit, ansi, timeout, unicode_results, readonly, kargs)
'''
def drivers():
if sys.platform not in ('win32','cli'):
raise Exception('This function is available for use in Windows only.')
try:
lock.acquire()
if shared_env_h is None:
AllocateEnv()
finally:
lock.release()
DriverDescription = create_buffer_u(1000)
BufferLength1 = c_short(1000)
DescriptionLength = c_short()
DriverAttributes = create_buffer_u(1000)
BufferLength2 = c_short(1000)
AttributesLength = c_short()
ret = SQL_SUCCESS
DriverList = []
Direction = SQL_FETCH_FIRST
while ret != SQL_NO_DATA:
ret = ODBC_API.SQLDriversW(shared_env_h, Direction , DriverDescription , BufferLength1
, ADDR(DescriptionLength), DriverAttributes, BufferLength2, ADDR(AttributesLength))
check_success(shared_env_h, ret)
DriverList.append(DriverDescription.value)
if Direction == SQL_FETCH_FIRST:
Direction = SQL_FETCH_NEXT
return DriverList
def win_create_mdb(mdb_path, sort_order = "General\0\0"):
if sys.platform not in ('win32','cli'):
raise Exception('This function is available for use in Windows only.')
mdb_driver = [d for d in drivers() if 'Microsoft Access Driver (*.mdb' in d]
if mdb_driver == []:
raise Exception('Access Driver is not found.')
else:
driver_name = mdb_driver[0].encode('mbcs')
#CREATE_DB=<path name> <sort order>
ctypes.windll.ODBCCP32.SQLConfigDataSource.argtypes = [ctypes.c_void_p,ctypes.c_ushort,ctypes.c_char_p,ctypes.c_char_p]
if py_v3:
c_Path = bytes("CREATE_DB=" + mdb_path + " " + sort_order,'mbcs')
else:
c_Path = "CREATE_DB=" + mdb_path + " " + sort_order
ODBC_ADD_SYS_DSN = 1
ret = ctypes.windll.ODBCCP32.SQLConfigDataSource(None,ODBC_ADD_SYS_DSN,driver_name, c_Path)
if not ret:
raise Exception('Failed to create Access mdb file - "%s". Please check file path, permission and Access driver readiness.' %mdb_path)
def win_connect_mdb(mdb_path):
if sys.platform not in ('win32','cli'):
raise Exception('This function is available for use in Windows only.')
mdb_driver = [d for d in drivers() if 'Microsoft Access Driver (*.mdb' in d]
if mdb_driver == []:
raise Exception('Access Driver is not found.')
else:
driver_name = mdb_driver[0]
return connect('Driver={'+driver_name+"};DBQ="+mdb_path, unicode_results = use_unicode, readonly = False)
def win_compact_mdb(mdb_path, compacted_mdb_path, sort_order = "General\0\0"):
if sys.platform not in ('win32','cli'):
raise Exception('This function is available for use in Windows only.')
mdb_driver = [d for d in drivers() if 'Microsoft Access Driver (*.mdb' in d]
if mdb_driver == []:
raise Exception('Access Driver is not found.')
else:
driver_name = mdb_driver[0].encode('mbcs')
#COMPACT_DB=<source path> <destination path> <sort order>
ctypes.windll.ODBCCP32.SQLConfigDataSource.argtypes = [ctypes.c_void_p,ctypes.c_ushort,ctypes.c_char_p,ctypes.c_char_p]
#driver_name = "Microsoft Access Driver (*.mdb)"
if py_v3:
c_Path = bytes("COMPACT_DB=" + mdb_path + " " + compacted_mdb_path + " " + sort_order,'mbcs')
#driver_name = bytes(driver_name,'mbcs')
else:
c_Path = "COMPACT_DB=" + mdb_path + " " + compacted_mdb_path + " " + sort_order
ODBC_ADD_SYS_DSN = 1
ret = ctypes.windll.ODBCCP32.SQLConfigDataSource(None,ODBC_ADD_SYS_DSN,driver_name, c_Path)
if not ret:
raise Exception('Failed to compact Access mdb file - "%s". Please check file path, permission and Access driver readiness.' %compacted_mdb_path)
def dataSources():
"""Return a list with [name, descrition]"""
dsn = create_buffer(1024)
desc = create_buffer(1024)
dsn_len = c_short()
desc_len = c_short()
dsn_list = {}
try:
lock.acquire()
if shared_env_h is None:
AllocateEnv()
finally:
lock.release()
while 1:
ret = ODBC_API.SQLDataSources(shared_env_h, SQL_FETCH_NEXT, \
dsn, len(dsn), ADDR(dsn_len), desc, len(desc), ADDR(desc_len))
if ret == SQL_NO_DATA_FOUND:
break
elif not ret in (SQL_SUCCESS, SQL_SUCCESS_WITH_INFO):
ctrl_err(SQL_HANDLE_ENV, shared_env_h, ret)
else:
dsn_list[dsn.value] = desc.value
return dsn_list
def monkey_patch_for_gevent():
import functools, gevent
apply_e = gevent.get_hub().threadpool.apply_e
def monkey_patch(func):
@functools.wraps(func)
def wrap(*args, **kwargs):
#if DEBUG:print('%s called with %s %s' % (func, args, kwargs))
return apply_e(Exception, func, args, kwargs)
return wrap
for attr in dir(ODBC_API):
if attr.startswith('SQL') and hasattr(getattr(ODBC_API, attr), 'argtypes'):
setattr(ODBC_API, attr, monkey_patch(getattr(ODBC_API, attr)))
|
deemoowoor/.vim
|
refs/heads/master
|
ftplugin/python/pyflakes/setup.py
|
37
|
#!/usr/bin/python
# (c) 2005-2009 Divmod, Inc. See LICENSE file for details
from distutils.core import setup
setup(
name="pyflakes",
license="MIT",
version="0.4.0",
description="passive checker of Python programs",
author="Phil Frost",
maintainer="Moe Aboulkheir",
maintainer_email="moe@divmod.com",
url="http://www.divmod.org/trac/wiki/DivmodPyflakes",
packages=["pyflakes", "pyflakes.scripts", "pyflakes.test"],
scripts=["bin/pyflakes"],
long_description="""Pyflakes is program to analyze Python programs and detect various errors. It
works by parsing the source file, not importing it, so it is safe to use on
modules with side effects. It's also much faster.""",
classifiers=[
"Development Status :: 6 - Mature",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
])
|
kthordarson/youtube-dl-ruv
|
refs/heads/master
|
youtube_dl/extractor/brightcove.py
|
2
|
# encoding: utf-8
from __future__ import unicode_literals
import re
import json
import xml.etree.ElementTree
from .common import InfoExtractor
from ..utils import (
compat_urllib_parse,
find_xpath_attr,
fix_xml_ampersands,
compat_urlparse,
compat_str,
compat_urllib_request,
compat_parse_qs,
compat_urllib_parse_urlparse,
determine_ext,
ExtractorError,
unsmuggle_url,
unescapeHTML,
)
class BrightcoveIE(InfoExtractor):
_VALID_URL = r'https?://.*brightcove\.com/(services|viewer).*?\?(?P<query>.*)'
_FEDERATED_URL_TEMPLATE = 'http://c.brightcove.com/services/viewer/htmlFederated?%s'
_TESTS = [
{
# From http://www.8tv.cat/8aldia/videos/xavier-sala-i-martin-aquesta-tarda-a-8-al-dia/
'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=1654948606001&flashID=myExperience&%40videoPlayer=2371591881001',
'md5': '5423e113865d26e40624dce2e4b45d95',
'note': 'Test Brightcove downloads and detection in GenericIE',
'info_dict': {
'id': '2371591881001',
'ext': 'mp4',
'title': 'Xavier Sala i Martín: “Un banc que no presta és un banc zombi que no serveix per a res”',
'uploader': '8TV',
'description': 'md5:a950cc4285c43e44d763d036710cd9cd',
}
},
{
# From http://medianetwork.oracle.com/video/player/1785452137001
'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=1217746023001&flashID=myPlayer&%40videoPlayer=1785452137001',
'info_dict': {
'id': '1785452137001',
'ext': 'flv',
'title': 'JVMLS 2012: Arrays 2.0 - Opportunities and Challenges',
'description': 'John Rose speaks at the JVM Language Summit, August 1, 2012.',
'uploader': 'Oracle',
},
},
{
# From http://mashable.com/2013/10/26/thermoelectric-bracelet-lets-you-control-your-body-temperature/
'url': 'http://c.brightcove.com/services/viewer/federated_f9?&playerID=1265504713001&publisherID=AQ%7E%7E%2CAAABBzUwv1E%7E%2CxP-xFHVUstiMFlNYfvF4G9yFnNaqCw_9&videoID=2750934548001',
'info_dict': {
'id': '2750934548001',
'ext': 'mp4',
'title': 'This Bracelet Acts as a Personal Thermostat',
'description': 'md5:547b78c64f4112766ccf4e151c20b6a0',
'uploader': 'Mashable',
},
},
{
# test that the default referer works
# from http://national.ballet.ca/interact/video/Lost_in_Motion_II/
'url': 'http://link.brightcove.com/services/player/bcpid756015033001?bckey=AQ~~,AAAApYJi_Ck~,GxhXCegT1Dp39ilhXuxMJxasUhVNZiil&bctid=2878862109001',
'info_dict': {
'id': '2878862109001',
'ext': 'mp4',
'title': 'Lost in Motion II',
'description': 'md5:363109c02998fee92ec02211bd8000df',
'uploader': 'National Ballet of Canada',
},
},
{
# test flv videos served by akamaihd.net
# From http://www.redbull.com/en/bike/stories/1331655643987/replay-uci-dh-world-cup-2014-from-fort-william
'url': 'http://c.brightcove.com/services/viewer/htmlFederated?%40videoPlayer=ref%3ABC2996102916001&linkBaseURL=http%3A%2F%2Fwww.redbull.com%2Fen%2Fbike%2Fvideos%2F1331655630249%2Freplay-uci-fort-william-2014-dh&playerKey=AQ%7E%7E%2CAAAApYJ7UqE%7E%2Cxqr_zXk0I-zzNndy8NlHogrCb5QdyZRf&playerID=1398061561001#__youtubedl_smuggle=%7B%22Referer%22%3A+%22http%3A%2F%2Fwww.redbull.com%2Fen%2Fbike%2Fstories%2F1331655643987%2Freplay-uci-dh-world-cup-2014-from-fort-william%22%7D',
# The md5 checksum changes on each download
'info_dict': {
'id': '2996102916001',
'ext': 'flv',
'title': 'UCI MTB World Cup 2014: Fort William, UK - Downhill Finals',
'uploader': 'Red Bull TV',
'description': 'UCI MTB World Cup 2014: Fort William, UK - Downhill Finals',
},
},
{
# playlist test
# from http://support.brightcove.com/en/video-cloud/docs/playlist-support-single-video-players
'url': 'http://c.brightcove.com/services/viewer/htmlFederated?playerID=3550052898001&playerKey=AQ%7E%7E%2CAAABmA9XpXk%7E%2C-Kp7jNgisre1fG5OdqpAFUTcs0lP_ZoL',
'info_dict': {
'title': 'Sealife',
},
'playlist_mincount': 7,
},
]
@classmethod
def _build_brighcove_url(cls, object_str):
"""
Build a Brightcove url from a xml string containing
<object class="BrightcoveExperience">{params}</object>
"""
# Fix up some stupid HTML, see https://github.com/rg3/youtube-dl/issues/1553
object_str = re.sub(r'(<param name="[^"]+" value="[^"]+")>',
lambda m: m.group(1) + '/>', object_str)
# Fix up some stupid XML, see https://github.com/rg3/youtube-dl/issues/1608
object_str = object_str.replace('<--', '<!--')
object_str = fix_xml_ampersands(object_str)
object_doc = xml.etree.ElementTree.fromstring(object_str.encode('utf-8'))
fv_el = find_xpath_attr(object_doc, './param', 'name', 'flashVars')
if fv_el is not None:
flashvars = dict(
(k, v[0])
for k, v in compat_parse_qs(fv_el.attrib['value']).items())
else:
flashvars = {}
def find_param(name):
if name in flashvars:
return flashvars[name]
node = find_xpath_attr(object_doc, './param', 'name', name)
if node is not None:
return node.attrib['value']
return None
params = {}
playerID = find_param('playerID')
if playerID is None:
raise ExtractorError('Cannot find player ID')
params['playerID'] = playerID
playerKey = find_param('playerKey')
# Not all pages define this value
if playerKey is not None:
params['playerKey'] = playerKey
# The three fields hold the id of the video
videoPlayer = find_param('@videoPlayer') or find_param('videoId') or find_param('videoID')
if videoPlayer is not None:
params['@videoPlayer'] = videoPlayer
linkBase = find_param('linkBaseURL')
if linkBase is not None:
params['linkBaseURL'] = linkBase
data = compat_urllib_parse.urlencode(params)
return cls._FEDERATED_URL_TEMPLATE % data
@classmethod
def _extract_brightcove_url(cls, webpage):
"""Try to extract the brightcove url from the webpage, returns None
if it can't be found
"""
urls = cls._extract_brightcove_urls(webpage)
return urls[0] if urls else None
@classmethod
def _extract_brightcove_urls(cls, webpage):
"""Return a list of all Brightcove URLs from the webpage """
url_m = re.search(
r'<meta\s+property="og:video"\s+content="(https?://(?:secure|c)\.brightcove.com/[^"]+)"',
webpage)
if url_m:
url = unescapeHTML(url_m.group(1))
# Some sites don't add it, we can't download with this url, for example:
# http://www.ktvu.com/videos/news/raw-video-caltrain-releases-video-of-man-almost/vCTZdY/
if 'playerKey' in url or 'videoId' in url:
return [url]
matches = re.findall(
r'''(?sx)<object
(?:
[^>]+?class=[\'"][^>]*?BrightcoveExperience.*?[\'"] |
[^>]*?>\s*<param\s+name="movie"\s+value="https?://[^/]*brightcove\.com/
).+?</object>''',
webpage)
return [cls._build_brighcove_url(m) for m in matches]
def _real_extract(self, url):
url, smuggled_data = unsmuggle_url(url, {})
# Change the 'videoId' and others field to '@videoPlayer'
url = re.sub(r'(?<=[?&])(videoI(d|D)|bctid)', '%40videoPlayer', url)
# Change bckey (used by bcove.me urls) to playerKey
url = re.sub(r'(?<=[?&])bckey', 'playerKey', url)
mobj = re.match(self._VALID_URL, url)
query_str = mobj.group('query')
query = compat_urlparse.parse_qs(query_str)
videoPlayer = query.get('@videoPlayer')
if videoPlayer:
# We set the original url as the default 'Referer' header
referer = smuggled_data.get('Referer', url)
return self._get_video_info(
videoPlayer[0], query_str, query, referer=referer)
elif 'playerKey' in query:
player_key = query['playerKey']
return self._get_playlist_info(player_key[0])
else:
raise ExtractorError(
'Cannot find playerKey= variable. Did you forget quotes in a shell invocation?',
expected=True)
def _get_video_info(self, video_id, query_str, query, referer=None):
request_url = self._FEDERATED_URL_TEMPLATE % query_str
req = compat_urllib_request.Request(request_url)
linkBase = query.get('linkBaseURL')
if linkBase is not None:
referer = linkBase[0]
if referer is not None:
req.add_header('Referer', referer)
webpage = self._download_webpage(req, video_id)
error_msg = self._html_search_regex(
r"<h1>We're sorry.</h1>\s*<p>(.*?)</p>", webpage,
'error message', default=None)
if error_msg is not None:
raise ExtractorError(
'brightcove said: %s' % error_msg, expected=True)
self.report_extraction(video_id)
info = self._search_regex(r'var experienceJSON = ({.*});', webpage, 'json')
info = json.loads(info)['data']
video_info = info['programmedContent']['videoPlayer']['mediaDTO']
video_info['_youtubedl_adServerURL'] = info.get('adServerURL')
return self._extract_video_info(video_info)
def _get_playlist_info(self, player_key):
info_url = 'http://c.brightcove.com/services/json/experience/runtime/?command=get_programming_for_experience&playerKey=%s' % player_key
playlist_info = self._download_webpage(
info_url, player_key, 'Downloading playlist information')
json_data = json.loads(playlist_info)
if 'videoList' not in json_data:
raise ExtractorError('Empty playlist')
playlist_info = json_data['videoList']
videos = [self._extract_video_info(video_info) for video_info in playlist_info['mediaCollectionDTO']['videoDTOs']]
return self.playlist_result(videos, playlist_id=playlist_info['id'],
playlist_title=playlist_info['mediaCollectionDTO']['displayName'])
def _extract_video_info(self, video_info):
info = {
'id': compat_str(video_info['id']),
'title': video_info['displayName'].strip(),
'description': video_info.get('shortDescription'),
'thumbnail': video_info.get('videoStillURL') or video_info.get('thumbnailURL'),
'uploader': video_info.get('publisherName'),
}
renditions = video_info.get('renditions')
if renditions:
formats = []
for rend in renditions:
url = rend['defaultURL']
if not url:
continue
if rend['remote']:
url_comp = compat_urllib_parse_urlparse(url)
if url_comp.path.endswith('.m3u8'):
formats.extend(
self._extract_m3u8_formats(url, info['id'], 'mp4'))
continue
elif 'akamaihd.net' in url_comp.netloc:
# This type of renditions are served through
# akamaihd.net, but they don't use f4m manifests
url = url.replace('control/', '') + '?&v=3.3.0&fp=13&r=FEEFJ&g=RTSJIMBMPFPB'
ext = 'flv'
else:
ext = determine_ext(url)
size = rend.get('size')
formats.append({
'url': url,
'ext': ext,
'height': rend.get('frameHeight'),
'width': rend.get('frameWidth'),
'filesize': size if size != 0 else None,
})
self._sort_formats(formats)
info['formats'] = formats
elif video_info.get('FLVFullLengthURL') is not None:
info.update({
'url': video_info['FLVFullLengthURL'],
})
if self._downloader.params.get('include_ads', False):
adServerURL = video_info.get('_youtubedl_adServerURL')
if adServerURL:
ad_info = {
'_type': 'url',
'url': adServerURL,
}
if 'url' in info:
return {
'_type': 'playlist',
'title': info['title'],
'entries': [ad_info, info],
}
else:
return ad_info
if 'url' not in info and not info.get('formats'):
raise ExtractorError('Unable to extract video url for %s' % info['id'])
return info
|
monsta/pluma
|
refs/heads/master
|
plugins/quickopen/quickopen/windowhelper.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009 - Jesse van den Kieboom
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301, USA.
import pluma
import gtk
from popup import Popup
import os
import pluma.commands
import gio
import glib
from virtualdirs import RecentDocumentsDirectory
from virtualdirs import CurrentDocumentsDirectory
ui_str = """<ui>
<menubar name="MenuBar">
<menu name="FileMenu" action="File">
<placeholder name="FileOps_2">
<menuitem name="QuickOpen" action="QuickOpen"/>
</placeholder>
</menu>
</menubar>
</ui>
"""
class WindowHelper:
def __init__(self, window, plugin):
self._window = window
self._plugin = plugin
self._popup = None
self._install_menu()
def deactivate(self):
self._uninstall_menu()
self._window = None
self._plugin = None
def update_ui(self):
pass
def _uninstall_menu(self):
manager = self._window.get_ui_manager()
manager.remove_ui(self._ui_id)
manager.remove_action_group(self._action_group)
manager.ensure_update()
def _install_menu(self):
manager = self._window.get_ui_manager()
self._action_group = gtk.ActionGroup("PlumaQuickOpenPluginActions")
self._action_group.add_actions([
("QuickOpen", gtk.STOCK_OPEN, _("Quick open"),
'<Ctrl><Alt>O', _("Quickly open documents"),
self.on_quick_open_activate)
])
manager.insert_action_group(self._action_group, -1)
self._ui_id = manager.add_ui_from_string(ui_str)
def _create_popup(self):
paths = []
# Open documents
paths.append(CurrentDocumentsDirectory(self._window))
doc = self._window.get_active_document()
# Current document directory
if doc and doc.is_local():
gfile = doc.get_location()
paths.append(gfile.get_parent())
# File browser root directory
bus = self._window.get_message_bus()
try:
msg = bus.send_sync('/plugins/filebrowser', 'get_root')
if msg:
uri = msg.get_value('uri')
if uri:
gfile = gio.File(uri)
if gfile.is_native():
paths.append(gfile)
except StandardError:
pass
# Recent documents
paths.append(RecentDocumentsDirectory(screen=self._window.get_screen()))
# Local bookmarks
for path in self._local_bookmarks():
paths.append(path)
# Desktop directory
desktopdir = self._desktop_dir()
if desktopdir:
paths.append(gio.File(desktopdir))
# Home directory
paths.append(gio.File(os.path.expanduser('~')))
self._popup = Popup(self._window, paths, self.on_activated)
self._popup.set_default_size(*self._plugin.get_popup_size())
self._popup.set_transient_for(self._window)
self._popup.set_position(gtk.WIN_POS_CENTER_ON_PARENT)
self._window.get_group().add_window(self._popup)
self._popup.connect('destroy', self.on_popup_destroy)
def _local_bookmarks(self):
filename = os.path.expanduser('~/.gtk-bookmarks')
if not os.path.isfile(filename):
return []
paths = []
for line in file(filename, 'r').xreadlines():
uri = line.strip().split(" ")[0]
f = gio.File(uri)
if f.is_native():
try:
info = f.query_info("standard::type")
if info and info.get_file_type() == gio.FILE_TYPE_DIRECTORY:
paths.append(f)
except glib.GError:
pass
return paths
def _desktop_dir(self):
config = os.getenv('XDG_CONFIG_HOME')
if not config:
config = os.path.expanduser('~/.config')
config = os.path.join(config, 'user-dirs.dirs')
desktopdir = None
if os.path.isfile(config):
for line in file(config, 'r').xreadlines():
line = line.strip()
if line.startswith('XDG_DESKTOP_DIR'):
parts = line.split('=', 1)
desktopdir = os.path.expandvars(parts[1].strip('"').strip("'"))
break
if not desktopdir:
desktopdir = os.path.expanduser('~/Desktop')
return desktopdir
# Callbacks
def on_quick_open_activate(self, action):
if not self._popup:
self._create_popup()
self._popup.show()
def on_popup_destroy(self, popup):
alloc = popup.get_allocation()
self._plugin.set_popup_size((alloc.width, alloc.height))
self._popup = None
def on_activated(self, gfile):
pluma.commands.load_uri(self._window, gfile.get_uri(), None, -1)
return True
# ex:ts=8:et:
|
hgl888/chromium-crosswalk
|
refs/heads/master
|
tools/git/git-diff-ide.py
|
197
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invokes git diff [args...] and inserts file:line in front of each line of diff
output where possible.
This is useful from an IDE that allows you to double-click lines that begin
with file:line to open and jump to that point in the file.
Synopsis:
%prog [git diff args...]
Examples:
%prog
%prog HEAD
"""
import subprocess
import sys
def GitShell(args, ignore_return=False):
"""A shell invocation suitable for communicating with git. Returns
output as list of lines, raises exception on error.
"""
job = subprocess.Popen(args,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(out, err) = job.communicate()
if job.returncode != 0 and not ignore_return:
print out
raise Exception("Error %d running command %s" % (
job.returncode, args))
return out.split('\n')
def PrintGitDiff(extra_args):
"""Outputs git diff extra_args with file:line inserted into relevant lines."""
current_file = '';
line_num = 0;
lines = GitShell('git diff %s' % ' '.join(extra_args))
for line in lines:
# Pass-through lines:
# diff --git a/file.c b/file.c
# index 0e38c2d..8cd69ae 100644
# --- a/file.c
if (line.startswith('diff ') or
line.startswith('index ') or
line.startswith('--- ')):
print line
continue
# Get the filename from the +++ line:
# +++ b/file.c
if line.startswith('+++ '):
# Filename might be /dev/null or a/file or b/file.
# Skip the first two characters unless it starts with /.
current_file = line[4:] if line[4] == '/' else line[6:]
print line
continue
# Update line number from the @@ lines:
# @@ -41,9 +41,9 @@ def MyFunc():
# ^^
if line.startswith('@@ '):
_, old_nr, new_nr, _ = line.split(' ', 3)
line_num = int(new_nr.split(',')[0])
print line
continue
print current_file + ':' + repr(line_num) + ':' + line
# Increment line number for lines that start with ' ' or '+':
# @@ -41,4 +41,4 @@ def MyFunc():
# file.c:41: // existing code
# file.c:42: // existing code
# file.c:43:-// deleted code
# file.c:43:-// deleted code
# file.c:43:+// inserted code
# file.c:44:+// inserted code
if line.startswith(' ') or line.startswith('+'):
line_num += 1
def main():
PrintGitDiff(sys.argv[1:])
if __name__ == '__main__':
main()
|
os2sd/android_kernel_lge_msm7x27-3.0.x
|
refs/heads/cm-11.0
|
tools/perf/scripts/python/sctop.py
|
11180
|
# system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
kumarkrishna/sympy
|
refs/heads/master
|
sympy/solvers/tests/test_inequalities.py
|
24
|
"""Tests for tools for solving inequalities and systems of inequalities. """
from sympy import (And, Eq, FiniteSet, Ge, Gt, Interval, Le, Lt, Ne, oo,
Or, S, sin, sqrt, Symbol, Union, Integral, Sum,
Function, Poly, PurePoly, pi, root)
from sympy.solvers.inequalities import (reduce_inequalities,
solve_poly_inequality as psolve,
reduce_rational_inequalities,
solve_univariate_inequality as isolve,
reduce_abs_inequality)
from sympy.polys.rootoftools import RootOf
from sympy.solvers.solvers import solve
from sympy.abc import x, y
from sympy.utilities.pytest import raises, slow
inf = oo.evalf()
def test_solve_poly_inequality():
assert psolve(Poly(0, x), '==') == [S.Reals]
assert psolve(Poly(1, x), '==') == [S.EmptySet]
assert psolve(PurePoly(x + 1, x), ">") == [Interval(-1, oo, True, False)]
def test_reduce_poly_inequalities_real_interval():
assert reduce_rational_inequalities(
[[Eq(x**2, 0)]], x, relational=False) == FiniteSet(0)
assert reduce_rational_inequalities(
[[Le(x**2, 0)]], x, relational=False) == FiniteSet(0)
assert reduce_rational_inequalities(
[[Lt(x**2, 0)]], x, relational=False) == S.EmptySet
assert reduce_rational_inequalities(
[[Ge(x**2, 0)]], x, relational=False) == \
S.Reals if x.is_real else Interval(-oo, oo)
assert reduce_rational_inequalities(
[[Gt(x**2, 0)]], x, relational=False) == \
FiniteSet(0).complement(S.Reals)
assert reduce_rational_inequalities(
[[Ne(x**2, 0)]], x, relational=False) == \
FiniteSet(0).complement(S.Reals)
assert reduce_rational_inequalities(
[[Eq(x**2, 1)]], x, relational=False) == FiniteSet(-1, 1)
assert reduce_rational_inequalities(
[[Le(x**2, 1)]], x, relational=False) == Interval(-1, 1)
assert reduce_rational_inequalities(
[[Lt(x**2, 1)]], x, relational=False) == Interval(-1, 1, True, True)
assert reduce_rational_inequalities(
[[Ge(x**2, 1)]], x, relational=False) == \
Union(Interval(-oo, -1), Interval(1, oo))
assert reduce_rational_inequalities(
[[Gt(x**2, 1)]], x, relational=False) == \
Interval(-1, 1).complement(S.Reals)
assert reduce_rational_inequalities(
[[Ne(x**2, 1)]], x, relational=False) == \
FiniteSet(-1, 1).complement(S.Reals)
assert reduce_rational_inequalities([[Eq(
x**2, 1.0)]], x, relational=False) == FiniteSet(-1.0, 1.0).evalf()
assert reduce_rational_inequalities(
[[Le(x**2, 1.0)]], x, relational=False) == Interval(-1.0, 1.0)
assert reduce_rational_inequalities([[Lt(
x**2, 1.0)]], x, relational=False) == Interval(-1.0, 1.0, True, True)
assert reduce_rational_inequalities(
[[Ge(x**2, 1.0)]], x, relational=False) == \
Union(Interval(-inf, -1.0), Interval(1.0, inf))
assert reduce_rational_inequalities(
[[Gt(x**2, 1.0)]], x, relational=False) == \
Union(Interval(-inf, -1.0, right_open=True),
Interval(1.0, inf, left_open=True))
assert reduce_rational_inequalities([[Ne(
x**2, 1.0)]], x, relational=False) == \
FiniteSet(-1.0, 1.0).complement(S.Reals)
s = sqrt(2)
assert reduce_rational_inequalities([[Lt(
x**2 - 1, 0), Gt(x**2 - 1, 0)]], x, relational=False) == S.EmptySet
assert reduce_rational_inequalities([[Le(x**2 - 1, 0), Ge(
x**2 - 1, 0)]], x, relational=False) == FiniteSet(-1, 1)
assert reduce_rational_inequalities(
[[Le(x**2 - 2, 0), Ge(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, False, False), Interval(1, s, False, False))
assert reduce_rational_inequalities(
[[Le(x**2 - 2, 0), Gt(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, False, True), Interval(1, s, True, False))
assert reduce_rational_inequalities(
[[Lt(x**2 - 2, 0), Ge(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, True, False), Interval(1, s, False, True))
assert reduce_rational_inequalities(
[[Lt(x**2 - 2, 0), Gt(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, True, True), Interval(1, s, True, True))
assert reduce_rational_inequalities(
[[Lt(x**2 - 2, 0), Ne(x**2 - 1, 0)]], x, relational=False
) == Union(Interval(-s, -1, True, True), Interval(-1, 1, True, True),
Interval(1, s, True, True))
def test_reduce_poly_inequalities_complex_relational():
assert reduce_rational_inequalities(
[[Eq(x**2, 0)]], x, relational=True) == Eq(x, 0)
assert reduce_rational_inequalities(
[[Le(x**2, 0)]], x, relational=True) == Eq(x, 0)
assert reduce_rational_inequalities(
[[Lt(x**2, 0)]], x, relational=True) == False
assert reduce_rational_inequalities(
[[Ge(x**2, 0)]], x, relational=True) == And(Lt(-oo, x), Lt(x, oo))
assert reduce_rational_inequalities(
[[Gt(x**2, 0)]], x, relational=True) == \
And(Or(And(Lt(-oo, x), Lt(x, 0)), And(Lt(0, x), Lt(x, oo))))
assert reduce_rational_inequalities(
[[Ne(x**2, 0)]], x, relational=True) == \
And(Or(And(Lt(-oo, x), Lt(x, 0)), And(Lt(0, x), Lt(x, oo))))
for one in (S(1), S(1.0)):
inf = one*oo
assert reduce_rational_inequalities(
[[Eq(x**2, one)]], x, relational=True) == \
Or(Eq(x, -one), Eq(x, one))
assert reduce_rational_inequalities(
[[Le(x**2, one)]], x, relational=True) == \
And(And(Le(-one, x), Le(x, one)))
assert reduce_rational_inequalities(
[[Lt(x**2, one)]], x, relational=True) == \
And(And(Lt(-one, x), Lt(x, one)))
assert reduce_rational_inequalities(
[[Ge(x**2, one)]], x, relational=True) == \
And(Or(And(Le(one, x), Lt(x, inf)), And(Le(x, -one), Lt(-inf, x))))
assert reduce_rational_inequalities(
[[Gt(x**2, one)]], x, relational=True) == \
And(Or(And(Lt(-inf, x), Lt(x, -one)), And(Lt(one, x), Lt(x, inf))))
assert reduce_rational_inequalities(
[[Ne(x**2, one)]], x, relational=True) == \
Or(And(Lt(-inf, x), Lt(x, -one)),
And(Lt(-one, x), Lt(x, one)),
And(Lt(one, x), Lt(x, inf)))
def test_reduce_rational_inequalities_real_relational():
assert reduce_rational_inequalities([], x) == False
assert reduce_rational_inequalities(
[[(x**2 + 3*x + 2)/(x**2 - 16) >= 0]], x, relational=False) == \
Union(Interval.open(-oo, -4), Interval(-2, -1), Interval.open(4, oo))
assert reduce_rational_inequalities(
[[((-2*x - 10)*(3 - x))/((x**2 + 5)*(x - 2)**2) < 0]], x,
relational=False) == \
Union(Interval.open(-5, 2), Interval.open(2, 3))
assert reduce_rational_inequalities([[(x + 1)/(x - 5) <= 0]], x,
relational=False) == \
Interval.Ropen(-1, 5)
assert reduce_rational_inequalities([[(x**2 + 4*x + 3)/(x - 1) > 0]], x,
relational=False) == \
Union(Interval.open(-3, -1), Interval.open(1, oo))
assert reduce_rational_inequalities([[(x**2 - 16)/(x - 1)**2 < 0]], x,
relational=False) == \
Union(Interval.open(-4, 1), Interval.open(1, 4))
assert reduce_rational_inequalities([[(3*x + 1)/(x + 4) >= 1]], x,
relational=False) == \
Union(Interval.open(-oo, -4), Interval.Ropen(S(3)/2, oo))
assert reduce_rational_inequalities([[(x - 8)/x <= 3 - x]], x,
relational=False) == \
Union(Interval.Lopen(-oo, -2), Interval.Lopen(0, 4))
def test_reduce_abs_inequalities():
e = abs(x - 5) < 3
ans = And(Lt(2, x), Lt(x, 8))
assert reduce_inequalities(e) == ans
assert reduce_inequalities(e, x) == ans
assert reduce_inequalities(abs(x - 5)) == Eq(x, 5)
assert reduce_inequalities(
abs(2*x + 3) >= 8) == Or(And(Le(S(5)/2, x), Lt(x, oo)),
And(Le(x, -S(11)/2), Lt(-oo, x)))
assert reduce_inequalities(abs(x - 4) + abs(
3*x - 5) < 7) == And(Lt(S(1)/2, x), Lt(x, 4))
assert reduce_inequalities(abs(x - 4) + abs(3*abs(x) - 5) < 7) == \
Or(And(S(-2) < x, x < -1), And(S(1)/2 < x, x < 4))
nr = Symbol('nr', real=False)
raises(TypeError, lambda: reduce_inequalities(abs(nr - 5) < 3))
def test_reduce_inequalities_general():
assert reduce_inequalities(Ge(sqrt(2)*x, 1)) == And(sqrt(2)/2 <= x, x < oo)
assert reduce_inequalities(PurePoly(x + 1, x) > 0) == And(S(-1) < x, x < oo)
def test_reduce_inequalities_boolean():
assert reduce_inequalities(
[Eq(x**2, 0), True]) == Eq(x, 0)
assert reduce_inequalities([Eq(x**2, 0), False]) == False
def test_reduce_inequalities_multivariate():
assert reduce_inequalities([Ge(x**2, 1), Ge(y**2, 1)]) == And(
Or(And(Le(1, x), Lt(x, oo)), And(Le(x, -1), Lt(-oo, x))),
Or(And(Le(1, y), Lt(y, oo)), And(Le(y, -1), Lt(-oo, y))))
def test_reduce_inequalities_errors():
raises(NotImplementedError, lambda: reduce_inequalities(Ge(sin(x) + x, 1)))
raises(NotImplementedError, lambda: reduce_inequalities(Ge(x**2*y + y, 1)))
def test_hacky_inequalities():
assert reduce_inequalities(x + y < 1, symbols=[x]) == (x < 1 - y)
assert reduce_inequalities(x + y >= 1, symbols=[x]) == (x >= 1 - y)
def test_issue_6343():
eq = -3*x**2/2 - 45*x/4 + S(33)/2 > 0
assert reduce_inequalities(eq) == \
And(x < -S(15)/4 + sqrt(401)/4, -sqrt(401)/4 - S(15)/4 < x)
def test_issue_8235():
assert reduce_inequalities(x**2 - 1 < 0) == \
And(S(-1) < x, x < S(1))
assert reduce_inequalities(x**2 - 1 <= 0) == \
And(S(-1) <= x, x <= 1)
assert reduce_inequalities(x**2 - 1 > 0) == \
Or(And(-oo < x, x < -1), And(x < oo, S(1) < x))
assert reduce_inequalities(x**2 - 1 >= 0) == \
Or(And(-oo < x, x <= S(-1)), And(S(1) <= x, x < oo))
eq = x**8 + x - 9 # we want RootOf solns here
sol = solve(eq >= 0)
tru = Or(And(RootOf(eq, 1) <= x, x < oo), And(-oo < x, x <= RootOf(eq, 0)))
assert sol == tru
# recast vanilla as real
assert solve(sqrt((-x + 1)**2) < 1) == And(S(0) < x, x < 2)
def test_issue_5526():
assert reduce_inequalities(S(0) <=
x + Integral(y**2, (y, 1, 3)) - 1, [x]) == \
(-Integral(y**2, (y, 1, 3)) + 1 <= x)
f = Function('f')
e = Sum(f(x), (x, 1, 3))
assert reduce_inequalities(S(0) <= x + e + y**2, [x]) == \
(-y**2 - Sum(f(x), (x, 1, 3)) <= x)
def test_solve_univariate_inequality():
assert isolve(x**2 >= 4, x, relational=False) == Union(Interval(-oo, -2),
Interval(2, oo))
assert isolve(x**2 >= 4, x) == Or(And(Le(2, x), Lt(x, oo)), And(Le(x, -2),
Lt(-oo, x)))
assert isolve((x - 1)*(x - 2)*(x - 3) >= 0, x, relational=False) == \
Union(Interval(1, 2), Interval(3, oo))
assert isolve((x - 1)*(x - 2)*(x - 3) >= 0, x) == \
Or(And(Le(1, x), Le(x, 2)), And(Le(3, x), Lt(x, oo)))
# issue 2785:
assert isolve(x**3 - 2*x - 1 > 0, x, relational=False) == \
Union(Interval(-1, -sqrt(5)/2 + S(1)/2, True, True),
Interval(S(1)/2 + sqrt(5)/2, oo, True, True))
# issue 2794:
assert isolve(x**3 - x**2 + x - 1 > 0, x, relational=False) == \
Interval(1, oo, True)
# XXX should be limited in domain, e.g. between 0 and 2*pi
assert isolve(sin(x) < S.Half, x) == \
Or(And(-oo < x, x < pi/6), And(5*pi/6 < x, x < oo))
assert isolve(sin(x) > S.Half, x) == And(pi/6 < x, x < 5*pi/6)
# numerical testing in valid() is needed
assert isolve(x**7 - x - 2 > 0, x) == \
And(RootOf(x**7 - x - 2, 0) < x, x < oo)
# handle numerator and denominator; although these would be handled as
# rational inequalities, these test confirm that the right thing is done
# when the domain is EX (e.g. when 2 is replaced with sqrt(2))
assert isolve(1/(x - 2) > 0, x) == And(S(2) < x, x < oo)
den = ((x - 1)*(x - 2)).expand()
assert isolve((x - 1)/den <= 0, x) == \
Or(And(-oo < x, x < 1), And(S(1) < x, x < 2))
@slow
def test_slow_general_univariate():
r = RootOf(x**5 - x**2 + 1, 0)
assert solve(sqrt(x) + 1/root(x, 3) > 1) == \
Or(And(S(0) < x, x < r**6), And(r**6 < x, x < oo))
def test_issue_8545():
eq = 1 - x - abs(1 - x)
ans = And(Lt(1, x), Lt(x, oo))
assert reduce_abs_inequality(eq, '<', x) == ans
eq = 1 - x - sqrt((1 - x)**2)
assert reduce_inequalities(eq < 0) == ans
def test_issue_8974():
assert isolve(-oo < x, x) == And(-oo < x, x < oo)
assert isolve(oo > x, x) == And(-oo < x, x < oo)
|
dougbenjamin/panda-harvester
|
refs/heads/master
|
pandaharvester/harvesterbody/command_manager.py
|
2
|
import socket
import datetime
from future.utils import iteritems
from pandaharvester.harvesterconfig import harvester_config
from pandaharvester.harvestercore import core_utils
from pandaharvester.harvestercore.db_proxy_pool import DBProxyPool as DBProxy
from pandaharvester.harvesterbody.agent_base import AgentBase
from pandaharvester.harvestercore.command_spec import CommandSpec
from pandaharvester import commit_timestamp
from pandaharvester import panda_pkg_info
# logger
_logger = core_utils.setup_logger('command_manager')
# class to retrieve commands from panda server
class CommandManager(AgentBase):
# constructor
def __init__(self, communicator, queue_config_mapper, single_mode=False):
AgentBase.__init__(self, single_mode)
self.db_proxy = DBProxy()
self.communicator = communicator
self.queueConfigMapper = queue_config_mapper
self.nodeName = socket.gethostname()
self.lastHeartbeat = None
# set single mode
def set_single_mode(self, single_mode):
self.singleMode = single_mode
def convert_to_command_specs(self, commands):
"""
Generates a list of CommandSpec objects
"""
command_specs = []
for command in commands:
command_spec = CommandSpec()
command_spec.convert_command_json(command)
for comStr, receiver in iteritems(CommandSpec.receiver_map):
if command_spec.command.startswith(comStr):
command_spec.receiver = receiver
break
if command_spec.receiver is not None:
command_specs.append(command_spec)
return command_specs
def run(self):
"""
main
"""
main_log = self.make_logger(_logger, 'id={0}'.format(self.get_pid()), method_name='run')
bulk_size = harvester_config.commandmanager.commands_bulk_size
locked = self.db_proxy.get_process_lock('commandmanager', self.get_pid(),
harvester_config.commandmanager.sleepTime)
if locked:
# send command list to be received
siteNames = set()
commandList = []
for queueName, queueConfig in iteritems(self.queueConfigMapper.get_active_queues()):
if queueConfig is None or queueConfig.runMode != 'slave':
continue
# one command for all queues in one site
if queueConfig.siteName not in siteNames:
commandItem = {'command': CommandSpec.COM_reportWorkerStats,
'computingSite': queueConfig.siteName,
'resourceType': queueConfig.resourceType
}
commandList.append(commandItem)
siteNames.add(queueConfig.siteName)
# one command for each queue
commandItem = {'command': CommandSpec.COM_setNWorkers,
'computingSite': queueConfig.siteName,
'resourceType': queueConfig.resourceType
}
commandList.append(commandItem)
data = {'startTime': datetime.datetime.utcnow(),
'sw_version': panda_pkg_info.release_version,
'commit_stamp': commit_timestamp.timestamp}
if len(commandList) > 0:
main_log.debug('sending command list to receive')
data['commands'] = commandList
self.communicator.is_alive(data)
# main loop
while True:
# get lock
locked = self.db_proxy.get_process_lock('commandmanager', self.get_pid(),
harvester_config.commandmanager.sleepTime)
if locked or self.singleMode:
main_log.debug('polling commands loop')
# send heartbeat
if self.lastHeartbeat is None \
or self.lastHeartbeat < datetime.datetime.utcnow() - datetime.timedelta(minutes=10):
self.lastHeartbeat = datetime.datetime.utcnow()
self.communicator.is_alive({})
continuous_loop = True # as long as there are commands, retrieve them
while continuous_loop:
# get commands from panda server for this harvester instance
commands = self.communicator.get_commands(bulk_size)
main_log.debug('got {0} commands (bulk size: {1})'.format(len(commands), bulk_size))
command_specs = self.convert_to_command_specs(commands)
# cache commands in internal DB
self.db_proxy.store_commands(command_specs)
main_log.debug('cached {0} commands in internal DB'.format(len(command_specs)))
# retrieve processed commands from harvester cache
command_ids_ack = self.db_proxy.get_commands_ack()
for shard in core_utils.create_shards(command_ids_ack, bulk_size):
# post acknowledgements to panda server
self.communicator.ack_commands(shard)
main_log.debug('acknowledged {0} commands to panda server'.format(len(shard)))
# clean acknowledged commands
self.db_proxy.clean_commands_by_id(shard)
# clean commands that have been processed and do not need acknowledgement
self.db_proxy.clean_processed_commands()
# if we didn't collect the full bulk, give panda server a break
if len(commands) < bulk_size:
continuous_loop = False
# check if being terminated
if self.terminated(harvester_config.commandmanager.sleepTime, randomize=False):
main_log.debug('terminated')
return
|
gencer/python-phonenumbers
|
refs/heads/dev
|
python/phonenumbers/data/alt_format_676.py
|
13
|
"""Auto-generated file, do not edit by hand. 676 metadata"""
from ..phonemetadata import NumberFormat
PHONE_ALT_FORMAT_676 = [NumberFormat(pattern='(\\d{2})(\\d{5})', format='\\1 \\2', leading_digits_pattern=['7[5-9]|8[47-9]'])]
|
stephenmcd/cartridge
|
refs/heads/master
|
cartridge/shop/__init__.py
|
17
|
from __future__ import unicode_literals
from cartridge import __version__
|
nubark/odoo
|
refs/heads/9.0
|
addons/payment_ogone/tests/test_ogone.py
|
430
|
# -*- coding: utf-8 -*-
from lxml import objectify
import time
import urlparse
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment.tests.common import PaymentAcquirerCommon
from openerp.addons.payment_ogone.controllers.main import OgoneController
from openerp.tools import mute_logger
class OgonePayment(PaymentAcquirerCommon):
def setUp(self):
super(OgonePayment, self).setUp()
cr, uid = self.cr, self.uid
self.base_url = self.registry('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# get the adyen account
model, self.ogone_id = self.registry('ir.model.data').get_object_reference(cr, uid, 'payment_ogone', 'payment_acquirer_ogone')
def test_10_ogone_form_render(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid thing
ogone = self.payment_acquirer.browse(self.cr, self.uid, self.ogone_id, None)
self.assertEqual(ogone.environment, 'test', 'test without test environment')
# ----------------------------------------
# Test: button direct rendering + shasign
# ----------------------------------------
form_values = {
'PSPID': 'dummy',
'ORDERID': 'test_ref0',
'AMOUNT': '1',
'CURRENCY': 'EUR',
'LANGUAGE': 'en_US',
'CN': 'Norbert Buyer',
'EMAIL': 'norbert.buyer@example.com',
'OWNERZIP': '1000',
'OWNERADDRESS': 'Huge Street 2/543',
'OWNERCTY': 'Belgium',
'OWNERTOWN': 'Sin City',
'OWNERTELNO': '0032 12 34 56 78',
'SHASIGN': '815f67b8ff70d234ffcf437c13a9fa7f807044cc',
'ACCEPTURL': '%s' % urlparse.urljoin(self.base_url, OgoneController._accept_url),
'DECLINEURL': '%s' % urlparse.urljoin(self.base_url, OgoneController._decline_url),
'EXCEPTIONURL': '%s' % urlparse.urljoin(self.base_url, OgoneController._exception_url),
'CANCELURL': '%s' % urlparse.urljoin(self.base_url, OgoneController._cancel_url),
}
# render the button
res = self.payment_acquirer.render(
cr, uid, self.ogone_id,
'test_ref0', 0.01, self.currency_euro_id,
partner_id=None,
partner_values=self.buyer_values,
context=context)
# check form result
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://secure.ogone.com/ncol/test/orderstandard.asp', 'ogone: wrong form POST url')
for form_input in tree.input:
if form_input.get('name') in ['submit']:
continue
self.assertEqual(
form_input.get('value'),
form_values[form_input.get('name')],
'ogone: wrong value for input %s: received %s instead of %s' % (form_input.get('name'), form_input.get('value'), form_values[form_input.get('name')])
)
# ----------------------------------------
# Test2: button using tx + validation
# ----------------------------------------
# create a new draft tx
tx_id = self.payment_transaction.create(
cr, uid, {
'amount': 0.01,
'acquirer_id': self.ogone_id,
'currency_id': self.currency_euro_id,
'reference': 'test_ref0',
'partner_id': self.buyer_id,
}, context=context
)
# render the button
res = self.payment_acquirer.render(
cr, uid, self.ogone_id,
'should_be_erased', 0.01, self.currency_euro,
tx_id=tx_id,
partner_id=None,
partner_values=self.buyer_values,
context=context)
# check form result
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://secure.ogone.com/ncol/test/orderstandard.asp', 'ogone: wrong form POST url')
for form_input in tree.input:
if form_input.get('name') in ['submit']:
continue
self.assertEqual(
form_input.get('value'),
form_values[form_input.get('name')],
'ogone: wrong value for form input %s: received %s instead of %s' % (form_input.get('name'), form_input.get('value'), form_values[form_input.get('name')])
)
@mute_logger('openerp.addons.payment_ogone.models.ogone', 'ValidationError')
def test_20_ogone_form_management(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid thing
ogone = self.payment_acquirer.browse(self.cr, self.uid, self.ogone_id, None)
self.assertEqual(ogone.environment, 'test', 'test without test environment')
# typical data posted by ogone after client has successfully paid
ogone_post_data = {
'orderID': u'test_ref_2',
'STATUS': u'9',
'CARDNO': u'XXXXXXXXXXXX0002',
'PAYID': u'25381582',
'CN': u'Norbert Buyer',
'NCERROR': u'0',
'TRXDATE': u'11/15/13',
'IP': u'85.201.233.72',
'BRAND': u'VISA',
'ACCEPTANCE': u'test123',
'currency': u'EUR',
'amount': u'1.95',
'SHASIGN': u'7B7B0ED9CBC4A85543A9073374589033A62A05A5',
'ED': u'0315',
'PM': u'CreditCard'
}
# should raise error about unknown tx
with self.assertRaises(ValidationError):
self.payment_transaction.ogone_form_feedback(cr, uid, ogone_post_data, context=context)
# create tx
tx_id = self.payment_transaction.create(
cr, uid, {
'amount': 1.95,
'acquirer_id': self.ogone_id,
'currency_id': self.currency_euro_id,
'reference': 'test_ref_2',
'partner_name': 'Norbert Buyer',
'partner_country_id': self.country_france_id,
}, context=context
)
# validate it
self.payment_transaction.ogone_form_feedback(cr, uid, ogone_post_data, context=context)
# check state
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertEqual(tx.state, 'done', 'ogone: validation did not put tx into done state')
self.assertEqual(tx.ogone_payid, ogone_post_data.get('PAYID'), 'ogone: validation did not update tx payid')
# reset tx
tx.write({'state': 'draft', 'date_validate': False, 'ogone_payid': False})
# now ogone post is ok: try to modify the SHASIGN
ogone_post_data['SHASIGN'] = 'a4c16bae286317b82edb49188d3399249a784691'
with self.assertRaises(ValidationError):
self.payment_transaction.ogone_form_feedback(cr, uid, ogone_post_data, context=context)
# simulate an error
ogone_post_data['STATUS'] = 2
ogone_post_data['SHASIGN'] = 'a4c16bae286317b82edb49188d3399249a784691'
self.payment_transaction.ogone_form_feedback(cr, uid, ogone_post_data, context=context)
# check state
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertEqual(tx.state, 'error', 'ogone: erroneous validation did not put tx into error state')
def test_30_ogone_s2s(self):
test_ref = 'test_ref_%.15f' % time.time()
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid thing
ogone = self.payment_acquirer.browse(self.cr, self.uid, self.ogone_id, None)
self.assertEqual(ogone.environment, 'test', 'test without test environment')
# create a new draft tx
tx_id = self.payment_transaction.create(
cr, uid, {
'amount': 0.01,
'acquirer_id': self.ogone_id,
'currency_id': self.currency_euro_id,
'reference': test_ref,
'partner_id': self.buyer_id,
'type': 'server2server',
}, context=context
)
# create an alias
res = self.payment_transaction.ogone_s2s_create_alias(
cr, uid, tx_id, {
'expiry_date_mm': '01',
'expiry_date_yy': '2015',
'holder_name': 'Norbert Poilu',
'number': '4000000000000002',
'brand': 'VISA',
}, context=context)
# check an alias is set, containing at least OPENERP
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertIn('OPENERP', tx.partner_reference, 'ogone: wrong partner reference after creating an alias')
res = self.payment_transaction.ogone_s2s_execute(cr, uid, tx_id, {}, context=context)
# print res
# {
# 'orderID': u'reference',
# 'STATUS': u'9',
# 'CARDNO': u'XXXXXXXXXXXX0002',
# 'PAYID': u'24998692',
# 'CN': u'Norbert Poilu',
# 'NCERROR': u'0',
# 'TRXDATE': u'11/05/13',
# 'IP': u'85.201.233.72',
# 'BRAND': u'VISA',
# 'ACCEPTANCE': u'test123',
# 'currency': u'EUR',
# 'amount': u'1.95',
# 'SHASIGN': u'EFDC56879EF7DE72CCF4B397076B5C9A844CB0FA',
# 'ED': u'0314',
# 'PM': u'CreditCard'
# }
|
MQQiang/kbengine
|
refs/heads/master
|
kbe/res/scripts/common/Lib/idlelib/macosxSupport.py
|
67
|
"""
A number of functions that enhance IDLE on Mac OSX.
"""
import sys
import tkinter
from os import path
import warnings
def runningAsOSXApp():
warnings.warn("runningAsOSXApp() is deprecated, use isAquaTk()",
DeprecationWarning, stacklevel=2)
return isAquaTk()
def isCarbonAquaTk(root):
warnings.warn("isCarbonAquaTk(root) is deprecated, use isCarbonTk()",
DeprecationWarning, stacklevel=2)
return isCarbonTk()
_tk_type = None
def _initializeTkVariantTests(root):
"""
Initializes OS X Tk variant values for
isAquaTk(), isCarbonTk(), isCocoaTk(), and isXQuartz().
"""
global _tk_type
if sys.platform == 'darwin':
ws = root.tk.call('tk', 'windowingsystem')
if 'x11' in ws:
_tk_type = "xquartz"
elif 'aqua' not in ws:
_tk_type = "other"
elif 'AppKit' in root.tk.call('winfo', 'server', '.'):
_tk_type = "cocoa"
else:
_tk_type = "carbon"
else:
_tk_type = "other"
def isAquaTk():
"""
Returns True if IDLE is using a native OS X Tk (Cocoa or Carbon).
"""
assert _tk_type is not None
return _tk_type == "cocoa" or _tk_type == "carbon"
def isCarbonTk():
"""
Returns True if IDLE is using a Carbon Aqua Tk (instead of the
newer Cocoa Aqua Tk).
"""
assert _tk_type is not None
return _tk_type == "carbon"
def isCocoaTk():
"""
Returns True if IDLE is using a Cocoa Aqua Tk.
"""
assert _tk_type is not None
return _tk_type == "cocoa"
def isXQuartz():
"""
Returns True if IDLE is using an OS X X11 Tk.
"""
assert _tk_type is not None
return _tk_type == "xquartz"
def tkVersionWarning(root):
"""
Returns a string warning message if the Tk version in use appears to
be one known to cause problems with IDLE.
1. Apple Cocoa-based Tk 8.5.7 shipped with Mac OS X 10.6 is unusable.
2. Apple Cocoa-based Tk 8.5.9 in OS X 10.7 and 10.8 is better but
can still crash unexpectedly.
"""
if isCocoaTk():
patchlevel = root.tk.call('info', 'patchlevel')
if patchlevel not in ('8.5.7', '8.5.9'):
return False
return (r"WARNING: The version of Tcl/Tk ({0}) in use may"
r" be unstable.\n"
r"Visit http://www.python.org/download/mac/tcltk/"
r" for current information.".format(patchlevel))
else:
return False
def addOpenEventSupport(root, flist):
"""
This ensures that the application will respond to open AppleEvents, which
makes is feasible to use IDLE as the default application for python files.
"""
def doOpenFile(*args):
for fn in args:
flist.open(fn)
# The command below is a hook in aquatk that is called whenever the app
# receives a file open event. The callback can have multiple arguments,
# one for every file that should be opened.
root.createcommand("::tk::mac::OpenDocument", doOpenFile)
def hideTkConsole(root):
try:
root.tk.call('console', 'hide')
except tkinter.TclError:
# Some versions of the Tk framework don't have a console object
pass
def overrideRootMenu(root, flist):
"""
Replace the Tk root menu by something that is more appropriate for
IDLE with an Aqua Tk.
"""
# The menu that is attached to the Tk root (".") is also used by AquaTk for
# all windows that don't specify a menu of their own. The default menubar
# contains a number of menus, none of which are appropriate for IDLE. The
# Most annoying of those is an 'About Tck/Tk...' menu in the application
# menu.
#
# This function replaces the default menubar by a mostly empty one, it
# should only contain the correct application menu and the window menu.
#
# Due to a (mis-)feature of TkAqua the user will also see an empty Help
# menu.
from tkinter import Menu, Text, Text
from idlelib.EditorWindow import prepstr, get_accelerator
from idlelib import Bindings
from idlelib import WindowList
from idlelib.MultiCall import MultiCallCreator
closeItem = Bindings.menudefs[0][1][-2]
# Remove the last 3 items of the file menu: a separator, close window and
# quit. Close window will be reinserted just above the save item, where
# it should be according to the HIG. Quit is in the application menu.
del Bindings.menudefs[0][1][-3:]
Bindings.menudefs[0][1].insert(6, closeItem)
# Remove the 'About' entry from the help menu, it is in the application
# menu
del Bindings.menudefs[-1][1][0:2]
# Remove the 'Configure' entry from the options menu, it is in the
# application menu as 'Preferences'
del Bindings.menudefs[-2][1][0:2]
menubar = Menu(root)
root.configure(menu=menubar)
menudict = {}
menudict['windows'] = menu = Menu(menubar, name='windows')
menubar.add_cascade(label='Window', menu=menu, underline=0)
def postwindowsmenu(menu=menu):
end = menu.index('end')
if end is None:
end = -1
if end > 0:
menu.delete(0, end)
WindowList.add_windows_to_menu(menu)
WindowList.register_callback(postwindowsmenu)
def about_dialog(event=None):
from idlelib import aboutDialog
aboutDialog.AboutDialog(root, 'About IDLE')
def config_dialog(event=None):
from idlelib import configDialog
# Ensure that the root object has an instance_dict attribute,
# mirrors code in EditorWindow (although that sets the attribute
# on an EditorWindow instance that is then passed as the first
# argument to ConfigDialog)
root.instance_dict = flist.inversedict
root.instance_dict = flist.inversedict
configDialog.ConfigDialog(root, 'Settings')
def help_dialog(event=None):
from idlelib import textView
fn = path.join(path.abspath(path.dirname(__file__)), 'help.txt')
textView.view_file(root, 'Help', fn)
root.bind('<<about-idle>>', about_dialog)
root.bind('<<open-config-dialog>>', config_dialog)
root.createcommand('::tk::mac::ShowPreferences', config_dialog)
if flist:
root.bind('<<close-all-windows>>', flist.close_all_callback)
# The binding above doesn't reliably work on all versions of Tk
# on MacOSX. Adding command definition below does seem to do the
# right thing for now.
root.createcommand('exit', flist.close_all_callback)
if isCarbonTk():
# for Carbon AquaTk, replace the default Tk apple menu
menudict['application'] = menu = Menu(menubar, name='apple')
menubar.add_cascade(label='IDLE', menu=menu)
Bindings.menudefs.insert(0,
('application', [
('About IDLE', '<<about-idle>>'),
None,
]))
tkversion = root.tk.eval('info patchlevel')
if tuple(map(int, tkversion.split('.'))) < (8, 4, 14):
# for earlier AquaTk versions, supply a Preferences menu item
Bindings.menudefs[0][1].append(
('_Preferences....', '<<open-config-dialog>>'),
)
if isCocoaTk():
# replace default About dialog with About IDLE one
root.createcommand('tkAboutDialog', about_dialog)
# replace default "Help" item in Help menu
root.createcommand('::tk::mac::ShowHelp', help_dialog)
# remove redundant "IDLE Help" from menu
del Bindings.menudefs[-1][1][0]
def setupApp(root, flist):
"""
Perform initial OS X customizations if needed.
Called from PyShell.main() after initial calls to Tk()
There are currently three major versions of Tk in use on OS X:
1. Aqua Cocoa Tk (native default since OS X 10.6)
2. Aqua Carbon Tk (original native, 32-bit only, deprecated)
3. X11 (supported by some third-party distributors, deprecated)
There are various differences among the three that affect IDLE
behavior, primarily with menus, mouse key events, and accelerators.
Some one-time customizations are performed here.
Others are dynamically tested throughout idlelib by calls to the
isAquaTk(), isCarbonTk(), isCocoaTk(), isXQuartz() functions which
are initialized here as well.
"""
_initializeTkVariantTests(root)
if isAquaTk():
hideTkConsole(root)
overrideRootMenu(root, flist)
addOpenEventSupport(root, flist)
|
getsentry/zeus
|
refs/heads/master
|
zeus/migrations/2174d4350f40_multiple_revision_authors.py
|
1
|
"""multiple_revision_authors
Revision ID: 2174d4350f40
Revises: f8851082b9d9
Create Date: 2020-01-06 15:57:52.051760
"""
import zeus
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "2174d4350f40"
down_revision = "f8851082b9d9"
branch_labels = ()
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"revision_author",
sa.Column("repository_id", zeus.db.types.guid.GUID(), nullable=False),
sa.Column("revision_sha", sa.String(length=40), nullable=False),
sa.Column("author_id", zeus.db.types.guid.GUID(), nullable=False),
sa.ForeignKeyConstraint(["author_id"], ["author.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["repository_id", "revision_sha"],
["revision.repository_id", "revision.sha"],
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["repository_id"], ["repository.id"], ondelete="CASCADE"
),
sa.PrimaryKeyConstraint("repository_id", "revision_sha", "author_id"),
)
op.drop_index("ix_revision_repository_id", table_name="revision")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index(
"ix_revision_repository_id", "revision", ["repository_id"], unique=False
)
op.drop_table("revision_author")
# ### end Alembic commands ###
|
klonage/nlt-gcs
|
refs/heads/master
|
packages/IronPython.StdLib.2.7.4/content/Lib/lib2to3/fixes/fix_callable.py
|
59
|
# Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for callable().
This converts callable(obj) into isinstance(obj, collections.Callable), adding a
collections import if needed."""
# Local imports
from lib2to3 import fixer_base
from lib2to3.fixer_util import Call, Name, String, Attr, touch_import
class FixCallable(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
# Ignore callable(*args) or use of keywords.
# Either could be a hint that the builtin callable() is not being used.
PATTERN = """
power< 'callable'
trailer< lpar='('
( not(arglist | argument<any '=' any>) func=any
| func=arglist<(not argument<any '=' any>) any ','> )
rpar=')' >
after=any*
>
"""
def transform(self, node, results):
func = results['func']
touch_import(None, u'collections', node=node)
args = [func.clone(), String(u', ')]
args.extend(Attr(Name(u'collections'), Name(u'Callable')))
return Call(Name(u'isinstance'), args, prefix=node.prefix)
|
yuewu001/sol
|
refs/heads/master
|
exp/demo_util.py
|
2
|
#!/usr/bin/env python
# AUTHOR: Yue Wu (yuewu@outlook.com)
# FILE: demo_util.py
# ROLE: shared parts for demo
# CREATED: 2015-02-08 16:09:41
# MODIFIED: 2015-02-08 22:24:12
import os
import sys
import dataset
import util
import run_ofs
import run_sol
import run_liblinear
import run_fgm
import run_mRMR
import run_bif
#train feature selection
def train_fs(dataset, model, model_config, result_file):
if model == 'liblinear':
result_once = run_liblinear.run(dataset, model_config, result_file)
elif model == 'FGM':
result_once = run_fgm.run(dataset, model_config, result_file)
elif model == 'mRMR':
run_mRMR.run(dataset, model_config)
elif model == 'BIF':
run_bif.run(dataset, model_config)
else:
param_config = ''
#get parameters
if model_config['is_default_param'] == False:
param_config = dataset.get_best_param(model)
result_once = run_ofs.run(dataset,model, model_config, param_config, result_file)
return result_once
#train sparse online learning
def train_sol(dataset, model, model_config, result_file):
param_config = ''
if model_config['sol_type'] == 'run_all':
result_once = run_sol.run_all(dataset,model, model_config, result_file)
elif model_config['sol_type'] == 'search_l1':
result_once = run_sol.search_best_l1(dataset,model, model_config,result_file)
else:
print 'unrecognized sol_type: ', model_config['sol_type']
sys.exit()
return result_once
#train model
def train_model(dataset, model_list, model_config):
model_result_dict = {}
for model in model_list:
model_result_dict[model] = util.ResultItem()
model_type = model_config['type'].lower()
rand_num = model_config['rand_num']
for k in range(0,rand_num):
if rand_num > 1:
print 'shuffle datset...'
dataset.shuffle_file()
for model in model_list:
print '-----------------------------------'
print ' Experiment on %s' %model + ' Random %d' %k
print '-----------------------------------'
#create destination folder
dst_folder = dataset.name + '/%s' %model
if os.path.exists(dst_folder) == False:
os.makedirs(dst_folder)
if model_type == 'fs':
#output file
result_file = '{0}/{1}_rand_{2}_type_{3}.txt'.format(dst_folder,model, k, model_type)
elif model_type == 'sol' and model_config['sol_type'] == 'run_all':
#output file
result_file = '{0}/{1}_rand_{2}_type_{3}_soltype_{4}.txt'.format(dst_folder,model, k,model_type, model_config['sol_type'])
elif model_type == 'sol' and model_config['sol_type'] == 'search_l1':
result_file = '{0}/{1}_rand_{2}_type_{3}_soltype_{4}_tolerance_{5}.txt'.format(dst_folder,model, k,model_type, model_config['sol_type'], model_config['tolerance'])
else:
print 'unregnized model type: ', model['type']
sys.exit()
result_file = result_file.replace('/',os.sep)
#clear the file if it already exists
open(result_file,'w').close()
if model_type == 'fs':
result_once = train_fs(dataset, model, model_config, result_file)
elif model_type == 'sol':
result_once = train_sol(dataset, model, model_config, result_file)
model_result_dict[model].Add(result_once)
dataset.del_rand_file()
#average the result
if (rand_num > 1):
for key,val in model_result_dict.iteritems():
val.Divide(rand_num)
return model_result_dict
def demo(ds_list, model_list, model_config):
#train the model
for ds in ds_list:
dt = dataset.dt_dict[ds]
model_result_dict = train_model(dt, model_list, model_config)
#write the result to file
dst_folder = dt.name
for key,val in model_result_dict.iteritems():
result_file = dst_folder +'/%s' %key + '.txt'
val.save_result(result_file)
|
intermezzo-fr/onedrive-d
|
refs/heads/master
|
onedrive_d/api/resources.py
|
2
|
__author__ = 'xb'
import json
from onedrive_d import str_to_datetime
from onedrive_d.api import options
class UserProfile:
VERSION_KEY = '@version'
VERSION_VALUE = 1
def __init__(self, data):
self._data = data
@property
def user_id(self):
"""
:rtype: str
"""
return self._data['id']
@property
def gender(self):
"""
:rtype: str | None
"""
return self._data['gender']
@property
def locale(self):
"""
:rtype: str
"""
return self._data['locale']
@property
def first_name(self):
"""
:rtype: str
"""
return self._data['first_name']
@property
def last_name(self):
"""
:rtype: str
"""
return self._data['last_name']
@property
def name(self):
"""
:rtype: str
"""
return self._data['name']
def dump(self):
return json.dumps({'data': self._data, self.VERSION_KEY: self.VERSION_VALUE})
@classmethod
def load(cls, s):
"""
:param str s: Some value previously returned by dump() call.
:rtype: onedrive_d.api.resources.UserProfile
"""
data = json.loads(s)
if cls.VERSION_KEY not in data:
raise ValueError('Unsupported user profile serialization data.')
if data[cls.VERSION_KEY] != cls.VERSION_VALUE:
raise ValueError('Outdated user profile serialization.')
return UserProfile(data['data'])
class ItemReference:
"""
https://github.com/OneDrive/onedrive-api-docs/blob/master/resources/itemReference.md
"""
def __init__(self, data):
"""
:param dict[str, str] data: Deserialized JSON dict of ItemReference resource.
"""
self.data = data
@property
def drive_id(self):
"""
Unique identifier for the Drive that contains the item.
:rtype: str
"""
return self.data['driveId']
@property
def id(self):
"""
Unique identifier for the item.
:rtype: str
"""
return self.data['id']
@property
def path(self):
"""
Path that used to navigate to the item.
:rtype: str
"""
return self.data['path']
@classmethod
def build(cls, drive_id=None, id=None, path=None):
"""
Build a ItemReference object from parameters.
:param str | None drive_id: (Optional) ID of the root drive.
:param str | None id: (Optional) ID of the item.
:param str | None path: (Optional) Path to the item relative to drive root.
:rtype: ItemReference
"""
if id is None and path is None:
raise ValueError('id and path cannot be both None.')
data = {}
if drive_id is not None:
data['driveId'] = drive_id
if id is not None:
data['id'] = id
if path is not None:
data['path'] = path
return ItemReference(data)
class UploadSession:
"""
https://github.com/OneDrive/onedrive-api-docs/blob/master/resources/uploadSession.md
"""
def __init__(self, data):
self.update(data)
# noinspection PyAttributeOutsideInit
def update(self, data):
if 'uploadUrl' in data:
self.upload_url = data['uploadUrl']
if 'expirationDateTime' in data:
self.expires_at = str_to_datetime(data['expirationDateTime'])
self.next_ranges = []
if 'nextExpectedRanges' in data:
for s in data['nextExpectedRanges']:
f, t = s.split('-', 1)
f = int(f)
if t == '':
t = None
else:
t = int(t)
self.next_ranges.append((f, t))
class AsyncCopySession:
"""
Track the state of an async copy request.
"""
ACCEPTABLE_STATUS_CODES = {200, 202, 303}
def __init__(self, drive, headers):
self.drive = drive
self.url = headers['Location']
self._status = options.AsyncOperationStatuses.NOT_STARTED
def update_status(self):
request = self.drive.root.account.session.get(self.url, ok_status_code=self.ACCEPTABLE_STATUS_CODES)
if request.status_code == 202:
data = request.json()
self._operation = data['operation']
self._percentage_complete = data['percentageComplete']
self._status = data['status']
elif request.status_code == 200:
self._percentage_complete = 100
self._status = options.AsyncOperationStatuses.COMPLETED
self._item = self.drive.build_item(request.json())
@property
def operation(self):
"""
:return str: The type of job being run.
"""
return self._operation
@property
def percentage_complete(self):
"""
:return float: An float value between 0 and 100 that indicates the percentage complete.
"""
return self._percentage_complete
@property
def status(self):
"""
:return str: An enum value in options.AsyncOperationStatuses to indicate the status of the job.
"""
return self._status
def get_item(self):
return self._item
class Identity:
def __init__(self, data):
self._data = data
@property
def id(self):
"""
:rtype: str
"""
return self._data['id']
@property
def display_name(self):
"""
:rtype: str
"""
return self._data['displayName']
class IdentitySet:
"""
Python representations of identity types.
https://github.com/OneDrive/onedrive-api-docs/blob/master/resources/identitySet.md
"""
ALL_IDENTITIES = ['user', 'application', 'device']
def __init__(self, data):
self._data = {}
for i in self.ALL_IDENTITIES:
if i in data:
self._data[i] = Identity(data[i])
else:
self._data[i] = None
@property
def user(self):
"""
An Identity resource that represents a user.
:rtype: Identity
"""
return self._data['user']
@property
def application(self):
"""
An Identity resource that represents the application.
:rtype: Identity
"""
return self._data['application']
@property
def device(self):
"""
An Identity resource that represents the device.
:rtype: Identity
"""
return self._data['device']
|
silentfuzzle/calibre
|
refs/heads/master
|
src/calibre/db/tests/profiling.py
|
14
|
#!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import os, cProfile
from tempfile import gettempdir
from calibre.db.legacy import LibraryDatabase
db = None
def initdb(path):
global db
db = LibraryDatabase(os.path.expanduser(path))
def show_stats(path):
from pstats import Stats
s = Stats(path)
s.sort_stats('cumulative')
s.print_stats(30)
def main():
stats = os.path.join(gettempdir(), 'read_db.stats')
pr = cProfile.Profile()
initdb('~/test library')
all_ids = db.new_api.all_book_ids() # noqa
pr.enable()
for book_id in all_ids:
db.new_api._composite_for('#isbn', book_id)
db.new_api._composite_for('#formats', book_id)
pr.disable()
pr.dump_stats(stats)
show_stats(stats)
print ('Stats saved to', stats)
if __name__ == '__main__':
main()
|
thundernixon/dailydecode
|
refs/heads/master
|
node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/MSVSUtil.py
|
566
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions shared amongst the Windows generators."""
import copy
import os
_TARGET_TYPE_EXT = {
'executable': '.exe',
'loadable_module': '.dll',
'shared_library': '.dll',
}
def _GetLargePdbShimCcPath():
"""Returns the path of the large_pdb_shim.cc file."""
this_dir = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
win_data_dir = os.path.join(src_dir, 'data', 'win')
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
return large_pdb_shim_cc
def _DeepCopySomeKeys(in_dict, keys):
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
Arguments:
in_dict: The dictionary to copy.
keys: The keys to be copied. If a key is in this list and doesn't exist in
|in_dict| this is not an error.
Returns:
The partially deep-copied dictionary.
"""
d = {}
for key in keys:
if key not in in_dict:
continue
d[key] = copy.deepcopy(in_dict[key])
return d
def _SuffixName(name, suffix):
"""Add a suffix to the end of a target.
Arguments:
name: name of the target (foo#target)
suffix: the suffix to be added
Returns:
Target name with suffix added (foo_suffix#target)
"""
parts = name.rsplit('#', 1)
parts[0] = '%s_%s' % (parts[0], suffix)
return '#'.join(parts)
def _ShardName(name, number):
"""Add a shard number to the end of a target.
Arguments:
name: name of the target (foo#target)
number: shard number
Returns:
Target name with shard added (foo_1#target)
"""
return _SuffixName(name, str(number))
def ShardTargets(target_list, target_dicts):
"""Shard some targets apart to work around the linkers limits.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
Returns:
Tuple of the new sharded versions of the inputs.
"""
# Gather the targets to shard, and how many pieces.
targets_to_shard = {}
for t in target_dicts:
shards = int(target_dicts[t].get('msvs_shard', 0))
if shards:
targets_to_shard[t] = shards
# Shard target_list.
new_target_list = []
for t in target_list:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
new_target_list.append(_ShardName(t, i))
else:
new_target_list.append(t)
# Shard target_dict.
new_target_dicts = {}
for t in target_dicts:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
name = _ShardName(t, i)
new_target_dicts[name] = copy.copy(target_dicts[t])
new_target_dicts[name]['target_name'] = _ShardName(
new_target_dicts[name]['target_name'], i)
sources = new_target_dicts[name].get('sources', [])
new_sources = []
for pos in range(i, len(sources), targets_to_shard[t]):
new_sources.append(sources[pos])
new_target_dicts[name]['sources'] = new_sources
else:
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
for t in new_target_dicts:
dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
new_dependencies = []
for d in dependencies:
if d in targets_to_shard:
for i in range(targets_to_shard[d]):
new_dependencies.append(_ShardName(d, i))
else:
new_dependencies.append(d)
new_target_dicts[t]['dependencies'] = new_dependencies
return (new_target_list, new_target_dicts)
def _GetPdbPath(target_dict, config_name, vars):
"""Returns the path to the PDB file that will be generated by a given
configuration.
The lookup proceeds as follows:
- Look for an explicit path in the VCLinkerTool configuration block.
- Look for an 'msvs_large_pdb_path' variable.
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
specified.
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
Arguments:
target_dict: The target dictionary to be searched.
config_name: The name of the configuration of interest.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
The path of the corresponding PDB file.
"""
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.get('VCLinkerTool', {})
pdb_path = linker.get('ProgramDatabaseFile')
if pdb_path:
return pdb_path
variables = target_dict.get('variables', {})
pdb_path = variables.get('msvs_large_pdb_path', None)
if pdb_path:
return pdb_path
pdb_base = target_dict.get('product_name', target_dict['target_name'])
pdb_base = '%s%s.pdb' % (pdb_base, _TARGET_TYPE_EXT[target_dict['type']])
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
return pdb_path
def InsertLargePdbShims(target_list, target_dicts, vars):
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
This is a workaround for targets with PDBs greater than 1GB in size, the
limit for the 1KB pagesize PDBs created by the linker by default.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
Tuple of the shimmed version of the inputs.
"""
# Determine which targets need shimming.
targets_to_shim = []
for t in target_dicts:
target_dict = target_dicts[t]
# We only want to shim targets that have msvs_large_pdb enabled.
if not int(target_dict.get('msvs_large_pdb', 0)):
continue
# This is intended for executable, shared_library and loadable_module
# targets where every configuration is set up to produce a PDB output.
# If any of these conditions is not true then the shim logic will fail
# below.
targets_to_shim.append(t)
large_pdb_shim_cc = _GetLargePdbShimCcPath()
for t in targets_to_shim:
target_dict = target_dicts[t]
target_name = target_dict.get('target_name')
base_dict = _DeepCopySomeKeys(target_dict,
['configurations', 'default_configuration', 'toolset'])
# This is the dict for copying the source file (part of the GYP tree)
# to the intermediate directory of the project. This is necessary because
# we can't always build a relative path to the shim source file (on Windows
# GYP and the project may be on different drives), and Ninja hates absolute
# paths (it ends up generating the .obj and .obj.d alongside the source
# file, polluting GYPs tree).
copy_suffix = 'large_pdb_copy'
copy_target_name = target_name + '_' + copy_suffix
full_copy_target_name = _SuffixName(t, copy_suffix)
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
copy_dict = copy.deepcopy(base_dict)
copy_dict['target_name'] = copy_target_name
copy_dict['type'] = 'none'
copy_dict['sources'] = [ large_pdb_shim_cc ]
copy_dict['copies'] = [{
'destination': shim_cc_dir,
'files': [ large_pdb_shim_cc ]
}]
# This is the dict for the PDB generating shim target. It depends on the
# copy target.
shim_suffix = 'large_pdb_shim'
shim_target_name = target_name + '_' + shim_suffix
full_shim_target_name = _SuffixName(t, shim_suffix)
shim_dict = copy.deepcopy(base_dict)
shim_dict['target_name'] = shim_target_name
shim_dict['type'] = 'static_library'
shim_dict['sources'] = [ shim_cc_path ]
shim_dict['dependencies'] = [ full_copy_target_name ]
# Set up the shim to output its PDB to the same location as the final linker
# target.
for config_name, config in shim_dict.get('configurations').iteritems():
pdb_path = _GetPdbPath(target_dict, config_name, vars)
# A few keys that we don't want to propagate.
for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
config.pop(key, None)
msvs = config.setdefault('msvs_settings', {})
# Update the compiler directives in the shim target.
compiler = msvs.setdefault('VCCLCompilerTool', {})
compiler['DebugInformationFormat'] = '3'
compiler['ProgramDataBaseFileName'] = pdb_path
# Set the explicit PDB path in the appropriate configuration of the
# original target.
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.setdefault('VCLinkerTool', {})
linker['GenerateDebugInformation'] = 'true'
linker['ProgramDatabaseFile'] = pdb_path
# Add the new targets. They must go to the beginning of the list so that
# the dependency generation works as expected in ninja.
target_list.insert(0, full_copy_target_name)
target_list.insert(0, full_shim_target_name)
target_dicts[full_copy_target_name] = copy_dict
target_dicts[full_shim_target_name] = shim_dict
# Update the original target to depend on the shim target.
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
return (target_list, target_dicts)
|
laugh26/clapcoin
|
refs/heads/master
|
share/qt/make_spinner.py
|
4415
|
#!/usr/bin/env python
# W.J. van der Laan, 2011
# Make spinning .mng animation from a .png
# Requires imagemagick 6.7+
from __future__ import division
from os import path
from PIL import Image
from subprocess import Popen
SRC='img/reload_scaled.png'
DST='../../src/qt/res/movies/update_spinner.mng'
TMPDIR='/tmp'
TMPNAME='tmp-%03i.png'
NUMFRAMES=35
FRAMERATE=10.0
CONVERT='convert'
CLOCKWISE=True
DSIZE=(16,16)
im_src = Image.open(SRC)
if CLOCKWISE:
im_src = im_src.transpose(Image.FLIP_LEFT_RIGHT)
def frame_to_filename(frame):
return path.join(TMPDIR, TMPNAME % frame)
frame_files = []
for frame in xrange(NUMFRAMES):
rotation = (frame + 0.5) / NUMFRAMES * 360.0
if CLOCKWISE:
rotation = -rotation
im_new = im_src.rotate(rotation, Image.BICUBIC)
im_new.thumbnail(DSIZE, Image.ANTIALIAS)
outfile = frame_to_filename(frame)
im_new.save(outfile, 'png')
frame_files.append(outfile)
p = Popen([CONVERT, "-delay", str(FRAMERATE), "-dispose", "2"] + frame_files + [DST])
p.communicate()
|
axinging/sky_engine
|
refs/heads/master
|
mojo/tools/testing/mojom_fetcher/mojom_file_tests.py
|
10
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os.path
import unittest
from fetcher.mojom_file import MojomFile
from fetcher.dependency import Dependency
from fetcher.repository import Repository
from fakes import FakeDependency, FakeMojomFile
class TestMojomFile(unittest.TestCase):
def test_add_dependency(self):
mojom = MojomFile(Repository("/base/repo", "third_party/external"),
"mojom_name")
mojom.add_dependency("dependency_name")
self.assertEqual(1, len(mojom.deps))
self.assertEqual("mojom_name", mojom.deps[0].get_importer())
self.assertEqual("dependency_name", mojom.deps[0].get_imported())
def test_jinja_parameters(self):
mojom = FakeMojomFile(
Repository("/base/repo", "third_party/external"),
"/base/repo/third_party/external/domokit.org/bar/baz/foo.mojom")
mojom.add_dependency("example.com/dir/example.mojom")
mojom.add_dependency("example.com/dir/dir.mojom")
mojom.add_dependency("buzz.mojom")
mojom.add_dependency("foo/bar.mojom")
mojom.add_dependency(
"mojo/public/interfaces/application/shell.mojom")
params = mojom.get_jinja_parameters([])
self.assertEquals({
"target_name": "foo",
"filename": "foo.mojom",
"import_dirs": [".."],
"mojo_sdk_deps": ["mojo/public/interfaces/application"],
"deps": [
'//third_party/external/example.com/dir:example',
'//third_party/external/example.com/dir:dir_mojom',
':buzz',
'../foo:bar']
}, params)
|
camptocamp/c2c-rd-addons
|
refs/heads/8.0
|
picking_invoice_rel/purchase.py
|
4
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2011 ChriCar Beteiligungs- und Beratungs- GmbH (<http://www.camptocamp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import logging
class purchase_order(osv.osv):
_inherit = "purchase.order"
_logger = logging.getLogger(__name__)
def action_invoice_create(self, cr, uid, ids, context=None):
res = super(purchase_order, self).action_invoice_create(cr, uid, ids, context)
self._logger.debug('PO inv create ids,res:%s %s', ids, res)
invoice_ids = res
if not isinstance(invoice_ids,list):
invoice_ids = [invoice_ids]
picking_obj = self.pool.get('stock.picking')
picking_ids = picking_obj.search(cr, uid, [('purchase_id','in',ids)])
self._logger.debug('PO inv create picking_ids:%s', picking_ids)
for picking_id in picking_ids:
picking_obj.write(cr, uid, picking_id, {'invoice_ids' : [(6,0, invoice_ids )]}, context=context)
return res
purchase_order()
|
thumbimigwe/echorizr
|
refs/heads/master
|
lib/python2.7/site-packages/django/template/defaulttags.py
|
31
|
"""Default tags used by the template system, available to all templates."""
from __future__ import unicode_literals
import os
import re
import sys
import warnings
from datetime import datetime
from itertools import cycle as itertools_cycle, groupby
from django.conf import settings
from django.utils import six, timezone
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text, smart_text
from django.utils.html import conditional_escape, format_html
from django.utils.lorem_ipsum import paragraphs, words
from django.utils.safestring import mark_safe
from .base import (
BLOCK_TAG_END, BLOCK_TAG_START, COMMENT_TAG_END, COMMENT_TAG_START,
SINGLE_BRACE_END, SINGLE_BRACE_START, VARIABLE_ATTRIBUTE_SEPARATOR,
VARIABLE_TAG_END, VARIABLE_TAG_START, Context, Node, NodeList, Template,
TemplateSyntaxError, VariableDoesNotExist, kwarg_re,
render_value_in_context, token_kwargs,
)
from .defaultfilters import date
from .library import Library
from .smartif import IfParser, Literal
register = Library()
class AutoEscapeControlNode(Node):
"""Implements the actions of the autoescape tag."""
def __init__(self, setting, nodelist):
self.setting, self.nodelist = setting, nodelist
def render(self, context):
old_setting = context.autoescape
context.autoescape = self.setting
output = self.nodelist.render(context)
context.autoescape = old_setting
if self.setting:
return mark_safe(output)
else:
return output
class CommentNode(Node):
def render(self, context):
return ''
class CsrfTokenNode(Node):
def render(self, context):
csrf_token = context.get('csrf_token')
if csrf_token:
if csrf_token == 'NOTPROVIDED':
return format_html("")
else:
return format_html("<input type='hidden' name='csrfmiddlewaretoken' value='{}' />", csrf_token)
else:
# It's very probable that the token is missing because of
# misconfiguration, so we raise a warning
if settings.DEBUG:
warnings.warn(
"A {% csrf_token %} was used in a template, but the context "
"did not provide the value. This is usually caused by not "
"using RequestContext."
)
return ''
class CycleNode(Node):
def __init__(self, cyclevars, variable_name=None, silent=False):
self.cyclevars = cyclevars
self.variable_name = variable_name
self.silent = silent
def render(self, context):
if self not in context.render_context:
# First time the node is rendered in template
context.render_context[self] = itertools_cycle(self.cyclevars)
cycle_iter = context.render_context[self]
value = next(cycle_iter).resolve(context)
if self.variable_name:
context[self.variable_name] = value
if self.silent:
return ''
return render_value_in_context(value, context)
class DebugNode(Node):
def render(self, context):
from pprint import pformat
output = [force_text(pformat(val)) for val in context]
output.append('\n\n')
output.append(force_text(pformat(sys.modules)))
return ''.join(output)
class FilterNode(Node):
def __init__(self, filter_expr, nodelist):
self.filter_expr, self.nodelist = filter_expr, nodelist
def render(self, context):
output = self.nodelist.render(context)
# Apply filters.
with context.push(var=output):
return self.filter_expr.resolve(context)
class FirstOfNode(Node):
def __init__(self, variables, asvar=None):
self.vars = variables
self.asvar = asvar
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
first = render_value_in_context(value, context)
if self.asvar:
context[self.asvar] = first
return ''
return first
return ''
class ForNode(Node):
child_nodelists = ('nodelist_loop', 'nodelist_empty')
def __init__(self, loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty=None):
self.loopvars, self.sequence = loopvars, sequence
self.is_reversed = is_reversed
self.nodelist_loop = nodelist_loop
if nodelist_empty is None:
self.nodelist_empty = NodeList()
else:
self.nodelist_empty = nodelist_empty
def __repr__(self):
reversed_text = ' reversed' if self.is_reversed else ''
return "<For Node: for %s in %s, tail_len: %d%s>" % \
(', '.join(self.loopvars), self.sequence, len(self.nodelist_loop),
reversed_text)
def __iter__(self):
for node in self.nodelist_loop:
yield node
for node in self.nodelist_empty:
yield node
def render(self, context):
if 'forloop' in context:
parentloop = context['forloop']
else:
parentloop = {}
with context.push():
try:
values = self.sequence.resolve(context, True)
except VariableDoesNotExist:
values = []
if values is None:
values = []
if not hasattr(values, '__len__'):
values = list(values)
len_values = len(values)
if len_values < 1:
return self.nodelist_empty.render(context)
nodelist = []
if self.is_reversed:
values = reversed(values)
num_loopvars = len(self.loopvars)
unpack = num_loopvars > 1
# Create a forloop value in the context. We'll update counters on each
# iteration just below.
loop_dict = context['forloop'] = {'parentloop': parentloop}
for i, item in enumerate(values):
# Shortcuts for current loop iteration number.
loop_dict['counter0'] = i
loop_dict['counter'] = i + 1
# Reverse counter iteration numbers.
loop_dict['revcounter'] = len_values - i
loop_dict['revcounter0'] = len_values - i - 1
# Boolean values designating first and last times through loop.
loop_dict['first'] = (i == 0)
loop_dict['last'] = (i == len_values - 1)
pop_context = False
if unpack:
# If there are multiple loop variables, unpack the item into
# them.
# To complete this deprecation, remove from here to the
# try/except block as well as the try/except itself,
# leaving `unpacked_vars = ...` and the "else" statements.
if not isinstance(item, (list, tuple)):
len_item = 1
else:
len_item = len(item)
# Check loop variable count before unpacking
if num_loopvars != len_item:
warnings.warn(
"Need {} values to unpack in for loop; got {}. "
"This will raise an exception in Django 1.10."
.format(num_loopvars, len_item),
RemovedInDjango110Warning)
try:
unpacked_vars = dict(zip(self.loopvars, item))
except TypeError:
pass
else:
pop_context = True
context.update(unpacked_vars)
else:
context[self.loopvars[0]] = item
for node in self.nodelist_loop:
nodelist.append(node.render_annotated(context))
if pop_context:
# The loop variables were pushed on to the context so pop them
# off again. This is necessary because the tag lets the length
# of loopvars differ to the length of each set of items and we
# don't want to leave any vars from the previous loop on the
# context.
context.pop()
return mark_safe(''.join(force_text(n) for n in nodelist))
class IfChangedNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, nodelist_true, nodelist_false, *varlist):
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self._varlist = varlist
def render(self, context):
# Init state storage
state_frame = self._get_context_stack_frame(context)
if self not in state_frame:
state_frame[self] = None
nodelist_true_output = None
try:
if self._varlist:
# Consider multiple parameters. This automatically behaves
# like an OR evaluation of the multiple variables.
compare_to = [var.resolve(context, True) for var in self._varlist]
else:
# The "{% ifchanged %}" syntax (without any variables) compares the rendered output.
compare_to = nodelist_true_output = self.nodelist_true.render(context)
except VariableDoesNotExist:
compare_to = None
if compare_to != state_frame[self]:
state_frame[self] = compare_to
# render true block if not already rendered
return nodelist_true_output or self.nodelist_true.render(context)
elif self.nodelist_false:
return self.nodelist_false.render(context)
return ''
def _get_context_stack_frame(self, context):
# The Context object behaves like a stack where each template tag can create a new scope.
# Find the place where to store the state to detect changes.
if 'forloop' in context:
# Ifchanged is bound to the local for loop.
# When there is a loop-in-loop, the state is bound to the inner loop,
# so it resets when the outer loop continues.
return context['forloop']
else:
# Using ifchanged outside loops. Effectively this is a no-op because the state is associated with 'self'.
return context.render_context
class IfEqualNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, var1, var2, nodelist_true, nodelist_false, negate):
self.var1, self.var2 = var1, var2
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self.negate = negate
def __repr__(self):
return "<IfEqualNode>"
def render(self, context):
val1 = self.var1.resolve(context, True)
val2 = self.var2.resolve(context, True)
if (self.negate and val1 != val2) or (not self.negate and val1 == val2):
return self.nodelist_true.render(context)
return self.nodelist_false.render(context)
class IfNode(Node):
def __init__(self, conditions_nodelists):
self.conditions_nodelists = conditions_nodelists
def __repr__(self):
return "<IfNode>"
def __iter__(self):
for _, nodelist in self.conditions_nodelists:
for node in nodelist:
yield node
@property
def nodelist(self):
return NodeList(node for _, nodelist in self.conditions_nodelists for node in nodelist)
def render(self, context):
for condition, nodelist in self.conditions_nodelists:
if condition is not None: # if / elif clause
try:
match = condition.eval(context)
except VariableDoesNotExist:
match = None
else: # else clause
match = True
if match:
return nodelist.render(context)
return ''
class LoremNode(Node):
def __init__(self, count, method, common):
self.count, self.method, self.common = count, method, common
def render(self, context):
try:
count = int(self.count.resolve(context))
except (ValueError, TypeError):
count = 1
if self.method == 'w':
return words(count, common=self.common)
else:
paras = paragraphs(count, common=self.common)
if self.method == 'p':
paras = ['<p>%s</p>' % p for p in paras]
return '\n\n'.join(paras)
class RegroupNode(Node):
def __init__(self, target, expression, var_name):
self.target, self.expression = target, expression
self.var_name = var_name
def resolve_expression(self, obj, context):
# This method is called for each object in self.target. See regroup()
# for the reason why we temporarily put the object in the context.
context[self.var_name] = obj
return self.expression.resolve(context, True)
def render(self, context):
obj_list = self.target.resolve(context, True)
if obj_list is None:
# target variable wasn't found in context; fail silently.
context[self.var_name] = []
return ''
# List of dictionaries in the format:
# {'grouper': 'key', 'list': [list of contents]}.
context[self.var_name] = [
{'grouper': key, 'list': list(val)}
for key, val in
groupby(obj_list, lambda obj: self.resolve_expression(obj, context))
]
return ''
def include_is_allowed(filepath, allowed_include_roots):
filepath = os.path.abspath(filepath)
for root in allowed_include_roots:
if filepath.startswith(root):
return True
return False
class SsiNode(Node):
def __init__(self, filepath, parsed):
self.filepath = filepath
self.parsed = parsed
def render(self, context):
filepath = self.filepath.resolve(context)
if not include_is_allowed(filepath, context.template.engine.allowed_include_roots):
if settings.DEBUG:
return "[Didn't have permission to include file]"
else:
return '' # Fail silently for invalid includes.
try:
with open(filepath, 'r') as fp:
output = fp.read()
except IOError:
output = ''
if self.parsed:
try:
t = Template(output, name=filepath, engine=context.template.engine)
return t.render(context)
except TemplateSyntaxError as e:
if settings.DEBUG:
return "[Included template had syntax error: %s]" % e
else:
return '' # Fail silently for invalid included templates.
return output
class LoadNode(Node):
def render(self, context):
return ''
class NowNode(Node):
def __init__(self, format_string, asvar=None):
self.format_string = format_string
self.asvar = asvar
def render(self, context):
tzinfo = timezone.get_current_timezone() if settings.USE_TZ else None
formatted = date(datetime.now(tz=tzinfo), self.format_string)
if self.asvar:
context[self.asvar] = formatted
return ''
else:
return formatted
class SpacelessNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
from django.utils.html import strip_spaces_between_tags
return strip_spaces_between_tags(self.nodelist.render(context).strip())
class TemplateTagNode(Node):
mapping = {'openblock': BLOCK_TAG_START,
'closeblock': BLOCK_TAG_END,
'openvariable': VARIABLE_TAG_START,
'closevariable': VARIABLE_TAG_END,
'openbrace': SINGLE_BRACE_START,
'closebrace': SINGLE_BRACE_END,
'opencomment': COMMENT_TAG_START,
'closecomment': COMMENT_TAG_END,
}
def __init__(self, tagtype):
self.tagtype = tagtype
def render(self, context):
return self.mapping.get(self.tagtype, '')
class URLNode(Node):
def __init__(self, view_name, args, kwargs, asvar):
self.view_name = view_name
self.args = args
self.kwargs = kwargs
self.asvar = asvar
def render(self, context):
from django.core.urlresolvers import reverse, NoReverseMatch
args = [arg.resolve(context) for arg in self.args]
kwargs = {
smart_text(k, 'ascii'): v.resolve(context)
for k, v in self.kwargs.items()
}
view_name = self.view_name.resolve(context)
try:
current_app = context.request.current_app
except AttributeError:
# Leave only the else block when the deprecation path for
# Context.current_app completes in Django 1.10.
# Can also remove the Context.is_current_app_set property.
if context.is_current_app_set:
current_app = context.current_app
else:
try:
current_app = context.request.resolver_match.namespace
except AttributeError:
current_app = None
# Try to look up the URL twice: once given the view name, and again
# relative to what we guess is the "main" app. If they both fail,
# re-raise the NoReverseMatch unless we're using the
# {% url ... as var %} construct in which case return nothing.
url = ''
try:
url = reverse(view_name, args=args, kwargs=kwargs, current_app=current_app)
except NoReverseMatch:
exc_info = sys.exc_info()
if settings.SETTINGS_MODULE:
project_name = settings.SETTINGS_MODULE.split('.')[0]
try:
url = reverse(project_name + '.' + view_name,
args=args, kwargs=kwargs,
current_app=current_app)
except NoReverseMatch:
if self.asvar is None:
# Re-raise the original exception, not the one with
# the path relative to the project. This makes a
# better error message.
six.reraise(*exc_info)
else:
if self.asvar is None:
raise
if self.asvar:
context[self.asvar] = url
return ''
else:
if context.autoescape:
url = conditional_escape(url)
return url
class VerbatimNode(Node):
def __init__(self, content):
self.content = content
def render(self, context):
return self.content
class WidthRatioNode(Node):
def __init__(self, val_expr, max_expr, max_width, asvar=None):
self.val_expr = val_expr
self.max_expr = max_expr
self.max_width = max_width
self.asvar = asvar
def render(self, context):
try:
value = self.val_expr.resolve(context)
max_value = self.max_expr.resolve(context)
max_width = int(self.max_width.resolve(context))
except VariableDoesNotExist:
return ''
except (ValueError, TypeError):
raise TemplateSyntaxError("widthratio final argument must be a number")
try:
value = float(value)
max_value = float(max_value)
ratio = (value / max_value) * max_width
result = str(int(round(ratio)))
except ZeroDivisionError:
return '0'
except (ValueError, TypeError, OverflowError):
return ''
if self.asvar:
context[self.asvar] = result
return ''
else:
return result
class WithNode(Node):
def __init__(self, var, name, nodelist, extra_context=None):
self.nodelist = nodelist
# var and name are legacy attributes, being left in case they are used
# by third-party subclasses of this Node.
self.extra_context = extra_context or {}
if name:
self.extra_context[name] = var
def __repr__(self):
return "<WithNode>"
def render(self, context):
values = {key: val.resolve(context) for key, val in
six.iteritems(self.extra_context)}
with context.push(**values):
return self.nodelist.render(context)
@register.tag
def autoescape(parser, token):
"""
Force autoescape behavior for this block.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 2:
raise TemplateSyntaxError("'autoescape' tag requires exactly one argument.")
arg = args[1]
if arg not in ('on', 'off'):
raise TemplateSyntaxError("'autoescape' argument should be 'on' or 'off'")
nodelist = parser.parse(('endautoescape',))
parser.delete_first_token()
return AutoEscapeControlNode((arg == 'on'), nodelist)
@register.tag
def comment(parser, token):
"""
Ignores everything between ``{% comment %}`` and ``{% endcomment %}``.
"""
parser.skip_past('endcomment')
return CommentNode()
@register.tag
def cycle(parser, token):
"""
Cycles among the given strings each time this tag is encountered.
Within a loop, cycles among the given strings each time through
the loop::
{% for o in some_list %}
<tr class="{% cycle 'row1' 'row2' %}">
...
</tr>
{% endfor %}
Outside of a loop, give the values a unique name the first time you call
it, then use that name each successive time through::
<tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
You can use any number of values, separated by spaces. Commas can also
be used to separate values; if a comma is used, the cycle values are
interpreted as literal strings.
The optional flag "silent" can be used to prevent the cycle declaration
from returning any value::
{% for o in some_list %}
{% cycle 'row1' 'row2' as rowcolors silent %}
<tr class="{{ rowcolors }}">{% include "subtemplate.html " %}</tr>
{% endfor %}
"""
# Note: This returns the exact same node on each {% cycle name %} call;
# that is, the node object returned from {% cycle a b c as name %} and the
# one returned from {% cycle name %} are the exact same object. This
# shouldn't cause problems (heh), but if it does, now you know.
#
# Ugly hack warning: This stuffs the named template dict into parser so
# that names are only unique within each template (as opposed to using
# a global variable, which would make cycle names have to be unique across
# *all* templates.
args = token.split_contents()
if len(args) < 2:
raise TemplateSyntaxError("'cycle' tag requires at least two arguments")
if ',' in args[1]:
warnings.warn(
"The old {% cycle %} syntax with comma-separated arguments is deprecated.",
RemovedInDjango110Warning,
)
# Backwards compatibility: {% cycle a,b %} or {% cycle a,b as foo %}
# case.
args[1:2] = ['"%s"' % arg for arg in args[1].split(",")]
if len(args) == 2:
# {% cycle foo %} case.
name = args[1]
if not hasattr(parser, '_namedCycleNodes'):
raise TemplateSyntaxError("No named cycles in template. '%s' is not defined" % name)
if name not in parser._namedCycleNodes:
raise TemplateSyntaxError("Named cycle '%s' does not exist" % name)
return parser._namedCycleNodes[name]
as_form = False
if len(args) > 4:
# {% cycle ... as foo [silent] %} case.
if args[-3] == "as":
if args[-1] != "silent":
raise TemplateSyntaxError("Only 'silent' flag is allowed after cycle's name, not '%s'." % args[-1])
as_form = True
silent = True
args = args[:-1]
elif args[-2] == "as":
as_form = True
silent = False
if as_form:
name = args[-1]
values = [parser.compile_filter(arg) for arg in args[1:-2]]
node = CycleNode(values, name, silent=silent)
if not hasattr(parser, '_namedCycleNodes'):
parser._namedCycleNodes = {}
parser._namedCycleNodes[name] = node
else:
values = [parser.compile_filter(arg) for arg in args[1:]]
node = CycleNode(values)
return node
@register.tag
def csrf_token(parser, token):
return CsrfTokenNode()
@register.tag
def debug(parser, token):
"""
Outputs a whole load of debugging information, including the current
context and imported modules.
Sample usage::
<pre>
{% debug %}
</pre>
"""
return DebugNode()
@register.tag('filter')
def do_filter(parser, token):
"""
Filters the contents of the block through variable filters.
Filters can also be piped through each other, and they can have
arguments -- just like in variable syntax.
Sample usage::
{% filter force_escape|lower %}
This text will be HTML-escaped, and will appear in lowercase.
{% endfilter %}
Note that the ``escape`` and ``safe`` filters are not acceptable arguments.
Instead, use the ``autoescape`` tag to manage autoescaping for blocks of
template code.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
_, rest = token.contents.split(None, 1)
filter_expr = parser.compile_filter("var|%s" % (rest))
for func, unused in filter_expr.filters:
filter_name = getattr(func, '_filter_name', None)
if filter_name in ('escape', 'safe'):
raise TemplateSyntaxError('"filter %s" is not permitted. Use the "autoescape" tag instead.' % filter_name)
nodelist = parser.parse(('endfilter',))
parser.delete_first_token()
return FilterNode(filter_expr, nodelist)
@register.tag
def firstof(parser, token):
"""
Outputs the first variable passed that is not False, without escaping.
Outputs nothing if all the passed variables are False.
Sample usage::
{% firstof var1 var2 var3 as myvar %}
This is equivalent to::
{% if var1 %}
{{ var1|safe }}
{% elif var2 %}
{{ var2|safe }}
{% elif var3 %}
{{ var3|safe }}
{% endif %}
but obviously much cleaner!
You can also use a literal string as a fallback value in case all
passed variables are False::
{% firstof var1 var2 var3 "fallback value" %}
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% firstof var1 var2 var3 "<strong>fallback value</strong>" %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %}
"""
bits = token.split_contents()[1:]
asvar = None
if len(bits) < 1:
raise TemplateSyntaxError("'firstof' statement requires at least one argument")
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
return FirstOfNode([parser.compile_filter(bit) for bit in bits], asvar)
@register.tag('for')
def do_for(parser, token):
"""
Loops over each item in an array.
For example, to display a list of athletes given ``athlete_list``::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
</ul>
You can loop over a list in reverse by using
``{% for obj in list reversed %}``.
You can also unpack multiple values from a two-dimensional array::
{% for key,value in dict.items %}
{{ key }}: {{ value }}
{% endfor %}
The ``for`` tag can take an optional ``{% empty %}`` clause that will
be displayed if the given array is empty or could not be found::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% empty %}
<li>Sorry, no athletes in this list.</li>
{% endfor %}
<ul>
The above is equivalent to -- but shorter, cleaner, and possibly faster
than -- the following::
<ul>
{% if athlete_list %}
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
{% else %}
<li>Sorry, no athletes in this list.</li>
{% endif %}
</ul>
The for loop sets a number of variables available within the loop:
========================== ================================================
Variable Description
========================== ================================================
``forloop.counter`` The current iteration of the loop (1-indexed)
``forloop.counter0`` The current iteration of the loop (0-indexed)
``forloop.revcounter`` The number of iterations from the end of the
loop (1-indexed)
``forloop.revcounter0`` The number of iterations from the end of the
loop (0-indexed)
``forloop.first`` True if this is the first time through the loop
``forloop.last`` True if this is the last time through the loop
``forloop.parentloop`` For nested loops, this is the loop "above" the
current one
========================== ================================================
"""
bits = token.split_contents()
if len(bits) < 4:
raise TemplateSyntaxError("'for' statements should have at least four"
" words: %s" % token.contents)
is_reversed = bits[-1] == 'reversed'
in_index = -3 if is_reversed else -2
if bits[in_index] != 'in':
raise TemplateSyntaxError("'for' statements should use the format"
" 'for x in y': %s" % token.contents)
loopvars = re.split(r' *, *', ' '.join(bits[1:in_index]))
for var in loopvars:
if not var or ' ' in var:
raise TemplateSyntaxError("'for' tag received an invalid argument:"
" %s" % token.contents)
sequence = parser.compile_filter(bits[in_index + 1])
nodelist_loop = parser.parse(('empty', 'endfor',))
token = parser.next_token()
if token.contents == 'empty':
nodelist_empty = parser.parse(('endfor',))
parser.delete_first_token()
else:
nodelist_empty = None
return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
def do_ifequal(parser, token, negate):
bits = list(token.split_contents())
if len(bits) != 3:
raise TemplateSyntaxError("%r takes two arguments" % bits[0])
end_tag = 'end' + bits[0]
nodelist_true = parser.parse(('else', end_tag))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse((end_tag,))
parser.delete_first_token()
else:
nodelist_false = NodeList()
val1 = parser.compile_filter(bits[1])
val2 = parser.compile_filter(bits[2])
return IfEqualNode(val1, val2, nodelist_true, nodelist_false, negate)
@register.tag
def ifequal(parser, token):
"""
Outputs the contents of the block if the two arguments equal each other.
Examples::
{% ifequal user.id comment.user_id %}
...
{% endifequal %}
{% ifnotequal user.id comment.user_id %}
...
{% else %}
...
{% endifnotequal %}
"""
return do_ifequal(parser, token, False)
@register.tag
def ifnotequal(parser, token):
"""
Outputs the contents of the block if the two arguments are not equal.
See ifequal.
"""
return do_ifequal(parser, token, True)
class TemplateLiteral(Literal):
def __init__(self, value, text):
self.value = value
self.text = text # for better error messages
def display(self):
return self.text
def eval(self, context):
return self.value.resolve(context, ignore_failures=True)
class TemplateIfParser(IfParser):
error_class = TemplateSyntaxError
def __init__(self, parser, *args, **kwargs):
self.template_parser = parser
super(TemplateIfParser, self).__init__(*args, **kwargs)
def create_var(self, value):
return TemplateLiteral(self.template_parser.compile_filter(value), value)
@register.tag('if')
def do_if(parser, token):
"""
The ``{% if %}`` tag evaluates a variable, and if that variable is "true"
(i.e., exists, is not empty, and is not a false boolean value), the
contents of the block are output:
::
{% if athlete_list %}
Number of athletes: {{ athlete_list|count }}
{% elif athlete_in_locker_room_list %}
Athletes should be out of the locker room soon!
{% else %}
No athletes.
{% endif %}
In the above, if ``athlete_list`` is not empty, the number of athletes will
be displayed by the ``{{ athlete_list|count }}`` variable.
As you can see, the ``if`` tag may take one or several `` {% elif %}``
clauses, as well as an ``{% else %}`` clause that will be displayed if all
previous conditions fail. These clauses are optional.
``if`` tags may use ``or``, ``and`` or ``not`` to test a number of
variables or to negate a given variable::
{% if not athlete_list %}
There are no athletes.
{% endif %}
{% if athlete_list or coach_list %}
There are some athletes or some coaches.
{% endif %}
{% if athlete_list and coach_list %}
Both athletes and coaches are available.
{% endif %}
{% if not athlete_list or coach_list %}
There are no athletes, or there are some coaches.
{% endif %}
{% if athlete_list and not coach_list %}
There are some athletes and absolutely no coaches.
{% endif %}
Comparison operators are also available, and the use of filters is also
allowed, for example::
{% if articles|length >= 5 %}...{% endif %}
Arguments and operators _must_ have a space between them, so
``{% if 1>2 %}`` is not a valid if tag.
All supported operators are: ``or``, ``and``, ``in``, ``not in``
``==``, ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
Operator precedence follows Python.
"""
# {% if ... %}
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists = [(condition, nodelist)]
token = parser.next_token()
# {% elif ... %} (repeatable)
while token.contents.startswith('elif'):
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists.append((condition, nodelist))
token = parser.next_token()
# {% else %} (optional)
if token.contents == 'else':
nodelist = parser.parse(('endif',))
conditions_nodelists.append((None, nodelist))
token = parser.next_token()
# {% endif %}
assert token.contents == 'endif'
return IfNode(conditions_nodelists)
@register.tag
def ifchanged(parser, token):
"""
Checks if a value has changed from the last iteration of a loop.
The ``{% ifchanged %}`` block tag is used within a loop. It has two
possible uses.
1. Checks its own rendered contents against its previous state and only
displays the content if it has changed. For example, this displays a
list of days, only displaying the month if it changes::
<h1>Archive for {{ year }}</h1>
{% for date in days %}
{% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
<a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
{% endfor %}
2. If given one or more variables, check whether any variable has changed.
For example, the following shows the date every time it changes, while
showing the hour if either the hour or the date has changed::
{% for date in days %}
{% ifchanged date.date %} {{ date.date }} {% endifchanged %}
{% ifchanged date.hour date.date %}
{{ date.hour }}
{% endifchanged %}
{% endfor %}
"""
bits = token.split_contents()
nodelist_true = parser.parse(('else', 'endifchanged'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endifchanged',))
parser.delete_first_token()
else:
nodelist_false = NodeList()
values = [parser.compile_filter(bit) for bit in bits[1:]]
return IfChangedNode(nodelist_true, nodelist_false, *values)
@register.tag
def ssi(parser, token):
"""
Outputs the contents of a given file into the page.
Like a simple "include" tag, the ``ssi`` tag includes the contents
of another file -- which must be specified using an absolute path --
in the current page::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" %}
If the optional "parsed" parameter is given, the contents of the included
file are evaluated as template code, with the current context::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" parsed %}
"""
warnings.warn(
"The {% ssi %} tag is deprecated. Use the {% include %} tag instead.",
RemovedInDjango110Warning,
)
bits = token.split_contents()
parsed = False
if len(bits) not in (2, 3):
raise TemplateSyntaxError("'ssi' tag takes one argument: the path to"
" the file to be included")
if len(bits) == 3:
if bits[2] == 'parsed':
parsed = True
else:
raise TemplateSyntaxError("Second (optional) argument to %s tag"
" must be 'parsed'" % bits[0])
filepath = parser.compile_filter(bits[1])
return SsiNode(filepath, parsed)
def find_library(parser, name):
try:
return parser.libraries[name]
except KeyError:
raise TemplateSyntaxError(
"'%s' is not a registered tag library. Must be one of:\n%s" % (
name, "\n".join(sorted(parser.libraries.keys())),
),
)
def load_from_library(library, label, names):
"""
Return a subset of tags and filters from a library.
"""
subset = Library()
for name in names:
found = False
if name in library.tags:
found = True
subset.tags[name] = library.tags[name]
if name in library.filters:
found = True
subset.filters[name] = library.filters[name]
if found is False:
raise TemplateSyntaxError(
"'%s' is not a valid tag or filter in tag library '%s'" % (
name, label,
),
)
return subset
@register.tag
def load(parser, token):
"""
Loads a custom template tag library into the parser.
For example, to load the template tags in
``django/templatetags/news/photos.py``::
{% load news.photos %}
Can also be used to load an individual tag/filter from
a library::
{% load byline from news %}
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) >= 4 and bits[-2] == "from":
# from syntax is used; load individual tags from the library
name = bits[-1]
lib = find_library(parser, name)
subset = load_from_library(lib, name, bits[1:-2])
parser.add_library(subset)
else:
# one or more libraries are specified; load and add them to the parser
for name in bits[1:]:
lib = find_library(parser, name)
parser.add_library(lib)
return LoadNode()
@register.tag
def lorem(parser, token):
"""
Creates random Latin text useful for providing test data in templates.
Usage format::
{% lorem [count] [method] [random] %}
``count`` is a number (or variable) containing the number of paragraphs or
words to generate (default is 1).
``method`` is either ``w`` for words, ``p`` for HTML paragraphs, ``b`` for
plain-text paragraph blocks (default is ``b``).
``random`` is the word ``random``, which if given, does not use the common
paragraph (starting "Lorem ipsum dolor sit amet, consectetuer...").
Examples:
* ``{% lorem %}`` will output the common "lorem ipsum" paragraph
* ``{% lorem 3 p %}`` will output the common "lorem ipsum" paragraph
and two random paragraphs each wrapped in HTML ``<p>`` tags
* ``{% lorem 2 w random %}`` will output two random latin words
"""
bits = list(token.split_contents())
tagname = bits[0]
# Random bit
common = bits[-1] != 'random'
if not common:
bits.pop()
# Method bit
if bits[-1] in ('w', 'p', 'b'):
method = bits.pop()
else:
method = 'b'
# Count bit
if len(bits) > 1:
count = bits.pop()
else:
count = '1'
count = parser.compile_filter(count)
if len(bits) != 1:
raise TemplateSyntaxError("Incorrect format for %r tag" % tagname)
return LoremNode(count, method, common)
@register.tag
def now(parser, token):
"""
Displays the date, formatted according to the given string.
Uses the same format as PHP's ``date()`` function; see http://php.net/date
for all the possible values.
Sample usage::
It is {% now "jS F Y H:i" %}
"""
bits = token.split_contents()
asvar = None
if len(bits) == 4 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits) != 2:
raise TemplateSyntaxError("'now' statement takes one argument")
format_string = bits[1][1:-1]
return NowNode(format_string, asvar)
@register.tag
def regroup(parser, token):
"""
Regroups a list of alike objects by a common attribute.
This complex tag is best illustrated by use of an example: say that
``people`` is a list of ``Person`` objects that have ``first_name``,
``last_name``, and ``gender`` attributes, and you'd like to display a list
that looks like:
* Male:
* George Bush
* Bill Clinton
* Female:
* Margaret Thatcher
* Colendeeza Rice
* Unknown:
* Pat Smith
The following snippet of template code would accomplish this dubious task::
{% regroup people by gender as grouped %}
<ul>
{% for group in grouped %}
<li>{{ group.grouper }}
<ul>
{% for item in group.list %}
<li>{{ item }}</li>
{% endfor %}
</ul>
{% endfor %}
</ul>
As you can see, ``{% regroup %}`` populates a variable with a list of
objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the
item that was grouped by; ``list`` contains the list of objects that share
that ``grouper``. In this case, ``grouper`` would be ``Male``, ``Female``
and ``Unknown``, and ``list`` is the list of people with those genders.
Note that ``{% regroup %}`` does not work when the list to be grouped is not
sorted by the key you are grouping by! This means that if your list of
people was not sorted by gender, you'd need to make sure it is sorted
before using it, i.e.::
{% regroup people|dictsort:"gender" by gender as grouped %}
"""
bits = token.split_contents()
if len(bits) != 6:
raise TemplateSyntaxError("'regroup' tag takes five arguments")
target = parser.compile_filter(bits[1])
if bits[2] != 'by':
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
if bits[4] != 'as':
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must"
" be 'as'")
var_name = bits[5]
# RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and
# group by the resulting value. After all items are processed, it will
# save the final result in the context under 'var_name', thus clearing the
# temporary values. This hack is necessary because the template engine
# doesn't provide a context-aware equivalent of Python's getattr.
expression = parser.compile_filter(var_name +
VARIABLE_ATTRIBUTE_SEPARATOR +
bits[3])
return RegroupNode(target, expression, var_name)
@register.tag
def spaceless(parser, token):
"""
Removes whitespace between HTML tags, including tab and newline characters.
Example usage::
{% spaceless %}
<p>
<a href="foo/">Foo</a>
</p>
{% endspaceless %}
This example would return this HTML::
<p><a href="foo/">Foo</a></p>
Only space between *tags* is normalized -- not space between tags and text.
In this example, the space around ``Hello`` won't be stripped::
{% spaceless %}
<strong>
Hello
</strong>
{% endspaceless %}
"""
nodelist = parser.parse(('endspaceless',))
parser.delete_first_token()
return SpacelessNode(nodelist)
@register.tag
def templatetag(parser, token):
"""
Outputs one of the bits used to compose template tags.
Since the template system has no concept of "escaping", to display one of
the bits used in template tags, you must use the ``{% templatetag %}`` tag.
The argument tells which template bit to output:
================== =======
Argument Outputs
================== =======
``openblock`` ``{%``
``closeblock`` ``%}``
``openvariable`` ``{{``
``closevariable`` ``}}``
``openbrace`` ``{``
``closebrace`` ``}``
``opencomment`` ``{#``
``closecomment`` ``#}``
================== =======
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'templatetag' statement takes one argument")
tag = bits[1]
if tag not in TemplateTagNode.mapping:
raise TemplateSyntaxError("Invalid templatetag argument: '%s'."
" Must be one of: %s" %
(tag, list(TemplateTagNode.mapping)))
return TemplateTagNode(tag)
@register.tag
def url(parser, token):
"""
Returns an absolute URL matching given view with its parameters.
This is a way to define links that aren't tied to a particular URL
configuration::
{% url "path.to.some_view" arg1 arg2 %}
or
{% url "path.to.some_view" name1=value1 name2=value2 %}
The first argument is a path to a view. It can be an absolute Python path
or just ``app_name.view_name`` without the project name if the view is
located inside the project.
Other arguments are space-separated values that will be filled in place of
positional and keyword arguments in the URL. Don't mix positional and
keyword arguments.
All arguments for the URL should be present.
For example if you have a view ``app_name.client`` taking client's id and
the corresponding line in a URLconf looks like this::
('^client/(\d+)/$', 'app_name.client')
and this app's URLconf is included into the project's URLconf under some
path::
('^clients/', include('project_name.app_name.urls'))
then in a template you can create a link for a certain client like this::
{% url "app_name.client" client.id %}
The URL will look like ``/clients/client/123/``.
The first argument can also be a named URL instead of the Python path to
the view callable. For example if the URLconf entry looks like this::
url('^client/(\d+)/$', name='client-detail-view')
then in the template you can use::
{% url "client-detail-view" client.id %}
There is even another possible value type for the first argument. It can be
the name of a template variable that will be evaluated to obtain the view
name or the URL name, e.g.::
{% with view_path="app_name.client" %}
{% url view_path client.id %}
{% endwith %}
or,
{% with url_name="client-detail-view" %}
{% url url_name client.id %}
{% endwith %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument"
" (path to a view)" % bits[0])
viewname = parser.compile_filter(bits[1])
args = []
kwargs = {}
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits):
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLNode(viewname, args, kwargs, asvar)
@register.tag
def verbatim(parser, token):
"""
Stops the template engine from rendering the contents of this block tag.
Usage::
{% verbatim %}
{% don't process this %}
{% endverbatim %}
You can also designate a specific closing tag block (allowing the
unrendered use of ``{% endverbatim %}``)::
{% verbatim myblock %}
...
{% endverbatim myblock %}
"""
nodelist = parser.parse(('endverbatim',))
parser.delete_first_token()
return VerbatimNode(nodelist.render(Context()))
@register.tag
def widthratio(parser, token):
"""
For creating bar charts and such, this tag calculates the ratio of a given
value to a maximum value, and then applies that ratio to a constant.
For example::
<img src="bar.png" alt="Bar"
height="10" width="{% widthratio this_value max_value max_width %}" />
If ``this_value`` is 175, ``max_value`` is 200, and ``max_width`` is 100,
the image in the above example will be 88 pixels wide
(because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88).
In some cases you might want to capture the result of widthratio in a
variable. It can be useful for instance in a blocktrans like this::
{% widthratio this_value max_value max_width as width %}
{% blocktrans %}The width is: {{ width }}{% endblocktrans %}
"""
bits = token.split_contents()
if len(bits) == 4:
tag, this_value_expr, max_value_expr, max_width = bits
asvar = None
elif len(bits) == 6:
tag, this_value_expr, max_value_expr, max_width, as_, asvar = bits
if as_ != 'as':
raise TemplateSyntaxError("Invalid syntax in widthratio tag. Expecting 'as' keyword")
else:
raise TemplateSyntaxError("widthratio takes at least three arguments")
return WidthRatioNode(parser.compile_filter(this_value_expr),
parser.compile_filter(max_value_expr),
parser.compile_filter(max_width),
asvar=asvar)
@register.tag('with')
def do_with(parser, token):
"""
Adds one or more values to the context (inside of this block) for caching
and easy access.
For example::
{% with total=person.some_sql_method %}
{{ total }} object{{ total|pluralize }}
{% endwith %}
Multiple values can be added to the context::
{% with foo=1 bar=2 %}
...
{% endwith %}
The legacy format of ``{% with person.some_sql_method as total %}`` is
still accepted.
"""
bits = token.split_contents()
remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context:
raise TemplateSyntaxError("%r expected at least one variable "
"assignment" % bits[0])
if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0]))
nodelist = parser.parse(('endwith',))
parser.delete_first_token()
return WithNode(None, None, nodelist, extra_context=extra_context)
|
swarna-k/MyDiary
|
refs/heads/master
|
flask/lib/python2.7/site-packages/pytz/lazy.py
|
514
|
from threading import RLock
try:
from UserDict import DictMixin
except ImportError:
from collections import Mapping as DictMixin
# With lazy loading, we might end up with multiple threads triggering
# it at the same time. We need a lock.
_fill_lock = RLock()
class LazyDict(DictMixin):
"""Dictionary populated on first use."""
data = None
def __getitem__(self, key):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return self.data[key.upper()]
def __contains__(self, key):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return key in self.data
def __iter__(self):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return iter(self.data)
def __len__(self):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return len(self.data)
def keys(self):
if self.data is None:
_fill_lock.acquire()
try:
if self.data is None:
self._fill()
finally:
_fill_lock.release()
return self.data.keys()
class LazyList(list):
"""List populated on first use."""
_props = [
'__str__', '__repr__', '__unicode__',
'__hash__', '__sizeof__', '__cmp__',
'__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'append', 'count', 'index', 'extend', 'insert', 'pop', 'remove',
'reverse', 'sort', '__add__', '__radd__', '__iadd__', '__mul__',
'__rmul__', '__imul__', '__contains__', '__len__', '__nonzero__',
'__getitem__', '__setitem__', '__delitem__', '__iter__',
'__reversed__', '__getslice__', '__setslice__', '__delslice__']
def __new__(cls, fill_iter=None):
if fill_iter is None:
return list()
# We need a new class as we will be dynamically messing with its
# methods.
class LazyList(list):
pass
fill_iter = [fill_iter]
def lazy(name):
def _lazy(self, *args, **kw):
_fill_lock.acquire()
try:
if len(fill_iter) > 0:
list.extend(self, fill_iter.pop())
for method_name in cls._props:
delattr(LazyList, method_name)
finally:
_fill_lock.release()
return getattr(list, name)(self, *args, **kw)
return _lazy
for name in cls._props:
setattr(LazyList, name, lazy(name))
new_list = LazyList()
return new_list
# Not all versions of Python declare the same magic methods.
# Filter out properties that don't exist in this version of Python
# from the list.
LazyList._props = [prop for prop in LazyList._props if hasattr(list, prop)]
class LazySet(set):
"""Set populated on first use."""
_props = (
'__str__', '__repr__', '__unicode__',
'__hash__', '__sizeof__', '__cmp__',
'__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__contains__', '__len__', '__nonzero__',
'__getitem__', '__setitem__', '__delitem__', '__iter__',
'__sub__', '__and__', '__xor__', '__or__',
'__rsub__', '__rand__', '__rxor__', '__ror__',
'__isub__', '__iand__', '__ixor__', '__ior__',
'add', 'clear', 'copy', 'difference', 'difference_update',
'discard', 'intersection', 'intersection_update', 'isdisjoint',
'issubset', 'issuperset', 'pop', 'remove',
'symmetric_difference', 'symmetric_difference_update',
'union', 'update')
def __new__(cls, fill_iter=None):
if fill_iter is None:
return set()
class LazySet(set):
pass
fill_iter = [fill_iter]
def lazy(name):
def _lazy(self, *args, **kw):
_fill_lock.acquire()
try:
if len(fill_iter) > 0:
for i in fill_iter.pop():
set.add(self, i)
for method_name in cls._props:
delattr(LazySet, method_name)
finally:
_fill_lock.release()
return getattr(set, name)(self, *args, **kw)
return _lazy
for name in cls._props:
setattr(LazySet, name, lazy(name))
new_set = LazySet()
return new_set
# Not all versions of Python declare the same magic methods.
# Filter out properties that don't exist in this version of Python
# from the list.
LazySet._props = [prop for prop in LazySet._props if hasattr(set, prop)]
|
mick-d/nipype_source
|
refs/heads/master
|
nipype/interfaces/afni/tests/test_auto_Merge.py
|
5
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.afni.preprocess import Merge
def test_Merge_inputs():
input_map = dict(args=dict(argstr='%s',
),
blurfwhm=dict(argstr='-1blur_fwhm %d',
units='mm',
),
doall=dict(argstr='-doall',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_files=dict(argstr='%s',
copyfile=False,
mandatory=True,
position=-1,
),
out_file=dict(argstr='-prefix %s',
name_source='in_file',
name_template='%s_merge',
),
outputtype=dict(),
terminal_output=dict(mandatory=True,
nohash=True,
),
)
inputs = Merge.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_Merge_outputs():
output_map = dict(out_file=dict(),
)
outputs = Merge.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
demarle/VTK
|
refs/heads/master
|
IO/EnSight/Testing/Python/EnSight6ElementsBin.py
|
17
|
#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# create a rendering window and renderer
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
renWin.StereoCapableWindowOn()
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
reader = vtk.vtkGenericEnSightReader()
reader.SetCaseFileName("" + str(VTK_DATA_ROOT) + "/Data/EnSight/elements6-bin.case")
reader.UpdateInformation()
reader.GetOutputInformation(0).Set(vtk.vtkStreamingDemandDrivenPipeline.UPDATE_TIME_STEP(), 0.1)
geom = vtk.vtkGeometryFilter()
geom.SetInputConnection(reader.GetOutputPort())
mapper = vtk.vtkHierarchicalPolyDataMapper()
mapper.SetInputConnection(geom.GetOutputPort())
mapper.SetColorModeToMapScalars()
mapper.SetScalarModeToUsePointFieldData()
mapper.ColorByArrayComponent("pointTensors",0)
mapper.SetScalarRange(0,300)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
# assign our actor to the renderer
ren1.AddActor(actor)
# enable user interface interactor
iren.Initialize()
renWin.Render()
# prevent the tk window from showing up then start the event loop
reader.SetDefaultExecutivePrototype(None)
# --- end of script --
|
TimothyVandenbrande/ansible-modules-extras
|
refs/heads/devel
|
packaging/os/zypper.py
|
6
|
#!/usr/bin/python -tt
# -*- coding: utf-8 -*-
# (c) 2013, Patrick Callahan <pmc@patrickcallahan.com>
# based on
# openbsd_pkg
# (c) 2013
# Patrik Lundin <patrik.lundin.swe@gmail.com>
#
# yum
# (c) 2012, Red Hat, Inc
# Written by Seth Vidal <skvidal at fedoraproject.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from xml.dom.minidom import parseString as parseXML
import re
DOCUMENTATION = '''
---
module: zypper
author:
- "Patrick Callahan (@dirtyharrycallahan)"
- "Alexander Gubin (@alxgu)"
- "Thomas O'Donnell (@andytom)"
- "Robin Roth (@robinro)"
- "Andrii Radyk (@AnderEnder)"
version_added: "1.2"
short_description: Manage packages on SUSE and openSUSE
description:
- Manage packages on SUSE and openSUSE using the zypper and rpm tools.
options:
name:
description:
- Package name C(name) or package specifier.
- Can include a version like C(name=1.0), C(name>3.4) or C(name<=2.7). If a version is given, C(oldpackage) is implied and zypper is allowed to update the package within the version range given.
- You can also pass a url or a local path to a rpm file.
- When using state=latest, this can be '*', which updates all installed packages.
required: true
aliases: [ 'pkg' ]
state:
description:
- C(present) will make sure the package is installed.
C(latest) will make sure the latest version of the package is installed.
C(absent) will make sure the specified package is not installed.
required: false
choices: [ present, latest, absent ]
default: "present"
type:
description:
- The type of package to be operated on.
required: false
choices: [ package, patch, pattern, product, srcpackage, application ]
default: "package"
version_added: "2.0"
disable_gpg_check:
description:
- Whether to disable to GPG signature checking of the package
signature being installed. Has an effect only if state is
I(present) or I(latest).
required: false
default: "no"
choices: [ "yes", "no" ]
disable_recommends:
version_added: "1.8"
description:
- Corresponds to the C(--no-recommends) option for I(zypper). Default behavior (C(yes)) modifies zypper's default behavior; C(no) does install recommended packages.
required: false
default: "yes"
choices: [ "yes", "no" ]
force:
version_added: "2.2"
description:
- Adds C(--force) option to I(zypper). Allows to downgrade packages and change vendor or architecture.
required: false
default: "no"
choices: [ "yes", "no" ]
update_cache:
version_added: "2.2"
description:
- Run the equivalent of C(zypper refresh) before the operation.
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: [ "refresh" ]
oldpackage:
version_added: "2.2"
description:
- Adds C(--oldpackage) option to I(zypper). Allows to downgrade packages with less side-effects than force. This is implied as soon as a version is specified as part of the package name.
required: false
default: "no"
choices: [ "yes", "no" ]
# informational: requirements for nodes
requirements:
- "zypper >= 1.0 # included in openSuSE >= 11.1 or SuSE Linux Enterprise Server/Desktop >= 11.0"
- rpm
'''
EXAMPLES = '''
# Install "nmap"
- zypper: name=nmap state=present
# Install apache2 with recommended packages
- zypper: name=apache2 state=present disable_recommends=no
# Apply a given patch
- zypper: name=openSUSE-2016-128 state=present type=patch
# Remove the "nmap" package
- zypper: name=nmap state=absent
# Install the nginx rpm from a remote repo
- zypper: name=http://nginx.org/packages/sles/12/x86_64/RPMS/nginx-1.8.0-1.sles12.ngx.x86_64.rpm state=present
# Install local rpm file
- zypper: name=/tmp/fancy-software.rpm state=present
# Update all packages
- zypper: name=* state=latest
# Apply all available patches
- zypper: name=* state=latest type=patch
# Refresh repositories and update package "openssl"
- zypper: name=openssl state=present update_cache=yes
# Install specific version (possible comparisons: <, >, <=, >=, =)
- zypper: name=docker>=1.10 state=installed
'''
def split_name_version(name):
"""splits of the package name and desired version
example formats:
- docker>=1.10
- apache=2.4
Allowed version specifiers: <, >, <=, >=, =
Allowed version format: [0-9.-]*
Also allows a prefix indicating remove "-", "~" or install "+"
"""
prefix = ''
if name[0] in ['-', '~', '+']:
prefix = name[0]
name = name[1:]
version_check = re.compile('^(.*?)((?:<|>|<=|>=|=)[0-9.-]*)?$')
try:
reres = version_check.match(name)
name, version = reres.groups()
return prefix, name, version
except:
return prefix, name, None
def get_want_state(m, names, remove=False):
packages_install = {}
packages_remove = {}
urls = []
for name in names:
if '://' in name or name.endswith('.rpm'):
urls.append(name)
else:
prefix, pname, version = split_name_version(name)
if prefix in ['-', '~']:
packages_remove[pname] = version
elif prefix == '+':
packages_install[pname] = version
else:
if remove:
packages_remove[pname] = version
else:
packages_install[pname] = version
return packages_install, packages_remove, urls
def get_installed_state(m, packages):
"get installed state of packages"
cmd = get_cmd(m, 'search')
cmd.extend(['--match-exact', '--details', '--installed-only'])
cmd.extend(packages)
return parse_zypper_xml(m, cmd, fail_not_found=False)[0]
def parse_zypper_xml(m, cmd, fail_not_found=True, packages=None):
rc, stdout, stderr = m.run_command(cmd, check_rc=False)
dom = parseXML(stdout)
if rc == 104:
# exit code 104 is ZYPPER_EXIT_INF_CAP_NOT_FOUND (no packages found)
if fail_not_found:
errmsg = dom.getElementsByTagName('message')[-1].childNodes[0].data
m.fail_json(msg=errmsg, rc=rc, stdout=stdout, stderr=stderr, cmd=cmd)
else:
return {}, rc, stdout, stderr
elif rc in [0, 106, 103]:
# zypper exit codes
# 0: success
# 106: signature verification failed
# 103: zypper was upgraded, run same command again
if packages is None:
firstrun = True
packages = {}
solvable_list = dom.getElementsByTagName('solvable')
for solvable in solvable_list:
name = solvable.getAttribute('name')
packages[name] = {}
packages[name]['version'] = solvable.getAttribute('edition')
packages[name]['oldversion'] = solvable.getAttribute('edition-old')
status = solvable.getAttribute('status')
packages[name]['installed'] = status == "installed"
packages[name]['group'] = solvable.parentNode.nodeName
if rc == 103 and firstrun:
# if this was the first run and it failed with 103
# run zypper again with the same command to complete update
return parse_zypper_xml(m, cmd, fail_not_found=fail_not_found, packages=packages)
return packages, rc, stdout, stderr
m.fail_json(msg='Zypper run command failed with return code %s.'%rc, rc=rc, stdout=stdout, stderr=stderr, cmd=cmd)
def get_cmd(m, subcommand):
"puts together the basic zypper command arguments with those passed to the module"
is_install = subcommand in ['install', 'update', 'patch']
is_refresh = subcommand == 'refresh'
cmd = ['/usr/bin/zypper', '--quiet', '--non-interactive', '--xmlout']
# add global options before zypper command
if (is_install or is_refresh) and m.params['disable_gpg_check']:
cmd.append('--no-gpg-checks')
cmd.append(subcommand)
if subcommand != 'patch' and not is_refresh:
cmd.extend(['--type', m.params['type']])
if m.check_mode and subcommand != 'search':
cmd.append('--dry-run')
if is_install:
cmd.append('--auto-agree-with-licenses')
if m.params['disable_recommends']:
cmd.append('--no-recommends')
if m.params['force']:
cmd.append('--force')
if m.params['oldpackage']:
cmd.append('--oldpackage')
return cmd
def set_diff(m, retvals, result):
# TODO: if there is only one package, set before/after to version numbers
packages = {'installed': [], 'removed': [], 'upgraded': []}
for p in result:
group = result[p]['group']
if group == 'to-upgrade':
versions = ' (' + result[p]['oldversion'] + ' => ' + result[p]['version'] + ')'
packages['upgraded'].append(p + versions)
elif group == 'to-install':
packages['installed'].append(p)
elif group == 'to-remove':
packages['removed'].append(p)
output = ''
for state in packages:
if packages[state]:
output += state + ': ' + ', '.join(packages[state]) + '\n'
if 'diff' not in retvals:
retvals['diff'] = {}
if 'prepared' not in retvals['diff']:
retvals['diff']['prepared'] = output
else:
retvals['diff']['prepared'] += '\n' + output
def package_present(m, name, want_latest):
"install and update (if want_latest) the packages in name_install, while removing the packages in name_remove"
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
name_install, name_remove, urls = get_want_state(m, name)
# if a version string is given, pass it to zypper
install_version = [p+name_install[p] for p in name_install if name_install[p]]
remove_version = [p+name_remove[p] for p in name_remove if name_remove[p]]
# add oldpackage flag when a version is given to allow downgrades
if install_version or remove_version:
m.params['oldpackage'] = True
if not want_latest:
# for state=present: filter out already installed packages
install_and_remove = name_install.copy()
install_and_remove.update(name_remove)
prerun_state = get_installed_state(m, install_and_remove)
# generate lists of packages to install or remove
name_install = [p for p in name_install if p not in prerun_state]
name_remove = [p for p in name_remove if p in prerun_state]
if not any((name_install, name_remove, urls, install_version, remove_version)):
# nothing to install/remove and nothing to update
return None, retvals
# zypper install also updates packages
cmd = get_cmd(m, 'install')
cmd.append('--')
cmd.extend(urls)
# pass packages with version information
cmd.extend(install_version)
cmd.extend(['-%s' % p for p in remove_version])
# allow for + or - prefixes in install/remove lists
# do this in one zypper run to allow for dependency-resolution
# for example "-exim postfix" runs without removing packages depending on mailserver
cmd.extend(name_install)
cmd.extend(['-%s' % p for p in name_remove])
retvals['cmd'] = cmd
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
return result, retvals
def package_update_all(m):
"run update or patch on all available packages"
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
if m.params['type'] == 'patch':
cmdname = 'patch'
else:
cmdname = 'update'
cmd = get_cmd(m, cmdname)
retvals['cmd'] = cmd
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
return result, retvals
def package_absent(m, name):
"remove the packages in name"
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
# Get package state
name_install, name_remove, urls = get_want_state(m, name, remove=True)
if name_install:
m.fail_json(msg="Can not combine '+' prefix with state=remove/absent.")
if urls:
m.fail_json(msg="Can not remove via URL.")
if m.params['type'] == 'patch':
m.fail_json(msg="Can not remove patches.")
prerun_state = get_installed_state(m, name_remove)
remove_version = [p+name_remove[p] for p in name_remove if name_remove[p]]
name_remove = [p for p in name_remove if p in prerun_state]
if not name_remove and not remove_version:
return None, retvals
cmd = get_cmd(m, 'remove')
cmd.extend(name_remove)
cmd.extend(remove_version)
retvals['cmd'] = cmd
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
return result, retvals
def repo_refresh(m):
"update the repositories"
retvals = {'rc': 0, 'stdout': '', 'stderr': ''}
cmd = get_cmd(m, 'refresh')
retvals['cmd'] = cmd
result, retvals['rc'], retvals['stdout'], retvals['stderr'] = parse_zypper_xml(m, cmd)
return retvals
# ===========================================
# Main control flow
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True, aliases=['pkg'], type='list'),
state = dict(required=False, default='present', choices=['absent', 'installed', 'latest', 'present', 'removed']),
type = dict(required=False, default='package', choices=['package', 'patch', 'pattern', 'product', 'srcpackage', 'application']),
disable_gpg_check = dict(required=False, default='no', type='bool'),
disable_recommends = dict(required=False, default='yes', type='bool'),
force = dict(required=False, default='no', type='bool'),
update_cache = dict(required=False, aliases=['refresh'], default='no', type='bool'),
oldpackage = dict(required=False, default='no', type='bool'),
),
supports_check_mode = True
)
name = module.params['name']
state = module.params['state']
update_cache = module.params['update_cache']
# remove empty strings from package list
name = filter(None, name)
# Refresh repositories
if update_cache:
retvals = repo_refresh(module)
if retvals['rc'] != 0:
module.fail_json(msg="Zypper refresh run failed.", **retvals)
# Perform requested action
if name == ['*'] and state == 'latest':
packages_changed, retvals = package_update_all(module)
else:
if state in ['absent', 'removed']:
packages_changed, retvals = package_absent(module, name)
elif state in ['installed', 'present', 'latest']:
packages_changed, retvals = package_present(module, name, state == 'latest')
retvals['changed'] = retvals['rc'] == 0 and bool(packages_changed)
if module._diff:
set_diff(module, retvals, packages_changed)
if retvals['rc'] != 0:
module.fail_json(msg="Zypper run failed.", **retvals)
if not retvals['changed']:
del retvals['stdout']
del retvals['stderr']
module.exit_json(name=name, state=state, update_cache=update_cache, **retvals)
# import module snippets
from ansible.module_utils.basic import AnsibleModule
if __name__ == "__main__":
main()
|
GREO/GNU-Radio
|
refs/heads/master
|
grc/grc_gnuradio/wxgui/panel.py
|
33
|
# Copyright 2009 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import wx
class Panel(wx.Panel):
def __init__(self, parent, orient=wx.VERTICAL):
wx.Panel.__init__(self, parent)
self._box = wx.BoxSizer(orient)
self._grid = wx.GridBagSizer(5, 5)
self.Add(self._grid)
self.SetSizer(self._box)
def GetWin(self): return self
def Add(self, win):
"""
Add a window to the wx vbox.
@param win the wx window
"""
self._box.Add(win, 0, wx.EXPAND)
def GridAdd(self, win, row, col, row_span=1, col_span=1):
"""
Add a window to the wx grid at the given position.
@param win the wx window
@param row the row specification (integer >= 0)
@param col the column specification (integer >= 0)
@param row_span the row span specification (integer >= 1)
@param col_span the column span specification (integer >= 1)
"""
self._grid.Add(win, wx.GBPosition(row, col), wx.GBSpan(row_span, col_span), wx.EXPAND)
|
mlaitinen/odoo
|
refs/heads/8.0
|
openerp/cli/deploy.py
|
369
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import requests
import sys
import tempfile
import zipfile
from . import Command
class Deploy(Command):
"""Deploy a module on an Odoo instance"""
def __init__(self):
super(Deploy, self).__init__()
self.session = requests.session()
def deploy_module(self, module_path, url, login, password, db='', force=False):
url = url.rstrip('/')
self.authenticate(url, login, password, db)
module_file = self.zip_module(module_path)
try:
return self.upload_module(url, module_file, force=force)
finally:
os.remove(module_file)
def upload_module(self, server, module_file, force=False):
print("Uploading module file...")
url = server + '/base_import_module/upload'
files = dict(mod_file=open(module_file, 'rb'))
force = '1' if force else ''
res = self.session.post(url, files=files, data=dict(force=force))
if res.status_code != 200:
raise Exception("Could not authenticate on server '%s'" % server)
return res.text
def authenticate(self, server, login, password, db=''):
print("Authenticating on server '%s' ..." % server)
# Fixate session with a given db if any
self.session.get(server + '/web/login', params=dict(db=db))
args = dict(login=login, password=password, db=db)
res = self.session.post(server + '/base_import_module/login', args)
if res.status_code == 404:
raise Exception("The server '%s' does not have the 'base_import_module' installed." % server)
elif res.status_code != 200:
raise Exception(res.text)
def zip_module(self, path):
path = os.path.abspath(path)
if not os.path.isdir(path):
raise Exception("Could not find module directory '%s'" % path)
container, module_name = os.path.split(path)
temp = tempfile.mktemp(suffix='.zip')
try:
print("Zipping module directory...")
with zipfile.ZipFile(temp, 'w') as zfile:
for root, dirs, files in os.walk(path):
for file in files:
file_path = os.path.join(root, file)
zfile.write(file_path, file_path.split(container).pop())
return temp
except Exception:
os.remove(temp)
raise
def run(self, cmdargs):
parser = argparse.ArgumentParser(
prog="%s deploy" % sys.argv[0].split(os.path.sep)[-1],
description=self.__doc__
)
parser.add_argument('path', help="Path of the module to deploy")
parser.add_argument('url', nargs='?', help='Url of the server (default=http://localhost:8069)', default="http://localhost:8069")
parser.add_argument('--db', dest='db', help='Database to use if server does not use db-filter.')
parser.add_argument('--login', dest='login', default="admin", help='Login (default=admin)')
parser.add_argument('--password', dest='password', default="admin", help='Password (default=admin)')
parser.add_argument('--verify-ssl', action='store_true', help='Verify SSL certificate')
parser.add_argument('--force', action='store_true', help='Force init even if module is already installed. (will update `noupdate="1"` records)')
if not cmdargs:
sys.exit(parser.print_help())
args = parser.parse_args(args=cmdargs)
if not args.verify_ssl:
self.session.verify = False
try:
if not args.url.startswith(('http://', 'https://')):
args.url = 'https://%s' % args.url
result = self.deploy_module(args.path, args.url, args.login, args.password, args.db, force=args.force)
print(result)
except Exception, e:
sys.exit("ERROR: %s" % e)
|
99cloud/keystone_register
|
refs/heads/master
|
horizon/models.py
|
172
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Stub file to work around django bug: https://code.djangoproject.com/ticket/7198
"""
|
mlaitinen/odoo
|
refs/heads/8.0
|
addons/payment_transfer/controllers/main.py
|
395
|
# -*- coding: utf-8 -*-
import logging
import pprint
import werkzeug
from openerp import http, SUPERUSER_ID
from openerp.http import request
_logger = logging.getLogger(__name__)
class OgoneController(http.Controller):
_accept_url = '/payment/transfer/feedback'
@http.route([
'/payment/transfer/feedback',
], type='http', auth='none')
def transfer_form_feedback(self, **post):
cr, uid, context = request.cr, SUPERUSER_ID, request.context
_logger.info('Beginning form_feedback with post data %s', pprint.pformat(post)) # debug
request.registry['payment.transaction'].form_feedback(cr, uid, post, 'transfer', context)
return werkzeug.utils.redirect(post.pop('return_url', '/'))
|
jur/linux
|
refs/heads/master
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
anitahitouch/mediadrop
|
refs/heads/master
|
mediacore/templates/admin/players/__init__.py
|
14
|
from mediadrop.templates.admin.players import *
|
MobinRanjbar/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/tests/utils_tests/test_checksums.py
|
246
|
import unittest
from django.utils import checksums
class TestUtilsChecksums(unittest.TestCase):
def check_output(self, function, value, output=None):
"""
Check that function(value) equals output. If output is None,
check that function(value) equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
def test_luhn(self):
f = checksums.luhn
items = (
(4111111111111111, True), ('4111111111111111', True),
(4222222222222, True), (378734493671000, True),
(5424000000000015, True), (5555555555554444, True),
(1008, True), ('0000001008', True), ('000000001008', True),
(4012888888881881, True), (1234567890123456789012345678909, True),
(4111111111211111, False), (42222222222224, False),
(100, False), ('100', False), ('0000100', False),
('abc', False), (None, False), (object(), False),
)
for value, output in items:
self.check_output(f, value, output)
|
popazerty/enigma2-4.3
|
refs/heads/master
|
lib/python/Screens/PluginBrowser.py
|
4
|
from Screen import Screen
from Components.Language import language
from enigma import eConsoleAppContainer, eDVBDB
from boxbranding import getImageVersion
from Components.ActionMap import ActionMap
from Components.PluginComponent import plugins
from Components.PluginList import *
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.Button import Button
from Components.Harddisk import harddiskmanager
from Components.Sources.StaticText import StaticText
from Components import Ipkg
from Components.config import config, ConfigSubsection, ConfigYesNo, getConfigListEntry, configfile, ConfigText
from Components.ConfigList import ConfigListScreen
from Screens.MessageBox import MessageBox
from Screens.ChoiceBox import ChoiceBox
from Screens.Console import Console
from Screens.VirtualKeyBoard import VirtualKeyBoard
from Plugins.Plugin import PluginDescriptor
from Tools.Directories import resolveFilename, SCOPE_PLUGINS, SCOPE_ACTIVE_SKIN
from Tools.LoadPixmap import LoadPixmap
from time import time
import os
config.pluginfilter = ConfigSubsection()
config.pluginfilter.opendroid = ConfigYesNo(default = True)
config.pluginfilter.po = ConfigYesNo(default = False)
config.pluginfilter.src = ConfigYesNo(default = False)
config.pluginfilter.kernel = ConfigYesNo(default = False)
config.pluginfilter.drivers = ConfigYesNo(default = True)
config.pluginfilter.extensions = ConfigYesNo(default = True)
config.pluginfilter.gigabluesupportnet = ConfigYesNo(default = False)
config.pluginfilter.picons = ConfigYesNo(default = True)
config.pluginfilter.pli = ConfigYesNo(default = False)
config.pluginfilter.security = ConfigYesNo(default = True)
config.pluginfilter.settings = ConfigYesNo(default = True)
config.pluginfilter.skins = ConfigYesNo(default = True)
config.pluginfilter.display = ConfigYesNo(default = True)
config.pluginfilter.softcams = ConfigYesNo(default = True)
config.pluginfilter.systemplugins = ConfigYesNo(default = True)
config.pluginfilter.vix = ConfigYesNo(default = False)
config.pluginfilter.weblinks = ConfigYesNo(default = True)
config.pluginfilter.userfeed = ConfigText(default = 'http://', fixed_size=False)
## command to find ipk with status hold
## grep -B 4 hold /var/lib/opkg/status | sed '/Provides/d' | sed '/Version/d' | sed '/Status/d' | sed '/Depends/d' | sed '/--/d' | sed -e '/^ *$/d'
#language.addCallback(plugins.reloadPlugins)
def languageChanged():
plugins.clearPluginList()
plugins.readPluginList(resolveFilename(SCOPE_PLUGINS))
def CreateFeedConfig():
fileconf = "/etc/opkg/user-feed.conf"
feedurl = "src/gz user-feeds %s\n" % config.pluginfilter.userfeed.value
f = open(fileconf, "w")
f.write(feedurl)
f.close()
os.system("ipkg update")
class PluginBrowserSummary(Screen):
def __init__(self, session, parent):
Screen.__init__(self, session, parent = parent)
self["entry"] = StaticText("")
self["desc"] = StaticText("")
self.onShow.append(self.addWatcher)
self.onHide.append(self.removeWatcher)
def addWatcher(self):
self.parent.onChangedEntry.append(self.selectionChanged)
self.parent.selectionChanged()
def removeWatcher(self):
self.parent.onChangedEntry.remove(self.selectionChanged)
def selectionChanged(self, name, desc):
self["entry"].text = name
self["desc"].text = desc
class PluginBrowser(Screen):
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Plugin Browser"))
self.firsttime = True
self["red"] = Label(_("Remove plugins"))
self["green"] = Label(_("Download plugins"))
self["blue"] = Label(_("Hold plugins"))
self.list = []
self["list"] = PluginList(self.list)
if config.usage.sort_pluginlist.value:
self["list"].list.sort()
self["actions"] = ActionMap(["SetupActions","WizardActions"],
{
"ok": self.save,
"back": self.close,
"menu": self.menu,
})
self["PluginDownloadActions"] = ActionMap(["ColorActions"],
{
"red": self.delete,
"green": self.download,
"blue": self.toogle
})
self.onFirstExecBegin.append(self.checkWarnings)
self.onShown.append(self.updateList)
self.onChangedEntry = []
self["list"].onSelectionChanged.append(self.selectionChanged)
self.onLayoutFinish.append(self.saveListsize)
if config.pluginfilter.userfeed.value != "http://":
if not os.path.exists("/etc/opkg/user-feed.conf"):
CreateFeedConfig()
def menu(self):
self.session.openWithCallback(self.PluginDownloadBrowserClosed, PluginFilter)
def saveListsize(self):
listsize = self["list"].instance.size()
self.listWidth = listsize.width()
self.listHeight = listsize.height()
def createSummary(self):
return PluginBrowserSummary
def selectionChanged(self):
item = self["list"].getCurrent()
if item:
p = item[0]
name = p.name
desc = p.description
else:
name = "-"
desc = ""
for cb in self.onChangedEntry:
cb(name, desc)
def checkWarnings(self):
if len(plugins.warnings):
text = _("Some plugins are not available:\n")
for (pluginname, error) in plugins.warnings:
text += _("%s (%s)\n") % (pluginname, error)
plugins.resetWarnings()
self.session.open(MessageBox, text = text, type = MessageBox.TYPE_WARNING)
def save(self):
self.run()
def run(self):
plugin = self["list"].l.getCurrentSelection()[0]
plugin(session=self.session)
def updateList(self):
self.pluginlist = plugins.getPlugins(PluginDescriptor.WHERE_PLUGINMENU)
self.list = [PluginEntryComponent(plugin, self.listWidth) for plugin in self.pluginlist]
self["list"].l.setList(self.list)
def delete(self):
self.session.openWithCallback(self.PluginDownloadBrowserClosed, PluginDownloadBrowser, PluginDownloadBrowser.REMOVE)
def toogle(self):
self.session.openWithCallback(self.PluginDownloadBrowserClosed, PluginDownloadBrowser, PluginDownloadBrowser.TOOGLE)
def download(self):
self.session.openWithCallback(self.PluginDownloadBrowserClosed, PluginDownloadBrowser, PluginDownloadBrowser.DOWNLOAD, self.firsttime)
self.firsttime = False
def PluginDownloadBrowserClosed(self):
self.updateList()
self.checkWarnings()
def openExtensionmanager(self):
if fileExists(resolveFilename(SCOPE_PLUGINS, "SystemPlugins/SoftwareManager/plugin.py")):
try:
from Plugins.SystemPlugins.SoftwareManager.plugin import PluginManager
except ImportError:
self.session.open(MessageBox, _("The software management extension is not installed!\nPlease install it."), type = MessageBox.TYPE_INFO,timeout = 10 )
else:
self.session.openWithCallback(self.PluginDownloadBrowserClosed, PluginManager)
class PluginDownloadBrowser(Screen):
DOWNLOAD = 0
REMOVE = 1
UPDATE = 2
TOOGLE = 3
PLUGIN_PREFIX = 'enigma2-plugin-'
PLUGIN_PREFIX2 = []
lastDownloadDate = None
def __init__(self, session, type = 0, needupdate = True):
Screen.__init__(self, session)
Screen.setTitle(self, _("Downloadable plugins"))
self.type = type
self.needupdate = needupdate
self.createPluginFilter()
self.LanguageList = language.getLanguageListSelection()
self.container = eConsoleAppContainer()
self.container.appClosed.append(self.runFinished)
self.container.dataAvail.append(self.dataAvail)
self.onLayoutFinish.append(self.startRun)
self.onShown.append(self.setWindowTitle)
self.list = []
self["list"] = PluginList(self.list)
self.pluginlist = []
self.expanded = []
self.installedplugins = []
self.plugins_changed = False
self.reload_settings = False
self.check_settings = False
self.check_bootlogo = False
self.install_settings_name = ''
self.remove_settings_name = ''
self.onChangedEntry = []
self["list"].onSelectionChanged.append(self.selectionChanged)
if self.type == self.DOWNLOAD:
self["text"] = Label(_("Downloading plugin information. Please wait..."))
if self.type == self.REMOVE:
self["text"] = Label(_("Getting plugin information. Please wait..."))
elif self.type == self.TOOGLE:
self["text"] = Label(_("Getting plugin information. Please wait..."))
self.run = 0
self.remainingdata = ""
self["actions"] = ActionMap(["WizardActions"],
{
"ok": self.go,
"back": self.requestClose,
})
if os.path.isfile('/usr/bin/opkg'):
self.ipkg = '/usr/bin/opkg'
self.ipkg_install = self.ipkg + ' install --force-overwrite'
self.ipkg_remove = self.ipkg + ' remove --autoremove --force-depends'
self.ipkg_toogle = self.ipkg + ' flag hold'
else:
self.ipkg = 'ipkg'
self.ipkg_install = 'ipkg install --force-overwrite -force-defaults'
self.ipkg_remove = self.ipkg + ' remove'
self.ipkg_toogle = self.ipkg + ' flag hold'
def createSummary(self):
return PluginBrowserSummary
def selectionChanged(self):
item = self["list"].getCurrent()
try:
if isinstance(item[0], str): # category
name = item[0]
desc = ""
else:
p = item[0]
name = item[1][0:8][7]
desc = p.description
except:
name = ""
desc = ""
for cb in self.onChangedEntry:
cb(name, desc)
def createPluginFilter(self):
#Create Plugin Filter
self.PLUGIN_PREFIX2 = []
if config.pluginfilter.opendroid.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'opendroid')
if config.pluginfilter.po.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'po')
if config.pluginfilter.src.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'src')
if config.pluginfilter.drivers.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'drivers')
if config.pluginfilter.extensions.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'extensions')
if config.pluginfilter.gigabluesupportnet.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'gigabluesupportnet')
if config.pluginfilter.picons.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'picons')
if config.pluginfilter.pli.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'pli')
if config.pluginfilter.security.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'security')
if config.pluginfilter.settings.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'settings')
if config.pluginfilter.skins.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'skins')
if config.pluginfilter.display.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'display')
if config.pluginfilter.softcams.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'softcams')
if config.pluginfilter.systemplugins.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'systemplugins')
if config.pluginfilter.vix.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'vix')
if config.pluginfilter.weblinks.value:
self.PLUGIN_PREFIX2.append(self.PLUGIN_PREFIX + 'weblinks')
if config.pluginfilter.kernel.value:
self.PLUGIN_PREFIX2.append('kernel-module-')
self.PLUGIN_PREFIX2.append('enigma2-locale-')
def go(self):
sel = self["list"].l.getCurrentSelection()
if sel is None:
return
sel = sel[0]
if isinstance(sel, str): # category
if sel in self.expanded:
self.expanded.remove(sel)
else:
self.expanded.append(sel)
self.updateList()
else:
if self.type == self.DOWNLOAD:
mbox=self.session.openWithCallback(self.runInstall, MessageBox, _("Do you really want to download the plugin \"%s\"?") % sel.name)
mbox.setTitle(_("Download plugins"))
if self.type == self.REMOVE:
mbox=self.session.openWithCallback(self.runInstall, MessageBox, _("Do you really want to remove the plugin \"%s\"?") % sel.name, default = False)
mbox.setTitle(_("Remove plugins"))
elif self.type == self.TOOGLE:
if 'hold' in os.popen("opkg status " + Ipkg.opkgExtraDestinations() + " " + self.PLUGIN_PREFIX + sel.name).read():
mbox=self.session.openWithCallback(self.runInstall, MessageBox, _("Do you really want to unhold the plugin \"%s\"?") % sel.name, default = False)
else:
mbox=self.session.openWithCallback(self.runInstall, MessageBox, _("Do you really want to hold the plugin \"%s\"?") % sel.name, default = False)
mbox.setTitle(_("Hold plugins"))
def requestClose(self):
if self.plugins_changed:
plugins.readPluginList(resolveFilename(SCOPE_PLUGINS))
if self.reload_settings:
self["text"].setText(_("Reloading bouquets and services..."))
eDVBDB.getInstance().reloadBouquets()
eDVBDB.getInstance().reloadServicelist()
plugins.readPluginList(resolveFilename(SCOPE_PLUGINS))
self.container.appClosed.remove(self.runFinished)
self.container.dataAvail.remove(self.dataAvail)
self.close()
def resetPostInstall(self):
try:
del self.postInstallCall
except:
pass
def installDestinationCallback(self, result):
if result is not None:
dest = result[1]
if dest.startswith('/'):
# Custom install path, add it to the list too
dest = os.path.normpath(dest)
extra = '--add-dest %s:%s -d %s' % (dest,dest,dest)
Ipkg.opkgAddDestination(dest)
else:
extra = '-d ' + dest
self.doInstall(self.installFinished, self["list"].l.getCurrentSelection()[0].name + ' ' + extra)
else:
self.resetPostInstall()
def runInstall(self, val):
if val:
if self.type == self.DOWNLOAD:
if self["list"].l.getCurrentSelection()[0].name.startswith("picons-"):
supported_filesystems = frozenset(('vfat','ext4', 'ext3', 'ext2', 'reiser', 'reiser4', 'jffs2', 'ubifs', 'rootfs'))
candidates = []
import Components.Harddisk
mounts = Components.Harddisk.getProcMounts()
for partition in harddiskmanager.getMountedPartitions(False, mounts):
if partition.filesystem(mounts) in supported_filesystems:
candidates.append((partition.description, partition.mountpoint))
if candidates:
from Components.Renderer import Picon
self.postInstallCall = Picon.initPiconPaths
self.session.openWithCallback(self.installDestinationCallback, ChoiceBox, title=_("Install picons on"), list=candidates)
return
elif self["list"].l.getCurrentSelection()[0].name.startswith("display-picon"):
supported_filesystems = frozenset(('vfat','ext4', 'ext3', 'ext2', 'reiser', 'reiser4', 'jffs2', 'ubifs', 'rootfs'))
candidates = []
import Components.Harddisk
mounts = Components.Harddisk.getProcMounts()
for partition in harddiskmanager.getMountedPartitions(False, mounts):
if partition.filesystem(mounts) in supported_filesystems:
candidates.append((partition.description, partition.mountpoint))
if candidates:
from Components.Renderer import LcdPicon
self.postInstallCall = LcdPicon.initLcdPiconPaths
self.session.openWithCallback(self.installDestinationCallback, ChoiceBox, title=_("Install lcd picons on"), list=candidates)
return
self.install_settings_name = self["list"].l.getCurrentSelection()[0].name
self.install_bootlogo_name = self["list"].l.getCurrentSelection()[0].name
if self["list"].l.getCurrentSelection()[0].name.startswith('settings-'):
self.check_settings = True
self.startIpkgListInstalled(self.PLUGIN_PREFIX + 'settings-*')
elif self["list"].l.getCurrentSelection()[0].name.startswith('bootlogo-'):
self.check_bootlogo = True
self.startIpkgListInstalled(self.PLUGIN_PREFIX + 'bootlogo-*')
else:
self.runSettingsInstall()
elif self.type == self.REMOVE:
if self["list"].l.getCurrentSelection()[0].name.startswith("bootlogo-"):
self.doRemove(self.installFinished, self["list"].l.getCurrentSelection()[0].name + " --force-remove --force-depends")
else:
self.doRemove(self.installFinished, self["list"].l.getCurrentSelection()[0].name)
elif self.type == self.TOOGLE:
self.doToogle(self.installFinished, self["list"].l.getCurrentSelection()[0].name)
def doRemove(self, callback, pkgname):
if pkgname.startswith('kernel-module-') or pkgname.startswith('enigma2-locale-'):
self.session.openWithCallback(callback, Console, cmdlist = [self.ipkg_remove + Ipkg.opkgExtraDestinations() + " " + pkgname, "sync"], closeOnSuccess = True)
else:
self.session.openWithCallback(callback, Console, cmdlist = [self.ipkg_remove + Ipkg.opkgExtraDestinations() + " " + self.PLUGIN_PREFIX + pkgname, "sync"], closeOnSuccess = True)
def doToogle(self, callback, pkgname):
if 'hold' in os.popen("opkg status " + Ipkg.opkgExtraDestinations() + " " + self.PLUGIN_PREFIX + pkgname).read():
self.ipkg_toogle = self.ipkg + ' flag user'
self.session.openWithCallback(callback, Console, cmdlist = [self.ipkg_toogle + " " + self.PLUGIN_PREFIX + pkgname, "sync"], closeOnSuccess = False)
else:
self.ipkg_toogle = self.ipkg + ' flag hold'
self.session.openWithCallback(callback, Console, cmdlist = [self.ipkg_toogle + " " + self.PLUGIN_PREFIX + pkgname, "sync"], closeOnSuccess = False)
def doInstall(self, callback, pkgname):
if pkgname.startswith('kernel-module-') or pkgname.startswith('enigma2-locale-'):
self.session.openWithCallback(callback, Console, cmdlist = [self.ipkg_install + " " + pkgname, "sync"], closeOnSuccess = True)
else:
self.session.openWithCallback(callback, Console, cmdlist = [self.ipkg_install + " " + self.PLUGIN_PREFIX + pkgname, "sync"], closeOnSuccess = True)
def runSettingsRemove(self, val):
if val:
self.doRemove(self.runSettingsInstall, self.remove_settings_name)
def runBootlogoRemove(self, val):
if val:
self.doRemove(self.runSettingsInstall, self.remove_bootlogo_name + " --force-remove --force-depends")
def runSettingsInstall(self):
self.doInstall(self.installFinished, self.install_settings_name)
def setWindowTitle(self):
if self.type == self.DOWNLOAD:
self.setTitle(_("Install plugins"))
elif self.type == self.REMOVE:
self.setTitle(_("Remove plugins"))
elif self.type == self.TOOGLE:
self.setTitle(_("Hold plugins"))
def startIpkgListInstalled(self, pkgname = PLUGIN_PREFIX + '*'):
self.container.execute(self.ipkg + Ipkg.opkgExtraDestinations() + " list_installed")
def startIpkgListAvailable(self):
self.container.execute(self.ipkg + Ipkg.opkgExtraDestinations() + " list")
def startRun(self):
listsize = self["list"].instance.size()
self["list"].instance.hide()
self.listWidth = listsize.width()
self.listHeight = listsize.height()
if self.type == self.DOWNLOAD:
self.type = self.UPDATE
self.container.execute(self.ipkg + " update")
elif self.type == self.REMOVE:
self.run = 1
self.startIpkgListInstalled()
elif self.type == self.TOOGLE:
self.run =1
self.startIpkgListInstalled()
def installFinished(self):
if hasattr(self, 'postInstallCall'):
try:
self.postInstallCall()
except Exception, ex:
print "[PluginBrowser] postInstallCall failed:", ex
self.resetPostInstall()
try:
os.unlink('/tmp/opkg.conf')
except:
pass
if self.type != self.TOOGLE:
for plugin in self.pluginlist:
if plugin[3] == self["list"].l.getCurrentSelection()[0].name or plugin[0] == self["list"].l.getCurrentSelection()[0].name:
self.pluginlist.remove(plugin)
break
self.plugins_changed = True
if self["list"].l.getCurrentSelection()[0].name.startswith("settings-"):
self.reload_settings = True
self.expanded = []
self.updateList()
self["list"].moveToIndex(0)
def runFinished(self, retval):
if self.check_settings:
self.check_settings = False
self.runSettingsInstall()
return
if self.check_bootlogo:
self.check_bootlogo = False
self.runSettingsInstall()
return
self.remainingdata = ""
if self.run == 0:
self.run = 1
if self.type == self.UPDATE:
self.type = self.DOWNLOAD
self.startIpkgListInstalled()
elif self.run == 1 and self.type == self.DOWNLOAD:
self.run = 2
self.startIpkgListAvailable()
else:
if len(self.pluginlist) > 0:
self.updateList()
self["list"].instance.show()
else:
if self.type == self.DOWNLOAD:
self["text"].setText(_("Sorry feeds are down for maintenance"))
def dataAvail(self, str):
if self.type == self.DOWNLOAD and str.find('404 Not Found') >= 0:
self["text"].setText(_("Sorry feeds are down for maintenance"))
self.run = 3
return
#prepend any remaining data from the previous call
str = self.remainingdata + str
#split in lines
lines = str.split('\n')
#'str' should end with '\n', so when splitting, the last line should be empty. If this is not the case, we received an incomplete line
if len(lines[-1]):
#remember this data for next time
self.remainingdata = lines[-1]
lines = lines[0:-1]
else:
self.remainingdata = ""
if self.check_settings:
self.check_settings = False
self.remove_settings_name = str.split(' - ')[0].replace(self.PLUGIN_PREFIX, '')
self.session.openWithCallback(self.runSettingsRemove, MessageBox, _('You already have a channel list installed,\nwould you like to remove\n"%s"?') % self.remove_settings_name)
return
if self.check_bootlogo:
self.check_bootlogo = False
self.remove_bootlogo_name = str.split(' - ')[0].replace(self.PLUGIN_PREFIX, '')
self.session.openWithCallback(self.runBootlogoRemove, MessageBox, _('You already have a bootlogo installed,\nwould you like to remove\n"%s"?') % self.remove_bootlogo_name)
return
for x in lines:
plugin = x.split(" - ", 2)
# 'opkg list_installed' only returns name + version, no description field
if len(plugin) >= 1:
if not plugin[0].endswith('-dev') and not plugin[0].endswith('-staticdev') and not plugin[0].endswith('-dbg') and not plugin[0].endswith('-doc') and not plugin[0].endswith('-src') and not plugin[0].endswith('-meta'):
# Plugin filter
for s in self.PLUGIN_PREFIX2:
if plugin[0].startswith(s):
if self.run == 1 and self.type == self.DOWNLOAD:
if plugin[0] not in self.installedplugins:
self.installedplugins.append(plugin[0])
else:
if plugin[0] not in self.installedplugins:
if len(plugin) == 2:
# 'opkg list_installed' does not return descriptions, append empty description
if plugin[0].startswith('enigma2-locale-'):
lang = plugin[0].split('-')
if len(lang) > 3:
plugin.append(lang[2] + '-' + lang[3])
else:
plugin.append(lang[2])
else:
plugin.append('')
plugin.append(plugin[0][15:])
self.pluginlist.append(plugin)
def updateList(self):
list = []
expandableIcon = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/expandable-plugins.png"))
expandedIcon = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/expanded-plugins.png"))
verticallineIcon = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/verticalline-plugins.png"))
self.plugins = {}
if self.type == self.UPDATE:
self.list = list
self["list"].l.setList(list)
return
for x in self.pluginlist:
split = x[3].split('-', 1)
if x[0][0:14] == 'kernel-module-':
split[0] = "kernel modules"
elif x[0][0:15] == 'enigma2-locale-':
split[0] = "languages"
if not self.plugins.has_key(split[0]):
self.plugins[split[0]] = []
if split[0] == "kernel modules":
self.plugins[split[0]].append((PluginDescriptor(name = x[0], description = x[2], icon = verticallineIcon), x[0][14:], x[1]))
elif split[0] == "languages":
for t in self.LanguageList:
if len(x[2])>2:
tmpT = t[0].lower()
tmpT = tmpT.replace('_','-')
if tmpT == x[2]:
countryIcon = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "countries/" + t[0] + ".png"))
if countryIcon is None:
countryIcon = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "countries/missing.png"))
self.plugins[split[0]].append((PluginDescriptor(name = x[0], description = x[2], icon = countryIcon), t[1], x[1]))
break
else:
if t[0][:2] == x[2] and t[0][3:] != 'GB':
countryIcon = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "countries/" + t[0] + ".png"))
if countryIcon is None:
countryIcon = LoadPixmap(resolveFilename(SCOPE_ACTIVE_SKIN, "countries/missing.png"))
self.plugins[split[0]].append((PluginDescriptor(name = x[0], description = x[2], icon = countryIcon), t[1], x[1]))
break
else:
if len(split) < 2:
continue
self.plugins[split[0]].append((PluginDescriptor(name = x[3], description = x[2], icon = verticallineIcon), split[1], x[1]))
temp = self.plugins.keys()
if config.usage.sort_pluginlist.value:
temp.sort()
for x in temp:
if x in self.expanded:
list.append(PluginCategoryComponent(x, expandedIcon, self.listWidth))
for plugin in self.plugins[x]:
if self.type == self.TOOGLE or self.type == self.REMOVE:
if "hold" in os.popen("opkg status " + self.PLUGIN_PREFIX + "*" + plugin[1]).read():
list.extend([PluginDownloadComponent(plugin[0], plugin[1] + ' holded', plugin[2], self.listWidth)])
else:
list.extend([PluginDownloadComponent(plugin[0], plugin[1], plugin[2], self.listWidth)])
else:
list.extend([PluginDownloadComponent(plugin[0], plugin[1], plugin[2], self.listWidth)])
else:
list.append(PluginCategoryComponent(x, expandableIcon, self.listWidth))
self.list = list
self["list"].l.setList(list)
class PluginFilter(ConfigListScreen, Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
self.skinName = "Setup"
Screen.setTitle(self, _("Plugin Filter..."))
self["HelpWindow"] = Pixmap()
self["HelpWindow"].hide()
self["status"] = StaticText()
self["labelExitsave"] = Label("[Exit] = " +_("Cancel") +" [Ok] =" +_("Save"))
self.onChangedEntry = [ ]
self.list = []
ConfigListScreen.__init__(self, self.list, session = self.session, on_change = self.changedEntry)
self.createSetup()
self["actions"] = ActionMap(["SetupActions", 'ColorActions', 'VirtualKeyboardActions'],
{
"ok": self.keySave,
"cancel": self.keyCancel,
"red": self.keyCancel,
"green": self.keySave,
"menu": self.keyCancel,
"showVirtualKeyboard": self.KeyText
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("OK"))
if not self.selectionChanged in self["config"].onSelectionChanged:
self["config"].onSelectionChanged.append(self.selectionChanged)
self.selectionChanged()
def createSetup(self):
self.editListEntry = None
self.list = []
self.list.append(getConfigListEntry(_("opendroid"), config.pluginfilter.opendroid, _("This allows you to show opendroid modules in downloads")))
self.list.append(getConfigListEntry(_("PO"), config.pluginfilter.po, _("If set to 'yes' it will show the 'PO' packages in browser.")))
self.list.append(getConfigListEntry(_("Src"), config.pluginfilter.src, _("If set to 'yes' it will show the 'SRC' packages in browser.")))
self.list.append(getConfigListEntry(_("Drivers"), config.pluginfilter.drivers, _("This allows you to show drivers modules in downloads")))
self.list.append(getConfigListEntry(_("Extensions"), config.pluginfilter.extensions, _("This allows you to show extensions modules in downloads")))
self.list.append(getConfigListEntry(_("Systemplugins"), config.pluginfilter.systemplugins, _("This allows you to show systemplugins modules in downloads")))
self.list.append(getConfigListEntry(_("Softcams"), config.pluginfilter.softcams, _("This allows you to show softcams modules in downloads")))
self.list.append(getConfigListEntry(_("Skins"), config.pluginfilter.skins, _("This allows you to show skins modules in downloads")))
self.list.append(getConfigListEntry(_("LCD Skins"), config.pluginfilter.skins, _("This allows you to show lcd skins in downloads")))
self.list.append(getConfigListEntry(_("Picons"), config.pluginfilter.picons, _("This allows you to show picons modules in downloads")))
self.list.append(getConfigListEntry(_("Settings"), config.pluginfilter.settings, _("This allows you to show settings modules in downloads")))
self.list.append(getConfigListEntry(_("Weblinks"), config.pluginfilter.weblinks, _("This allows you to show weblinks modules in downloads")))
self.list.append(getConfigListEntry(_("PLi"), config.pluginfilter.pli, _("This allows you to show pli modules in downloads")))
self.list.append(getConfigListEntry(_("ViX"), config.pluginfilter.vix, _("This allows you to show vix modules in downloads")))
self.list.append(getConfigListEntry(_("Security"), config.pluginfilter.security, _("This allows you to show security modules in downloads")))
self.list.append(getConfigListEntry(_("Kernel Modules"), config.pluginfilter.kernel, _("This allows you to show kernel modules in downloads")))
self.list.append(getConfigListEntry(_("Gigabluesupportnet"), config.pluginfilter.gigabluesupportnet, _("This allows you to show gigabluesupportnet modules in downloads")))
self.list.append(getConfigListEntry(_("User Feed URL"), config.pluginfilter.userfeed, _("Please enter your personal feed URL")))
self["config"].list = self.list
self["config"].setList(self.list)
if config.usage.sort_settings.value:
self["config"].list.sort()
def selectionChanged(self):
self["status"].setText(self["config"].getCurrent()[2])
def changedEntry(self):
for x in self.onChangedEntry:
x()
self.selectionChanged()
def getCurrentEntry(self):
return self["config"].getCurrent()[0]
def getCurrentValue(self):
return str(self["config"].getCurrent()[1].getText())
def saveAll(self):
for x in self["config"].list:
x[1].save()
configfile.save()
if config.pluginfilter.userfeed.value != "http://":
CreateFeedConfig()
def keySave(self):
self.saveAll()
self.close()
def cancelConfirm(self, result):
if not result:
return
for x in self["config"].list:
x[1].cancel()
self.close()
def keyCancel(self):
if self["config"].isChanged():
self.session.openWithCallback(self.cancelConfirm, MessageBox, _("Really close without saving settings?"))
else:
self.close()
def KeyText(self):
sel = self['config'].getCurrent()
if sel:
self.session.openWithCallback(self.VirtualKeyBoardCallback, VirtualKeyBoard, title = self["config"].getCurrent()[0], text = self["config"].getCurrent()[1].value)
def VirtualKeyBoardCallback(self, callback = None):
if callback is not None and len(callback):
self["config"].getCurrent()[1].value = callback
self["config"].invalidate(self["config"].getCurrent())
language.addCallback(languageChanged)
|
gurneyalex/odoo
|
refs/heads/13.0-improve_sale_coupon_perf
|
addons/hr_holidays/tests/common.py
|
7
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.test_mail.tests.common import mail_new_test_user
from odoo.tests import common
class TestHrHolidaysBase(common.TransactionCase):
def setUp(self):
super(TestHrHolidaysBase, self).setUp()
# Test users to use through the various tests
self.user_hruser = mail_new_test_user(self.env, login='armande', groups='base.group_user,hr_holidays.group_hr_holidays_user')
self.user_hruser_id = self.user_hruser.id
self.user_hrmanager = mail_new_test_user(self.env, login='bastien', groups='base.group_user,hr_holidays.group_hr_holidays_manager')
self.user_hrmanager_id = self.user_hrmanager.id
self.user_employee = mail_new_test_user(self.env, login='david', groups='base.group_user')
self.user_employee_id = self.user_employee.id
# Hr Data
Department = self.env['hr.department'].with_context(tracking_disable=True)
self.hr_dept = Department.create({
'name': 'Human Resources',
})
self.rd_dept = Department.create({
'name': 'Research and devlopment',
})
self.employee_emp = self.env['hr.employee'].create({
'name': 'David Employee',
'user_id': self.user_employee_id,
'department_id': self.rd_dept.id,
})
self.employee_emp_id = self.employee_emp.id
self.employee_hruser = self.env['hr.employee'].create({
'name': 'Armande HrUser',
'user_id': self.user_hruser_id,
'department_id': self.rd_dept.id,
})
self.employee_hruser_id = self.employee_hruser.id
self.employee_hrmanager = self.env['hr.employee'].create({
'name': 'Bastien HrManager',
'user_id': self.user_hrmanager_id,
'department_id': self.hr_dept.id,
'parent_id': self.employee_hruser_id,
})
self.employee_hrmanager_id = self.employee_hrmanager.id
self.rd_dept.write({'manager_id': self.employee_hruser_id})
|
Russell-IO/ansible
|
refs/heads/devel
|
lib/ansible/modules/storage/purestorage/purefa_volume.py
|
21
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Simon Dodsley (simon@purestorage.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: purefa_volume
version_added: '2.4'
short_description: Manage volumes on Pure Storage FlashArrays
description:
- Create, delete or extend the capacity of a volume on Pure Storage FlashArray.
author:
- Simon Dodsley (@sdodsley)
options:
name:
description:
- The name of the volume.
required: true
target:
description:
- The name of the target volume, if copying.
state:
description:
- Define whether the volume should exist or not.
default: present
choices: [ absent, present ]
eradicate:
description:
- Define whether to eradicate the volume on delete or leave in trash.
type: bool
default: 'no'
overwrite:
description:
- Define whether to overwrite a target volume if it already exisits.
type: bool
default: 'no'
size:
description:
- Volume size in M, G, T or P units.
extends_documentation_fragment:
- purestorage.fa
'''
EXAMPLES = r'''
- name: Create new volume named foo
purefa_volume:
name: foo
size: 1T
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Extend the size of an existing volume named foo
purefa_volume:
name: foo
size: 2T
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Delete and eradicate volume named foo
purefa_volume:
name: foo
eradicate: yes
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: absent
- name: Create clone of volume bar named foo
purefa_volume:
name: foo
target: bar
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
- name: Overwrite volume bar with volume foo
purefa_volume:
name: foo
target: bar
overwrite: yes
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: present
'''
RETURN = r'''
'''
try:
from purestorage import purestorage
HAS_PURESTORAGE = True
except ImportError:
HAS_PURESTORAGE = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pure import get_system, purefa_argument_spec
def human_to_bytes(size):
"""Given a human-readable byte string (e.g. 2G, 30M),
return the number of bytes. Will return 0 if the argument has
unexpected form.
"""
bytes = size[:-1]
unit = size[-1]
if bytes.isdigit():
bytes = int(bytes)
if unit == 'P':
bytes *= 1125899906842624
elif unit == 'T':
bytes *= 1099511627776
elif unit == 'G':
bytes *= 1073741824
elif unit == 'M':
bytes *= 1048576
else:
bytes = 0
else:
bytes = 0
return bytes
def get_volume(module, array):
"""Return Volume or None"""
try:
return array.get_volume(module.params['name'])
except:
return None
def get_target(module, array):
"""Return Volume or None"""
try:
return array.get_volume(module.params['target'])
except:
return None
def create_volume(module, array):
"""Create Volume"""
size = module.params['size']
if not module.check_mode:
array.create_volume(module.params['name'], size)
module.exit_json(changed=True)
def copy_from_volume(module, array):
"""Create Volume Clone"""
changed = False
tgt = get_target(module, array)
if tgt is None:
changed = True
if not module.check_mode:
array.copy_volume(module.params['name'],
module.params['target'])
elif tgt is not None and module.params['overwrite']:
changed = True
if not module.check_mode:
array.copy_volume(module.params['name'],
module.params['target'],
overwrite=module.params['overwrite'])
module.exit_json(changed=changed)
def update_volume(module, array):
"""Update Volume"""
changed = True
vol = array.get_volume(module.params['name'])
if human_to_bytes(module.params['size']) > vol['size']:
if not module.check_mode:
array.extend_volume(module.params['name'], module.params['size'])
else:
changed = False
module.exit_json(changed=changed)
def delete_volume(module, array):
""" Delete Volume"""
if not module.check_mode:
array.destroy_volume(module.params['name'])
if module.params['eradicate']:
array.eradicate_volume(module.params['name'])
module.exit_json(changed=True)
def main():
argument_spec = purefa_argument_spec()
argument_spec.update(dict(
name=dict(type='str', required=True),
target=dict(type='str'),
overwrite=dict(type='bool', default=False),
eradicate=dict(type='bool', default=False),
state=dict(type='str', default='present', choices=['absent', 'present']),
size=dict(type='str'),
))
mutually_exclusive = [['size', 'target']]
module = AnsibleModule(argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
if not HAS_PURESTORAGE:
module.fail_json(msg='purestorage sdk is required for this module in volume')
size = module.params['size']
state = module.params['state']
array = get_system(module)
volume = get_volume(module, array)
target = get_target(module, array)
if state == 'present' and not volume and size:
create_volume(module, array)
elif state == 'present' and volume and size:
update_volume(module, array)
elif state == 'present' and volume and target:
copy_from_volume(module, array)
elif state == 'present' and volume and not target:
copy_from_volume(module, array)
elif state == 'absent' and volume:
delete_volume(module, array)
elif state == 'present' and not volume or not size:
module.exit_json(changed=False)
elif state == 'absent' and not volume:
module.exit_json(changed=False)
if __name__ == '__main__':
main()
|
leimaohui/dnf-yocto
|
refs/heads/dnf-yocto2.3
|
doc/examples/install_plugin.py
|
5
|
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
"""A plugin that ensures that given features are present."""
import dnf.cli
# The parent class allows registration to the CLI manager.
class Command(dnf.cli.Command):
"""A command that ensures that given features are present."""
# An alias is needed to invoke the command from command line.
aliases = ['foo'] # <-- SET YOUR ALIAS HERE.
def configure(self):
"""Setup the demands."""
# Repositories are needed if we want to install anything.
self.cli.demands.available_repos = True
# A sack is required by marking methods and dependency resolving.
self.cli.demands.sack_activation = True
# Resolving performs a transaction that installs the packages.
self.cli.demands.resolving = True
# Based on the system, privileges are required to do an installation.
self.cli.demands.root_user = True # <-- SET YOUR FLAG HERE.
@staticmethod
def set_argparser(parser):
"""Parse command line arguments."""
parser.add_argument('package', nargs='+', metavar=_('PACKAGE'),
action=OptionParser.ParseSpecGroupFileCallback,
help=_('Package to install'))
def run(self):
"""Run the command."""
# Feature marking methods set the user request.
for ftr_spec in self.opts.pkg_specs:
try:
self.base.install(ftr_spec)
except dnf.exceptions.MarkingError:
raise dnf.exceptions.Error('feature(s) not found: ' + ftr_spec)
# Package marking methods set the user request.
try:
self.base.package_install(self.base.add_remote_rpms(self.opts.filenames, strict=False))
except EnvironmentError as e:
raise dnf.exceptions.Error(e)
# Comps data reading initializes the base.comps attribute.
if self.opts.grp_specs:
self.base.read_comps(arch_filter=True)
# Group marking methods set the user request.
for grp_spec in self.opts.grp_specs:
group = self.base.comps.group_by_pattern(grp_spec)
if not group:
raise dnf.exceptions.Error('group not found: ' + grp_spec)
self.base.group_install(group, ['mandatory', 'default'])
# Every plugin must be a subclass of dnf.Plugin.
class Plugin(dnf.Plugin):
"""A plugin that registers our custom command."""
# Every plugin must provide its name.
name = 'foo' # <-- SET YOUR NAME HERE.
# Every plugin must provide its own initialization function.
def __init__(self, base, cli):
"""Initialize the plugin."""
super(Plugin, self).__init__(base, cli)
if cli:
cli.register_command(Command)
|
boris-savic/swampdragon
|
refs/heads/master
|
tests/test_selfpub_model.py
|
13
|
from swampdragon.route_handler import ModelRouter
from swampdragon.pubsub_providers.base_provider import PUBACTIONS
from swampdragon.testing.dragon_testcase import DragonTestCase
from .models import FooSelfPub, BarSelfPub
from .serializers import FooSelfPubSerializer, BarSelfPubSerializer
from datetime import datetime
class FooModelRouter(ModelRouter):
serializer_class = FooSelfPubSerializer
class BarModelRouter(ModelRouter):
serializer_class = BarSelfPubSerializer
class TestSelfPubModel(DragonTestCase):
def test_self_pub_model(self):
router = FooModelRouter(self.connection)
router.subscribe(**{'channel': 'testchan'})
self.assertIsNone(self.connection.last_pub)
FooSelfPub.objects.create(name='test')
self.assertIsNotNone(self.connection.last_pub)
def test_self_pub_model_with_fk(self):
router = BarModelRouter(self.connection)
router.subscribe(**{'channel': 'testchan'})
self.assertIsNone(self.connection.last_pub)
foo = FooSelfPub.objects.create(name='test')
BarSelfPub.objects.create(date=datetime.now(), foo=foo)
self.assertIsNotNone(self.connection.last_pub)
def test_ignore_id_when_getting_updated_fields(self):
FooSelfPubSerializer.Meta.publish_fields += ('pk', )
FooSelfPub.objects.create(name='test')
def test_get_changes(self):
foo = FooSelfPub.objects.create(name='test')
self.assertListEqual(foo.get_changed_fields(), [])
foo.number = 12
self.assertListEqual(foo.get_changed_fields(), ['number'])
foo.name = 'updated'
self.assertIn('number', foo.get_changed_fields())
self.assertIn('name', foo.get_changed_fields())
bar = BarSelfPub.objects.create(date=datetime.now(), foo=foo)
self.assertListEqual(bar.get_changed_fields(), [])
update_date = datetime.now()
bar.date = update_date
self.assertListEqual(bar.get_changed_fields(), ['date'])
def test_raise_validation_error(self):
foo = FooSelfPub.objects.create(name='test')
data = foo.serialize()
self.assertEqual(data['name'], foo.name)
def test_create(self):
router = FooModelRouter(self.connection)
router.subscribe(**{'channel': 'testchan'})
FooSelfPub.objects.create(name='test')
self.assertEqual(self.connection.last_pub['action'], 'created')
def test_update(self):
router = FooModelRouter(self.connection)
router.subscribe(**{'channel': 'testchan'})
foo = FooSelfPub.objects.create(name='test')
foo.name = 'updated'
foo.save()
self.assertEqual(self.connection.last_pub['action'], 'updated')
def test_remove_on_update(self):
router = FooModelRouter(self.connection)
router.subscribe(**{'channel': 'testchan', 'name__contains': 'findme'})
foo = FooSelfPub.objects.create(name='test')
self.assertIsNone(self.connection.last_pub)
foo.name = 'findme'
foo.save()
self.assertEqual(self.connection.last_pub['action'], PUBACTIONS.updated)
foo.name = 'hideme'
foo.save()
self.assertEqual(self.connection.last_pub['action'], PUBACTIONS.deleted)
foo.name = 'findmeagain'
foo.save()
self.assertEqual(self.connection.last_pub['action'], PUBACTIONS.updated)
|
ianstalk/Flexget
|
refs/heads/develop
|
flexget/tests/test_pathscrub.py
|
3
|
import pytest
from flexget.utils.pathscrub import pathscrub
class TestPathscrub:
def test_windows_filenames(self):
# Windows filename tests
# 'None' indicates there should be no changes after path scrub
win_fn = {
'afilename': 'afilename',
'filename/with/slash': 'filename with slash',
'filename\\with\\backslash': 'filename with backslash',
'afilename.': 'afilename', # filenames can't end in dot
'a<b>c:d"e/f\\g|h?i*j': 'a b c d e f g h i j', # Can't contain invalid characters
'a<<b?*?c: d': 'a b c d', # try with some repeated bad characters
'something.>': 'something', # Don't leave dots at the end
'something *': 'something', # Don't leave spaces at the end
'aoeu. > * . * <': 'aoeu', # Really don't leave spaces or dots at the end
}
for test in win_fn:
result = pathscrub(test, os='windows', filename=True)
assert result == win_fn[test], '%s != %s' % (result, win_fn[test])
def test_windows_paths(self):
win_path = {
'aoeu/aoeu': 'aoeu/aoeu', # Don't strip slashes in path mode
'aoeu\\aoeu': 'aoeu\\aoeu', # Or backslashes
'aoeu / aoeu ': 'aoeu/aoeu', # Don't leave spaces at the begin or end of folder names
'aoeu \\aoeu ': 'aoeu\\aoeu',
'aoeu./aoeu.\\aoeu.': 'aoeu/aoeu\\aoeu', # Or dots
}
for test in win_path:
result = pathscrub(test, os='windows', filename=False)
assert result == win_path[test], '%s != %s' % (result, win_path[test])
def test_degenerate(self):
# If path is reduced to nothing, make sure it complains
with pytest.raises(ValueError):
pathscrub('<<<<:>>>>', os='windows', filename=True)
def test_space_around(self):
# We don't want folder or file names to end or start with spaces on any platform
space_paths = {' / aoeu /aoeu ': '/aoeu/aoeu', '/ a/a ': '/a/a', '/a /': '/a/'}
for platform in ['windows', 'linux', 'mac']:
for test in space_paths:
result = pathscrub(test, filename=False)
assert result == space_paths[test], '%s != %s (%s)' % (
result,
space_paths[test],
platform,
)
# Windows only should also use backslashes as dir separators
test = ['c:\\ aoeu \\aoeu /aoeu ', 'c:\\aoeu\\aoeu/aoeu']
result = pathscrub(test[0], os='windows', filename=False)
assert result == test[1], '%s != %s' % (result, test[1])
|
w495/scrapy
|
refs/heads/master
|
scrapy/contrib/spidermiddleware/referer.py
|
144
|
import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
warnings.warn("Module `scrapy.contrib.spidermiddleware.referer` is deprecated, "
"use `scrapy.spidermiddlewares.referer` instead",
ScrapyDeprecationWarning, stacklevel=2)
from scrapy.spidermiddlewares.referer import *
|
mozilla/splice
|
refs/heads/master
|
tests/api/test_tile.py
|
3
|
from nose.tools import assert_equal
from flask import url_for, json
from tests.base import BaseTestCase
from tests.populate_database import parse_csv
from collections import defaultdict
class TestTile(BaseTestCase):
def setUp(self):
self.tile_fixture = defaultdict(list)
self.new_adgroup_id = 1
self.new_tile = {
'title': "New Tile",
'target_url': "http://www.newtile.com",
'adgroup_id': self.new_adgroup_id,
'type': "affiliate",
'status': "unapproved",
'paused': False,
'image_uri': "data:image/image_uri_new_tile",
'enhanced_image_uri': "data:image/enhanced_image_uri_new_tile",
}
for tile in parse_csv("tiles.csv"):
self.tile_fixture[int(tile["adgroup_id"])].append(tile)
super(TestTile, self).setUp()
def test_cors(self):
"""Test the support for CORS"""
url = url_for('api.tile.tiles')
data = json.dumps(self.new_tile)
res = self.client.post(url,
data=data,
headers={"Origin": "foo.com"},
content_type='application/json')
assert_equal(res.status_code, 201)
assert_equal(res.headers['Access-Control-Allow-Origin'], 'foo.com')
# test CORS gets set properly in failures
res = self.client.post(url,
data=data,
headers={"Origin": "foo.com"},
content_type='application/json')
assert_equal(res.status_code, 400)
assert_equal(res.headers['Access-Control-Allow-Origin'], 'foo.com')
def test_get_tiles_by_adgroup_id(self):
""" Test for getting all tiles for a given adgroup id
"""
for adgroup_id, tiles in self.tile_fixture.iteritems():
url = url_for('api.tile.tiles', adgroup_id=adgroup_id)
response = self.client.get(url)
assert_equal(response.status_code, 200)
resp = json.loads(response.data)
assert_equal(len(resp["results"]), len(tiles))
def test_get_tiles_for_missing_agroups(self):
""" Test for getting tiles for a missing adgroup id """
url = url_for('api.tile.tiles', adgroup_id=10001)
response = self.client.get(url)
assert_equal(response.status_code, 200)
resp = json.loads(response.data)
assert_equal(len(resp["results"]), 0)
def test_post_and_get(self):
""" Test for HTTP POST and GET
"""
url = url_for('api.tile.tiles')
data = json.dumps(self.new_tile)
response = self.client.post(url, data=data, content_type="application/json")
assert_equal(response.status_code, 201)
new = json.loads(response.data)["result"]
url = url_for('api.tile.tile', adgroup_id=self.new_adgroup_id, tile_id=new["id"])
response = self.client.get(url)
resp = json.loads(response.data)
assert_equal(new, resp["result"])
def test_post_400_missing_argument(self):
""" Test the failure case of HTTP POST
"""
url = url_for('api.tile.tiles')
tile = dict(self.new_tile)
del tile["title"]
data = json.dumps(tile)
response = self.client.post(url, data=data, content_type="application/json")
assert_equal(response.status_code, 400)
def test_post_400_invalid_argument(self):
""" Test the failure case of HTTP POST
"""
url = url_for('api.tile.tiles')
tile = dict(self.new_tile)
tile["target_url"] = "someinsane.site.com.*"
data = json.dumps(tile)
response = self.client.post(url, data=data, content_type="application/json")
assert_equal(response.status_code, 400)
tile = dict(self.new_tile)
tile["image_uri"] = "invalid_image_code"
data = json.dumps(tile)
response = self.client.post(url, data=data, content_type="application/json")
assert_equal(response.status_code, 400)
def test_post_duplicate(self):
""" Test HTTP POST the same data twice, it should reject the second one as
invalid arguments
"""
url = url_for('api.tile.tiles')
data = json.dumps(self.new_tile)
response = self.client.post(url, data=data, content_type="application/json")
assert_equal(response.status_code, 201)
response = self.client.post(url, data=data, content_type="application/json")
assert_equal(response.status_code, 400)
def test_http_put(self):
""" Test the success case of HTTP PUT
"""
url = url_for('api.tile.tiles')
data = json.dumps(self.new_tile)
response = self.client.post(url, data=data, content_type="application/json")
assert_equal(response.status_code, 201)
new = json.loads(response.data)["result"]
url = url_for('api.tile.tile', tile_id=new["id"])
new["status"] = "approved"
assert_equal(new["position_priority"], "medium") # test the default value
new["position_priority"] = "high" # update the position_priority here
data = json.dumps(new)
response = self.client.put(url, data=data, content_type="application/json")
assert_equal(response.status_code, 200)
updated = json.loads(response.data)["result"]
url = url_for('api.tile.tile', tile_id=new["id"])
response = self.client.get(url)
resp = json.loads(response.data)
assert_equal(updated, resp["result"])
assert_equal("high", resp["result"]["position_priority"]) # test the update
def test_http_put_404(self):
""" Test the failure case of HTTP PUT. Editing a missing tile ends up with a 404 error
"""
url = url_for('api.tile.tiles')
data = json.dumps(self.new_tile)
response = self.client.post(url, data=data, content_type="application/json")
assert_equal(response.status_code, 201)
url = url_for('api.tile.tile', adgroup_id=1000, tile_id=1001)
response = self.client.put(url, data=data, content_type="application/json")
assert_equal(response.status_code, 404)
def test_http_put_400(self):
""" Test the failure case of HTTP PUT. Sending request without required fileds should
get a 400 error
"""
url = url_for('api.tile.tiles')
data = json.dumps(self.new_tile)
response = self.client.post(url, data=data, content_type="application/json")
assert_equal(response.status_code, 201)
new_tile = json.loads(response.data)["result"]
url = url_for('api.tile.tile', tile_id=new_tile["id"])
new_tile["bg_color"] = "invalid_bg_color"
data = json.dumps(new_tile)
response = self.client.put(url, data=data, content_type="application/json")
assert_equal(response.status_code, 400)
|
mbalasso/mynumpy
|
refs/heads/master
|
numpy/fft/setupscons.py
|
100
|
def configuration(parent_package = '', top_path = None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('fft', parent_package, top_path)
config.add_data_dir('tests')
config.add_sconscript('SConstruct',
source_files = ['fftpack_litemodule.c', 'fftpack.c',
'fftpack.h'])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
cydhaselton/coreclr
|
refs/heads/master
|
src/ToolBox/SOS/tests/t_cmd_histroot.py
|
43
|
import lldb
import re
import testutils as test
def runScenario(assembly, debugger, target):
process = target.GetProcess()
res = lldb.SBCommandReturnObject()
ci = debugger.GetCommandInterpreter()
# Run debugger, wait until libcoreclr is loaded,
# set breakpoint at Test.Main and stop there
test.stop_in_main(debugger, assembly)
ci.HandleCommand("dso", res)
print(res.GetOutput())
print(res.GetError())
# Interpreter must have this command and able to run it
test.assertTrue(res.Succeeded())
output = res.GetOutput()
# Output is not empty
test.assertTrue(len(output) > 0)
# Get all objects
objects = []
for line in output.split('\n'):
match = re.match('([0-9a-fA-F]+)\s+([0-9a-fA-F]+)\s', line)
# Not all lines list objects
if match:
groups = match.groups()
# Match has exactly two subgroups
test.assertEqual(len(groups), 2)
obj_addr = groups[1]
# Address must be a hex number
test.assertTrue(test.is_hexnum(obj_addr))
objects.append(obj_addr)
# There must be at least one object
test.assertTrue(len(objects) > 0)
for obj in objects:
ci.HandleCommand("histroot " + obj, res)
print(res.GetOutput())
print(res.GetError())
# Interpreter must have this command and able to run it
test.assertTrue(res.Succeeded())
output = res.GetOutput()
# Output is not empty
test.assertTrue(len(output) > 0)
match = re.search('GCCount', output)
test.assertTrue(match)
# TODO: test other use cases
# Continue current process and checks its exit code
test.exit_lldb(debugger, assembly)
|
MakingMexico/Eaco
|
refs/heads/master
|
Eacos/settings.py
|
1
|
"""
Django settings for Eacos project.
Generated by 'django-admin startproject' using Django 1.8.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '2v+u!j4-@*w$%vd#-5+mbrr3_(41w0%t7e-1b)t(f^#e1ui2zi'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'Eacos.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Eacos.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
nharraud/b2share
|
refs/heads/master
|
invenio/modules/workflows/signals.py
|
16
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Contain signals emitted from workflows module."""
from flask.signals import Namespace
_signals = Namespace()
workflow_halted = _signals.signal('workflow_halted')
"""
This signal is sent when a workflow engine's halt function is called.
Sender is the bibworkflow object that was running before the workflow
was halted.
"""
workflow_started = _signals.signal('workflow_started')
"""
This signal is sent when a workflow is started.
Sender is the workflow engine object running the workflow.
"""
workflow_finished = _signals.signal('workflow_finished')
"""
This signal is sent when a workflow is finished.
Sender is the workflow engine object running the workflow.
"""
|
xorpaul/shinken
|
refs/heads/master
|
windows/Tools/Charp_Services/Shinken_Services/install.d/tools/skonf.py
|
6
|
#!/usr/bin/env python
#
# Copyright (C) 2009-2012:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
# David GUENAULT, dguenault@monitoring-fr.org
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
import os
import cmd
import sys
import time
import datetime
import copy
import socket
#try:
# from shinken.bin import VERSION
# import shinken
#except ImportError:
# # If importing shinken fails, try to load from current directory
# # or parent directory to support running without installation.
# # Submodules will then be loaded from there, too.
# import imp
# imp.load_module('shinken', *imp.find_module('shinken', [os.path.realpath("."), os.path.realpath(".."), os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), "..")]))
from shinken.bin import VERSION
from shinken.objects.config import Config
import getopt, sys
def usage():
print "skonf.py -a action -f configfile -o objecttype -d directive -v value -r directive=value,directive=value"
print ""
print " * actions:"
print " - control (control action is specified with -d [stop|start|restart]). Apply action on all satellites"
print " - sync: deploy shinken-specific on all satellites"
print " - deploy deploy shinken on hosts defined in authfile (-f path/to/auth)"
print " - macros: execute macros file"
print " - delobject: remove a shinken object from the shinken configuration file"
print " - cloneobject: clone an object (currently only pollers are suported"
print " - showconfig: display configuration of object"
print " - setparam: set directive value for an object"
print " - delparam: remove directive for an object"
print " - getdirective: get a directive value from an object"
print " - getobjectnames: get a list of objects names (required parameters: configfile, objectype)"
print " * configfile: full path to the shinken-specific.cfg file"
print " * objectype: configuration object type on which the action apply"
print " * directive: the directive name of a configuration object"
print " * value: the directive value of a configuration object"
print " * r: this parameter restric the application to objects matching the directive/value pair list"
def main():
config = ()
action = ""
configfile = ""
objectype = ""
directive = ""
value = ""
filters = ""
commit = True
try:
opts, args = getopt.getopt(sys.argv[1:], "qa:f:o:d:v:r:", [])
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o == "-a":
actions = ["setparam", "delparam", "showconfig", "addobject", "getobjectnames", "getdirective", "getaddresses", "delobject", "cloneobject", "macros", "sync", "control", "deploy", "removemodule"]
if a in actions:
action = a
else:
print "Invalid action"
usage()
sys.exit(2)
elif o == "-f":
configfile = a
elif o == "-q":
quiet = 1
elif o == "-o":
objectype = a
elif o == "-d":
directive = a
elif o == "-v":
value = a
elif o == "-r":
filters = a
else:
assert False, "unhandled option"
sys.exit(2)
if action == "":
print "action is mandatory"
usage()
sys.exit(2)
if configfile == "" and action != "control":
print "config file is mandatory"
usage()
sys.exit(2)
if objectype == "" and action != "getaddresses" and action != "showconfig" and action != "macros" and action != "sync" and action != "control" and action != "deploy":
print "object type is mandatory"
usage()
sys.exit(2)
if directive == "" and (action == "setparam" or action == "addobject"):
print "directive is mandatory"
usage()
sys.exit(2)
if filters == "" and action == "delobject":
print "filters is mandatory"
usage()
sys.exit(2)
if value == "" and action == "setparam":
print "value is mandatory"
usage()
sys.exit(2)
if action != "macros" and action != "control" and action != "deploy":
result, config = loadconfig([configfile])
if not result:
print config
sys.exit(2)
commit = False
else:
config = None
allowed = ['poller', 'arbiter', 'scheduler', 'broker', 'receiver', 'reactionner']
if action == "setparam":
result, content = setparam(config, objectype, directive, value, filters)
print content
if not result:
print content
sys.exit(2)
else:
result, content = writeconfig(config, configfile)
if not result:
sys.exit(2)
else:
sys.exit(0)
if action == "delparam":
result, content = delparam(config, objectype, directive, filters)
print content
if not result:
print content
sys.exit(2)
else:
result, content = writeconfig(config, configfile)
if not result:
sys.exit(2)
else:
sys.exit(0)
elif action == "macros":
if directive != "":
result, content = domacros(configfile, directive.split(','))
else:
result, content = domacros(configfile)
if not result:
print content
sys.exit(2)
else:
sys.exit(0)
elif action == "sync":
if directive == "":
print "You must specify the authentication file with -d option"
sys.exit(2)
result, content = sync(config, configfile, directive)
if not result:
print content
sys.exit(2)
else:
sys.exit(0)
elif action == "control":
if directive == "":
print "You must specify the authentication file with -d option"
sys.exit(2)
result, content = control(config, directive)
if not result:
print content
sys.exit(2)
else:
sys.exit(0)
elif action == "showconfig":
allowed = ['poller', 'arbiter', 'scheduler', 'broker', 'receiver', 'reactionner', 'module']
dumpconfig(objectype, config, allowed)
elif action == "getobjectnames":
allowed = ['poller', 'arbiter', 'scheduler', 'broker', 'receiver', 'reactionner', 'module']
getobjectnames(objectype, config, allowed)
elif action == "cloneobject":
allowed = ['poller', 'arbiter', 'scheduler', 'broker', 'receiver', 'reactionner', 'module']
if objectype not in allowed:
print "Clone of %s is not supported" % (objectype)
sys.exit(2)
else:
result, confignew = cloneobject(config, objectype, directive, filters)
if not result:
print confignew
sys.exit(2)
else:
result, message = writeconfig(confignew, configfile)
if not result:
print message
sys.exit(2)
print "The objectype %s has been cloned with the new attributes: %s" % (objectype, filter)
elif action == "addobject":
print "Not implemented"
sys.exit(2)
elif action == "delobject":
result, confignew = delobject(config, objectype, filters)
if not result:
print confignew
sys.exit(2)
else:
result, message = writeconfig(confignew, configfile)
print message
if not result:
sys.exit(2)
else:
sys.exit(0)
elif action == "deploy":
""" deploy shinken on remote hosts """
result, content = deploy(configfile)
if not result:
print content
sys.exit(2)
else:
print "Deploy ok"
elif action == "getdirective":
result, content = getdirective(config, objectype, directive, filters)
if not result:
print content
sys.exit(2)
else:
print content
sys.exit(0)
elif action == "getaddresses":
getaddresses(config)
else:
print "Unknown action %s" % (action)
sys.exit(2)
def domacros(configfile, args=[]):
import string
import re
""" load macro """
try:
fd = open(configfile, 'r')
data = map(string.strip, fd.readlines())
fd.close()
except:
return (False, "Error while reading macros file")
authfile = ""
""" remove comments lines """
index_line = 0
cleandata = []
for line in data:
if re.match(r"^#", line) == None:
cleandata.append(line)
index_line += 1
index_line = 0
data = cleandata
""" merge arguments with macro file content """
if len(args) > 0:
index_line = 0
while index_line < len(data):
index_args = 0
tmp = data[index_line]
while index_args < len(args):
tmp = tmp.replace("ARG%d" % (index_args+1), args[index_args])
data[index_line] = tmp
index_args += 1
index_line += 1
allowed = ["arbiter", "scheduler", "poller", "broker", "reactionner", "receiver"]
commands = {
"onerror": r"(?P<action>\w+)",
"setconfigfile": r"(?P<configfile>.*)",
"setauthfile": r"(?P<authfile>.*)",
"clone": r"(?P<object>\w+) set (?P<directives>.*) where (?P<clauses>.*)",
"delete": r"(?P<object>\w+) where (?P<clauses>.*)",
"showconfig": r"(?P<object>\w+)",
"setparam": r"(?P<directive>\w+)=(?P<value>.*) from (?P<object>\w+) where (?P<clauses>.*)",
"delparam": r"(?P<directive>\w+)=(?P<value>.*) from (?P<object>\w+) where (?P<clauses>.*)",
"getdirective": r"(?P<directives>\w+) from (?P<object>\w+) where (?P<clauses>.*)",
"removemodule": r"(?P<module>\w+) from (?P<object>\w+) where (?P<clauses>.*)",
"control": r"(?P<action>\w+)",
"writeconfig": r"",
"sync": r""
}
""" Compile regexp """
ccommands = {}
for cmd, reg in commands.items():
if reg != "":
creg = re.compile(r"^(" + cmd + ") " + reg)
ccommands[cmd] = creg
else:
ccommands[cmd] = False
last = False
indexline = 1
""" macros execution """
for line in data:
maction = "stop"
matched = False
if last != False:
line = line.replace("LAST", last)
else:
line = line.replace("LAST,", "")
for command, regexp in ccommands.items():
if re.match("^" + command, line):
if type(regexp).__name__ == "SRE_Pattern":
result = regexp.match(line)
if result == None:
return (False, "There was an error with %s" % (command))
if command == "setconfigfile":
code, config = loadconfig([result.group('configfile')])
if not code:
return (code, config)
configfile = result.group('configfile')
if command == "setauthfile":
authfile = result.group('authfile')
elif command == "delete":
code, message = delobject(config, result.group('object'), result.group('clauses'))
if not code:
if maction == "stop": return (code, message)
elif command == "control":
code, message = control(authfile, result.group('action'))
if not code:
if maction == "stop": return (code, message)
elif command == "onerror":
if result.group('action') in ('continue', 'stop'):
maction = result.group('action')
else:
return (False, "Unknown action on error %s" % (result.group('action')))
elif command == "clone":
code, message = cloneobject(config, result.group('object'), result.group('directives'), result.group('clauses'))
if not code:
if maction == "stop": return (code, message)
elif command == "showconfig":
dumpconfig(result.group('object'), config, allowed)
elif command == "getdirective":
code, last = getdirective(config, result.group('object'), result.group('directives'), result.group('clauses'))
if not code:
last = False
#return (code,last)
elif command == "setparam":
code, message = setparam(config, result.group('object'), result.group('directive'), result.group('value'), result.group('clauses'))
if not code:
if maction == "stop": return (code, message)
elif command == "delparam":
code, message = delparam(config, result.group('object'), result.group('directive'), result.group('clauses'))
if not code:
if maction == "stop": return (code, message)
elif command == "removemodule":
code, message = removemodule(config, result.group('module'), result.group('object'), result.group('clauses'))
if not code:
if maction == "stop": return (code, message)
else:
if command == "writeconfig":
code, message = writeconfig(config, configfile)
if not code:
if maction == "stop": return (code, message)
elif command == "sync":
code, message = sync(config, configfile, authfile)
if not code:
if maction == "stop": return (code, message)
matched = True
if not matched:
if not line == "":
return (False, "Error Unknown command %s" % (line))
indexline += 1
return (True, "Macro execution success")
def delobject(config, objectype, filters):
dfilters = {}
max = 0
if len(filters) > 0:
t = filters.split(',')
for i in range(len(t)):
(k, v) = t[i].split('=')
dfilters[k] = v
else:
return (False, "Filter is mandatory")
if config.has_key(objectype):
filterok = 0
max = len(config[objectype])
removed = 0
for i in range(max):
filterok = 0
for (d, v) in dfilters.items():
filterok = filterok + 1
if config[objectype][i].has_key(d):
if config[objectype][i][d] != v:
filterok = filterok - 1
else:
filterok = filterok - 1
if filterok == len(dfilters):
config[objectype].pop(i)
removed = removed + 1
if removed == 0:
return (False, "Filter did not return any result")
else:
return (True, "%d objects removed" % (removed))
else:
return (False, "No %s objects found" % (objectype))
def cloneobject(config, objectype, directive, filter):
directives = {}
filters = {}
newobj = {}
# extract directives to be modified
for pair in directive.split(','):
(d, v) = pair.split('=')
directives[d] = v
# extract filters
for pair in filter.split(','):
(d, v) = pair.split('=')
filters[d] = v
filterok = 0
# find the matching object
for o in config[objectype]:
for (d, v) in filters.items():
if o.has_key(d) and o[d] == v:
filterok = filterok + 1
if filterok == len(filters):
newobj = copy.deepcopy(o)
filterok = 0
if len(newobj) == 0:
return (False, "I was unable to find the object to be cloned")
# create the new object
for (d, v) in directives.items():
newobj[d] = v
# verify the unicity of the object
for o in config[objectype]:
if o[objectype + "_name"] == newobj[objectype + "_name"]:
return (False, "An object of type %s with the name %s allready exist" % (objectype, newobj[objectype + "_name"]))
config[objectype].append(newobj)
return (True, config)
def getaddresses(config):
allowed = ['poller', 'arbiter', 'scheduler', 'broker', 'receiver', 'reactionner']
addresses = []
for (ot, oc) in config.items():
if ot in allowed:
for o in oc:
for (d, v) in o.items():
if d == "address" and v != "localhost" and v != "127.0.01":
if not v in addresses:
addresses.append(v)
print v
def showconfig(config, objectype, filters=""):
dfilters = {}
if len(filters) > 0:
t = filters.split(',')
for i in range(len(t)):
(k, v) = t[i].split('=')
dfilters[k] = v
if config.has_key(objectype):
max = len(config[objectype])
filterok = 0
for i in range(max):
filterok = 0
#if config[objectype][i].has_key(directive):
for (d, v) in dfilters.items():
filterok = filterok + 1
if config[objectype][i].has_key(d):
if config[objectype][i][d] != v:
filterok = filterok - 1
else:
filterok = filterok - 1
if filterok == len(dfilters):
print "%s[%d]" % (objectype, i)
for (d, v) in config[objectype][i].items():
print " %s = %s" % (d, v)
else:
print "Unknown object type %s" % (o)
return config
def getsatellitesaddresses(config):
import netifaces
import re
allowed = ['poller', 'arbiter', 'scheduler', 'broker', 'receiver', 'reactionner']
addresses = []
local = []
""" detect local adresses """
for ifname in netifaces.interfaces():
for t in netifaces.ifaddresses(ifname).items():
for e in t[1]:
if e.has_key('addr'):
if re.match(r"[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}", e['addr']):
if e['addr'] != "127.0.0.1":
local.append(e['addr'])
""" get all adresses defined in configuration """
for (ot, oc) in config.items():
if ot in allowed:
for o in oc:
for (d, v) in o.items():
if d == "address" and v != "localhost" and v != "127.0.01":
if not v in local and not v in addresses:
addresses.append(v)
return (True, addresses)
def getauthdata(authfile):
import re
import string
""" load authentication data """
auth = {}
creg = re.compile(r"^(?P<address>.*):(?P<login>.*):(?P<password>.*)")
try:
fd = open(authfile, 'r')
data = map(string.strip, fd.readlines())
fd.close()
for line in data:
if line != "":
result = creg.match(line)
if result == None:
return "There was an error in the authentication file at line: %s" % (line)
auth[result.group("address")] = {"login": result.group("login"), "password": result.group("password")}
return (True, auth)
except:
return (False, "Error while loading authentication data")
def sync(config, configfile, authfile):
import re
import paramiko
import string
code, addresses = getsatellitesaddresses(config)
code, auth = getauthdata(authfile)
if not code:
return (False, auth)
""" now push configuration to each satellite """
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
for address in addresses:
print "Synch with: %s" % (address)
if not auth.has_key(address):
return (False, "Auth informations for %s does not exist in authfile" % (address))
else:
ssh.connect(address, username=auth[address]["login"], password=auth[address]["password"])
ftp = ssh.open_sftp()
ftp.put(configfile, configfile)
ftp.close()
ssh.close()
except:
return (False, "There was an error trying to push configuration to %s" % (address))
return (True, addresses)
def deploy(authfile):
import paramiko
import tarfile
code, auths = getauthdata(authfile)
if not code:
return (False, auths)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
""" current user """
user = os.getlogin()
""" define home """
if user == "root":
home = "/root"
else:
home = "/home/%s" % (user)
""" compress shinken in tar gz format """
print "Make archive"
source = os.path.abspath(os.getcwd() + '/../../../../')
tar = tarfile.open('/tmp/shinken.tar.gz', 'w:gz')
tar.add(source)
tar.close()
""" upload shinken archive to remote server """
for address, auth in auths.items():
print "Upload archive on %s"
ssh.connect(address, username=auth["login"], password=auth["password"])
ftp = ssh.open_sftp()
ftp.put('/tmp/shinken.tar.gz', os.path.abspath('/tmp/shinken.tar.gz'))
ftp.close()
print "Extract archive"
stdin, stdout, stderr = ssh.exec_command('cd /tmp && tar zxvf shinken.tar.gz && rm -Rf %s/shinken && mv %s/shinken %s/' % (home, user, home))
out = stdout.read()
err = stderr.read()
print "Launch installation"
stdin, stdout, stderr = ssh.exec_command('cd %s/shinken/contrib/alternative-installation/shinken-install/ && ./shinken.sh -d && ./shinken.sh -i' % (home))
out = stdout.read()
err = stderr.read()
print out
print err
ssh.close()
return (True, "OK")
def control(authfile, action):
import re
import paramiko
import string
code, auth = getauthdata(authfile)
if not code:
return (False, auth)
""" which command for an action """
commands = {"stop": "service shinken stop", "start": "service shinken start", "restart": "service shinken restart"}
if not commands.has_key(action):
return (False, "Unknown action command")
""" now apply control action to all elements """
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
for address, authdata in auth.items():
try:
ssh.connect(address, username=authdata["login"], password=authdata["password"])
ssh.exec_command("service shinken %s" % (action))
ssh.close()
except socket.error:
return (False, "socket error (%s)" % (address))
except paramiko.BadAuthenticationType:
return (False, "BadAuthenticationType (%s)" % (address))
except paramiko.BadHostKeyException:
return (False, "BadHostKeyException (%s)" % (address))
except paramiko.ChannelException:
return (False, "ChannelException (%s)" % (address))
except paramiko.ChannelException:
return (False, "ChannelException (%s)" % (address))
except paramiko.PasswordRequiredException:
return (False, "PasswordRequiredException (%s)" % (address))
except paramiko.SSHException:
return (False, "SSHException (%s)" % (address))
except paramiko.AuthenticationException:
return (False, "AuthenticationException (%s)" % (address))
return (True, "Action completed")
def writeconfig(config, configfile):
bck = "%s.%d" % (configfile, time.time())
os.rename(configfile, bck)
fd = open(configfile, 'w')
objects = ["arbiter", "poller", "scheduler", "broker", "reactionner", "receiver", "module", "realm"]
for (t, s) in config.items():
if t in objects:
for o in range(len(config[t])):
buff = "define %s {\n" % (t)
fd.write(buff)
for (d, v) in config[t][o].items():
if d != "imported_from":
buff = " %s %s\n" % (d, v)
fd.write(buff)
buff = "}\n\n"
fd.write(buff)
fd.close()
return (True, "Config saved")
def addobject(config, objectype, directive):
# allowed objects types to be added
allowed = ['poller', 'arbiter', 'scheduler', 'broker', 'receiver', 'reactionner']
# veritfy if object type is allowed
if not objectype in allowed:
print "Invalid objectype"
sys.exit(2)
# get a dict of directives
try:
directives = {}
for pair in directive.split(','):
(d, v) = pair.split('=')
directives[d] = v
except:
print "An unrecoverable error occured while checking directives"
sys.exit(2)
# at least the directive objectype_name should exist
if not directives.has_key(objectype + "_name"):
print "The object definition should have at least an object name directive"
sys.exit(2)
# check if an object with the same name and type allready exist
if config.has_key(objectype):
good = 1
# an object with the same type allready exist so we check it have different name
name = directives[objectype + "_name"]
for o in config[objectype]:
if o[objectype + "_name"] == name:
# ouch same object allready defined
print "%s %s allready exist" % (objectype, name)
sys.exit(2)
# so we can create the new object
newobject = {}
for (d, v) in directives.items():
if d != "imported_from":
newobject[d] = v
config[objectype].append(newobject)
return config
def splitCount(s, count):
return [s[i:i + count] for i in range(0, len(s), count)]
def dumpconfig(type, config, allowed):
for (k, oc) in config.items():
if k in allowed:
if type != "" and type == k:
display = 1
else:
display = 0
if display == 1:
print "".center(100, "=")
print "| " + k.center(97, " ") + "|"
print "".center(100, "=")
for o in oc:
print "+".ljust(99, "-") + "+"
for (d, v) in o.items():
if d != "imported_from":
if len(v) > 48:
vp = splitCount(v, 47)
col1 = "| " + d.ljust(47, " ") + "| "
col2 = vp[0].ljust(48, " ") + "|"
print col1 + col2
vp.pop(0)
for vpe in vp:
col1 = "| " + " ".ljust(47, " ") + "| "
col2 = vpe.ljust(48, " ") + "|"
print col1 + col2
else:
col1 = "| " + d.ljust(47, " ") + "| "
col2 = v.ljust(48, " ") + "|"
print col1 + col2
print "+".ljust(99, "-") + "+"
def getobjectnames(objectype, config, allowed):
names = []
for (k, oc) in config.items():
if k in allowed and k == objectype:
for o in oc:
for (d, v) in o.items():
if objectype + "_name" == d:
names.append(v)
print ','.join(names)
return (True, ','.join(names))
def getdirective(config, objectype, directive, filters):
try:
dfilters = {}
if len(filters) > 0:
t = filters.split(',')
for i in range(len(t)):
(k, v) = t[i].split('=')
dfilters[k] = v
if config.has_key(objectype):
## max=len(config[objectype])
## filterok=0
## if max > 1 or max == 0:
## return (False,"Two many values. Refine your filter")
filterok = 0
for (d, v) in dfilters.items():
filterok = filterok + 1
if config[objectype][0].has_key(d):
if config[objectype][0][d] != v:
filterok = filterok - 1
else:
filterok = filterok - 1
if filterok == len(dfilters):
if not config[objectype][0].has_key(directive):
code = False
content = "Directive not found %s for object %s" % (directive, objectype)
return code, content
else:
code = True
content = config[objectype][0][directive]
return code, content
else:
return (False, "Filters not matched")
else:
return (False, "%s not found" % (objectype))
except:
return (False, "Unknown error in getdirective")
def setparam(config, objectype, directive, value, filters):
import re
dfilters = {}
if len(filters) > 0:
t = filters.split(',')
for i in range(len(t)):
(k, v) = t[i].split('=')
dfilters[k] = v
if config.has_key(objectype):
max = len(config[objectype])
filterok = 0
for i in range(max):
filterok = 0
for (d, v) in dfilters.items():
filterok = filterok + 1
if config[objectype][i].has_key(d):
if config[objectype][i][d] != v:
filterok = filterok - 1
else:
filterok = filterok - 1
if filterok == len(dfilters):
""" if directive does not exist create it! """
if not config[objectype][i].has_key(directive):
config[objectype][i][directive] = value
message = "Added configuration %s[%d] %s=%s" % (objectype, i, directive, value)
else:
""" check if directive value allready exist """
if re.search(value, config[objectype][i][directive]) != None:
message = "Directive value allready exist"
else:
config[objectype][i][directive] = value
message = "updated configuration of %s[%d] %s=%s" % (objectype, i, directive, value)
print message
return (True, message)
else:
return (False, "Unknown object type %s" % (o))
def removemodule(config, module, objectype, filters):
import re
dfilters = {}
if len(filters) > 0:
t = filters.split(',')
for i in range(len(t)):
(k, v) = t[i].split('=')
dfilters[k.strip()] = v.strip()
code = False
message = "Nothing was done"
# check wether objectype is defined or not
if config.has_key(objectype):
# verify each filter (directive,value)
for (directive, value) in dfilters.items():
for o in config[objectype]:
if o.has_key(directive) and o[directive] == value:
modules = []
for m in o["modules"].split(','):
modules.append(m.strip())
if module in modules:
while module in modules:
modules.remove(module)
o["modules"] = ",".join(modules)
message = "removed module %s from objects of type %s" % (module, objectype)
code = True
print message
return (code, message)
message = "No module %s found in object of type %s" % (module, objectype)
code = True
print message
return (code, message)
else:
message = "no objectype %s was found in configuration" % (objectype)
code = True
print message
return (code, message)
def delparam(config, objectype, directive, filters):
import re
dfilters = {}
if len(filters) > 0:
t = filters.split(',')
for i in range(len(t)):
(k, v) = t[i].split('=')
dfilters[k] = v
if config.has_key(objectype):
max = len(config[objectype])
filterok = 0
for i in range(max):
filterok = 0
for (d, v) in dfilters.items():
filterok = filterok + 1
if config[objectype][i].has_key(d):
if config[objectype][i][d] != v:
filterok = filterok - 1
else:
filterok = filterok - 1
if filterok == len(dfilters):
""" if directive exist remove it! """
if config[objectype][i].has_key(directive):
""" config[objectype][i][directive]=value"""
config[objectype][i].pop(directive)
print config[objectype][i]
message = "Removed directive %s from %s" % (directive, objectype)
else:
message = "Nothing to remove"
return (True, message)
else:
return (False, "Unknown object type %s" % (o))
def loadconfig(configfile):
try:
c = Config()
c.read_config_silent = 1
r = c.read_config(configfile)
b = c.read_config_buf(r)
return (True, b)
except:
return (False, "There was an error reading the configuration file")
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.