repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
sodafree/backend
|
refs/heads/master
|
build/lib.linux-i686-2.7/django/contrib/gis/admin/options.py
|
83
|
from django.contrib.admin import ModelAdmin
from django.contrib.gis.admin.widgets import OpenLayersWidget
from django.contrib.gis.gdal import OGRGeomType
from django.contrib.gis.db import models
class GeoModelAdmin(ModelAdmin):
"""
The administration options class for Geographic models. Map settings
may be overloaded from their defaults to create custom maps.
"""
# The default map settings that may be overloaded -- still subject
# to API changes.
default_lon = 0
default_lat = 0
default_zoom = 4
display_wkt = False
display_srid = False
extra_js = []
num_zoom = 18
max_zoom = False
min_zoom = False
units = False
max_resolution = False
max_extent = False
modifiable = True
mouse_position = True
scale_text = True
layerswitcher = True
scrollable = True
map_width = 600
map_height = 400
map_srid = 4326
map_template = 'gis/admin/openlayers.html'
openlayers_url = 'http://openlayers.org/api/2.11/OpenLayers.js'
point_zoom = num_zoom - 6
wms_url = 'http://vmap0.tiles.osgeo.org/wms/vmap0'
wms_layer = 'basic'
wms_name = 'OpenLayers WMS'
debug = False
widget = OpenLayersWidget
@property
def media(self):
"Injects OpenLayers JavaScript into the admin."
media = super(GeoModelAdmin, self).media
media.add_js([self.openlayers_url])
media.add_js(self.extra_js)
return media
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Overloaded from ModelAdmin so that an OpenLayersWidget is used
for viewing/editing GeometryFields.
"""
if isinstance(db_field, models.GeometryField):
request = kwargs.pop('request', None)
# Setting the widget with the newly defined widget.
kwargs['widget'] = self.get_map_widget(db_field)
return db_field.formfield(**kwargs)
else:
return super(GeoModelAdmin, self).formfield_for_dbfield(db_field, **kwargs)
def get_map_widget(self, db_field):
"""
Returns a subclass of the OpenLayersWidget (or whatever was specified
in the `widget` attribute) using the settings from the attributes set
in this class.
"""
is_collection = db_field.geom_type in ('MULTIPOINT', 'MULTILINESTRING', 'MULTIPOLYGON', 'GEOMETRYCOLLECTION')
if is_collection:
if db_field.geom_type == 'GEOMETRYCOLLECTION': collection_type = 'Any'
else: collection_type = OGRGeomType(db_field.geom_type.replace('MULTI', ''))
else:
collection_type = 'None'
class OLMap(self.widget):
template = self.map_template
geom_type = db_field.geom_type
params = {'default_lon' : self.default_lon,
'default_lat' : self.default_lat,
'default_zoom' : self.default_zoom,
'display_wkt' : self.debug or self.display_wkt,
'geom_type' : OGRGeomType(db_field.geom_type),
'field_name' : db_field.name,
'is_collection' : is_collection,
'scrollable' : self.scrollable,
'layerswitcher' : self.layerswitcher,
'collection_type' : collection_type,
'is_linestring' : db_field.geom_type in ('LINESTRING', 'MULTILINESTRING'),
'is_polygon' : db_field.geom_type in ('POLYGON', 'MULTIPOLYGON'),
'is_point' : db_field.geom_type in ('POINT', 'MULTIPOINT'),
'num_zoom' : self.num_zoom,
'max_zoom' : self.max_zoom,
'min_zoom' : self.min_zoom,
'units' : self.units, #likely shoud get from object
'max_resolution' : self.max_resolution,
'max_extent' : self.max_extent,
'modifiable' : self.modifiable,
'mouse_position' : self.mouse_position,
'scale_text' : self.scale_text,
'map_width' : self.map_width,
'map_height' : self.map_height,
'point_zoom' : self.point_zoom,
'srid' : self.map_srid,
'display_srid' : self.display_srid,
'wms_url' : self.wms_url,
'wms_layer' : self.wms_layer,
'wms_name' : self.wms_name,
'debug' : self.debug,
}
return OLMap
from django.contrib.gis import gdal
if gdal.HAS_GDAL:
# Use the official spherical mercator projection SRID on versions
# of GDAL that support it; otherwise, fallback to 900913.
if gdal.GDAL_VERSION >= (1, 7):
spherical_mercator_srid = 3857
else:
spherical_mercator_srid = 900913
class OSMGeoAdmin(GeoModelAdmin):
map_template = 'gis/admin/osm.html'
num_zoom = 20
map_srid = spherical_mercator_srid
max_extent = '-20037508,-20037508,20037508,20037508'
max_resolution = '156543.0339'
point_zoom = num_zoom - 6
units = 'm'
|
qwattash/mpm
|
refs/heads/master
|
conf.py
|
1
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Minecraft Package Manager documentation build configuration file, created by
# sphinx-quickstart on Wed May 13 23:19:08 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['docs/_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Minecraft Package Manager'
copyright = '2015, Alfredo Mazzinghi (qwattash)'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['docs', 'venv']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['docs/_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MinecraftPackageManagerdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'MinecraftPackageManager.tex', 'Minecraft Package Manager Documentation',
'Alfredo Mazzinghi (qwattash)', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'minecraftpackagemanager', 'Minecraft Package Manager Documentation',
['Alfredo Mazzinghi (qwattash)'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MinecraftPackageManager', 'Minecraft Package Manager Documentation',
'Alfredo Mazzinghi (qwattash)', 'MinecraftPackageManager', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
lakshayg/tensorflow
|
refs/heads/master
|
tensorflow/contrib/distributions/python/kernel_tests/bijectors/reshape_test.py
|
9
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Reshape Bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops.bijectors.reshape import Reshape
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops.distributions.bijector_test_util import assert_bijective_and_finite
from tensorflow.python.platform import test
class _ReshapeBijectorTest(object):
"""Base class for testing the reshape transformation.
Methods defined in this class call a method self.build_shapes() that
is implemented by subclasses defined below, returning respectively
ReshapeBijectorTestStatic: static shapes,
ReshapeBijectorTestDynamic: shape placeholders of known ndims, and
ReshapeBijectorTestDynamicNdims: shape placeholders of unspecified ndims,
so that each test in this base class is automatically run over all
three cases. The subclasses also implement assertRaisesError to test
for either Python exceptions (in the case of static shapes) or
TensorFlow op errors (dynamic shapes).
"""
def setUp(self):
self._rng = np.random.RandomState(42)
def testBijector(self):
"""Do a basic sanity check of forward, inverse, jacobian."""
expected_x = np.random.randn(4, 3, 2)
expected_y = np.reshape(expected_x, [4, 6])
with self.test_session() as sess:
shape_in, shape_out, feed_dict = self.build_shapes([3, 2], [6,])
bijector = Reshape(
event_shape_out=shape_out,
event_shape_in=shape_in,
validate_args=True)
(x_,
y_,
fldj_,
ildj_) = sess.run((
bijector.inverse(expected_y),
bijector.forward(expected_x),
bijector.forward_log_det_jacobian(expected_x),
bijector.inverse_log_det_jacobian(expected_y),
), feed_dict=feed_dict)
self.assertEqual("reshape", bijector.name)
self.assertAllClose(expected_y, y_, rtol=1e-6, atol=0)
self.assertAllClose(expected_x, x_, rtol=1e-6, atol=0)
self.assertAllClose(0., fldj_, rtol=1e-6, atol=0)
self.assertAllClose(0., ildj_, rtol=1e-6, atol=0)
def testEventShapeTensor(self):
"""Test event_shape_tensor methods when even ndims may be dynamic."""
shape_in_static = [2, 3]
shape_out_static = [6,]
shape_in, shape_out, feed_dict = self.build_shapes(shape_in_static,
shape_out_static)
bijector = Reshape(
event_shape_out=shape_out,
event_shape_in=shape_in, validate_args=True)
# using the _tensor methods, we should always get a fully-specified
# result since these are evaluated at graph runtime.
with self.test_session() as sess:
(shape_out_,
shape_in_) = sess.run((
bijector.forward_event_shape_tensor(shape_in),
bijector.inverse_event_shape_tensor(shape_out),
), feed_dict=feed_dict)
self.assertAllEqual(shape_out_static, shape_out_)
self.assertAllEqual(shape_in_static, shape_in_)
def testScalarReshape(self):
"""Test reshaping to and from a scalar shape ()."""
expected_x = np.random.randn(4, 3, 1)
expected_y = np.reshape(expected_x, [4, 3])
expected_x_scalar = np.random.randn(1,)
expected_y_scalar = expected_x_scalar[0]
shape_in, shape_out, feed_dict = self.build_shapes([], [1,])
with self.test_session() as sess:
bijector = Reshape(
event_shape_out=shape_in,
event_shape_in=shape_out, validate_args=True)
(x_,
y_,
x_scalar_,
y_scalar_
) = sess.run((
bijector.inverse(expected_y),
bijector.forward(expected_x),
bijector.inverse(expected_y_scalar),
bijector.forward(expected_x_scalar),
), feed_dict=feed_dict)
self.assertAllClose(expected_y, y_, rtol=1e-6, atol=0)
self.assertAllClose(expected_x, x_, rtol=1e-6, atol=0)
self.assertAllClose(expected_y_scalar, y_scalar_, rtol=1e-6, atol=0)
self.assertAllClose(expected_x_scalar, x_scalar_, rtol=1e-6, atol=0)
def testMultipleUnspecifiedDimensionsOpError(self):
with self.test_session() as sess:
shape_in, shape_out, feed_dict = self.build_shapes([2, 3], [4, -1, -1,])
bijector = Reshape(
event_shape_out=shape_out,
event_shape_in=shape_in,
validate_args=True)
with self.assertRaisesError(
"elements must have at most one `-1`."):
sess.run(bijector.forward_event_shape_tensor(shape_in),
feed_dict=feed_dict)
def testInvalidDimensionsOpError(self):
with self.test_session() as sess:
shape_in, shape_out, feed_dict = self.build_shapes([2, 3], [1, 2, -2,])
bijector = Reshape(
event_shape_out=shape_out,
event_shape_in=shape_in,
validate_args=True)
with self.assertRaisesError(
"elements must be either positive integers or `-1`."):
sess.run(bijector.forward_event_shape_tensor(shape_in),
feed_dict=feed_dict)
def testValidButNonMatchingInputOpError(self):
x = np.random.randn(4, 3, 2)
with self.test_session() as sess:
shape_in, shape_out, feed_dict = self.build_shapes([2, 3], [1, 6, 1,])
bijector = Reshape(
event_shape_out=shape_out,
event_shape_in=shape_in,
validate_args=True)
# Here we pass in a tensor (x) whose shape is compatible with
# the output shape, so tf.reshape will throw no error, but
# doesn't match the expected input shape.
with self.assertRaisesError(
"Input `event_shape` does not match `event_shape_in`."):
sess.run(bijector.forward(x),
feed_dict=feed_dict)
def testValidButNonMatchingInputPartiallySpecifiedOpError(self):
x = np.random.randn(4, 3, 2)
with self.test_session() as sess:
shape_in, shape_out, feed_dict = self.build_shapes([2, -1], [1, 6, 1,])
bijector = Reshape(
event_shape_out=shape_out,
event_shape_in=shape_in,
validate_args=True)
with self.assertRaisesError(
"Input `event_shape` does not match `event_shape_in`."):
sess.run(bijector.forward(x),
feed_dict=feed_dict)
def testInputOutputMismatchOpError(self):
x1 = np.random.randn(4, 2, 3)
x2 = np.random.randn(4, 1, 1, 5)
with self.test_session() as sess:
shape_in, shape_out, fd_mismatched = self.build_shapes([2, 3],
[1, 1, 5])
bijector = Reshape(
event_shape_out=shape_out,
event_shape_in=shape_in,
validate_args=True)
# test that *all* methods check basic assertions
with self.assertRaisesError(
"Input to reshape is a tensor with"):
sess.run(bijector.forward(x1), feed_dict=fd_mismatched)
with self.assertRaisesError(
"Input to reshape is a tensor with"):
sess.run(bijector.inverse(x2), feed_dict=fd_mismatched)
def testOneShapePartiallySpecified(self):
expected_x = np.random.randn(4, 6)
expected_y = np.reshape(expected_x, [4, 2, 3])
with self.test_session() as sess:
# one of input/output shapes is partially specified
shape_in, shape_out, feed_dict = self.build_shapes([-1,], [2, 3])
bijector = Reshape(
event_shape_out=shape_out,
event_shape_in=shape_in,
validate_args=True)
(x_,
y_,
) = sess.run((
bijector.inverse(expected_y),
bijector.forward(expected_x),
), feed_dict=feed_dict)
self.assertAllClose(expected_y, y_, rtol=1e-6, atol=0)
self.assertAllClose(expected_x, x_, rtol=1e-6, atol=0)
def testBothShapesPartiallySpecified(self):
expected_x = np.random.randn(4, 2, 3)
expected_y = np.reshape(expected_x, [4, 3, 2])
with self.test_session() as sess:
shape_in, shape_out, feed_dict = self.build_shapes([-1, 3], [-1, 2])
bijector = Reshape(
event_shape_out=shape_out,
event_shape_in=shape_in,
validate_args=True)
(x_,
y_,
) = sess.run((
bijector.inverse(expected_y),
bijector.forward(expected_x),
), feed_dict=feed_dict)
self.assertAllClose(expected_y, y_, rtol=1e-6, atol=0)
self.assertAllClose(expected_x, x_, rtol=1e-6, atol=0)
def testDefaultVectorShape(self):
expected_x = np.random.randn(4, 4)
expected_y = np.reshape(expected_x, [4, 2, 2])
with self.test_session() as sess:
_, shape_out, feed_dict = self.build_shapes([-1,], [-1, 2])
bijector = Reshape(shape_out,
validate_args=True)
(x_,
y_,
) = sess.run((
bijector.inverse(expected_y),
bijector.forward(expected_x),
), feed_dict=feed_dict)
self.assertAllClose(expected_y, y_, rtol=1e-6, atol=0)
self.assertAllClose(expected_x, x_, rtol=1e-6, atol=0)
def build_shapes(self, *args, **kwargs):
raise NotImplementedError("Subclass failed to implement `build_shapes`.")
class ReshapeBijectorTestStatic(test.TestCase, _ReshapeBijectorTest):
def build_shapes(self, shape_in, shape_out):
shape_in_static = shape_in
shape_out_static = shape_out
feed_dict = {}
return shape_in_static, shape_out_static, feed_dict
def assertRaisesError(self, msg):
return self.assertRaisesRegexp(Exception, msg)
def testEventShape(self):
shape_in_static = tensor_shape.TensorShape([2, 3])
shape_out_static = tensor_shape.TensorShape([6,])
bijector = Reshape(
event_shape_out=shape_out_static,
event_shape_in=shape_in_static, validate_args=True)
# test that forward_ and inverse_event_shape do sensible things
# when shapes are statically known.
self.assertEqual(
bijector.forward_event_shape(shape_in_static),
shape_out_static)
self.assertEqual(
bijector.inverse_event_shape(shape_out_static),
shape_in_static)
def testBijectiveAndFinite(self):
x = np.random.randn(4, 2, 3)
y = np.reshape(x, [4, 1, 2, 3])
with self.test_session():
bijector = Reshape(
event_shape_in=[2, 3],
event_shape_out=[1, 2, 3],
validate_args=True)
assert_bijective_and_finite(bijector, x, y, rtol=1e-6, atol=0)
class ReshapeBijectorTestDynamic(test.TestCase, _ReshapeBijectorTest):
def build_shapes(self, shape_in, shape_out):
shape_in_ph = array_ops.placeholder(shape=(len(shape_in),),
dtype=dtypes.int32)
shape_out_ph = array_ops.placeholder(shape=(len(shape_out),),
dtype=dtypes.int32)
feed_dict = {shape_in_ph: shape_in, shape_out_ph: shape_out}
return shape_in_ph, shape_out_ph, feed_dict
def assertRaisesError(self, msg):
return self.assertRaisesOpError(msg)
class ReshapeBijectorTestDynamicNdims(test.TestCase, _ReshapeBijectorTest):
def build_shapes(self, shape_in, shape_out):
shape_in_ph = array_ops.placeholder(shape=None, dtype=dtypes.int32)
shape_out_ph = array_ops.placeholder(shape=None, dtype=dtypes.int32)
feed_dict = {shape_in_ph: shape_in, shape_out_ph: shape_out}
return shape_in_ph, shape_out_ph, feed_dict
def assertRaisesError(self, msg):
return self.assertRaisesOpError(msg)
if __name__ == "__main__":
test.main()
|
konono/equlipse
|
refs/heads/master
|
openstack-install/charm/trusty/charm-keystone/tests/charmhelpers/contrib/openstack/__init__.py
|
43
|
# Copyright 2014-2015 Canonical Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
onaio/django-guardian
|
refs/heads/devel
|
guardian/migrations/0001_initial.py
|
15
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0001_initial'),
('auth', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GroupObjectPermission',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('object_pk', models.CharField(max_length=255, verbose_name='object ID')),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('group', models.ForeignKey(to='auth.Group')),
('permission', models.ForeignKey(to='auth.Permission')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='UserObjectPermission',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('object_pk', models.CharField(max_length=255, verbose_name='object ID')),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('permission', models.ForeignKey(to='auth.Permission')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='userobjectpermission',
unique_together=set([('user', 'permission', 'object_pk')]),
),
migrations.AlterUniqueTogether(
name='groupobjectpermission',
unique_together=set([('group', 'permission', 'object_pk')]),
),
]
|
spbguru/repo1
|
refs/heads/master
|
tests/integration/py2/nupic/opf/opf_checkpoint_test/experiments/backwards_compatibility/a/description.py
|
14
|
# ----------------------------------------------------------------------
# Copyright (C) 2011 Numenta Inc. All rights reserved.
#
# The information and source code contained herein is the
# exclusive property of Numenta Inc. No part of this software
# may be used, reproduced, stored or distributed in any form,
# without explicit written authorization from Numenta Inc.
# ----------------------------------------------------------------------
## This file defines parameters for a prediction experiment.
###############################################################################
# IMPORTANT!!!
# This params file is dynamically generated by the RunExperimentPermutations
# script. Any changes made manually will be over-written the next time
# RunExperimentPermutations is run!!!
###############################################################################
from nupic.frameworks.opf.expdescriptionhelpers import importBaseDescription
# the sub-experiment configuration
config ={
'modelParams' : {'sensorParams': {'encoders': {u'c0_timeOfDay': None, u'c0_dayOfWeek': None, u'c1': {'name': 'c1', 'clipInput': True, 'n': 275, 'fieldname': 'c1', 'w': 21, 'type': 'AdaptiveScalarEncoder'}, u'c0_weekend': None}}, 'inferenceType': 'NontemporalMultiStep', 'spParams': {'synPermInactiveDec': 0.052500000000000005}, 'tpParams': {'minThreshold': 11, 'activationThreshold': 14, 'pamLength': 3}, 'clParams': {'alpha': 0.050050000000000004}},
'firstRecord': 0,
'lastRecord': 10,
}
mod = importBaseDescription('../base.py', config)
locals().update(mod.__dict__)
|
talishte/ctigre
|
refs/heads/master
|
env/lib/python2.7/site-packages/django/contrib/sessions/backends/signed_cookies.py
|
288
|
from django.conf import settings
from django.core import signing
from django.contrib.sessions.backends.base import SessionBase
class SessionStore(SessionBase):
def load(self):
"""
We load the data from the key itself instead of fetching from
some external data store. Opposite of _get_session_key(),
raises BadSignature if signature fails.
"""
try:
return signing.loads(self.session_key,
serializer=self.serializer,
# This doesn't handle non-default expiry dates, see #19201
max_age=settings.SESSION_COOKIE_AGE,
salt='django.contrib.sessions.backends.signed_cookies')
except (signing.BadSignature, ValueError):
self.create()
return {}
def create(self):
"""
To create a new key, we simply make sure that the modified flag is set
so that the cookie is set on the client for the current request.
"""
self.modified = True
def save(self, must_create=False):
"""
To save, we get the session key as a securely signed string and then
set the modified flag so that the cookie is set on the client for the
current request.
"""
self._session_key = self._get_session_key()
self.modified = True
def exists(self, session_key=None):
"""
This method makes sense when you're talking to a shared resource, but
it doesn't matter when you're storing the information in the client's
cookie.
"""
return False
def delete(self, session_key=None):
"""
To delete, we clear the session key and the underlying data structure
and set the modified flag so that the cookie is set on the client for
the current request.
"""
self._session_key = ''
self._session_cache = {}
self.modified = True
def cycle_key(self):
"""
Keeps the same data but with a new key. To do this, we just have to
call ``save()`` and it will automatically save a cookie with a new key
at the end of the request.
"""
self.save()
def _get_session_key(self):
"""
Most session backends don't need to override this method, but we do,
because instead of generating a random string, we want to actually
generate a secure url-safe Base64-encoded string of data as our
session key.
"""
session_cache = getattr(self, '_session_cache', {})
return signing.dumps(session_cache, compress=True,
salt='django.contrib.sessions.backends.signed_cookies',
serializer=self.serializer)
@classmethod
def clear_expired(cls):
pass
|
msabramo/kallithea
|
refs/heads/master
|
kallithea/controllers/forks.py
|
2
|
# -*- coding: utf-8 -*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
kallithea.controllers.forks
~~~~~~~~~~~~~~~~~~~~~~~~~~~
forks controller for Kallithea
This file was forked by the Kallithea project in July 2014.
Original author and date, and relevant copyright and licensing information is below:
:created_on: Apr 23, 2011
:author: marcink
:copyright: (c) 2013 RhodeCode GmbH, and others.
:license: GPLv3, see LICENSE.md for more details.
"""
import logging
import formencode
import traceback
from formencode import htmlfill
from pylons import tmpl_context as c, request, url
from pylons.controllers.util import redirect
from pylons.i18n.translation import _
import kallithea.lib.helpers as h
from kallithea.lib.helpers import Page
from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator, \
NotAnonymous, HasRepoPermissionAny, HasPermissionAnyDecorator
from kallithea.lib.base import BaseRepoController, render
from kallithea.model.db import Repository, RepoGroup, UserFollowing, User,\
Ui
from kallithea.model.repo import RepoModel
from kallithea.model.forms import RepoForkForm
from kallithea.model.scm import ScmModel, RepoGroupList
from kallithea.lib.utils2 import safe_int
log = logging.getLogger(__name__)
class ForksController(BaseRepoController):
def __before__(self):
super(ForksController, self).__before__()
def __load_defaults(self):
acl_groups = RepoGroupList(RepoGroup.query().all(),
perm_set=['group.write', 'group.admin'])
c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
c.repo_groups_choices = map(lambda k: unicode(k[0]), c.repo_groups)
choices, c.landing_revs = ScmModel().get_repo_landing_revs()
c.landing_revs_choices = choices
c.can_update = Ui.get_by_key(Ui.HOOK_UPDATE).ui_active
def __load_data(self, repo_name=None):
"""
Load defaults settings for edit, and update
:param repo_name:
"""
self.__load_defaults()
c.repo_info = db_repo = Repository.get_by_repo_name(repo_name)
repo = db_repo.scm_instance
if c.repo_info is None:
h.not_mapped_error(repo_name)
return redirect(url('repos'))
c.default_user_id = User.get_default_user().user_id
c.in_public_journal = UserFollowing.query()\
.filter(UserFollowing.user_id == c.default_user_id)\
.filter(UserFollowing.follows_repository == c.repo_info).scalar()
if c.repo_info.stats:
last_rev = c.repo_info.stats.stat_on_revision+1
else:
last_rev = 0
c.stats_revision = last_rev
c.repo_last_rev = repo.count() if repo.revisions else 0
if last_rev == 0 or c.repo_last_rev == 0:
c.stats_percentage = 0
else:
c.stats_percentage = '%.2f' % ((float((last_rev)) /
c.repo_last_rev) * 100)
defaults = RepoModel()._get_defaults(repo_name)
# alter the description to indicate a fork
defaults['description'] = ('fork of repository: %s \n%s'
% (defaults['repo_name'],
defaults['description']))
# add suffix to fork
defaults['repo_name'] = '%s-fork' % defaults['repo_name']
return defaults
@LoginRequired()
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
'repository.admin')
def forks(self, repo_name):
p = safe_int(request.GET.get('page', 1), 1)
repo_id = c.db_repo.repo_id
d = []
for r in Repository.get_repo_forks(repo_id):
if not HasRepoPermissionAny(
'repository.read', 'repository.write', 'repository.admin'
)(r.repo_name, 'get forks check'):
continue
d.append(r)
c.forks_pager = Page(d, page=p, items_per_page=20)
if request.environ.get('HTTP_X_PARTIAL_XHR'):
return render('/forks/forks_data.html')
return render('/forks/forks.html')
@LoginRequired()
@NotAnonymous()
@HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
'repository.admin')
def fork(self, repo_name):
c.repo_info = Repository.get_by_repo_name(repo_name)
if not c.repo_info:
h.not_mapped_error(repo_name)
return redirect(url('home'))
defaults = self.__load_data(repo_name)
return htmlfill.render(
render('forks/fork.html'),
defaults=defaults,
encoding="UTF-8",
force_defaults=False)
@LoginRequired()
@NotAnonymous()
@HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
@HasRepoPermissionAnyDecorator('repository.read', 'repository.write',
'repository.admin')
def fork_create(self, repo_name):
self.__load_defaults()
c.repo_info = Repository.get_by_repo_name(repo_name)
_form = RepoForkForm(old_data={'repo_type': c.repo_info.repo_type},
repo_groups=c.repo_groups_choices,
landing_revs=c.landing_revs_choices)()
form_result = {}
task_id = None
try:
form_result = _form.to_python(dict(request.POST))
# an approximation that is better than nothing
if not Ui.get_by_key(Ui.HOOK_UPDATE).ui_active:
form_result['update_after_clone'] = False
# create fork is done sometimes async on celery, db transaction
# management is handled there.
task = RepoModel().create_fork(form_result, self.authuser.user_id)
from celery.result import BaseAsyncResult
if isinstance(task, BaseAsyncResult):
task_id = task.task_id
except formencode.Invalid, errors:
c.new_repo = errors.value['repo_name']
return htmlfill.render(
render('forks/fork.html'),
defaults=errors.value,
errors=errors.error_dict or {},
prefix_error=False,
encoding="UTF-8",
force_defaults=False)
except Exception:
log.error(traceback.format_exc())
h.flash(_('An error occurred during repository forking %s') %
repo_name, category='error')
return redirect(h.url('repo_creating_home',
repo_name=form_result['repo_name_full'],
task_id=task_id))
|
jeroanan/GameCollection
|
refs/heads/master
|
UI/Tests/Handlers/TestChangePasswordHandler.py
|
1
|
# Copyright (C) 2015 David Wilson
# Icarus is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Icarus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Icarus. If not, see <http://www.gnu.org/licenses/>.
import unittest
from unittest.mock import Mock
from Interactors.InteractorFactory import InteractorFactory
from Interactors.UserInteractors import ChangePasswordInteractor
from UI.Handlers.ChangePasswordHandler import ChangePasswordHandler
from UI.Handlers.Handler import Handler
from User import User
class TestChangePasswordHandler(unittest.TestCase):
"""Unit tests for the ChangePasswordHandler class"""
def setUp(self):
"""setUp function for all unit tests in this class"""
interactor_factory = Mock(InteractorFactory)
self.__interactor = Mock(ChangePasswordInteractor)
interactor_factory.create = Mock(return_value=self.__interactor)
self.__target = ChangePasswordHandler(interactor_factory, None)
def test_is_handler(self):
"""Test that ChangePasswordHandler is an instance of Handler"""
self.assertIsInstance(self.__target, Handler)
def test_get_page_missing_required_param_raises_value_error(self):
"""Test that calling ChangePasswordHandler.get_page with missing required parameters raises a ValueError"""
required_params = ["user_id", "password"]
for rp in required_params:
p = self.__get_params()
del p[rp]
self.assertRaises(ValueError, self.__target.get_page, p)
def test_get_page_empty_required_params_raises_value_error(self):
"""Test that calling ChangePasswordHandler.get_page with empty required parameters raises a ValueError"""
required_params = ["user_id", "password"]
for rp in required_params:
p = self.__get_params()
p[rp] = ""
self.assertRaises(ValueError, self.__target.get_page, p)
def test_get_page_executes_interactor(self):
"""Test that caling ChangePasswordHandler.get_page correctly causes ChangePasswordInteractor.execute to be
called"""
self.__target.get_page(self.__get_params())
self.__interactor.execute.assert_called_with(self.__get_user())
def __get_params(self):
return {"user_id": "user",
"password": "password"}
def __get_user(self):
p = self.__get_params()
u = User()
u.user_id = p["user_id"]
u.password = ["password"]
return u
|
ElDeveloper/qiita
|
refs/heads/master
|
qiita_pet/handlers/artifact_handlers/tests/__init__.py
|
76
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
|
macdiesel/mongo-python-driver
|
refs/heads/master
|
test/test_bulk.py
|
7
|
# Copyright 2014-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the bulk API."""
import sys
sys.path[0:0] = [""]
from bson import InvalidDocument, SON
from bson.objectid import ObjectId
from bson.py3compat import string_type
from pymongo import MongoClient
from pymongo.operations import *
from pymongo.common import partition_node
from pymongo.errors import (BulkWriteError,
ConfigurationError,
InvalidOperation,
OperationFailure)
from pymongo.write_concern import WriteConcern
from test import (client_context,
unittest,
host,
port,
IntegrationTest,
SkipTest)
from test.utils import oid_generated_on_client, remove_all_users, wait_until
class BulkTestBase(IntegrationTest):
@classmethod
def setUpClass(cls):
super(BulkTestBase, cls).setUpClass()
cls.coll = cls.db.test
ismaster = client_context.client.admin.command('ismaster')
cls.has_write_commands = (ismaster.get("maxWireVersion", 0) > 1)
def setUp(self):
super(BulkTestBase, self).setUp()
self.coll.drop()
def assertEqualResponse(self, expected, actual):
"""Compare response from bulk.execute() to expected response."""
for key, value in expected.items():
if key == 'nModified':
if self.has_write_commands:
self.assertEqual(value, actual['nModified'])
else:
# Legacy servers don't include nModified in the response.
self.assertFalse('nModified' in actual)
elif key == 'upserted':
expected_upserts = value
actual_upserts = actual['upserted']
self.assertEqual(
len(expected_upserts), len(actual_upserts),
'Expected %d elements in "upserted", got %d' % (
len(expected_upserts), len(actual_upserts)))
for e, a in zip(expected_upserts, actual_upserts):
self.assertEqualUpsert(e, a)
elif key == 'writeErrors':
expected_errors = value
actual_errors = actual['writeErrors']
self.assertEqual(
len(expected_errors), len(actual_errors),
'Expected %d elements in "writeErrors", got %d' % (
len(expected_errors), len(actual_errors)))
for e, a in zip(expected_errors, actual_errors):
self.assertEqualWriteError(e, a)
else:
self.assertEqual(
actual.get(key), value,
'%r value of %r does not match expected %r' %
(key, actual.get(key), value))
def assertEqualUpsert(self, expected, actual):
"""Compare bulk.execute()['upserts'] to expected value.
Like: {'index': 0, '_id': ObjectId()}
"""
self.assertEqual(expected['index'], actual['index'])
if expected['_id'] == '...':
# Unspecified value.
self.assertTrue('_id' in actual)
else:
self.assertEqual(expected['_id'], actual['_id'])
def assertEqualWriteError(self, expected, actual):
"""Compare bulk.execute()['writeErrors'] to expected value.
Like: {'index': 0, 'code': 123, 'errmsg': '...', 'op': { ... }}
"""
self.assertEqual(expected['index'], actual['index'])
self.assertEqual(expected['code'], actual['code'])
if expected['errmsg'] == '...':
# Unspecified value.
self.assertTrue('errmsg' in actual)
else:
self.assertEqual(expected['errmsg'], actual['errmsg'])
expected_op = expected['op'].copy()
actual_op = actual['op'].copy()
if expected_op.get('_id') == '...':
# Unspecified _id.
self.assertTrue('_id' in actual_op)
actual_op.pop('_id')
expected_op.pop('_id')
self.assertEqual(expected_op, actual_op)
class TestBulk(BulkTestBase):
def test_empty(self):
bulk = self.coll.initialize_ordered_bulk_op()
self.assertRaises(InvalidOperation, bulk.execute)
def test_find(self):
# find() requires a selector.
bulk = self.coll.initialize_ordered_bulk_op()
self.assertRaises(TypeError, bulk.find)
self.assertRaises(TypeError, bulk.find, 'foo')
# No error.
bulk.find({})
def test_insert(self):
expected = {
'nMatched': 0,
'nModified': 0,
'nUpserted': 0,
'nInserted': 1,
'nRemoved': 0,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []
}
bulk = self.coll.initialize_ordered_bulk_op()
self.assertRaises(TypeError, bulk.insert, 1)
# find() before insert() is prohibited.
self.assertRaises(AttributeError, lambda: bulk.find({}).insert({}))
# We don't allow multiple documents per call.
self.assertRaises(TypeError, bulk.insert, [{}, {}])
self.assertRaises(TypeError, bulk.insert, ({} for _ in range(2)))
bulk.insert({})
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual(1, self.coll.count())
doc = self.coll.find_one()
self.assertTrue(oid_generated_on_client(doc['_id']))
bulk = self.coll.initialize_unordered_bulk_op()
bulk.insert({})
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual(2, self.coll.count())
result = self.coll.bulk_write([InsertOne({})])
self.assertEqualResponse(expected, result.bulk_api_result)
self.assertEqual(1, result.inserted_count)
self.assertEqual(3, self.coll.count())
def test_insert_check_keys(self):
bulk = self.coll.initialize_ordered_bulk_op()
bulk.insert({'$dollar': 1})
self.assertRaises(InvalidDocument, bulk.execute)
bulk = self.coll.initialize_ordered_bulk_op()
bulk.insert({'a.b': 1})
self.assertRaises(InvalidDocument, bulk.execute)
def test_update(self):
expected = {
'nMatched': 2,
'nModified': 2,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 0,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []
}
self.coll.insert_many([{}, {}])
bulk = self.coll.initialize_ordered_bulk_op()
# update() requires find() first.
self.assertRaises(
AttributeError,
lambda: bulk.update({'$set': {'x': 1}}))
self.assertRaises(TypeError, bulk.find({}).update, 1)
self.assertRaises(ValueError, bulk.find({}).update, {})
# All fields must be $-operators.
self.assertRaises(ValueError, bulk.find({}).update, {'foo': 'bar'})
bulk.find({}).update({'$set': {'foo': 'bar'}})
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual(self.coll.find({'foo': 'bar'}).count(), 2)
self.coll.delete_many({})
self.coll.insert_many([{}, {}])
result = self.coll.bulk_write([UpdateMany({},
{'$set': {'foo': 'bar'}})])
self.assertEqualResponse(expected, result.bulk_api_result)
self.assertEqual(2, result.matched_count)
self.assertTrue(result.modified_count in (2, None))
# All fields must be $-operators -- validated server-side.
bulk = self.coll.initialize_ordered_bulk_op()
updates = SON([('$set', {'x': 1}), ('y', 1)])
bulk.find({}).update(updates)
self.assertRaises(BulkWriteError, bulk.execute)
self.coll.delete_many({})
self.coll.insert_many([{}, {}])
bulk = self.coll.initialize_unordered_bulk_op()
bulk.find({}).update({'$set': {'bim': 'baz'}})
result = bulk.execute()
self.assertEqualResponse(
{'nMatched': 2,
'nModified': 2,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 0,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []},
result)
self.assertEqual(self.coll.find({'bim': 'baz'}).count(), 2)
self.coll.insert_one({'x': 1})
bulk = self.coll.initialize_unordered_bulk_op()
bulk.find({'x': 1}).update({'$set': {'x': 42}})
result = bulk.execute()
self.assertEqualResponse(
{'nMatched': 1,
'nModified': 1,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 0,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []},
result)
self.assertEqual(1, self.coll.find({'x': 42}).count())
# Second time, x is already 42 so nModified is 0.
bulk = self.coll.initialize_unordered_bulk_op()
bulk.find({'x': 42}).update({'$set': {'x': 42}})
result = bulk.execute()
self.assertEqualResponse(
{'nMatched': 1,
'nModified': 0,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 0,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []},
result)
def test_update_one(self):
expected = {
'nMatched': 1,
'nModified': 1,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 0,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []
}
self.coll.insert_many([{}, {}])
bulk = self.coll.initialize_ordered_bulk_op()
# update_one() requires find() first.
self.assertRaises(
AttributeError,
lambda: bulk.update_one({'$set': {'x': 1}}))
self.assertRaises(TypeError, bulk.find({}).update_one, 1)
self.assertRaises(ValueError, bulk.find({}).update_one, {})
self.assertRaises(ValueError, bulk.find({}).update_one, {'foo': 'bar'})
bulk.find({}).update_one({'$set': {'foo': 'bar'}})
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual(self.coll.find({'foo': 'bar'}).count(), 1)
self.coll.delete_many({})
self.coll.insert_many([{}, {}])
result = self.coll.bulk_write([UpdateOne({},
{'$set': {'foo': 'bar'}})])
self.assertEqualResponse(expected, result.bulk_api_result)
self.assertEqual(1, result.matched_count)
self.assertTrue(result.modified_count in (1, None))
self.coll.delete_many({})
self.coll.insert_many([{}, {}])
bulk = self.coll.initialize_unordered_bulk_op()
bulk.find({}).update_one({'$set': {'bim': 'baz'}})
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual(self.coll.find({'bim': 'baz'}).count(), 1)
# All fields must be $-operators -- validated server-side.
bulk = self.coll.initialize_ordered_bulk_op()
updates = SON([('$set', {'x': 1}), ('y', 1)])
bulk.find({}).update_one(updates)
self.assertRaises(BulkWriteError, bulk.execute)
def test_replace_one(self):
expected = {
'nMatched': 1,
'nModified': 1,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 0,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []
}
self.coll.insert_many([{}, {}])
bulk = self.coll.initialize_ordered_bulk_op()
self.assertRaises(TypeError, bulk.find({}).replace_one, 1)
self.assertRaises(ValueError,
bulk.find({}).replace_one, {'$set': {'foo': 'bar'}})
bulk.find({}).replace_one({'foo': 'bar'})
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual(self.coll.find({'foo': 'bar'}).count(), 1)
self.coll.delete_many({})
self.coll.insert_many([{}, {}])
result = self.coll.bulk_write([ReplaceOne({}, {'foo': 'bar'})])
self.assertEqualResponse(expected, result.bulk_api_result)
self.assertEqual(1, result.matched_count)
self.assertTrue(result.modified_count in (1, None))
self.coll.delete_many({})
self.coll.insert_many([{}, {}])
bulk = self.coll.initialize_unordered_bulk_op()
bulk.find({}).replace_one({'bim': 'baz'})
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual(self.coll.find({'bim': 'baz'}).count(), 1)
def test_remove(self):
# Test removing all documents, ordered.
expected = {
'nMatched': 0,
'nModified': 0,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 2,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []
}
self.coll.insert_many([{}, {}])
bulk = self.coll.initialize_ordered_bulk_op()
# remove() must be preceded by find().
self.assertRaises(AttributeError, lambda: bulk.remove())
bulk.find({}).remove()
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual(self.coll.count(), 0)
self.coll.insert_many([{}, {}])
result = self.coll.bulk_write([DeleteMany({})])
self.assertEqualResponse(expected, result.bulk_api_result)
self.assertEqual(2, result.deleted_count)
# Test removing some documents, ordered.
self.coll.insert_many([{}, {'x': 1}, {}, {'x': 1}])
bulk = self.coll.initialize_ordered_bulk_op()
bulk.find({'x': 1}).remove()
result = bulk.execute()
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 2,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []},
result)
self.assertEqual(self.coll.count(), 2)
self.coll.delete_many({})
# Test removing all documents, unordered.
self.coll.insert_many([{}, {}])
bulk = self.coll.initialize_unordered_bulk_op()
bulk.find({}).remove()
result = bulk.execute()
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 2,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []},
result)
# Test removing some documents, unordered.
self.assertEqual(self.coll.count(), 0)
self.coll.insert_many([{}, {'x': 1}, {}, {'x': 1}])
bulk = self.coll.initialize_unordered_bulk_op()
bulk.find({'x': 1}).remove()
result = bulk.execute()
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 2,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []},
result)
self.assertEqual(self.coll.count(), 2)
self.coll.delete_many({})
def test_remove_one(self):
bulk = self.coll.initialize_ordered_bulk_op()
# remove_one() must be preceded by find().
self.assertRaises(AttributeError, lambda: bulk.remove_one())
# Test removing one document, empty selector.
# First ordered, then unordered.
self.coll.insert_many([{}, {}])
expected = {
'nMatched': 0,
'nModified': 0,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 1,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []
}
bulk.find({}).remove_one()
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual(self.coll.count(), 1)
self.coll.insert_one({})
result = self.coll.bulk_write([DeleteOne({})])
self.assertEqualResponse(expected, result.bulk_api_result)
self.assertEqual(1, result.deleted_count)
self.assertEqual(self.coll.count(), 1)
self.coll.insert_one({})
bulk = self.coll.initialize_unordered_bulk_op()
bulk.find({}).remove_one()
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual(self.coll.count(), 1)
# Test removing one document, with a selector.
# First ordered, then unordered.
self.coll.insert_one({'x': 1})
bulk = self.coll.initialize_ordered_bulk_op()
bulk.find({'x': 1}).remove_one()
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual([{}], list(self.coll.find({}, {'_id': False})))
self.coll.insert_one({'x': 1})
bulk = self.coll.initialize_unordered_bulk_op()
bulk.find({'x': 1}).remove_one()
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.assertEqual([{}], list(self.coll.find({}, {'_id': False})))
def test_upsert(self):
bulk = self.coll.initialize_ordered_bulk_op()
# upsert() requires find() first.
self.assertRaises(
AttributeError,
lambda: bulk.upsert())
expected = {
'nMatched': 0,
'nModified': 0,
'nUpserted': 1,
'nInserted': 0,
'nRemoved': 0,
'upserted': [{'index': 0, '_id': '...'}]
}
# Note, in MongoDB 2.4 the server won't return the
# "upserted" field unless _id is an ObjectId
bulk.find({}).upsert().replace_one({'foo': 'bar'})
result = bulk.execute()
self.assertEqualResponse(expected, result)
self.coll.delete_many({})
result = self.coll.bulk_write([ReplaceOne({},
{'foo': 'bar'},
upsert=True)])
self.assertEqualResponse(expected, result.bulk_api_result)
self.assertEqual(1, result.upserted_count)
self.assertEqual(1, len(result.upserted_ids))
self.assertTrue(isinstance(result.upserted_ids.get(0), ObjectId))
self.assertEqual(self.coll.find({'foo': 'bar'}).count(), 1)
bulk = self.coll.initialize_ordered_bulk_op()
bulk.find({}).upsert().update_one({'$set': {'bim': 'baz'}})
result = bulk.execute()
self.assertEqualResponse(
{'nMatched': 1,
'nModified': 1,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 0,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []},
result)
self.assertEqual(self.coll.find({'bim': 'baz'}).count(), 1)
bulk = self.coll.initialize_ordered_bulk_op()
bulk.find({}).upsert().update({'$set': {'bim': 'bop'}})
# Non-upsert, no matches.
bulk.find({'x': 1}).update({'$set': {'x': 2}})
result = bulk.execute()
self.assertEqualResponse(
{'nMatched': 1,
'nModified': 1,
'nUpserted': 0,
'nInserted': 0,
'nRemoved': 0,
'upserted': [],
'writeErrors': [],
'writeConcernErrors': []},
result)
self.assertEqual(self.coll.find({'bim': 'bop'}).count(), 1)
self.assertEqual(self.coll.find({'x': 2}).count(), 0)
def test_upsert_large(self):
big = 'a' * (client_context.client.max_bson_size - 37)
bulk = self.coll.initialize_ordered_bulk_op()
bulk.find({'x': 1}).upsert().update({'$set': {'s': big}})
result = bulk.execute()
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 1,
'nInserted': 0,
'nRemoved': 0,
'upserted': [{'index': 0, '_id': '...'}]},
result)
self.assertEqual(1, self.coll.find({'x': 1}).count())
def test_client_generated_upsert_id(self):
batch = self.coll.initialize_ordered_bulk_op()
batch.find({'_id': 0}).upsert().update_one({'$set': {'a': 0}})
batch.find({'a': 1}).upsert().replace_one({'_id': 1})
if not client_context.version.at_least(2, 6, 0):
# This case is only possible in MongoDB versions before 2.6.
batch.find({'_id': 3}).upsert().replace_one({'_id': 2})
else:
# This is just here to make the counts right in all cases.
batch.find({'_id': 2}).upsert().replace_one({'_id': 2})
result = batch.execute()
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 3,
'nInserted': 0,
'nRemoved': 0,
'upserted': [{'index': 0, '_id': 0},
{'index': 1, '_id': 1},
{'index': 2, '_id': 2}]},
result)
def test_single_ordered_batch(self):
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'a': 1})
batch.find({'a': 1}).update_one({'$set': {'b': 1}})
batch.find({'a': 2}).upsert().update_one({'$set': {'b': 2}})
batch.insert({'a': 3})
batch.find({'a': 3}).remove()
result = batch.execute()
self.assertEqualResponse(
{'nMatched': 1,
'nModified': 1,
'nUpserted': 1,
'nInserted': 2,
'nRemoved': 1,
'upserted': [{'index': 2, '_id': '...'}]},
result)
def test_single_error_ordered_batch(self):
self.coll.create_index('a', unique=True)
self.addCleanup(self.coll.drop_index, [('a', 1)])
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'b': 1, 'a': 1})
batch.find({'b': 2}).upsert().update_one({'$set': {'a': 1}})
batch.insert({'b': 3, 'a': 2})
try:
batch.execute()
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 0,
'nInserted': 1,
'nRemoved': 0,
'upserted': [],
'writeConcernErrors': [],
'writeErrors': [
{'index': 1,
'code': 11000,
'errmsg': '...',
'op': {'q': {'b': 2},
'u': {'$set': {'a': 1}},
'multi': False,
'upsert': True}}]},
result)
def test_multiple_error_ordered_batch(self):
self.coll.create_index('a', unique=True)
self.addCleanup(self.coll.drop_index, [('a', 1)])
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'b': 1, 'a': 1})
batch.find({'b': 2}).upsert().update_one({'$set': {'a': 1}})
batch.find({'b': 3}).upsert().update_one({'$set': {'a': 2}})
batch.find({'b': 2}).upsert().update_one({'$set': {'a': 1}})
batch.insert({'b': 4, 'a': 3})
batch.insert({'b': 5, 'a': 1})
try:
batch.execute()
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 0,
'nInserted': 1,
'nRemoved': 0,
'upserted': [],
'writeConcernErrors': [],
'writeErrors': [
{'index': 1,
'code': 11000,
'errmsg': '...',
'op': {'q': {'b': 2},
'u': {'$set': {'a': 1}},
'multi': False,
'upsert': True}}]},
result)
def test_single_unordered_batch(self):
batch = self.coll.initialize_unordered_bulk_op()
batch.insert({'a': 1})
batch.find({'a': 1}).update_one({'$set': {'b': 1}})
batch.find({'a': 2}).upsert().update_one({'$set': {'b': 2}})
batch.insert({'a': 3})
batch.find({'a': 3}).remove()
result = batch.execute()
self.assertEqualResponse(
{'nMatched': 1,
'nModified': 1,
'nUpserted': 1,
'nInserted': 2,
'nRemoved': 1,
'upserted': [{'index': 2, '_id': '...'}],
'writeErrors': [],
'writeConcernErrors': []},
result)
def test_single_error_unordered_batch(self):
self.coll.create_index('a', unique=True)
self.addCleanup(self.coll.drop_index, [('a', 1)])
batch = self.coll.initialize_unordered_bulk_op()
batch.insert({'b': 1, 'a': 1})
batch.find({'b': 2}).upsert().update_one({'$set': {'a': 1}})
batch.insert({'b': 3, 'a': 2})
try:
batch.execute()
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 0,
'nInserted': 2,
'nRemoved': 0,
'upserted': [],
'writeConcernErrors': [],
'writeErrors': [
{'index': 1,
'code': 11000,
'errmsg': '...',
'op': {'q': {'b': 2},
'u': {'$set': {'a': 1}},
'multi': False,
'upsert': True}}]},
result)
def test_multiple_error_unordered_batch(self):
self.coll.create_index('a', unique=True)
self.addCleanup(self.coll.drop_index, [('a', 1)])
batch = self.coll.initialize_unordered_bulk_op()
batch.insert({'b': 1, 'a': 1})
batch.find({'b': 2}).upsert().update_one({'$set': {'a': 3}})
batch.find({'b': 3}).upsert().update_one({'$set': {'a': 4}})
batch.find({'b': 4}).upsert().update_one({'$set': {'a': 3}})
batch.insert({'b': 5, 'a': 2})
batch.insert({'b': 6, 'a': 1})
try:
batch.execute()
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
# Assume the update at index 1 runs before the update at index 3,
# although the spec does not require it. Same for inserts.
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 2,
'nInserted': 2,
'nRemoved': 0,
'upserted': [
{'index': 1, '_id': '...'},
{'index': 2, '_id': '...'}],
'writeConcernErrors': [],
'writeErrors': [
{'index': 3,
'code': 11000,
'errmsg': '...',
'op': {'q': {'b': 4},
'u': {'$set': {'a': 3}},
'multi': False,
'upsert': True}},
{'index': 5,
'code': 11000,
'errmsg': '...',
'op': {'_id': '...', 'b': 6, 'a': 1}}]},
result)
def test_large_inserts_ordered(self):
big = 'x' * self.coll.database.client.max_bson_size
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'b': 1, 'a': 1})
batch.insert({'big': big})
batch.insert({'b': 2, 'a': 2})
try:
batch.execute()
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
self.assertEqual(1, result['nInserted'])
self.coll.delete_many({})
big = 'x' * (1024 * 1024 * 4)
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'a': 1, 'big': big})
batch.insert({'a': 2, 'big': big})
batch.insert({'a': 3, 'big': big})
batch.insert({'a': 4, 'big': big})
batch.insert({'a': 5, 'big': big})
batch.insert({'a': 6, 'big': big})
result = batch.execute()
self.assertEqual(6, result['nInserted'])
self.assertEqual(6, self.coll.count())
def test_large_inserts_unordered(self):
big = 'x' * self.coll.database.client.max_bson_size
batch = self.coll.initialize_unordered_bulk_op()
batch.insert({'b': 1, 'a': 1})
batch.insert({'big': big})
batch.insert({'b': 2, 'a': 2})
try:
batch.execute()
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
self.assertEqual(2, result['nInserted'])
self.coll.delete_many({})
big = 'x' * (1024 * 1024 * 4)
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'a': 1, 'big': big})
batch.insert({'a': 2, 'big': big})
batch.insert({'a': 3, 'big': big})
batch.insert({'a': 4, 'big': big})
batch.insert({'a': 5, 'big': big})
batch.insert({'a': 6, 'big': big})
result = batch.execute()
self.assertEqual(6, result['nInserted'])
self.assertEqual(6, self.coll.count())
def test_numerous_inserts(self):
# Ensure we don't exceed server's 1000-document batch size limit.
n_docs = 2100
batch = self.coll.initialize_unordered_bulk_op()
for _ in range(n_docs):
batch.insert({})
result = batch.execute()
self.assertEqual(n_docs, result['nInserted'])
self.assertEqual(n_docs, self.coll.count())
# Same with ordered bulk.
self.coll.delete_many({})
batch = self.coll.initialize_ordered_bulk_op()
for _ in range(n_docs):
batch.insert({})
result = batch.execute()
self.assertEqual(n_docs, result['nInserted'])
self.assertEqual(n_docs, self.coll.count())
def test_multiple_execution(self):
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({})
batch.execute()
self.assertRaises(InvalidOperation, batch.execute)
class TestBulkWriteConcern(BulkTestBase):
@classmethod
def setUpClass(cls):
super(TestBulkWriteConcern, cls).setUpClass()
cls.w = client_context.w
cls.secondary = None
if cls.w > 1:
for member in client_context.ismaster['hosts']:
if member != client_context.ismaster['primary']:
cls.secondary = MongoClient(*partition_node(member))
break
# We tested wtimeout errors by specifying a write concern greater than
# the number of members, but in MongoDB 2.7.8+ this causes a different
# sort of error, "Not enough data-bearing nodes". In recent servers we
# use a failpoint to pause replication on a secondary.
cls.need_replication_stopped = client_context.version.at_least(2, 7, 8)
def cause_wtimeout(self, batch):
if self.need_replication_stopped:
if not client_context.test_commands_enabled:
raise SkipTest("Test commands must be enabled.")
self.secondary.admin.command('configureFailPoint',
'rsSyncApplyStop',
mode='alwaysOn')
try:
return batch.execute({'w': self.w, 'wtimeout': 1})
finally:
self.secondary.admin.command('configureFailPoint',
'rsSyncApplyStop',
mode='off')
else:
return batch.execute({'w': self.w + 1, 'wtimeout': 1})
def test_fsync_and_j(self):
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'a': 1})
self.assertRaises(
ConfigurationError,
batch.execute, {'fsync': True, 'j': True})
@client_context.require_replica_set
def test_write_concern_failure_ordered(self):
# Ensure we don't raise on wnote.
batch = self.coll.initialize_ordered_bulk_op()
batch.find({"something": "that does no exist"}).remove()
self.assertTrue(batch.execute({"w": self.w}))
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'a': 1})
batch.insert({'a': 2})
# Replication wtimeout is a 'soft' error.
# It shouldn't stop batch processing.
try:
self.cause_wtimeout(batch)
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 0,
'nInserted': 2,
'nRemoved': 0,
'upserted': [],
'writeErrors': []},
result)
# When talking to legacy servers there will be a
# write concern error for each operation.
self.assertTrue(len(result['writeConcernErrors']) > 0)
failed = result['writeConcernErrors'][0]
self.assertEqual(64, failed['code'])
self.assertTrue(isinstance(failed['errmsg'], string_type))
self.coll.delete_many({})
self.coll.create_index('a', unique=True)
self.addCleanup(self.coll.drop_index, [('a', 1)])
# Fail due to write concern support as well
# as duplicate key error on ordered batch.
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'a': 1})
batch.find({'a': 3}).upsert().replace_one({'b': 1})
batch.insert({'a': 1})
batch.insert({'a': 2})
try:
self.cause_wtimeout(batch)
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
self.assertEqualResponse(
{'nMatched': 0,
'nModified': 0,
'nUpserted': 1,
'nInserted': 1,
'nRemoved': 0,
'upserted': [{'index': 1, '_id': '...'}],
'writeErrors': [
{'index': 2,
'code': 11000,
'errmsg': '...',
'op': {'_id': '...', 'a': 1}}]},
result)
self.assertEqual(2, len(result['writeConcernErrors']))
failed = result['writeErrors'][0]
self.assertTrue("duplicate" in failed['errmsg'])
@client_context.require_replica_set
def test_write_concern_failure_unordered(self):
# Ensure we don't raise on wnote.
batch = self.coll.initialize_unordered_bulk_op()
batch.find({"something": "that does no exist"}).remove()
self.assertTrue(batch.execute({"w": self.w}))
batch = self.coll.initialize_unordered_bulk_op()
batch.insert({'a': 1})
batch.find({'a': 3}).upsert().update_one({'$set': {'a': 3, 'b': 1}})
batch.insert({'a': 2})
# Replication wtimeout is a 'soft' error.
# It shouldn't stop batch processing.
try:
self.cause_wtimeout(batch)
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
self.assertEqual(2, result['nInserted'])
self.assertEqual(1, result['nUpserted'])
self.assertEqual(0, len(result['writeErrors']))
# When talking to legacy servers there will be a
# write concern error for each operation.
self.assertTrue(len(result['writeConcernErrors']) > 1)
self.coll.delete_many({})
self.coll.create_index('a', unique=True)
self.addCleanup(self.coll.drop_index, [('a', 1)])
# Fail due to write concern support as well
# as duplicate key error on unordered batch.
batch = self.coll.initialize_unordered_bulk_op()
batch.insert({'a': 1})
batch.find({'a': 3}).upsert().update_one({'$set': {'a': 3,
'b': 1}})
batch.insert({'a': 1})
batch.insert({'a': 2})
try:
self.cause_wtimeout(batch)
except BulkWriteError as exc:
result = exc.details
self.assertEqual(exc.code, 65)
else:
self.fail("Error not raised")
self.assertEqual(2, result['nInserted'])
self.assertEqual(1, result['nUpserted'])
self.assertEqual(1, len(result['writeErrors']))
# When talking to legacy servers there will be a
# write concern error for each operation.
self.assertTrue(len(result['writeConcernErrors']) > 1)
failed = result['writeErrors'][0]
self.assertEqual(2, failed['index'])
self.assertEqual(11000, failed['code'])
self.assertTrue(isinstance(failed['errmsg'], string_type))
self.assertEqual(1, failed['op']['a'])
failed = result['writeConcernErrors'][0]
self.assertEqual(64, failed['code'])
self.assertTrue(isinstance(failed['errmsg'], string_type))
upserts = result['upserted']
self.assertEqual(1, len(upserts))
self.assertEqual(1, upserts[0]['index'])
self.assertTrue(upserts[0].get('_id'))
class TestBulkNoResults(BulkTestBase):
def test_no_results_ordered_success(self):
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'_id': 1})
batch.find({'_id': 3}).upsert().update_one({'$set': {'b': 1}})
batch.insert({'_id': 2})
batch.find({'_id': 1}).remove_one()
self.assertTrue(batch.execute({'w': 0}) is None)
wait_until(lambda: 2 == self.coll.count(),
'insert 2 documents')
def test_no_results_ordered_failure(self):
batch = self.coll.initialize_ordered_bulk_op()
batch.insert({'_id': 1})
batch.find({'_id': 3}).upsert().update_one({'$set': {'b': 1}})
batch.insert({'_id': 2})
batch.insert({'_id': 1})
batch.find({'_id': 1}).remove_one()
self.assertTrue(batch.execute({'w': 0}) is None)
wait_until(lambda: 3 == self.coll.count(),
'insert 3 documents')
def test_no_results_unordered_success(self):
batch = self.coll.initialize_unordered_bulk_op()
batch.insert({'_id': 1})
batch.find({'_id': 3}).upsert().update_one({'$set': {'b': 1}})
batch.insert({'_id': 2})
batch.find({'_id': 1}).remove_one()
self.assertTrue(batch.execute({'w': 0}) is None)
wait_until(lambda: 2 == self.coll.count(),
'insert 2 documents')
def test_no_results_unordered_failure(self):
batch = self.coll.initialize_unordered_bulk_op()
batch.insert({'_id': 1})
batch.find({'_id': 3}).upsert().update_one({'$set': {'b': 1}})
batch.insert({'_id': 2})
batch.insert({'_id': 1})
batch.find({'_id': 1}).remove_one()
self.assertTrue(batch.execute({'w': 0}) is None)
wait_until(lambda: 2 == self.coll.count(),
'insert 2 documents')
self.assertTrue(self.coll.find_one({'_id': 1}) is None)
def test_bulk_write_no_results(self):
coll = self.coll.with_options(write_concern=WriteConcern(w=0))
result = coll.bulk_write([InsertOne({})])
self.assertFalse(result.acknowledged)
self.assertRaises(InvalidOperation, lambda: result.inserted_count)
self.assertRaises(InvalidOperation, lambda: result.matched_count)
self.assertRaises(InvalidOperation, lambda: result.modified_count)
self.assertRaises(InvalidOperation, lambda: result.deleted_count)
self.assertRaises(InvalidOperation, lambda: result.upserted_count)
self.assertRaises(InvalidOperation, lambda: result.upserted_ids)
class TestBulkAuthorization(BulkTestBase):
@classmethod
@client_context.require_auth
@client_context.require_version_min(2, 5, 3)
def setUpClass(cls):
super(TestBulkAuthorization, cls).setUpClass()
def setUp(self):
super(TestBulkAuthorization, self).setUp()
self.db.add_user('readonly', 'pw', roles=['read'])
self.db.command(
'createRole', 'noremove',
privileges=[{
'actions': ['insert', 'update', 'find'],
'resource': {'db': 'pymongo_test', 'collection': 'test'}
}],
roles=[])
self.db.add_user('noremove', 'pw', roles=['noremove'])
def tearDown(self):
self.db.command('dropRole', 'noremove')
remove_all_users(self.db)
def test_readonly(self):
# We test that an authorization failure aborts the batch and is raised
# as OperationFailure.
cli = MongoClient(host, port)
db = cli.pymongo_test
coll = db.test
db.authenticate('readonly', 'pw')
bulk = coll.initialize_ordered_bulk_op()
bulk.insert({'x': 1})
self.assertRaises(OperationFailure, bulk.execute)
def test_no_remove(self):
# We test that an authorization failure aborts the batch and is raised
# as OperationFailure.
cli = MongoClient(host, port)
db = cli.pymongo_test
coll = db.test
db.authenticate('noremove', 'pw')
bulk = coll.initialize_ordered_bulk_op()
bulk.insert({'x': 1})
bulk.find({'x': 2}).upsert().replace_one({'x': 2})
bulk.find({}).remove() # Prohibited.
bulk.insert({'x': 3}) # Never attempted.
self.assertRaises(OperationFailure, bulk.execute)
self.assertEqual(set([1, 2]), set(self.coll.distinct('x')))
if __name__ == "__main__":
unittest.main()
|
40223148/finaltest
|
refs/heads/master
|
static/Brython3.1.1-20150328-091302/Lib/importlib/machinery.py
|
635
|
"""The machinery of importlib: finders, loaders, hooks, etc."""
import _imp
from ._bootstrap import (SOURCE_SUFFIXES, DEBUG_BYTECODE_SUFFIXES,
OPTIMIZED_BYTECODE_SUFFIXES, #BYTECODE_SUFFIXES,
EXTENSION_SUFFIXES)
from ._bootstrap import BuiltinImporter
from ._bootstrap import FrozenImporter
from ._bootstrap import WindowsRegistryFinder
from ._bootstrap import PathFinder
from ._bootstrap import FileFinder
from ._bootstrap import SourceFileLoader
from ._bootstrap import SourcelessFileLoader
from ._bootstrap import ExtensionFileLoader
#def all_suffixes():
# """Returns a list of all recognized module suffixes for this process"""
# return SOURCE_SUFFIXES + BYTECODE_SUFFIXES + EXTENSION_SUFFIXES
|
rsjohnco/rez
|
refs/heads/resources2
|
src/rez/backport/ordereddict.py
|
1047
|
# Copyright (c) 2009 Raymond Hettinger
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from UserDict import DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
|
BT-fgarbely/odoo
|
refs/heads/8.0
|
addons/account_voucher/account_voucher.py
|
132
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from lxml import etree
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
from openerp.tools import float_compare
from openerp.report import report_sxw
import openerp
class res_currency(osv.osv):
_inherit = "res.currency"
def _get_current_rate(self, cr, uid, ids, raise_on_no_rate=True, context=None):
if context is None:
context = {}
res = super(res_currency, self)._get_current_rate(cr, uid, ids, raise_on_no_rate, context=context)
if context.get('voucher_special_currency') in ids and context.get('voucher_special_currency_rate'):
res[context.get('voucher_special_currency')] = context.get('voucher_special_currency_rate')
return res
class account_voucher(osv.osv):
def _check_paid(self, cr, uid, ids, name, args, context=None):
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
res[voucher.id] = any([((line.account_id.type, 'in', ('receivable', 'payable')) and line.reconcile_id) for line in voucher.move_ids])
return res
def _get_type(self, cr, uid, context=None):
if context is None:
context = {}
return context.get('type', False)
def _get_period(self, cr, uid, context=None):
if context is None: context = {}
if context.get('period_id', False):
return context.get('period_id')
periods = self.pool.get('account.period').find(cr, uid, context=context)
return periods and periods[0] or False
def _make_journal_search(self, cr, uid, ttype, context=None):
journal_pool = self.pool.get('account.journal')
return journal_pool.search(cr, uid, [('type', '=', ttype)], limit=1)
def _get_journal(self, cr, uid, context=None):
if context is None: context = {}
invoice_pool = self.pool.get('account.invoice')
journal_pool = self.pool.get('account.journal')
if context.get('invoice_id', False):
invoice = invoice_pool.browse(cr, uid, context['invoice_id'], context=context)
journal_id = journal_pool.search(cr, uid, [
('currency', '=', invoice.currency_id.id), ('company_id', '=', invoice.company_id.id)
], limit=1, context=context)
return journal_id and journal_id[0] or False
if context.get('journal_id', False):
return context.get('journal_id')
if not context.get('journal_id', False) and context.get('search_default_journal_id', False):
return context.get('search_default_journal_id')
ttype = context.get('type', 'bank')
if ttype in ('payment', 'receipt'):
ttype = 'bank'
res = self._make_journal_search(cr, uid, ttype, context=context)
return res and res[0] or False
def _get_tax(self, cr, uid, context=None):
if context is None: context = {}
journal_pool = self.pool.get('account.journal')
journal_id = context.get('journal_id', False)
if not journal_id:
ttype = context.get('type', 'bank')
res = journal_pool.search(cr, uid, [('type', '=', ttype)], limit=1)
if not res:
return False
journal_id = res[0]
if not journal_id:
return False
journal = journal_pool.browse(cr, uid, journal_id, context=context)
account_id = journal.default_credit_account_id or journal.default_debit_account_id
if account_id and account_id.tax_ids:
tax_id = account_id.tax_ids[0].id
return tax_id
return False
def _get_payment_rate_currency(self, cr, uid, context=None):
"""
Return the default value for field payment_rate_currency_id: the currency of the journal
if there is one, otherwise the currency of the user's company
"""
if context is None: context = {}
journal_pool = self.pool.get('account.journal')
journal_id = context.get('journal_id', False)
if journal_id:
journal = journal_pool.browse(cr, uid, journal_id, context=context)
if journal.currency:
return journal.currency.id
#no journal given in the context, use company currency as default
return self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
def _get_currency(self, cr, uid, context=None):
if context is None: context = {}
journal_pool = self.pool.get('account.journal')
journal_id = context.get('journal_id', False)
if journal_id:
if isinstance(journal_id, (list, tuple)):
# sometimes journal_id is a pair (id, display_name)
journal_id = journal_id[0]
journal = journal_pool.browse(cr, uid, journal_id, context=context)
if journal.currency:
return journal.currency.id
return self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
def _get_partner(self, cr, uid, context=None):
if context is None: context = {}
return context.get('partner_id', False)
def _get_reference(self, cr, uid, context=None):
if context is None: context = {}
return context.get('reference', False)
def _get_narration(self, cr, uid, context=None):
if context is None: context = {}
return context.get('narration', False)
def _get_amount(self, cr, uid, context=None):
if context is None:
context= {}
return context.get('amount', 0.0)
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
if context is None: context = {}
return [(r['id'], (r['number'] or _('Voucher'))) for r in self.read(cr, uid, ids, ['number'], context, load='_classic_write')]
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False):
mod_obj = self.pool.get('ir.model.data')
if context is None: context = {}
if view_type == 'form':
if not view_id and context.get('invoice_type'):
if context.get('invoice_type') in ('out_invoice', 'out_refund'):
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_receipt_form')
else:
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_payment_form')
result = result and result[1] or False
view_id = result
if not view_id and context.get('line_type'):
if context.get('line_type') == 'customer':
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_receipt_form')
else:
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_payment_form')
result = result and result[1] or False
view_id = result
res = super(account_voucher, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
doc = etree.XML(res['arch'])
if context.get('type', 'sale') in ('purchase', 'payment'):
nodes = doc.xpath("//field[@name='partner_id']")
for node in nodes:
node.set('context', "{'default_customer': 0, 'search_default_supplier': 1, 'default_supplier': 1}")
if context.get('invoice_type','') in ('in_invoice', 'in_refund'):
node.set('string', _("Supplier"))
res['arch'] = etree.tostring(doc)
return res
def _compute_writeoff_amount(self, cr, uid, line_dr_ids, line_cr_ids, amount, type):
debit = credit = 0.0
sign = type == 'payment' and -1 or 1
for l in line_dr_ids:
if isinstance(l, dict):
debit += l['amount']
for l in line_cr_ids:
if isinstance(l, dict):
credit += l['amount']
return amount - sign * (credit - debit)
def onchange_line_ids(self, cr, uid, ids, line_dr_ids, line_cr_ids, amount, voucher_currency, type, context=None):
context = context or {}
if not line_dr_ids and not line_cr_ids:
return {'value':{'writeoff_amount': 0.0}}
# resolve lists of commands into lists of dicts
line_dr_ids = self.resolve_2many_commands(cr, uid, 'line_dr_ids', line_dr_ids, ['amount'], context)
line_cr_ids = self.resolve_2many_commands(cr, uid, 'line_cr_ids', line_cr_ids, ['amount'], context)
#compute the field is_multi_currency that is used to hide/display options linked to secondary currency on the voucher
is_multi_currency = False
#loop on the voucher lines to see if one of these has a secondary currency. If yes, we need to see the options
for voucher_line in line_dr_ids+line_cr_ids:
line_id = voucher_line.get('id') and self.pool.get('account.voucher.line').browse(cr, uid, voucher_line['id'], context=context).move_line_id.id or voucher_line.get('move_line_id')
if line_id and self.pool.get('account.move.line').browse(cr, uid, line_id, context=context).currency_id:
is_multi_currency = True
break
return {'value': {'writeoff_amount': self._compute_writeoff_amount(cr, uid, line_dr_ids, line_cr_ids, amount, type), 'is_multi_currency': is_multi_currency}}
def _get_journal_currency(self, cr, uid, ids, name, args, context=None):
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
res[voucher.id] = voucher.journal_id.currency and voucher.journal_id.currency.id or voucher.company_id.currency_id.id
return res
def _get_writeoff_amount(self, cr, uid, ids, name, args, context=None):
if not ids: return {}
currency_obj = self.pool.get('res.currency')
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
debit = credit = 0.0
sign = voucher.type == 'payment' and -1 or 1
for l in voucher.line_dr_ids:
debit += l.amount
for l in voucher.line_cr_ids:
credit += l.amount
currency = voucher.currency_id or voucher.company_id.currency_id
res[voucher.id] = currency_obj.round(cr, uid, currency, voucher.amount - sign * (credit - debit))
return res
def _paid_amount_in_company_currency(self, cr, uid, ids, name, args, context=None):
if context is None:
context = {}
res = {}
ctx = context.copy()
for v in self.browse(cr, uid, ids, context=context):
ctx.update({'date': v.date})
#make a new call to browse in order to have the right date in the context, to get the right currency rate
voucher = self.browse(cr, uid, v.id, context=ctx)
ctx.update({
'voucher_special_currency': voucher.payment_rate_currency_id and voucher.payment_rate_currency_id.id or False,
'voucher_special_currency_rate': voucher.currency_id.rate * voucher.payment_rate,})
res[voucher.id] = self.pool.get('res.currency').compute(cr, uid, voucher.currency_id.id, voucher.company_id.currency_id.id, voucher.amount, context=ctx)
return res
def _get_currency_help_label(self, cr, uid, currency_id, payment_rate, payment_rate_currency_id, context=None):
"""
This function builds a string to help the users to understand the behavior of the payment rate fields they can specify on the voucher.
This string is only used to improve the usability in the voucher form view and has no other effect.
:param currency_id: the voucher currency
:type currency_id: integer
:param payment_rate: the value of the payment_rate field of the voucher
:type payment_rate: float
:param payment_rate_currency_id: the value of the payment_rate_currency_id field of the voucher
:type payment_rate_currency_id: integer
:return: translated string giving a tip on what's the effect of the current payment rate specified
:rtype: str
"""
rml_parser = report_sxw.rml_parse(cr, uid, 'currency_help_label', context=context)
currency_pool = self.pool.get('res.currency')
currency_str = payment_rate_str = ''
if currency_id:
currency_str = rml_parser.formatLang(1, currency_obj=currency_pool.browse(cr, uid, currency_id, context=context))
if payment_rate_currency_id:
payment_rate_str = rml_parser.formatLang(payment_rate, currency_obj=currency_pool.browse(cr, uid, payment_rate_currency_id, context=context))
currency_help_label = _('At the operation date, the exchange rate was\n%s = %s') % (currency_str, payment_rate_str)
return currency_help_label
def _fnct_currency_help_label(self, cr, uid, ids, name, args, context=None):
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
res[voucher.id] = self._get_currency_help_label(cr, uid, voucher.currency_id.id, voucher.payment_rate, voucher.payment_rate_currency_id.id, context=context)
return res
_name = 'account.voucher'
_description = 'Accounting Voucher'
_inherit = ['mail.thread']
_order = "date desc, id desc"
# _rec_name = 'number'
_track = {
'state': {
'account_voucher.mt_voucher_state_change': lambda self, cr, uid, obj, ctx=None: True,
},
}
_columns = {
'type':fields.selection([
('sale','Sale'),
('purchase','Purchase'),
('payment','Payment'),
('receipt','Receipt'),
],'Default Type', readonly=True, states={'draft':[('readonly',False)]}),
'name':fields.char('Memo', readonly=True, states={'draft':[('readonly',False)]}),
'date':fields.date('Date', readonly=True, select=True, states={'draft':[('readonly',False)]},
help="Effective date for accounting entries", copy=False),
'journal_id':fields.many2one('account.journal', 'Journal', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'account_id':fields.many2one('account.account', 'Account', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'line_ids':fields.one2many('account.voucher.line', 'voucher_id', 'Voucher Lines',
readonly=True, copy=True,
states={'draft':[('readonly',False)]}),
'line_cr_ids':fields.one2many('account.voucher.line','voucher_id','Credits',
domain=[('type','=','cr')], context={'default_type':'cr'}, readonly=True, states={'draft':[('readonly',False)]}),
'line_dr_ids':fields.one2many('account.voucher.line','voucher_id','Debits',
domain=[('type','=','dr')], context={'default_type':'dr'}, readonly=True, states={'draft':[('readonly',False)]}),
'period_id': fields.many2one('account.period', 'Period', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'narration':fields.text('Notes', readonly=True, states={'draft':[('readonly',False)]}),
'currency_id': fields.function(_get_journal_currency, type='many2one', relation='res.currency', string='Currency', readonly=True, required=True),
'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'state':fields.selection(
[('draft','Draft'),
('cancel','Cancelled'),
('proforma','Pro-forma'),
('posted','Posted')
], 'Status', readonly=True, track_visibility='onchange', copy=False,
help=' * The \'Draft\' status is used when a user is encoding a new and unconfirmed Voucher. \
\n* The \'Pro-forma\' when voucher is in Pro-forma status,voucher does not have an voucher number. \
\n* The \'Posted\' status is used when user create voucher,a voucher number is generated and voucher entries are created in account \
\n* The \'Cancelled\' status is used when user cancel voucher.'),
'amount': fields.float('Total', digits_compute=dp.get_precision('Account'), required=True, readonly=True, states={'draft':[('readonly',False)]}),
'tax_amount':fields.float('Tax Amount', digits_compute=dp.get_precision('Account'), readonly=True),
'reference': fields.char('Ref #', readonly=True, states={'draft':[('readonly',False)]},
help="Transaction reference number.", copy=False),
'number': fields.char('Number', readonly=True, copy=False),
'move_id':fields.many2one('account.move', 'Account Entry', copy=False),
'move_ids': fields.related('move_id','line_id', type='one2many', relation='account.move.line', string='Journal Items', readonly=True),
'partner_id':fields.many2one('res.partner', 'Partner', change_default=1, readonly=True, states={'draft':[('readonly',False)]}),
'audit': fields.related('move_id','to_check', type='boolean', help='Check this box if you are unsure of that journal entry and if you want to note it as \'to be reviewed\' by an accounting expert.', relation='account.move', string='To Review'),
'paid': fields.function(_check_paid, string='Paid', type='boolean', help="The Voucher has been totally paid."),
'pay_now':fields.selection([
('pay_now','Pay Directly'),
('pay_later','Pay Later or Group Funds'),
],'Payment', select=True, readonly=True, states={'draft':[('readonly',False)]}),
'tax_id': fields.many2one('account.tax', 'Tax', readonly=True, states={'draft':[('readonly',False)]}, domain=[('price_include','=', False)], help="Only for tax excluded from price"),
'pre_line':fields.boolean('Previous Payments ?', required=False),
'date_due': fields.date('Due Date', readonly=True, select=True, states={'draft':[('readonly',False)]}),
'payment_option':fields.selection([
('without_writeoff', 'Keep Open'),
('with_writeoff', 'Reconcile Payment Balance'),
], 'Payment Difference', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="This field helps you to choose what you want to do with the eventual difference between the paid amount and the sum of allocated amounts. You can either choose to keep open this difference on the partner's account, or reconcile it with the payment(s)"),
'writeoff_acc_id': fields.many2one('account.account', 'Counterpart Account', readonly=True, states={'draft': [('readonly', False)]}),
'comment': fields.char('Counterpart Comment', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'analytic_id': fields.many2one('account.analytic.account','Write-Off Analytic Account', readonly=True, states={'draft': [('readonly', False)]}),
'writeoff_amount': fields.function(_get_writeoff_amount, string='Difference Amount', type='float', readonly=True, help="Computed as the difference between the amount stated in the voucher and the sum of allocation on the voucher lines."),
'payment_rate_currency_id': fields.many2one('res.currency', 'Payment Rate Currency', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'payment_rate': fields.float('Exchange Rate', digits=(12,6), required=True, readonly=True, states={'draft': [('readonly', False)]},
help='The specific rate that will be used, in this voucher, between the selected currency (in \'Payment Rate Currency\' field) and the voucher currency.'),
'paid_amount_in_company_currency': fields.function(_paid_amount_in_company_currency, string='Paid Amount in Company Currency', type='float', readonly=True),
'is_multi_currency': fields.boolean('Multi Currency Voucher', help='Fields with internal purpose only that depicts if the voucher is a multi currency one or not'),
'currency_help_label': fields.function(_fnct_currency_help_label, type='text', string="Helping Sentence", help="This sentence helps you to know how to specify the payment rate by giving you the direct effect it has"),
}
_defaults = {
'period_id': _get_period,
'partner_id': _get_partner,
'journal_id':_get_journal,
'currency_id': _get_currency,
'reference': _get_reference,
'narration':_get_narration,
'amount': _get_amount,
'type':_get_type,
'state': 'draft',
'pay_now': 'pay_now',
'name': '',
'date': fields.date.context_today,
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.voucher',context=c),
'tax_id': _get_tax,
'payment_option': 'without_writeoff',
'comment': _('Write-Off'),
'payment_rate': 1.0,
'payment_rate_currency_id': _get_payment_rate_currency,
}
def compute_tax(self, cr, uid, ids, context=None):
tax_pool = self.pool.get('account.tax')
partner_pool = self.pool.get('res.partner')
position_pool = self.pool.get('account.fiscal.position')
voucher_line_pool = self.pool.get('account.voucher.line')
voucher_pool = self.pool.get('account.voucher')
if context is None: context = {}
for voucher in voucher_pool.browse(cr, uid, ids, context=context):
voucher_amount = 0.0
for line in voucher.line_ids:
voucher_amount += line.untax_amount or line.amount
line.amount = line.untax_amount or line.amount
voucher_line_pool.write(cr, uid, [line.id], {'amount':line.amount, 'untax_amount':line.untax_amount})
if not voucher.tax_id:
self.write(cr, uid, [voucher.id], {'amount':voucher_amount, 'tax_amount':0.0})
continue
tax = [tax_pool.browse(cr, uid, voucher.tax_id.id, context=context)]
partner = partner_pool.browse(cr, uid, voucher.partner_id.id, context=context) or False
taxes = position_pool.map_tax(cr, uid, partner and partner.property_account_position or False, tax)
tax = tax_pool.browse(cr, uid, taxes, context=context)
total = voucher_amount
total_tax = 0.0
if not tax[0].price_include:
for line in voucher.line_ids:
for tax_line in tax_pool.compute_all(cr, uid, tax, line.amount, 1).get('taxes', []):
total_tax += tax_line.get('amount', 0.0)
total += total_tax
else:
for line in voucher.line_ids:
line_total = 0.0
line_tax = 0.0
for tax_line in tax_pool.compute_all(cr, uid, tax, line.untax_amount or line.amount, 1).get('taxes', []):
line_tax += tax_line.get('amount', 0.0)
line_total += tax_line.get('price_unit')
total_tax += line_tax
untax_amount = line.untax_amount or line.amount
voucher_line_pool.write(cr, uid, [line.id], {'amount':line_total, 'untax_amount':untax_amount})
self.write(cr, uid, [voucher.id], {'amount':total, 'tax_amount':total_tax})
return True
def onchange_price(self, cr, uid, ids, line_ids, tax_id, partner_id=False, context=None):
context = context or {}
tax_pool = self.pool.get('account.tax')
partner_pool = self.pool.get('res.partner')
position_pool = self.pool.get('account.fiscal.position')
if not line_ids:
line_ids = []
res = {
'tax_amount': False,
'amount': False,
}
voucher_total = 0.0
# resolve the list of commands into a list of dicts
line_ids = self.resolve_2many_commands(cr, uid, 'line_ids', line_ids, ['amount'], context)
total_tax = 0.0
for line in line_ids:
line_amount = 0.0
line_amount = line.get('amount',0.0)
if tax_id:
tax = [tax_pool.browse(cr, uid, tax_id, context=context)]
if partner_id:
partner = partner_pool.browse(cr, uid, partner_id, context=context) or False
taxes = position_pool.map_tax(cr, uid, partner and partner.property_account_position or False, tax)
tax = tax_pool.browse(cr, uid, taxes, context=context)
if not tax[0].price_include:
for tax_line in tax_pool.compute_all(cr, uid, tax, line_amount, 1).get('taxes', []):
total_tax += tax_line.get('amount')
voucher_total += line_amount
total = voucher_total + total_tax
res.update({
'amount': total or voucher_total,
'tax_amount': total_tax
})
return {
'value': res
}
def onchange_term_id(self, cr, uid, ids, term_id, amount):
term_pool = self.pool.get('account.payment.term')
terms = False
due_date = False
default = {'date_due':False}
if term_id and amount:
terms = term_pool.compute(cr, uid, term_id, amount)
if terms:
due_date = terms[-1][0]
default.update({
'date_due':due_date
})
return {'value':default}
def onchange_journal_voucher(self, cr, uid, ids, line_ids=False, tax_id=False, price=0.0, partner_id=False, journal_id=False, ttype=False, company_id=False, context=None):
"""price
Returns a dict that contains new values and context
@param partner_id: latest value from user input for field partner_id
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
default = {
'value':{},
}
if not partner_id or not journal_id:
return default
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
journal = journal_pool.browse(cr, uid, journal_id, context=context)
partner = partner_pool.browse(cr, uid, partner_id, context=context)
account_id = False
tr_type = False
if journal.type in ('sale','sale_refund'):
account_id = partner.property_account_receivable.id
tr_type = 'sale'
elif journal.type in ('purchase', 'purchase_refund','expense'):
account_id = partner.property_account_payable.id
tr_type = 'purchase'
else:
if not journal.default_credit_account_id or not journal.default_debit_account_id:
raise osv.except_osv(_('Error!'), _('Please define default credit/debit accounts on the journal "%s".') % (journal.name))
if ttype in ('sale', 'receipt'):
account_id = journal.default_debit_account_id.id
elif ttype in ('purchase', 'payment'):
account_id = journal.default_credit_account_id.id
else:
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
tr_type = 'receipt'
default['value']['account_id'] = account_id
default['value']['type'] = ttype or tr_type
vals = self.onchange_journal(cr, uid, ids, journal_id, line_ids, tax_id, partner_id, time.strftime('%Y-%m-%d'), price, ttype, company_id, context)
default['value'].update(vals.get('value'))
return default
def onchange_rate(self, cr, uid, ids, rate, amount, currency_id, payment_rate_currency_id, company_id, context=None):
res = {'value': {'paid_amount_in_company_currency': amount, 'currency_help_label': self._get_currency_help_label(cr, uid, currency_id, rate, payment_rate_currency_id, context=context)}}
if rate and amount and currency_id:
company_currency = self.pool.get('res.company').browse(cr, uid, company_id, context=context).currency_id
#context should contain the date, the payment currency and the payment rate specified on the voucher
amount_in_company_currency = self.pool.get('res.currency').compute(cr, uid, currency_id, company_currency.id, amount, context=context)
res['value']['paid_amount_in_company_currency'] = amount_in_company_currency
return res
def onchange_amount(self, cr, uid, ids, amount, rate, partner_id, journal_id, currency_id, ttype, date, payment_rate_currency_id, company_id, context=None):
if context is None:
context = {}
ctx = context.copy()
ctx.update({'date': date})
#read the voucher rate with the right date in the context
currency_id = currency_id or self.pool.get('res.company').browse(cr, uid, company_id, context=ctx).currency_id.id
voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate']
ctx.update({
'voucher_special_currency': payment_rate_currency_id,
'voucher_special_currency_rate': rate * voucher_rate})
res = self.recompute_voucher_lines(cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context=ctx)
vals = self.onchange_rate(cr, uid, ids, rate, amount, currency_id, payment_rate_currency_id, company_id, context=ctx)
for key in vals.keys():
res[key].update(vals[key])
return res
def recompute_payment_rate(self, cr, uid, ids, vals, currency_id, date, ttype, journal_id, amount, context=None):
if context is None:
context = {}
#on change of the journal, we need to set also the default value for payment_rate and payment_rate_currency_id
currency_obj = self.pool.get('res.currency')
journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context)
company_id = journal.company_id.id
payment_rate = 1.0
currency_id = currency_id or journal.company_id.currency_id.id
payment_rate_currency_id = currency_id
ctx = context.copy()
ctx.update({'date': date})
o2m_to_loop = False
if ttype == 'receipt':
o2m_to_loop = 'line_cr_ids'
elif ttype == 'payment':
o2m_to_loop = 'line_dr_ids'
if o2m_to_loop and 'value' in vals and o2m_to_loop in vals['value']:
for voucher_line in vals['value'][o2m_to_loop]:
if not isinstance(voucher_line, dict):
continue
if voucher_line['currency_id'] != currency_id:
# we take as default value for the payment_rate_currency_id, the currency of the first invoice that
# is not in the voucher currency
payment_rate_currency_id = voucher_line['currency_id']
tmp = currency_obj.browse(cr, uid, payment_rate_currency_id, context=ctx).rate
payment_rate = tmp / currency_obj.browse(cr, uid, currency_id, context=ctx).rate
break
vals['value'].update({
'payment_rate': payment_rate,
'currency_id': currency_id,
'payment_rate_currency_id': payment_rate_currency_id
})
#read the voucher rate with the right date in the context
voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate']
ctx.update({
'voucher_special_currency_rate': payment_rate * voucher_rate,
'voucher_special_currency': payment_rate_currency_id})
res = self.onchange_rate(cr, uid, ids, payment_rate, amount, currency_id, payment_rate_currency_id, company_id, context=ctx)
for key in res.keys():
vals[key].update(res[key])
return vals
def basic_onchange_partner(self, cr, uid, ids, partner_id, journal_id, ttype, context=None):
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
res = {'value': {'account_id': False}}
if not partner_id or not journal_id:
return res
journal = journal_pool.browse(cr, uid, journal_id, context=context)
partner = partner_pool.browse(cr, uid, partner_id, context=context)
account_id = False
if journal.type in ('sale','sale_refund'):
account_id = partner.property_account_receivable.id
elif journal.type in ('purchase', 'purchase_refund','expense'):
account_id = partner.property_account_payable.id
elif ttype in ('sale', 'receipt'):
account_id = journal.default_debit_account_id.id
elif ttype in ('purchase', 'payment'):
account_id = journal.default_credit_account_id.id
else:
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
res['value']['account_id'] = account_id
return res
def onchange_partner_id(self, cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context=None):
if not journal_id:
return {}
if context is None:
context = {}
#TODO: comment me and use me directly in the sales/purchases views
res = self.basic_onchange_partner(cr, uid, ids, partner_id, journal_id, ttype, context=context)
if ttype in ['sale', 'purchase']:
return res
ctx = context.copy()
# not passing the payment_rate currency and the payment_rate in the context but it's ok because they are reset in recompute_payment_rate
ctx.update({'date': date})
vals = self.recompute_voucher_lines(cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context=ctx)
vals2 = self.recompute_payment_rate(cr, uid, ids, vals, currency_id, date, ttype, journal_id, amount, context=context)
for key in vals.keys():
res[key].update(vals[key])
for key in vals2.keys():
res[key].update(vals2[key])
#TODO: can probably be removed now
#TODO: onchange_partner_id() should not returns [pre_line, line_dr_ids, payment_rate...] for type sale, and not
# [pre_line, line_cr_ids, payment_rate...] for type purchase.
# We should definitively split account.voucher object in two and make distinct on_change functions. In the
# meanwhile, bellow lines must be there because the fields aren't present in the view, what crashes if the
# onchange returns a value for them
if ttype == 'sale':
del(res['value']['line_dr_ids'])
del(res['value']['pre_line'])
del(res['value']['payment_rate'])
elif ttype == 'purchase':
del(res['value']['line_cr_ids'])
del(res['value']['pre_line'])
del(res['value']['payment_rate'])
return res
def recompute_voucher_lines(self, cr, uid, ids, partner_id, journal_id, price, currency_id, ttype, date, context=None):
"""
Returns a dict that contains new values and context
@param partner_id: latest value from user input for field partner_id
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
def _remove_noise_in_o2m():
"""if the line is partially reconciled, then we must pay attention to display it only once and
in the good o2m.
This function returns True if the line is considered as noise and should not be displayed
"""
if line.reconcile_partial_id:
if currency_id == line.currency_id.id:
if line.amount_residual_currency <= 0:
return True
else:
if line.amount_residual <= 0:
return True
return False
if context is None:
context = {}
context_multi_currency = context.copy()
currency_pool = self.pool.get('res.currency')
move_line_pool = self.pool.get('account.move.line')
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
line_pool = self.pool.get('account.voucher.line')
#set default values
default = {
'value': {'line_dr_ids': [], 'line_cr_ids': [], 'pre_line': False},
}
# drop existing lines
line_ids = ids and line_pool.search(cr, uid, [('voucher_id', '=', ids[0])])
for line in line_pool.browse(cr, uid, line_ids, context=context):
if line.type == 'cr':
default['value']['line_cr_ids'].append((2, line.id))
else:
default['value']['line_dr_ids'].append((2, line.id))
if not partner_id or not journal_id:
return default
journal = journal_pool.browse(cr, uid, journal_id, context=context)
partner = partner_pool.browse(cr, uid, partner_id, context=context)
currency_id = currency_id or journal.company_id.currency_id.id
total_credit = 0.0
total_debit = 0.0
account_type = None
if context.get('account_id'):
account_type = self.pool['account.account'].browse(cr, uid, context['account_id'], context=context).type
if ttype == 'payment':
if not account_type:
account_type = 'payable'
total_debit = price or 0.0
else:
total_credit = price or 0.0
if not account_type:
account_type = 'receivable'
if not context.get('move_line_ids', False):
ids = move_line_pool.search(cr, uid, [('state','=','valid'), ('account_id.type', '=', account_type), ('reconcile_id', '=', False), ('partner_id', '=', partner_id)], context=context)
else:
ids = context['move_line_ids']
invoice_id = context.get('invoice_id', False)
company_currency = journal.company_id.currency_id.id
move_lines_found = []
#order the lines by most old first
ids.reverse()
account_move_lines = move_line_pool.browse(cr, uid, ids, context=context)
#compute the total debit/credit and look for a matching open amount or invoice
for line in account_move_lines:
if _remove_noise_in_o2m():
continue
if invoice_id:
if line.invoice.id == invoice_id:
#if the invoice linked to the voucher line is equal to the invoice_id in context
#then we assign the amount on that line, whatever the other voucher lines
move_lines_found.append(line.id)
elif currency_id == company_currency:
#otherwise treatments is the same but with other field names
if line.amount_residual == price:
#if the amount residual is equal the amount voucher, we assign it to that voucher
#line, whatever the other voucher lines
move_lines_found.append(line.id)
break
#otherwise we will split the voucher amount on each line (by most old first)
total_credit += line.credit or 0.0
total_debit += line.debit or 0.0
elif currency_id == line.currency_id.id:
if line.amount_residual_currency == price:
move_lines_found.append(line.id)
break
total_credit += line.credit and line.amount_currency or 0.0
total_debit += line.debit and line.amount_currency or 0.0
remaining_amount = price
#voucher line creation
for line in account_move_lines:
if _remove_noise_in_o2m():
continue
if line.currency_id and currency_id == line.currency_id.id:
amount_original = abs(line.amount_currency)
amount_unreconciled = abs(line.amount_residual_currency)
else:
#always use the amount booked in the company currency as the basis of the conversion into the voucher currency
amount_original = currency_pool.compute(cr, uid, company_currency, currency_id, line.credit or line.debit or 0.0, context=context_multi_currency)
amount_unreconciled = currency_pool.compute(cr, uid, company_currency, currency_id, abs(line.amount_residual), context=context_multi_currency)
line_currency_id = line.currency_id and line.currency_id.id or company_currency
rs = {
'name':line.move_id.name,
'type': line.credit and 'dr' or 'cr',
'move_line_id':line.id,
'account_id':line.account_id.id,
'amount_original': amount_original,
'amount': (line.id in move_lines_found) and min(abs(remaining_amount), amount_unreconciled) or 0.0,
'date_original':line.date,
'date_due':line.date_maturity,
'amount_unreconciled': amount_unreconciled,
'currency_id': line_currency_id,
}
remaining_amount -= rs['amount']
#in case a corresponding move_line hasn't been found, we now try to assign the voucher amount
#on existing invoices: we split voucher amount by most old first, but only for lines in the same currency
if not move_lines_found:
if currency_id == line_currency_id:
if line.credit:
amount = min(amount_unreconciled, abs(total_debit))
rs['amount'] = amount
total_debit -= amount
else:
amount = min(amount_unreconciled, abs(total_credit))
rs['amount'] = amount
total_credit -= amount
if rs['amount_unreconciled'] == rs['amount']:
rs['reconcile'] = True
if rs['type'] == 'cr':
default['value']['line_cr_ids'].append(rs)
else:
default['value']['line_dr_ids'].append(rs)
if len(default['value']['line_cr_ids']) > 0:
default['value']['pre_line'] = 1
elif len(default['value']['line_dr_ids']) > 0:
default['value']['pre_line'] = 1
default['value']['writeoff_amount'] = self._compute_writeoff_amount(cr, uid, default['value']['line_dr_ids'], default['value']['line_cr_ids'], price, ttype)
return default
def onchange_payment_rate_currency(self, cr, uid, ids, currency_id, payment_rate, payment_rate_currency_id, date, amount, company_id, context=None):
if context is None:
context = {}
res = {'value': {}}
if currency_id:
#set the default payment rate of the voucher and compute the paid amount in company currency
ctx = context.copy()
ctx.update({'date': date})
#read the voucher rate with the right date in the context
voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate']
ctx.update({
'voucher_special_currency_rate': payment_rate * voucher_rate,
'voucher_special_currency': payment_rate_currency_id})
vals = self.onchange_rate(cr, uid, ids, payment_rate, amount, currency_id, payment_rate_currency_id, company_id, context=ctx)
for key in vals.keys():
res[key].update(vals[key])
return res
def onchange_date(self, cr, uid, ids, date, currency_id, payment_rate_currency_id, amount, company_id, context=None):
"""
@param date: latest value from user input for field date
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
if context is None:
context ={}
res = {'value': {}}
#set the period of the voucher
period_pool = self.pool.get('account.period')
currency_obj = self.pool.get('res.currency')
ctx = context.copy()
ctx.update({'company_id': company_id, 'account_period_prefer_normal': True})
voucher_currency_id = currency_id or self.pool.get('res.company').browse(cr, uid, company_id, context=ctx).currency_id.id
pids = period_pool.find(cr, uid, date, context=ctx)
if pids:
res['value'].update({'period_id':pids[0]})
if payment_rate_currency_id:
ctx.update({'date': date})
payment_rate = 1.0
if payment_rate_currency_id != currency_id:
tmp = currency_obj.browse(cr, uid, payment_rate_currency_id, context=ctx).rate
payment_rate = tmp / currency_obj.browse(cr, uid, voucher_currency_id, context=ctx).rate
vals = self.onchange_payment_rate_currency(cr, uid, ids, voucher_currency_id, payment_rate, payment_rate_currency_id, date, amount, company_id, context=context)
vals['value'].update({'payment_rate': payment_rate})
for key in vals.keys():
res[key].update(vals[key])
return res
def onchange_journal(self, cr, uid, ids, journal_id, line_ids, tax_id, partner_id, date, amount, ttype, company_id, context=None):
if context is None:
context = {}
if not journal_id:
return False
journal_pool = self.pool.get('account.journal')
journal = journal_pool.browse(cr, uid, journal_id, context=context)
if ttype in ('sale', 'receipt'):
account_id = journal.default_debit_account_id
elif ttype in ('purchase', 'payment'):
account_id = journal.default_credit_account_id
else:
account_id = journal.default_credit_account_id or journal.default_debit_account_id
tax_id = False
if account_id and account_id.tax_ids:
tax_id = account_id.tax_ids[0].id
vals = {'value':{} }
if ttype in ('sale', 'purchase'):
vals = self.onchange_price(cr, uid, ids, line_ids, tax_id, partner_id, context)
vals['value'].update({'tax_id':tax_id,'amount': amount})
currency_id = False
if journal.currency:
currency_id = journal.currency.id
else:
currency_id = journal.company_id.currency_id.id
period_ids = self.pool['account.period'].find(cr, uid, dt=date, context=dict(context, company_id=company_id))
vals['value'].update({
'currency_id': currency_id,
'payment_rate_currency_id': currency_id,
'period_id': period_ids and period_ids[0] or False
})
#in case we want to register the payment directly from an invoice, it's confusing to allow to switch the journal
#without seeing that the amount is expressed in the journal currency, and not in the invoice currency. So to avoid
#this common mistake, we simply reset the amount to 0 if the currency is not the invoice currency.
if context.get('payment_expected_currency') and currency_id != context.get('payment_expected_currency'):
vals['value']['amount'] = 0
amount = 0
if partner_id:
res = self.onchange_partner_id(cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context)
for key in res.keys():
vals[key].update(res[key])
return vals
def onchange_company(self, cr, uid, ids, partner_id, journal_id, currency_id, company_id, context=None):
"""
If the company changes, check that the journal is in the right company.
If not, fetch a new journal.
"""
journal_pool = self.pool['account.journal']
journal = journal_pool.browse(cr, uid, journal_id, context=context)
if journal.company_id.id != company_id:
# can not guess type of journal, better remove it
return {'value': {'journal_id': False}}
return {}
def button_proforma_voucher(self, cr, uid, ids, context=None):
self.signal_workflow(cr, uid, ids, 'proforma_voucher')
return {'type': 'ir.actions.act_window_close'}
def proforma_voucher(self, cr, uid, ids, context=None):
self.action_move_line_create(cr, uid, ids, context=context)
return True
def action_cancel_draft(self, cr, uid, ids, context=None):
self.create_workflow(cr, uid, ids)
self.write(cr, uid, ids, {'state':'draft'})
return True
def cancel_voucher(self, cr, uid, ids, context=None):
reconcile_pool = self.pool.get('account.move.reconcile')
move_pool = self.pool.get('account.move')
move_line_pool = self.pool.get('account.move.line')
for voucher in self.browse(cr, uid, ids, context=context):
# refresh to make sure you don't unlink an already removed move
voucher.refresh()
for line in voucher.move_ids:
# refresh to make sure you don't unreconcile an already unreconciled entry
line.refresh()
if line.reconcile_id:
move_lines = [move_line.id for move_line in line.reconcile_id.line_id]
move_lines.remove(line.id)
reconcile_pool.unlink(cr, uid, [line.reconcile_id.id])
if len(move_lines) >= 2:
move_line_pool.reconcile_partial(cr, uid, move_lines, 'auto',context=context)
if voucher.move_id:
move_pool.button_cancel(cr, uid, [voucher.move_id.id])
move_pool.unlink(cr, uid, [voucher.move_id.id])
res = {
'state':'cancel',
'move_id':False,
}
self.write(cr, uid, ids, res)
return True
def unlink(self, cr, uid, ids, context=None):
for t in self.read(cr, uid, ids, ['state'], context=context):
if t['state'] not in ('draft', 'cancel'):
raise osv.except_osv(_('Invalid Action!'), _('Cannot delete voucher(s) which are already opened or paid.'))
return super(account_voucher, self).unlink(cr, uid, ids, context=context)
def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'):
res = {}
if not partner_id:
return res
res = {}
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
if pay_now == 'pay_later':
partner = partner_pool.browse(cr, uid, partner_id)
journal = journal_pool.browse(cr, uid, journal_id)
if journal.type in ('sale','sale_refund'):
account_id = partner.property_account_receivable.id
elif journal.type in ('purchase', 'purchase_refund','expense'):
account_id = partner.property_account_payable.id
elif ttype in ('sale', 'receipt'):
account_id = journal.default_debit_account_id.id
elif ttype in ('purchase', 'payment'):
account_id = journal.default_credit_account_id.id
else:
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
if account_id:
res['account_id'] = account_id
return {'value':res}
def _sel_context(self, cr, uid, voucher_id, context=None):
"""
Select the context to use accordingly if it needs to be multicurrency or not.
:param voucher_id: Id of the actual voucher
:return: The returned context will be the same as given in parameter if the voucher currency is the same
than the company currency, otherwise it's a copy of the parameter with an extra key 'date' containing
the date of the voucher.
:rtype: dict
"""
company_currency = self._get_company_currency(cr, uid, voucher_id, context)
current_currency = self._get_current_currency(cr, uid, voucher_id, context)
if current_currency <> company_currency:
context_multi_currency = context.copy()
voucher = self.pool.get('account.voucher').browse(cr, uid, voucher_id, context)
context_multi_currency.update({'date': voucher.date})
return context_multi_currency
return context
def first_move_line_get(self, cr, uid, voucher_id, move_id, company_currency, current_currency, context=None):
'''
Return a dict to be use to create the first account move line of given voucher.
:param voucher_id: Id of voucher what we are creating account_move.
:param move_id: Id of account move where this line will be added.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: mapping between fieldname and value of account move line to create
:rtype: dict
'''
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
debit = credit = 0.0
# TODO: is there any other alternative then the voucher type ??
# ANSWER: We can have payment and receipt "In Advance".
# TODO: Make this logic available.
# -for sale, purchase we have but for the payment and receipt we do not have as based on the bank/cash journal we can not know its payment or receipt
if voucher.type in ('purchase', 'payment'):
credit = voucher.paid_amount_in_company_currency
elif voucher.type in ('sale', 'receipt'):
debit = voucher.paid_amount_in_company_currency
if debit < 0: credit = -debit; debit = 0.0
if credit < 0: debit = -credit; credit = 0.0
sign = debit - credit < 0 and -1 or 1
#set the first line of the voucher
move_line = {
'name': voucher.name or '/',
'debit': debit,
'credit': credit,
'account_id': voucher.account_id.id,
'move_id': move_id,
'journal_id': voucher.journal_id.id,
'period_id': voucher.period_id.id,
'partner_id': voucher.partner_id.id,
'currency_id': company_currency <> current_currency and current_currency or False,
'amount_currency': (sign * abs(voucher.amount) # amount < 0 for refunds
if company_currency != current_currency else 0.0),
'date': voucher.date,
'date_maturity': voucher.date_due
}
return move_line
def account_move_get(self, cr, uid, voucher_id, context=None):
'''
This method prepare the creation of the account move related to the given voucher.
:param voucher_id: Id of voucher for which we are creating account_move.
:return: mapping between fieldname and value of account move to create
:rtype: dict
'''
seq_obj = self.pool.get('ir.sequence')
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
if voucher.number:
name = voucher.number
elif voucher.journal_id.sequence_id:
if not voucher.journal_id.sequence_id.active:
raise osv.except_osv(_('Configuration Error !'),
_('Please activate the sequence of selected journal !'))
c = dict(context)
c.update({'fiscalyear_id': voucher.period_id.fiscalyear_id.id})
name = seq_obj.next_by_id(cr, uid, voucher.journal_id.sequence_id.id, context=c)
else:
raise osv.except_osv(_('Error!'),
_('Please define a sequence on the journal.'))
if not voucher.reference:
ref = name.replace('/','')
else:
ref = voucher.reference
move = {
'name': name,
'journal_id': voucher.journal_id.id,
'narration': voucher.narration,
'date': voucher.date,
'ref': ref,
'period_id': voucher.period_id.id,
}
return move
def _get_exchange_lines(self, cr, uid, line, move_id, amount_residual, company_currency, current_currency, context=None):
'''
Prepare the two lines in company currency due to currency rate difference.
:param line: browse record of the voucher.line for which we want to create currency rate difference accounting
entries
:param move_id: Account move wher the move lines will be.
:param amount_residual: Amount to be posted.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: the account move line and its counterpart to create, depicted as mapping between fieldname and value
:rtype: tuple of dict
'''
if amount_residual > 0:
account_id = line.voucher_id.company_id.expense_currency_exchange_account_id
if not account_id:
model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_form')
msg = _("You should configure the 'Loss Exchange Rate Account' to manage automatically the booking of accounting entries related to differences between exchange rates.")
raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel'))
else:
account_id = line.voucher_id.company_id.income_currency_exchange_account_id
if not account_id:
model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_form')
msg = _("You should configure the 'Gain Exchange Rate Account' to manage automatically the booking of accounting entries related to differences between exchange rates.")
raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel'))
# Even if the amount_currency is never filled, we need to pass the foreign currency because otherwise
# the receivable/payable account may have a secondary currency, which render this field mandatory
if line.account_id.currency_id:
account_currency_id = line.account_id.currency_id.id
else:
account_currency_id = company_currency <> current_currency and current_currency or False
move_line = {
'journal_id': line.voucher_id.journal_id.id,
'period_id': line.voucher_id.period_id.id,
'name': _('change')+': '+(line.name or '/'),
'account_id': line.account_id.id,
'move_id': move_id,
'partner_id': line.voucher_id.partner_id.id,
'currency_id': account_currency_id,
'amount_currency': 0.0,
'quantity': 1,
'credit': amount_residual > 0 and amount_residual or 0.0,
'debit': amount_residual < 0 and -amount_residual or 0.0,
'date': line.voucher_id.date,
}
move_line_counterpart = {
'journal_id': line.voucher_id.journal_id.id,
'period_id': line.voucher_id.period_id.id,
'name': _('change')+': '+(line.name or '/'),
'account_id': account_id.id,
'move_id': move_id,
'amount_currency': 0.0,
'partner_id': line.voucher_id.partner_id.id,
'currency_id': account_currency_id,
'quantity': 1,
'debit': amount_residual > 0 and amount_residual or 0.0,
'credit': amount_residual < 0 and -amount_residual or 0.0,
'date': line.voucher_id.date,
}
return (move_line, move_line_counterpart)
def _convert_amount(self, cr, uid, amount, voucher_id, context=None):
'''
This function convert the amount given in company currency. It takes either the rate in the voucher (if the
payment_rate_currency_id is relevant) either the rate encoded in the system.
:param amount: float. The amount to convert
:param voucher: id of the voucher on which we want the conversion
:param context: to context to use for the conversion. It may contain the key 'date' set to the voucher date
field in order to select the good rate to use.
:return: the amount in the currency of the voucher's company
:rtype: float
'''
if context is None:
context = {}
currency_obj = self.pool.get('res.currency')
voucher = self.browse(cr, uid, voucher_id, context=context)
return currency_obj.compute(cr, uid, voucher.currency_id.id, voucher.company_id.currency_id.id, amount, context=context)
def voucher_move_line_create(self, cr, uid, voucher_id, line_total, move_id, company_currency, current_currency, context=None):
'''
Create one account move line, on the given account move, per voucher line where amount is not 0.0.
It returns Tuple with tot_line what is total of difference between debit and credit and
a list of lists with ids to be reconciled with this format (total_deb_cred,list_of_lists).
:param voucher_id: Voucher id what we are working with
:param line_total: Amount of the first line, which correspond to the amount we should totally split among all voucher lines.
:param move_id: Account move wher those lines will be joined.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: Tuple build as (remaining amount not allocated on voucher lines, list of account_move_line created in this method)
:rtype: tuple(float, list of int)
'''
if context is None:
context = {}
move_line_obj = self.pool.get('account.move.line')
currency_obj = self.pool.get('res.currency')
tax_obj = self.pool.get('account.tax')
tot_line = line_total
rec_lst_ids = []
date = self.read(cr, uid, [voucher_id], ['date'], context=context)[0]['date']
ctx = context.copy()
ctx.update({'date': date})
voucher = self.pool.get('account.voucher').browse(cr, uid, voucher_id, context=ctx)
voucher_currency = voucher.journal_id.currency or voucher.company_id.currency_id
ctx.update({
'voucher_special_currency_rate': voucher_currency.rate * voucher.payment_rate ,
'voucher_special_currency': voucher.payment_rate_currency_id and voucher.payment_rate_currency_id.id or False,})
prec = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account')
for line in voucher.line_ids:
#create one move line per voucher line where amount is not 0.0
# AND (second part of the clause) only if the original move line was not having debit = credit = 0 (which is a legal value)
if not line.amount and not (line.move_line_id and not float_compare(line.move_line_id.debit, line.move_line_id.credit, precision_digits=prec) and not float_compare(line.move_line_id.debit, 0.0, precision_digits=prec)):
continue
# convert the amount set on the voucher line into the currency of the voucher's company
# this calls res_curreny.compute() with the right context, so that it will take either the rate on the voucher if it is relevant or will use the default behaviour
amount = self._convert_amount(cr, uid, line.untax_amount or line.amount, voucher.id, context=ctx)
# if the amount encoded in voucher is equal to the amount unreconciled, we need to compute the
# currency rate difference
if line.amount == line.amount_unreconciled:
if not line.move_line_id:
raise osv.except_osv(_('Wrong voucher line'),_("The invoice you are willing to pay is not valid anymore."))
sign = line.type =='dr' and -1 or 1
currency_rate_difference = sign * (line.move_line_id.amount_residual - amount)
else:
currency_rate_difference = 0.0
move_line = {
'journal_id': voucher.journal_id.id,
'period_id': voucher.period_id.id,
'name': line.name or '/',
'account_id': line.account_id.id,
'move_id': move_id,
'partner_id': voucher.partner_id.id,
'currency_id': line.move_line_id and (company_currency <> line.move_line_id.currency_id.id and line.move_line_id.currency_id.id) or False,
'analytic_account_id': line.account_analytic_id and line.account_analytic_id.id or False,
'quantity': 1,
'credit': 0.0,
'debit': 0.0,
'date': voucher.date
}
if amount < 0:
amount = -amount
if line.type == 'dr':
line.type = 'cr'
else:
line.type = 'dr'
if (line.type=='dr'):
tot_line += amount
move_line['debit'] = amount
else:
tot_line -= amount
move_line['credit'] = amount
if voucher.tax_id and voucher.type in ('sale', 'purchase'):
move_line.update({
'account_tax_id': voucher.tax_id.id,
})
# compute the amount in foreign currency
foreign_currency_diff = 0.0
amount_currency = False
if line.move_line_id:
# We want to set it on the account move line as soon as the original line had a foreign currency
if line.move_line_id.currency_id and line.move_line_id.currency_id.id != company_currency:
# we compute the amount in that foreign currency.
if line.move_line_id.currency_id.id == current_currency:
# if the voucher and the voucher line share the same currency, there is no computation to do
sign = (move_line['debit'] - move_line['credit']) < 0 and -1 or 1
amount_currency = sign * (line.amount)
else:
# if the rate is specified on the voucher, it will be used thanks to the special keys in the context
# otherwise we use the rates of the system
amount_currency = currency_obj.compute(cr, uid, company_currency, line.move_line_id.currency_id.id, move_line['debit']-move_line['credit'], context=ctx)
if line.amount == line.amount_unreconciled:
foreign_currency_diff = line.move_line_id.amount_residual_currency - abs(amount_currency)
move_line['amount_currency'] = amount_currency
voucher_line = move_line_obj.create(cr, uid, move_line)
rec_ids = [voucher_line, line.move_line_id.id]
if not currency_obj.is_zero(cr, uid, voucher.company_id.currency_id, currency_rate_difference):
# Change difference entry in company currency
exch_lines = self._get_exchange_lines(cr, uid, line, move_id, currency_rate_difference, company_currency, current_currency, context=context)
new_id = move_line_obj.create(cr, uid, exch_lines[0],context)
move_line_obj.create(cr, uid, exch_lines[1], context)
rec_ids.append(new_id)
if line.move_line_id and line.move_line_id.currency_id and not currency_obj.is_zero(cr, uid, line.move_line_id.currency_id, foreign_currency_diff):
# Change difference entry in voucher currency
move_line_foreign_currency = {
'journal_id': line.voucher_id.journal_id.id,
'period_id': line.voucher_id.period_id.id,
'name': _('change')+': '+(line.name or '/'),
'account_id': line.account_id.id,
'move_id': move_id,
'partner_id': line.voucher_id.partner_id.id,
'currency_id': line.move_line_id.currency_id.id,
'amount_currency': -1 * foreign_currency_diff,
'quantity': 1,
'credit': 0.0,
'debit': 0.0,
'date': line.voucher_id.date,
}
new_id = move_line_obj.create(cr, uid, move_line_foreign_currency, context=context)
rec_ids.append(new_id)
if line.move_line_id.id:
rec_lst_ids.append(rec_ids)
return (tot_line, rec_lst_ids)
def writeoff_move_line_get(self, cr, uid, voucher_id, line_total, move_id, name, company_currency, current_currency, context=None):
'''
Set a dict to be use to create the writeoff move line.
:param voucher_id: Id of voucher what we are creating account_move.
:param line_total: Amount remaining to be allocated on lines.
:param move_id: Id of account move where this line will be added.
:param name: Description of account move line.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: mapping between fieldname and value of account move line to create
:rtype: dict
'''
currency_obj = self.pool.get('res.currency')
move_line = {}
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
current_currency_obj = voucher.currency_id or voucher.journal_id.company_id.currency_id
if not currency_obj.is_zero(cr, uid, current_currency_obj, line_total):
diff = line_total
account_id = False
write_off_name = ''
if voucher.payment_option == 'with_writeoff':
account_id = voucher.writeoff_acc_id.id
write_off_name = voucher.comment
elif voucher.partner_id:
if voucher.type in ('sale', 'receipt'):
account_id = voucher.partner_id.property_account_receivable.id
else:
account_id = voucher.partner_id.property_account_payable.id
else:
# fallback on account of voucher
account_id = voucher.account_id.id
sign = voucher.type == 'payment' and -1 or 1
move_line = {
'name': write_off_name or name,
'account_id': account_id,
'move_id': move_id,
'partner_id': voucher.partner_id.id,
'date': voucher.date,
'credit': diff > 0 and diff or 0.0,
'debit': diff < 0 and -diff or 0.0,
'amount_currency': company_currency <> current_currency and (sign * -1 * voucher.writeoff_amount) or 0.0,
'currency_id': company_currency <> current_currency and current_currency or False,
'analytic_account_id': voucher.analytic_id and voucher.analytic_id.id or False,
}
return move_line
def _get_company_currency(self, cr, uid, voucher_id, context=None):
'''
Get the currency of the actual company.
:param voucher_id: Id of the voucher what i want to obtain company currency.
:return: currency id of the company of the voucher
:rtype: int
'''
return self.pool.get('account.voucher').browse(cr,uid,voucher_id,context).journal_id.company_id.currency_id.id
def _get_current_currency(self, cr, uid, voucher_id, context=None):
'''
Get the currency of the voucher.
:param voucher_id: Id of the voucher what i want to obtain current currency.
:return: currency id of the voucher
:rtype: int
'''
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
return voucher.currency_id.id or self._get_company_currency(cr,uid,voucher.id,context)
def action_move_line_create(self, cr, uid, ids, context=None):
'''
Confirm the vouchers given in ids and create the journal entries for each of them
'''
if context is None:
context = {}
move_pool = self.pool.get('account.move')
move_line_pool = self.pool.get('account.move.line')
for voucher in self.browse(cr, uid, ids, context=context):
local_context = dict(context, force_company=voucher.journal_id.company_id.id)
if voucher.move_id:
continue
company_currency = self._get_company_currency(cr, uid, voucher.id, context)
current_currency = self._get_current_currency(cr, uid, voucher.id, context)
# we select the context to use accordingly if it's a multicurrency case or not
context = self._sel_context(cr, uid, voucher.id, context)
# But for the operations made by _convert_amount, we always need to give the date in the context
ctx = context.copy()
ctx.update({'date': voucher.date})
# Create the account move record.
move_id = move_pool.create(cr, uid, self.account_move_get(cr, uid, voucher.id, context=context), context=context)
# Get the name of the account_move just created
name = move_pool.browse(cr, uid, move_id, context=context).name
# Create the first line of the voucher
move_line_id = move_line_pool.create(cr, uid, self.first_move_line_get(cr,uid,voucher.id, move_id, company_currency, current_currency, local_context), local_context)
move_line_brw = move_line_pool.browse(cr, uid, move_line_id, context=context)
line_total = move_line_brw.debit - move_line_brw.credit
rec_list_ids = []
if voucher.type == 'sale':
line_total = line_total - self._convert_amount(cr, uid, voucher.tax_amount, voucher.id, context=ctx)
elif voucher.type == 'purchase':
line_total = line_total + self._convert_amount(cr, uid, voucher.tax_amount, voucher.id, context=ctx)
# Create one move line per voucher line where amount is not 0.0
line_total, rec_list_ids = self.voucher_move_line_create(cr, uid, voucher.id, line_total, move_id, company_currency, current_currency, context)
# Create the writeoff line if needed
ml_writeoff = self.writeoff_move_line_get(cr, uid, voucher.id, line_total, move_id, name, company_currency, current_currency, local_context)
if ml_writeoff:
move_line_pool.create(cr, uid, ml_writeoff, local_context)
# We post the voucher.
self.write(cr, uid, [voucher.id], {
'move_id': move_id,
'state': 'posted',
'number': name,
})
if voucher.journal_id.entry_posted:
move_pool.post(cr, uid, [move_id], context={})
# We automatically reconcile the account move lines.
reconcile = False
for rec_ids in rec_list_ids:
if len(rec_ids) >= 2:
reconcile = move_line_pool.reconcile_partial(cr, uid, rec_ids, writeoff_acc_id=voucher.writeoff_acc_id.id, writeoff_period_id=voucher.period_id.id, writeoff_journal_id=voucher.journal_id.id)
return True
class account_voucher_line(osv.osv):
_name = 'account.voucher.line'
_description = 'Voucher Lines'
_order = "move_line_id"
# If the payment is in the same currency than the invoice, we keep the same amount
# Otherwise, we compute from invoice currency to payment currency
def _compute_balance(self, cr, uid, ids, name, args, context=None):
currency_pool = self.pool.get('res.currency')
rs_data = {}
for line in self.browse(cr, uid, ids, context=context):
ctx = context.copy()
ctx.update({'date': line.voucher_id.date})
voucher_rate = self.pool.get('res.currency').read(cr, uid, line.voucher_id.currency_id.id, ['rate'], context=ctx)['rate']
ctx.update({
'voucher_special_currency': line.voucher_id.payment_rate_currency_id and line.voucher_id.payment_rate_currency_id.id or False,
'voucher_special_currency_rate': line.voucher_id.payment_rate * voucher_rate})
res = {}
company_currency = line.voucher_id.journal_id.company_id.currency_id.id
voucher_currency = line.voucher_id.currency_id and line.voucher_id.currency_id.id or company_currency
move_line = line.move_line_id or False
if not move_line:
res['amount_original'] = 0.0
res['amount_unreconciled'] = 0.0
elif move_line.currency_id and voucher_currency==move_line.currency_id.id:
res['amount_original'] = abs(move_line.amount_currency)
res['amount_unreconciled'] = abs(move_line.amount_residual_currency)
else:
#always use the amount booked in the company currency as the basis of the conversion into the voucher currency
res['amount_original'] = currency_pool.compute(cr, uid, company_currency, voucher_currency, move_line.credit or move_line.debit or 0.0, context=ctx)
res['amount_unreconciled'] = currency_pool.compute(cr, uid, company_currency, voucher_currency, abs(move_line.amount_residual), context=ctx)
rs_data[line.id] = res
return rs_data
def _currency_id(self, cr, uid, ids, name, args, context=None):
'''
This function returns the currency id of a voucher line. It's either the currency of the
associated move line (if any) or the currency of the voucher or the company currency.
'''
res = {}
for line in self.browse(cr, uid, ids, context=context):
move_line = line.move_line_id
if move_line:
res[line.id] = move_line.currency_id and move_line.currency_id.id or move_line.company_id.currency_id.id
else:
res[line.id] = line.voucher_id.currency_id and line.voucher_id.currency_id.id or line.voucher_id.company_id.currency_id.id
return res
_columns = {
'voucher_id':fields.many2one('account.voucher', 'Voucher', required=1, ondelete='cascade'),
'name':fields.char('Description',),
'account_id':fields.many2one('account.account','Account', required=True),
'partner_id':fields.related('voucher_id', 'partner_id', type='many2one', relation='res.partner', string='Partner'),
'untax_amount':fields.float('Untax Amount'),
'amount':fields.float('Amount', digits_compute=dp.get_precision('Account')),
'reconcile': fields.boolean('Full Reconcile'),
'type':fields.selection([('dr','Debit'),('cr','Credit')], 'Dr/Cr'),
'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic Account'),
'move_line_id': fields.many2one('account.move.line', 'Journal Item', copy=False),
'date_original': fields.related('move_line_id','date', type='date', relation='account.move.line', string='Date', readonly=1),
'date_due': fields.related('move_line_id','date_maturity', type='date', relation='account.move.line', string='Due Date', readonly=1),
'amount_original': fields.function(_compute_balance, multi='dc', type='float', string='Original Amount', store=True, digits_compute=dp.get_precision('Account')),
'amount_unreconciled': fields.function(_compute_balance, multi='dc', type='float', string='Open Balance', store=True, digits_compute=dp.get_precision('Account')),
'company_id': fields.related('voucher_id','company_id', relation='res.company', type='many2one', string='Company', store=True, readonly=True),
'currency_id': fields.function(_currency_id, string='Currency', type='many2one', relation='res.currency', readonly=True),
}
_defaults = {
'name': '',
}
def onchange_reconcile(self, cr, uid, ids, reconcile, amount, amount_unreconciled, context=None):
vals = {'amount': 0.0}
if reconcile:
vals = { 'amount': amount_unreconciled}
return {'value': vals}
def onchange_amount(self, cr, uid, ids, amount, amount_unreconciled, context=None):
vals = {}
if amount:
vals['reconcile'] = (amount == amount_unreconciled)
return {'value': vals}
def onchange_move_line_id(self, cr, user, ids, move_line_id, context=None):
"""
Returns a dict that contains new values and context
@param move_line_id: latest value from user input for field move_line_id
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
res = {}
move_line_pool = self.pool.get('account.move.line')
if move_line_id:
move_line = move_line_pool.browse(cr, user, move_line_id, context=context)
if move_line.credit:
ttype = 'dr'
else:
ttype = 'cr'
res.update({
'account_id': move_line.account_id.id,
'type': ttype,
'currency_id': move_line.currency_id and move_line.currency_id.id or move_line.company_id.currency_id.id,
})
return {
'value':res,
}
def default_get(self, cr, user, fields_list, context=None):
"""
Returns default values for fields
@param fields_list: list of fields, for which default values are required to be read
@param context: context arguments, like lang, time zone
@return: Returns a dict that contains default values for fields
"""
if context is None:
context = {}
journal_id = context.get('journal_id', False)
partner_id = context.get('partner_id', False)
journal_pool = self.pool.get('account.journal')
partner_pool = self.pool.get('res.partner')
values = super(account_voucher_line, self).default_get(cr, user, fields_list, context=context)
if (not journal_id) or ('account_id' not in fields_list):
return values
journal = journal_pool.browse(cr, user, journal_id, context=context)
account_id = False
ttype = 'cr'
if journal.type in ('sale', 'sale_refund'):
account_id = journal.default_credit_account_id and journal.default_credit_account_id.id or False
ttype = 'cr'
elif journal.type in ('purchase', 'expense', 'purchase_refund'):
account_id = journal.default_debit_account_id and journal.default_debit_account_id.id or False
ttype = 'dr'
elif partner_id:
partner = partner_pool.browse(cr, user, partner_id, context=context)
if context.get('type') == 'payment':
ttype = 'dr'
account_id = partner.property_account_payable.id
elif context.get('type') == 'receipt':
account_id = partner.property_account_receivable.id
values.update({
'account_id':account_id,
'type':ttype
})
return values
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
hy-2013/scrapy
|
refs/heads/master
|
tests/test_mail.py
|
129
|
import unittest
from io import BytesIO
from scrapy.mail import MailSender
class MailSenderTest(unittest.TestCase):
def test_send(self):
mailsender = MailSender(debug=True)
mailsender.send(to=['test@scrapy.org'], subject='subject', body='body', _callback=self._catch_mail_sent)
assert self.catched_msg
self.assertEqual(self.catched_msg['to'], ['test@scrapy.org'])
self.assertEqual(self.catched_msg['subject'], 'subject')
self.assertEqual(self.catched_msg['body'], 'body')
msg = self.catched_msg['msg']
self.assertEqual(msg['to'], 'test@scrapy.org')
self.assertEqual(msg['subject'], 'subject')
self.assertEqual(msg.get_payload(), 'body')
self.assertEqual(msg.get('Content-Type'), 'text/plain')
def test_send_html(self):
mailsender = MailSender(debug=True)
mailsender.send(to=['test@scrapy.org'], subject='subject', body='<p>body</p>', mimetype='text/html', _callback=self._catch_mail_sent)
msg = self.catched_msg['msg']
self.assertEqual(msg.get_payload(), '<p>body</p>')
self.assertEqual(msg.get('Content-Type'), 'text/html')
def test_send_attach(self):
attach = BytesIO()
attach.write(b'content')
attach.seek(0)
attachs = [('attachment', 'text/plain', attach)]
mailsender = MailSender(debug=True)
mailsender.send(to=['test@scrapy.org'], subject='subject', body='body',
attachs=attachs, _callback=self._catch_mail_sent)
assert self.catched_msg
self.assertEqual(self.catched_msg['to'], ['test@scrapy.org'])
self.assertEqual(self.catched_msg['subject'], 'subject')
self.assertEqual(self.catched_msg['body'], 'body')
msg = self.catched_msg['msg']
self.assertEqual(msg['to'], 'test@scrapy.org')
self.assertEqual(msg['subject'], 'subject')
payload = msg.get_payload()
assert isinstance(payload, list)
self.assertEqual(len(payload), 2)
text, attach = payload
self.assertEqual(text.get_payload(decode=True), 'body')
self.assertEqual(attach.get_payload(decode=True), 'content')
def _catch_mail_sent(self, **kwargs):
self.catched_msg = dict(**kwargs)
if __name__ == "__main__":
unittest.main()
|
dsavoiu/kafe2
|
refs/heads/master
|
kafe2/core/__init__.py
|
1
|
"""Core API: core components for fitting with kafe2
"""
|
shahbaz17/zamboni
|
refs/heads/master
|
mkt/purchase/management/__init__.py
|
12133432
| |
wathsalav/xos
|
refs/heads/master
|
xos/core/migrations/__init__.py
|
12133432
| |
chengsoonong/crowdastro
|
refs/heads/master
|
crowdastro/crowd/__init__.py
|
12133432
| |
kionetworks/openstack-dashboard-havana
|
refs/heads/master
|
openstack_dashboard/dashboards/project/images_and_snapshots/snapshots/__init__.py
|
12133432
| |
madmath/sous-chef
|
refs/heads/dev
|
src/billing/migrations/0001_initial.py
|
3
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-12 19:23
from __future__ import unicode_literals
import annoying.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('order', '0008_auto_20160912_1509'),
]
operations = [
migrations.CreateModel(
name='Billing',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('total_amount', models.DecimalField(decimal_places=2, max_digits=6, verbose_name='total_amount')),
('billing_month', models.IntegerField()),
('billing_year', models.IntegerField()),
('created', models.DateTimeField(auto_now=True)),
('detail', annoying.fields.JSONField()),
('orders', models.ManyToManyField(to='order.Order')),
],
),
]
|
mancoast/CPythonPyc_test
|
refs/heads/master
|
cpython/234_test_types.py
|
2
|
# Python test set -- part 6, built-in types
from test.test_support import *
print '6. Built-in types'
print '6.1 Truth value testing'
if None: raise TestFailed, 'None is true instead of false'
if 0: raise TestFailed, '0 is true instead of false'
if 0L: raise TestFailed, '0L is true instead of false'
if 0.0: raise TestFailed, '0.0 is true instead of false'
if '': raise TestFailed, '\'\' is true instead of false'
if (): raise TestFailed, '() is true instead of false'
if []: raise TestFailed, '[] is true instead of false'
if {}: raise TestFailed, '{} is true instead of false'
if not 1: raise TestFailed, '1 is false instead of true'
if not 1L: raise TestFailed, '1L is false instead of true'
if not 1.0: raise TestFailed, '1.0 is false instead of true'
if not 'x': raise TestFailed, '\'x\' is false instead of true'
if not (1, 1): raise TestFailed, '(1, 1) is false instead of true'
if not [1]: raise TestFailed, '[1] is false instead of true'
if not {'x': 1}: raise TestFailed, '{\'x\': 1} is false instead of true'
def f(): pass
class C: pass
import sys
x = C()
if not f: raise TestFailed, 'f is false instead of true'
if not C: raise TestFailed, 'C is false instead of true'
if not sys: raise TestFailed, 'sys is false instead of true'
if not x: raise TestFailed, 'x is false instead of true'
print '6.2 Boolean operations'
if 0 or 0: raise TestFailed, '0 or 0 is true instead of false'
if 1 and 1: pass
else: raise TestFailed, '1 and 1 is false instead of true'
if not 1: raise TestFailed, 'not 1 is true instead of false'
print '6.3 Comparisons'
if 0 < 1 <= 1 == 1 >= 1 > 0 != 1: pass
else: raise TestFailed, 'int comparisons failed'
if 0L < 1L <= 1L == 1L >= 1L > 0L != 1L: pass
else: raise TestFailed, 'long int comparisons failed'
if 0.0 < 1.0 <= 1.0 == 1.0 >= 1.0 > 0.0 != 1.0: pass
else: raise TestFailed, 'float comparisons failed'
if '' < 'a' <= 'a' == 'a' < 'abc' < 'abd' < 'b': pass
else: raise TestFailed, 'string comparisons failed'
if 0 in [0] and 0 not in [1]: pass
else: raise TestFailed, 'membership test failed'
if None is None and [] is not []: pass
else: raise TestFailed, 'identity test failed'
try: float('')
except ValueError: pass
else: raise TestFailed, "float('') didn't raise ValueError"
try: float('5\0')
except ValueError: pass
else: raise TestFailed, "float('5\0') didn't raise ValueError"
try: 5.0 / 0.0
except ZeroDivisionError: pass
else: raise TestFailed, "5.0 / 0.0 didn't raise ZeroDivisionError"
try: 5.0 // 0.0
except ZeroDivisionError: pass
else: raise TestFailed, "5.0 // 0.0 didn't raise ZeroDivisionError"
try: 5.0 % 0.0
except ZeroDivisionError: pass
else: raise TestFailed, "5.0 % 0.0 didn't raise ZeroDivisionError"
try: 5 / 0L
except ZeroDivisionError: pass
else: raise TestFailed, "5 / 0L didn't raise ZeroDivisionError"
try: 5 // 0L
except ZeroDivisionError: pass
else: raise TestFailed, "5 // 0L didn't raise ZeroDivisionError"
try: 5 % 0L
except ZeroDivisionError: pass
else: raise TestFailed, "5 % 0L didn't raise ZeroDivisionError"
print '6.4 Numeric types (mostly conversions)'
if 0 != 0L or 0 != 0.0 or 0L != 0.0: raise TestFailed, 'mixed comparisons'
if 1 != 1L or 1 != 1.0 or 1L != 1.0: raise TestFailed, 'mixed comparisons'
if -1 != -1L or -1 != -1.0 or -1L != -1.0:
raise TestFailed, 'int/long/float value not equal'
# calling built-in types without argument must return 0
if int() != 0: raise TestFailed, 'int() does not return 0'
if long() != 0L: raise TestFailed, 'long() does not return 0L'
if float() != 0.0: raise TestFailed, 'float() does not return 0.0'
if int(1.9) == 1 == int(1.1) and int(-1.1) == -1 == int(-1.9): pass
else: raise TestFailed, 'int() does not round properly'
if long(1.9) == 1L == long(1.1) and long(-1.1) == -1L == long(-1.9): pass
else: raise TestFailed, 'long() does not round properly'
if float(1) == 1.0 and float(-1) == -1.0 and float(0) == 0.0: pass
else: raise TestFailed, 'float() does not work properly'
print '6.4.1 32-bit integers'
if 12 + 24 != 36: raise TestFailed, 'int op'
if 12 + (-24) != -12: raise TestFailed, 'int op'
if (-12) + 24 != 12: raise TestFailed, 'int op'
if (-12) + (-24) != -36: raise TestFailed, 'int op'
if not 12 < 24: raise TestFailed, 'int op'
if not -24 < -12: raise TestFailed, 'int op'
# Test for a particular bug in integer multiply
xsize, ysize, zsize = 238, 356, 4
if not (xsize*ysize*zsize == zsize*xsize*ysize == 338912):
raise TestFailed, 'int mul commutativity'
# And another.
m = -sys.maxint - 1
for divisor in 1, 2, 4, 8, 16, 32:
j = m // divisor
prod = divisor * j
if prod != m:
raise TestFailed, "%r * %r == %r != %r" % (divisor, j, prod, m)
if type(prod) is not int:
raise TestFailed, ("expected type(prod) to be int, not %r" %
type(prod))
# Check for expected * overflow to long.
for divisor in 1, 2, 4, 8, 16, 32:
j = m // divisor - 1
prod = divisor * j
if type(prod) is not long:
raise TestFailed, ("expected type(%r) to be long, not %r" %
(prod, type(prod)))
# Check for expected * overflow to long.
m = sys.maxint
for divisor in 1, 2, 4, 8, 16, 32:
j = m // divisor + 1
prod = divisor * j
if type(prod) is not long:
raise TestFailed, ("expected type(%r) to be long, not %r" %
(prod, type(prod)))
print '6.4.2 Long integers'
if 12L + 24L != 36L: raise TestFailed, 'long op'
if 12L + (-24L) != -12L: raise TestFailed, 'long op'
if (-12L) + 24L != 12L: raise TestFailed, 'long op'
if (-12L) + (-24L) != -36L: raise TestFailed, 'long op'
if not 12L < 24L: raise TestFailed, 'long op'
if not -24L < -12L: raise TestFailed, 'long op'
x = sys.maxint
if int(long(x)) != x: raise TestFailed, 'long op'
try: y = int(long(x)+1L)
except OverflowError: raise TestFailed, 'long op'
if not isinstance(y, long): raise TestFailed, 'long op'
x = -x
if int(long(x)) != x: raise TestFailed, 'long op'
x = x-1
if int(long(x)) != x: raise TestFailed, 'long op'
try: y = int(long(x)-1L)
except OverflowError: raise TestFailed, 'long op'
if not isinstance(y, long): raise TestFailed, 'long op'
try: 5 << -5
except ValueError: pass
else: raise TestFailed, 'int negative shift <<'
try: 5L << -5L
except ValueError: pass
else: raise TestFailed, 'long negative shift <<'
try: 5 >> -5
except ValueError: pass
else: raise TestFailed, 'int negative shift >>'
try: 5L >> -5L
except ValueError: pass
else: raise TestFailed, 'long negative shift >>'
print '6.4.3 Floating point numbers'
if 12.0 + 24.0 != 36.0: raise TestFailed, 'float op'
if 12.0 + (-24.0) != -12.0: raise TestFailed, 'float op'
if (-12.0) + 24.0 != 12.0: raise TestFailed, 'float op'
if (-12.0) + (-24.0) != -36.0: raise TestFailed, 'float op'
if not 12.0 < 24.0: raise TestFailed, 'float op'
if not -24.0 < -12.0: raise TestFailed, 'float op'
print '6.5 Sequence types'
print '6.5.1 Strings'
if len('') != 0: raise TestFailed, 'len(\'\')'
if len('a') != 1: raise TestFailed, 'len(\'a\')'
if len('abcdef') != 6: raise TestFailed, 'len(\'abcdef\')'
if 'xyz' + 'abcde' != 'xyzabcde': raise TestFailed, 'string concatenation'
if 'xyz'*3 != 'xyzxyzxyz': raise TestFailed, 'string repetition *3'
if 0*'abcde' != '': raise TestFailed, 'string repetition 0*'
if min('abc') != 'a' or max('abc') != 'c': raise TestFailed, 'min/max string'
if 'a' in 'abc' and 'b' in 'abc' and 'c' in 'abc' and 'd' not in 'abc': pass
else: raise TestFailed, 'in/not in string'
x = 'x'*103
if '%s!'%x != x+'!': raise TestFailed, 'nasty string formatting bug'
#extended slices for strings
a = '0123456789'
vereq(a[::], a)
vereq(a[::2], '02468')
vereq(a[1::2], '13579')
vereq(a[::-1],'9876543210')
vereq(a[::-2], '97531')
vereq(a[3::-2], '31')
vereq(a[-100:100:], a)
vereq(a[100:-100:-1], a[::-1])
vereq(a[-100L:100L:2L], '02468')
if have_unicode:
a = unicode('0123456789', 'ascii')
vereq(a[::], a)
vereq(a[::2], unicode('02468', 'ascii'))
vereq(a[1::2], unicode('13579', 'ascii'))
vereq(a[::-1], unicode('9876543210', 'ascii'))
vereq(a[::-2], unicode('97531', 'ascii'))
vereq(a[3::-2], unicode('31', 'ascii'))
vereq(a[-100:100:], a)
vereq(a[100:-100:-1], a[::-1])
vereq(a[-100L:100L:2L], unicode('02468', 'ascii'))
print '6.5.2 Tuples'
# calling built-in types without argument must return empty
if tuple() != (): raise TestFailed,'tuple() does not return ()'
if len(()) != 0: raise TestFailed, 'len(())'
if len((1,)) != 1: raise TestFailed, 'len((1,))'
if len((1,2,3,4,5,6)) != 6: raise TestFailed, 'len((1,2,3,4,5,6))'
if (1,2)+(3,4) != (1,2,3,4): raise TestFailed, 'tuple concatenation'
if (1,2)*3 != (1,2,1,2,1,2): raise TestFailed, 'tuple repetition *3'
if 0*(1,2,3) != (): raise TestFailed, 'tuple repetition 0*'
if min((1,2)) != 1 or max((1,2)) != 2: raise TestFailed, 'min/max tuple'
if 0 in (0,1,2) and 1 in (0,1,2) and 2 in (0,1,2) and 3 not in (0,1,2): pass
else: raise TestFailed, 'in/not in tuple'
try: ()[0]
except IndexError: pass
else: raise TestFailed, "tuple index error didn't raise IndexError"
x = ()
x += ()
if x != (): raise TestFailed, 'tuple inplace add from () to () failed'
x += (1,)
if x != (1,): raise TestFailed, 'tuple resize from () failed'
# extended slicing - subscript only for tuples
a = (0,1,2,3,4)
vereq(a[::], a)
vereq(a[::2], (0,2,4))
vereq(a[1::2], (1,3))
vereq(a[::-1], (4,3,2,1,0))
vereq(a[::-2], (4,2,0))
vereq(a[3::-2], (3,1))
vereq(a[-100:100:], a)
vereq(a[100:-100:-1], a[::-1])
vereq(a[-100L:100L:2L], (0,2,4))
# Check that a specific bug in _PyTuple_Resize() is squashed.
def f():
for i in range(1000):
yield i
vereq(list(tuple(f())), range(1000))
# Verify that __getitem__ overrides are not recognized by __iter__
class T(tuple):
def __getitem__(self, key):
return str(key) + '!!!'
vereq(iter(T((1,2))).next(), 1)
print '6.5.3 Lists'
# calling built-in types without argument must return empty
if list() != []: raise TestFailed,'list() does not return []'
if len([]) != 0: raise TestFailed, 'len([])'
if len([1,]) != 1: raise TestFailed, 'len([1,])'
if len([1,2,3,4,5,6]) != 6: raise TestFailed, 'len([1,2,3,4,5,6])'
if [1,2]+[3,4] != [1,2,3,4]: raise TestFailed, 'list concatenation'
if [1,2]*3 != [1,2,1,2,1,2]: raise TestFailed, 'list repetition *3'
if [1,2]*3L != [1,2,1,2,1,2]: raise TestFailed, 'list repetition *3L'
if 0*[1,2,3] != []: raise TestFailed, 'list repetition 0*'
if 0L*[1,2,3] != []: raise TestFailed, 'list repetition 0L*'
if min([1,2]) != 1 or max([1,2]) != 2: raise TestFailed, 'min/max list'
if 0 in [0,1,2] and 1 in [0,1,2] and 2 in [0,1,2] and 3 not in [0,1,2]: pass
else: raise TestFailed, 'in/not in list'
a = [1, 2, 3, 4, 5]
a[:-1] = a
if a != [1, 2, 3, 4, 5, 5]:
raise TestFailed, "list self-slice-assign (head)"
a = [1, 2, 3, 4, 5]
a[1:] = a
if a != [1, 1, 2, 3, 4, 5]:
raise TestFailed, "list self-slice-assign (tail)"
a = [1, 2, 3, 4, 5]
a[1:-1] = a
if a != [1, 1, 2, 3, 4, 5, 5]:
raise TestFailed, "list self-slice-assign (center)"
try: [][0]
except IndexError: pass
else: raise TestFailed, "list index error didn't raise IndexError"
try: [][0] = 5
except IndexError: pass
else: raise TestFailed, "list assignment index error didn't raise IndexError"
try: [].pop()
except IndexError: pass
else: raise TestFailed, "empty list.pop() didn't raise IndexError"
try: [1].pop(5)
except IndexError: pass
else: raise TestFailed, "[1].pop(5) didn't raise IndexError"
try: [][0:1] = 5
except TypeError: pass
else: raise TestFailed, "bad list slice assignment didn't raise TypeError"
try: [].extend(None)
except TypeError: pass
else: raise TestFailed, "list.extend(None) didn't raise TypeError"
a = [1, 2, 3, 4]
a *= 0
if a != []:
raise TestFailed, "list inplace repeat"
a = []
a[:] = tuple(range(10))
if a != range(10):
raise TestFailed, "assigning tuple to slice"
print '6.5.3a Additional list operations'
a = [0,1,2,3,4]
a[0L] = 1
a[1L] = 2
a[2L] = 3
if a != [1,2,3,3,4]: raise TestFailed, 'list item assignment [0L], [1L], [2L]'
a[0] = 5
a[1] = 6
a[2] = 7
if a != [5,6,7,3,4]: raise TestFailed, 'list item assignment [0], [1], [2]'
a[-2L] = 88
a[-1L] = 99
if a != [5,6,7,88,99]: raise TestFailed, 'list item assignment [-2L], [-1L]'
a[-2] = 8
a[-1] = 9
if a != [5,6,7,8,9]: raise TestFailed, 'list item assignment [-2], [-1]'
a[:2] = [0,4]
a[-3:] = []
a[1:1] = [1,2,3]
if a != [0,1,2,3,4]: raise TestFailed, 'list slice assignment'
a[ 1L : 4L] = [7,8,9]
if a != [0,7,8,9,4]: raise TestFailed, 'list slice assignment using long ints'
del a[1:4]
if a != [0,4]: raise TestFailed, 'list slice deletion'
del a[0]
if a != [4]: raise TestFailed, 'list item deletion [0]'
del a[-1]
if a != []: raise TestFailed, 'list item deletion [-1]'
a=range(0,5)
del a[1L:4L]
if a != [0,4]: raise TestFailed, 'list slice deletion'
del a[0L]
if a != [4]: raise TestFailed, 'list item deletion [0]'
del a[-1L]
if a != []: raise TestFailed, 'list item deletion [-1]'
a.append(0)
a.append(1)
a.append(2)
if a != [0,1,2]: raise TestFailed, 'list append'
a.insert(0, -2)
a.insert(1, -1)
a.insert(2,0)
if a != [-2,-1,0,0,1,2]: raise TestFailed, 'list insert'
b = a[:]
b.insert(-2, "foo")
b.insert(-200, "left")
b.insert(200, "right")
if b != ["left",-2,-1,0,0,"foo",1,2,"right"]: raise TestFailed, 'list insert2'
# a = [-2,-1,0,0,1,2]
if a.count(0) != 2: raise TestFailed, ' list count'
if a.index(0) != 2: raise TestFailed, 'list index'
if a.index(0,2) != 2: raise TestFailed, 'list index, start argument'
if a.index(0,-4) != 2: raise TestFailed, 'list index, -start argument'
if a.index(-2,-10) != 0: raise TestFailed, 'list index, very -start argument'
if a.index(0,3) != 3: raise TestFailed, 'list index, start argument'
if a.index(0,-3) != 3: raise TestFailed, 'list index, -start argument'
if a.index(0,3,4) != 3: raise TestFailed, 'list index, stop argument'
if a.index(0,-3,-2) != 3: raise TestFailed, 'list index, -stop argument'
if a.index(0,-4*sys.maxint,4*sys.maxint) != 2:
raise TestFailed, 'list index, -maxint, maxint argument'
try:
a.index(0, 4*sys.maxint,-4*sys.maxint)
except ValueError:
pass
else:
raise TestFailed, 'list index, maxint,-maxint argument'
try:
a.index(2,0,-10)
except ValueError:
pass
else:
raise TestFailed, 'list index, very -stop argument'
a.remove(0)
try:
a.index(2,0,4)
except ValueError:
pass
else:
raise TestFailed, 'list index, stop argument.'
if a != [-2,-1,0,1,2]: raise TestFailed, 'list remove'
a.reverse()
if a != [2,1,0,-1,-2]: raise TestFailed, 'list reverse'
a.sort()
if a != [-2,-1,0,1,2]: raise TestFailed, 'list sort'
def revcmp(a, b): return cmp(b, a)
a.sort(revcmp)
if a != [2,1,0,-1,-2]: raise TestFailed, 'list sort with cmp func'
# The following dumps core in unpatched Python 1.5:
def myComparison(x,y):
return cmp(x%3, y%7)
z = range(12)
z.sort(myComparison)
try: z.sort(2)
except TypeError: pass
else: raise TestFailed, 'list sort compare function is not callable'
def selfmodifyingComparison(x,y):
z.append(1)
return cmp(x, y)
try: z.sort(selfmodifyingComparison)
except ValueError: pass
else: raise TestFailed, 'modifying list during sort'
try: z.sort(lambda x, y: 's')
except TypeError: pass
else: raise TestFailed, 'list sort compare function does not return int'
# Test extreme cases with long ints
a = [0,1,2,3,4]
if a[ -pow(2,128L): 3 ] != [0,1,2]:
raise TestFailed, "list slicing with too-small long integer"
if a[ 3: pow(2,145L) ] != [3,4]:
raise TestFailed, "list slicing with too-large long integer"
# extended slicing
# subscript
a = [0,1,2,3,4]
vereq(a[::], a)
vereq(a[::2], [0,2,4])
vereq(a[1::2], [1,3])
vereq(a[::-1], [4,3,2,1,0])
vereq(a[::-2], [4,2,0])
vereq(a[3::-2], [3,1])
vereq(a[-100:100:], a)
vereq(a[100:-100:-1], a[::-1])
vereq(a[-100L:100L:2L], [0,2,4])
vereq(a[1000:2000:2], [])
vereq(a[-1000:-2000:-2], [])
# deletion
del a[::2]
vereq(a, [1,3])
a = range(5)
del a[1::2]
vereq(a, [0,2,4])
a = range(5)
del a[1::-2]
vereq(a, [0,2,3,4])
a = range(10)
del a[::1000]
vereq(a, [1, 2, 3, 4, 5, 6, 7, 8, 9])
# assignment
a = range(10)
a[::2] = [-1]*5
vereq(a, [-1, 1, -1, 3, -1, 5, -1, 7, -1, 9])
a = range(10)
a[::-4] = [10]*3
vereq(a, [0, 10, 2, 3, 4, 10, 6, 7, 8 ,10])
a = range(4)
a[::-1] = a
vereq(a, [3, 2, 1, 0])
a = range(10)
b = a[:]
c = a[:]
a[2:3] = ["two", "elements"]
b[slice(2,3)] = ["two", "elements"]
c[2:3:] = ["two", "elements"]
vereq(a, b)
vereq(a, c)
a = range(10)
a[::2] = tuple(range(5))
vereq(a, [0, 1, 1, 3, 2, 5, 3, 7, 4, 9])
# Verify that __getitem__ overrides are not recognized by __iter__
class L(list):
def __getitem__(self, key):
return str(key) + '!!!'
vereq(iter(L([1,2])).next(), 1)
print '6.6 Mappings == Dictionaries'
# calling built-in types without argument must return empty
if dict() != {}: raise TestFailed,'dict() does not return {}'
d = {}
if d.keys() != []: raise TestFailed, '{}.keys()'
if d.values() != []: raise TestFailed, '{}.values()'
if d.items() != []: raise TestFailed, '{}.items()'
if d.has_key('a') != 0: raise TestFailed, '{}.has_key(\'a\')'
if ('a' in d) != 0: raise TestFailed, "'a' in {}"
if ('a' not in d) != 1: raise TestFailed, "'a' not in {}"
if len(d) != 0: raise TestFailed, 'len({})'
d = {'a': 1, 'b': 2}
if len(d) != 2: raise TestFailed, 'len(dict)'
k = d.keys()
k.sort()
if k != ['a', 'b']: raise TestFailed, 'dict keys()'
if d.has_key('a') and d.has_key('b') and not d.has_key('c'): pass
else: raise TestFailed, 'dict keys()'
if 'a' in d and 'b' in d and 'c' not in d: pass
else: raise TestFailed, 'dict keys() # in/not in version'
if d['a'] != 1 or d['b'] != 2: raise TestFailed, 'dict item'
d['c'] = 3
d['a'] = 4
if d['c'] != 3 or d['a'] != 4: raise TestFailed, 'dict item assignment'
del d['b']
if d != {'a': 4, 'c': 3}: raise TestFailed, 'dict item deletion'
# dict.clear()
d = {1:1, 2:2, 3:3}
d.clear()
if d != {}: raise TestFailed, 'dict clear'
# dict.update()
d.update({1:100})
d.update({2:20})
d.update({1:1, 2:2, 3:3})
if d != {1:1, 2:2, 3:3}: raise TestFailed, 'dict update'
d.clear()
try: d.update(None)
except AttributeError: pass
else: raise TestFailed, 'dict.update(None), AttributeError expected'
class SimpleUserDict:
def __init__(self):
self.d = {1:1, 2:2, 3:3}
def keys(self):
return self.d.keys()
def __getitem__(self, i):
return self.d[i]
d.update(SimpleUserDict())
if d != {1:1, 2:2, 3:3}: raise TestFailed, 'dict.update(instance)'
d.clear()
class FailingUserDict:
def keys(self):
raise ValueError
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'dict.keys() expected ValueError'
class FailingUserDict:
def keys(self):
class BogonIter:
def __iter__(self):
raise ValueError
return BogonIter()
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'iter(dict.keys()) expected ValueError'
class FailingUserDict:
def keys(self):
class BogonIter:
def __init__(self):
self.i = 1
def __iter__(self):
return self
def next(self):
if self.i:
self.i = 0
return 'a'
raise ValueError
return BogonIter()
def __getitem__(self, key):
return key
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'iter(dict.keys()).next() expected ValueError'
class FailingUserDict:
def keys(self):
class BogonIter:
def __init__(self):
self.i = ord('a')
def __iter__(self):
return self
def next(self):
if self.i <= ord('z'):
rtn = chr(self.i)
self.i += 1
return rtn
raise StopIteration
return BogonIter()
def __getitem__(self, key):
raise ValueError
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'dict.update(), __getitem__ expected ValueError'
# dict.fromkeys()
if dict.fromkeys('abc') != {'a':None, 'b':None, 'c':None}:
raise TestFailed, 'dict.fromkeys did not work as a class method'
d = {}
if d.fromkeys('abc') is d:
raise TestFailed, 'dict.fromkeys did not return a new dict'
if d.fromkeys('abc') != {'a':None, 'b':None, 'c':None}:
raise TestFailed, 'dict.fromkeys failed with default value'
if d.fromkeys((4,5),0) != {4:0, 5:0}:
raise TestFailed, 'dict.fromkeys failed with specified value'
if d.fromkeys([]) != {}:
raise TestFailed, 'dict.fromkeys failed with null sequence'
def g():
yield 1
if d.fromkeys(g()) != {1:None}:
raise TestFailed, 'dict.fromkeys failed with a generator'
try: {}.fromkeys(3)
except TypeError: pass
else: raise TestFailed, 'dict.fromkeys failed to raise TypeError'
class dictlike(dict): pass
if dictlike.fromkeys('a') != {'a':None}:
raise TestFailed, 'dictsubclass.fromkeys did not inherit'
if dictlike().fromkeys('a') != {'a':None}:
raise TestFailed, 'dictsubclass.fromkeys did not inherit'
if type(dictlike.fromkeys('a')) is not dictlike:
raise TestFailed, 'dictsubclass.fromkeys created wrong type'
if type(dictlike().fromkeys('a')) is not dictlike:
raise TestFailed, 'dictsubclass.fromkeys created wrong type'
from UserDict import UserDict
class mydict(dict):
def __new__(cls):
return UserDict()
ud = mydict.fromkeys('ab')
if ud != {'a':None, 'b':None} or not isinstance(ud,UserDict):
raise TestFailed, 'fromkeys did not instantiate using __new__'
# dict.copy()
d = {1:1, 2:2, 3:3}
if d.copy() != {1:1, 2:2, 3:3}: raise TestFailed, 'dict copy'
if {}.copy() != {}: raise TestFailed, 'empty dict copy'
# dict.get()
d = {}
if d.get('c') is not None: raise TestFailed, 'missing {} get, no 2nd arg'
if d.get('c', 3) != 3: raise TestFailed, 'missing {} get, w/ 2nd arg'
d = {'a' : 1, 'b' : 2}
if d.get('c') is not None: raise TestFailed, 'missing dict get, no 2nd arg'
if d.get('c', 3) != 3: raise TestFailed, 'missing dict get, w/ 2nd arg'
if d.get('a') != 1: raise TestFailed, 'present dict get, no 2nd arg'
if d.get('a', 3) != 1: raise TestFailed, 'present dict get, w/ 2nd arg'
# dict.setdefault()
d = {}
if d.setdefault('key0') is not None:
raise TestFailed, 'missing {} setdefault, no 2nd arg'
if d.setdefault('key0') is not None:
raise TestFailed, 'present {} setdefault, no 2nd arg'
d.setdefault('key', []).append(3)
if d['key'][0] != 3:
raise TestFailed, 'missing {} setdefault, w/ 2nd arg'
d.setdefault('key', []).append(4)
if len(d['key']) != 2:
raise TestFailed, 'present {} setdefault, w/ 2nd arg'
# dict.popitem()
for copymode in -1, +1:
# -1: b has same structure as a
# +1: b is a.copy()
for log2size in range(12):
size = 2**log2size
a = {}
b = {}
for i in range(size):
a[`i`] = i
if copymode < 0:
b[`i`] = i
if copymode > 0:
b = a.copy()
for i in range(size):
ka, va = ta = a.popitem()
if va != int(ka): raise TestFailed, "a.popitem: %s" % str(ta)
kb, vb = tb = b.popitem()
if vb != int(kb): raise TestFailed, "b.popitem: %s" % str(tb)
if copymode < 0 and ta != tb:
raise TestFailed, "a.popitem != b.popitem: %s, %s" % (
str(ta), str(tb))
if a: raise TestFailed, 'a not empty after popitems: %s' % str(a)
if b: raise TestFailed, 'b not empty after popitems: %s' % str(b)
d.clear()
try: d.popitem()
except KeyError: pass
else: raise TestFailed, "{}.popitem doesn't raise KeyError"
# Tests for pop with specified key
d.clear()
k, v = 'abc', 'def'
d[k] = v
try: d.pop('ghi')
except KeyError: pass
else: raise TestFailed, "{}.pop(k) doesn't raise KeyError when k not in dictionary"
if d.pop(k) != v: raise TestFailed, "{}.pop(k) doesn't find known key/value pair"
if len(d) > 0: raise TestFailed, "{}.pop(k) failed to remove the specified pair"
try: d.pop(k)
except KeyError: pass
else: raise TestFailed, "{}.pop(k) doesn't raise KeyError when dictionary is empty"
# verify longs/ints get same value when key > 32 bits (for 64-bit archs)
# see SF bug #689659
x = 4503599627370496L
y = 4503599627370496
h = {x: 'anything', y: 'something else'}
if h[x] != h[y]:
raise TestFailed, "long/int key should match"
if d.pop(k, v) != v: raise TestFailed, "{}.pop(k, v) doesn't return default value"
d[k] = v
if d.pop(k, 1) != v: raise TestFailed, "{}.pop(k, v) doesn't find known key/value pair"
d[1] = 1
try:
for i in d:
d[i+1] = 1
except RuntimeError:
pass
else:
raise TestFailed, "changing dict size during iteration doesn't raise Error"
try: type(1, 2)
except TypeError: pass
else: raise TestFailed, 'type(), w/2 args expected TypeError'
try: type(1, 2, 3, 4)
except TypeError: pass
else: raise TestFailed, 'type(), w/4 args expected TypeError'
print 'Buffers'
try: buffer('asdf', -1)
except ValueError: pass
else: raise TestFailed, "buffer('asdf', -1) should raise ValueError"
try: buffer(None)
except TypeError: pass
else: raise TestFailed, "buffer(None) should raise TypeError"
a = buffer('asdf')
hash(a)
b = a * 5
if a == b:
raise TestFailed, 'buffers should not be equal'
if str(b) != ('asdf' * 5):
raise TestFailed, 'repeated buffer has wrong content'
if str(a * 0) != '':
raise TestFailed, 'repeated buffer zero times has wrong content'
if str(a + buffer('def')) != 'asdfdef':
raise TestFailed, 'concatenation of buffers yields wrong content'
try: a[1] = 'g'
except TypeError: pass
else: raise TestFailed, "buffer assignment should raise TypeError"
try: a[0:1] = 'g'
except TypeError: pass
else: raise TestFailed, "buffer slice assignment should raise TypeError"
|
cpcloud/odo
|
refs/heads/master
|
odo/tests/test_core.py
|
2
|
from __future__ import absolute_import, division, print_function
import warnings
from odo.core import NetworkDispatcher, path, FailedConversionWarning
from datashape import discover
d = NetworkDispatcher('foo')
@d.register(float, int, cost=1.0)
def f(x, **kwargs):
return float(x)
@d.register(str, float, cost=1.0)
def g(x, **kwargs):
return str(x)
def test_basic():
assert [func for a, b, func in d.path(int, str)] == [f, g]
assert d.path(int, str) == d.path(1, '')
def test_convert_is_robust_to_failures():
foo = NetworkDispatcher('foo')
def badfunc(*args, **kwargs):
raise NotImplementedError()
class A(object): pass
class B(object): pass
class C(object): pass
discover.register((A, B, C))(lambda x: 'int')
foo.register(B, A, cost=1.0)(lambda x, **kwargs: 1)
foo.register(C, B, cost=1.0)(badfunc)
foo.register(C, A, cost=10.0)(lambda x, **kwargs: 2)
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter('always')
assert foo(C, A()) == 2
assert len(ws) == 1
w = ws[0].message
assert isinstance(w, FailedConversionWarning)
assert 'B -> C' in str(w)
def test_ooc_behavior():
foo = NetworkDispatcher('foo')
class A(object): pass
class B(object): pass
class C(object): pass
discover.register((A, B, C))(lambda x: 'int')
foo.register(B, A, cost=1.0)(lambda x, **kwargs: 1)
foo.register(C, B, cost=1.0)(lambda x, **kwargs: x / 0) # note that this errs
foo.register(C, A, cost=10.0)(lambda x, **kwargs: 2)
assert [(a, b) for a, b, func in path(foo.graph, A, C)] == [(A, B), (B, C)]
ooc = set([A, C])
assert [(a, b) for a, b, func in path(foo.graph, A, C, ooc_types=ooc)] == \
[(A, C)]
|
stoewer/nixpy
|
refs/heads/master
|
nixio/file.py
|
1
|
# Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import nixio.util.find as finders
from nixio.util.proxy_list import ProxyList
try:
from sys import maxint
except:
from sys import maxsize as maxint
class BlockProxyList(ProxyList):
def __init__(self, obj):
super(BlockProxyList, self).__init__(obj, "_block_count",
"_get_block_by_id",
"_get_block_by_pos",
"_delete_block_by_id")
class SectionProxyList(ProxyList):
def __init__(self, obj):
super(SectionProxyList, self).__init__(obj, "_section_count",
"_get_section_by_id",
"_get_section_by_pos",
"_delete_section_by_id")
class FileMixin(object):
@property
def blocks(self):
"""
A property containing all blocks of a file. Blocks can be obtained by
their id or their index. Blocks can be deleted from the list, when a
block is deleted all its content (data arrays, tags and sources) will
be also deleted from the file. Adding new Block is done via the
create_block method of File. This is a read-only attribute.
:type: ProxyList of Block entities.
"""
if not hasattr(self, "_blocks"):
setattr(self, "_blocks", BlockProxyList(self))
return self._blocks
def find_sections(self, filtr=lambda _: True, limit=None):
"""
Get all sections and their child sections recursively.
This method traverses the trees of all sections. The traversal is
accomplished via breadth first and can be limited in depth. On each
node or section a filter is applied. If the filter returns true the
respective section will be added to the result list.
By default a filter is used that accepts all sections.
:param filtr: A filter function
:type filtr: function
:param limit: The maximum depth of traversal
:type limit: int
:returns: A list containing the matching sections.
:rtype: list of Section
"""
if limit is None:
limit = maxint
return finders._find_sections(self, filtr, limit)
@property
def sections(self):
"""
A property containing all root sections of a file. Specific root
sections can be obtained by their id or their index. Sections can be
deleted from this list. Notice: when a section is deleted all its child
section and properties will be removed too. Adding a new Section is
done via the crate_section method of File.
This is a read-only property.
:type: ProxyList of Section entities.
"""
if not hasattr(self, "_sections"):
setattr(self, "_sections", SectionProxyList(self))
return self._sections
|
JioCloud/oslo.config
|
refs/heads/master
|
tests/test_warning.py
|
2
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import imp
import os
import warnings
import mock
from oslotest import base as test_base
import six
class DeprecationWarningTest(test_base.BaseTestCase):
@mock.patch('warnings.warn')
def test_warning(self, mock_warn):
import oslo.config
imp.reload(oslo.config)
self.assertTrue(mock_warn.called)
args = mock_warn.call_args
self.assertIn('oslo_config', args[0][0])
self.assertIn('deprecated', args[0][0])
self.assertTrue(issubclass(args[0][1], DeprecationWarning))
def test_real_warning(self):
with warnings.catch_warnings(record=True) as warning_msgs:
warnings.resetwarnings()
warnings.simplefilter('always', DeprecationWarning)
import oslo.config
# Use a separate function to get the stack level correct
# so we know the message points back to this file. This
# corresponds to an import or reload, which isn't working
# inside the test under Python 3.3. That may be due to a
# difference in the import implementation not triggering
# warnings properly when the module is reloaded, or
# because the warnings module is mostly implemented in C
# and something isn't cleanly resetting the global state
# used to track whether a warning needs to be
# emitted. Whatever the cause, we definitely see the
# warnings.warn() being invoked on a reload (see the test
# above) and warnings are reported on the console when we
# run the tests. A simpler test script run outside of
# testr does correctly report the warnings.
def foo():
oslo.config.deprecated()
foo()
self.assertEqual(1, len(warning_msgs))
msg = warning_msgs[0]
self.assertIn('oslo_config', six.text_type(msg.message))
self.assertEqual('test_warning.py', os.path.basename(msg.filename))
|
adedayo/intellij-community
|
refs/heads/master
|
python/testData/quickdoc/CallFunc.py
|
83
|
# directly in function
def foo():
"<the_doc>Doc of foo."
pass
<the_ref>foo()
|
40223114/w16
|
refs/heads/master
|
static/Brython3.1.3-20150514-095342/Lib/site-packages/pygame/draw.py
|
603
|
from javascript import console
from browser import timer
import math
class Queue:
def __init__(self):
self._list=[]
def empty(self):
return len(self._list) == 0
def put(self, element):
self._list.append(element)
def get(self):
if len(self._list) == 0:
raise BaseError
_element=self._list[0]
if len(self._list) == 1:
self._list=[]
else:
self._list=self._list[1:]
return _element
dm={}
def aaline(canvas, color, startpos, endpos, width, outline, blend=1):
#console.log("aaline")
if canvas not in dm:
dm[canvas]=DrawManager(canvas)
dm[canvas].process()
_dl=DrawLine(startpos[0], startpos[1], endpos[0], endpos[1], color,
width, outline, speed=10)
dm[canvas].add_line(_dl) #color, startpos, endpos, width, outline)
def aapolygon(canvas, color, coordinates, width, outline, blend=1):
#console.log("aapolygon")
if canvas not in dm:
dm[canvas]=DrawManager(canvas)
dm[canvas].process()
_dp=DrawPolygon(coordinates, color, width, outline, speed=10)
dm[canvas].add_polygon(_dp)
def aapolygon_bg(canvas, shape):
if canvas not in dm:
dm[canvas]=DrawManager(canvas)
dm[canvas].process()
dm[canvas].add_polygon_bg(shape)
class DrawPolygon:
def __init__(self, coordinates, color, width, outline, speed=10):
self.moveTo=coordinates[0]
self.segments=coordinates[1:]
self.color=color
self.width=width
self.outline=outline
class DrawLine:
def __init__(self, x0, y0, x1, y1, color, width, outline, speed=None):
self._type='LINE'
self._x0=x0
self._x1=x1
self._y0=y0
self._y1=y1
self._speed=speed
self._color=color
self._width=width
self._outline=outline
def get_segments(self):
if self._speed==0: #no animate since speed is 0 (return one segment)
return [{'type': self._type, 'x0':self._x0, 'y0': self._y0,
'x1': self._x1, 'y1': self._y1, 'color': self._color}]
#need to figure out how to translate speed into pixels, etc
#maybe speed is pixels per ms? 10 = 10 pixels per millisecond?
_x=(self._x1 - self._x0)
_x*=_x
_y=(self._y1 - self._y0)
_y*=_y
_distance=math.sqrt(_x + _y)
if _distance < self._speed: # we can do this in one segment
return [{'type': self._type, 'x0':self._x0, 'y0': self._y0,
'x1': self._x1, 'y1': self._y1, 'color': self._color}]
_segments=[]
_num_segments=math.floor(_distance/self._speed)
_pos_x=self._x0
_pos_y=self._y0
_x_diff=self._x1 - self._x0
_y_diff=self._y1 - self._y0
for _i in range(1,_num_segments+1):
_x=self._x0 + _i/_num_segments * _x_diff
_y=self._y0 + _i/_num_segments * _y_diff
_segments.append({'type': 'LINE': 'x0': _pos_x, 'y0': _pos_y,
'x1': _x, 'y1': _y, 'color': self._color})
_pos_x=_x
_pos_y=_y
if _pos_x != self._x1 or _pos_y != self._y1:
_segments.append({'type': 'LINE': 'x0': _pos_x, 'y0': _pos_y,
'x1': _x, 'y1': _y, 'color': self._color})
return _segments
class DrawManager:
def __init__(self, canvas):
self._queue=Queue()
self._canvas=canvas
self._ctx=canvas.getContext('2d')
self._interval=None
self._bg=None #used to capture bg before polygon is drawn
def __del__(self):
if self._interval is not None:
timer.clear_Interval(self._interval)
self._interval=None
del self._queue
def rect_from_shape(self, points):
_width=self._canvas.width
_height=self._canvas.height
_min_x=_width
_max_x=0
_min_y=_height
_max_y=0
for _point in points:
_x, _y = _point
_min_x=min(_min_x, _x)
_min_y=min(_min_y, _y)
_max_x=max(_max_x, _x)
_max_y=max(_max_y, _y)
_w2=_width/2
_h2=_height/2
return math.floor(_min_x-0.5)+_w2, math.floor(_min_y-0.5+_h2), \
math.ceil(_max_x+0.5)+_w2, math.ceil(_max_y+0.5+_h2)
def __interval(self):
if not self._queue.empty():
_dict=self._queue.get()
if _dict['type'] == 'LINE':
self._ctx.beginPath()
self._ctx.moveTo(_dict['x0'], _dict['y0'])
self._ctx.lineTo(_dict['x1'], _dict['y1'])
#if _dict['outline'] is not None:
# self._ctx.strokeStyle=_dict['outline'] #set line color
if _dict['color'] is not None:
self._ctx.fillStyle=_dict['color']
self._ctx.stroke()
elif _dict['type'] == 'POLYGON':
if self._bg is not None:
self._ctx.putImageData(self._bg[0], self._bg[1], self._bg[2])
console.log(self._bg[0])
self._bg=None
self._ctx.beginPath()
_moveTo=_dict['moveTo']
self._ctx.moveTo(_moveTo[0], _moveTo[1])
for _segment in _dict['segments']:
self._ctx.lineTo(_segment[0], _segment[1])
if _dict['width']:
self._ctx.lineWidth=_dict['width']
if _dict['outline']:
self._ctx.strokeStyle=_dict['outline']
if _dict['color']:
self._ctx.fillStyle=_dict['color']
self._ctx.fill()
self._ctx.closePath()
self._ctx.stroke()
elif _dict['type'] == 'POLYGON_BG':
_x0,_y0,_x1,_y1=self.rect_from_shape(_dict['shape'])
console.log(_x0,_y0,_x1, _y1)
self._bg=[]
self._bg.append(self._ctx.getImageData(_x0,_y0,abs(_x1)-abs(_x0),abs(_y1)-abs(_y0)))
self._bg.append(_x0)
self._bg.append(_y0)
def process(self):
self._interval=timer.set_interval(self.__interval, 10)
def add_line(self, dl): #color, startpos, endpos, width, outline, speed=None):
for _segment in dl.get_segments():
self._queue.put(_segment)
def add_polygon(self, dp):
self._queue.put({'type': 'POLYGON', 'moveTo': dp.moveTo,
'segments': dp.segments, 'color': dp.color,
'outline': dp.outline, 'width': dp.width})
def add_polygon_bg(self, shape):
self._queue.put({'type': 'POLYGON_BG', 'shape': shape})
|
nekohayo/meld
|
refs/heads/master
|
meld/melddoc.py
|
2
|
### Copyright (C) 2002-2006 Stephen Kennedy <stevek@gnome.org>
### Copyright (C) 2011 Kai Willadsen <kai.willadsen@gmail.com>
### This program is free software; you can redistribute it and/or modify
### it under the terms of the GNU General Public License as published by
### the Free Software Foundation; either version 2 of the License, or
### (at your option) any later version.
### This program is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
### You should have received a copy of the GNU General Public License
### along with this program; if not, write to the Free Software
### Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import subprocess
import sys
import gobject
import task
import undo
import gtk
import os
from gettext import gettext as _
class MeldDoc(gobject.GObject):
"""Base class for documents in the meld application.
"""
__gsignals__ = {
'label-changed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE,
(gobject.TYPE_STRING, gobject.TYPE_STRING)),
'file-changed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE,
(gobject.TYPE_STRING,)),
'create-diff': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'status-changed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'current-diff-changed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE,
()),
'next-diff-changed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE,
(bool, bool)),
}
def __init__(self, prefs):
gobject.GObject.__init__(self)
self.undosequence = undo.UndoSequence()
self.scheduler = task.FifoScheduler()
self.prefs = prefs
self.prefs.notify_add(self.on_preference_changed)
self.num_panes = 0
self.label_text = _("untitled")
self.tooltip_text = _("untitled")
def save(self):
pass
def save_as(self):
pass
def stop(self):
if self.scheduler.tasks_pending():
self.scheduler.remove_task(self.scheduler.get_current_task())
def _open_files(self, selected):
files = [f for f in selected if os.path.isfile(f)]
dirs = [d for d in selected if os.path.isdir(d)]
def os_open(paths):
for path in paths:
if sys.platform == "win32":
subprocess.Popen(["start", path], shell=True)
elif sys.platform == "darwin":
subprocess.Popen(["open", path])
else:
subprocess.Popen(["xdg-open", path])
if len(files):
cmd = self.prefs.get_editor_command(files)
if cmd:
os.spawnvp(os.P_NOWAIT, cmd[0], cmd)
else:
os_open(files)
os_open(dirs)
def open_external(self):
pass
def on_undo_activate(self):
if self.undosequence.can_undo():
self.undosequence.undo()
def on_redo_activate(self):
if self.undosequence.can_redo():
self.undosequence.redo()
def on_refresh_activate(self, *extra):
self.on_reload_activate(self, *extra)
def on_reload_activate(self, *extra):
pass
def on_find_activate(self, *extra):
pass
def on_find_next_activate(self, *extra):
pass
def on_find_previous_activate(self, *extra):
pass
def on_replace_activate(self, *extra):
pass
def on_preference_changed(self, key, value):
pass
def on_file_changed(self, filename):
pass
def label_changed(self):
self.emit("label-changed", self.label_text, self.tooltip_text)
def set_labels(self, lst):
pass
def on_container_switch_in_event(self, uimanager):
"""Called when the container app switches to this tab.
"""
self.ui_merge_id = uimanager.add_ui_from_file(self.ui_file)
uimanager.insert_action_group(self.actiongroup, -1)
self.popup_menu = uimanager.get_widget("/Popup")
uimanager.ensure_update()
def on_container_switch_out_event(self, uimanager):
"""Called when the container app switches away from this tab.
"""
uimanager.remove_action_group(self.actiongroup)
uimanager.remove_ui(self.ui_merge_id)
def on_delete_event(self, appquit=0):
"""Called when the docs container is about to close.
A doc normally returns gtk.RESPONSE_OK but may return
gtk.RESPONSE_CANCEL which requests the container
to not delete it. In the special case when the
app is about to quit, gtk.RESPONSE_CLOSE may be returned
which instructs the container to quit without any
more callbacks.
"""
return gtk.RESPONSE_OK
def on_quit_event(self):
"""Called when the docs container is about to close.
There is no way to interrupt the quit event.
"""
pass
|
dcroc16/skunk_works
|
refs/heads/master
|
google_appengine/lib/django-1.3/django/contrib/localflavor/is_/is_postalcodes.py
|
438
|
# -*- coding: utf-8 -*-
IS_POSTALCODES = (
('101', u'101 Reykjavík'),
('103', u'103 Reykjavík'),
('104', u'104 Reykjavík'),
('105', u'105 Reykjavík'),
('107', u'107 Reykjavík'),
('108', u'108 Reykjavík'),
('109', u'109 Reykjavík'),
('110', u'110 Reykjavík'),
('111', u'111 Reykjavík'),
('112', u'112 Reykjavík'),
('113', u'113 Reykjavík'),
('116', u'116 Kjalarnes'),
('121', u'121 Reykjavík'),
('123', u'123 Reykjavík'),
('124', u'124 Reykjavík'),
('125', u'125 Reykjavík'),
('127', u'127 Reykjavík'),
('128', u'128 Reykjavík'),
('129', u'129 Reykjavík'),
('130', u'130 Reykjavík'),
('132', u'132 Reykjavík'),
('150', u'150 Reykjavík'),
('155', u'155 Reykjavík'),
('170', u'170 Seltjarnarnes'),
('172', u'172 Seltjarnarnes'),
('190', u'190 Vogar'),
('200', u'200 Kópavogur'),
('201', u'201 Kópavogur'),
('202', u'202 Kópavogur'),
('203', u'203 Kópavogur'),
('210', u'210 Garðabær'),
('212', u'212 Garðabær'),
('220', u'220 Hafnarfjörður'),
('221', u'221 Hafnarfjörður'),
('222', u'222 Hafnarfjörður'),
('225', u'225 Álftanes'),
('230', u'230 Reykjanesbær'),
('232', u'232 Reykjanesbær'),
('233', u'233 Reykjanesbær'),
('235', u'235 Keflavíkurflugvöllur'),
('240', u'240 Grindavík'),
('245', u'245 Sandgerði'),
('250', u'250 Garður'),
('260', u'260 Reykjanesbær'),
('270', u'270 Mosfellsbær'),
('300', u'300 Akranes'),
('301', u'301 Akranes'),
('302', u'302 Akranes'),
('310', u'310 Borgarnes'),
('311', u'311 Borgarnes'),
('320', u'320 Reykholt í Borgarfirði'),
('340', u'340 Stykkishólmur'),
('345', u'345 Flatey á Breiðafirði'),
('350', u'350 Grundarfjörður'),
('355', u'355 Ólafsvík'),
('356', u'356 Snæfellsbær'),
('360', u'360 Hellissandur'),
('370', u'370 Búðardalur'),
('371', u'371 Búðardalur'),
('380', u'380 Reykhólahreppur'),
('400', u'400 Ísafjörður'),
('401', u'401 Ísafjörður'),
('410', u'410 Hnífsdalur'),
('415', u'415 Bolungarvík'),
('420', u'420 Súðavík'),
('425', u'425 Flateyri'),
('430', u'430 Suðureyri'),
('450', u'450 Patreksfjörður'),
('451', u'451 Patreksfjörður'),
('460', u'460 Tálknafjörður'),
('465', u'465 Bíldudalur'),
('470', u'470 Þingeyri'),
('471', u'471 Þingeyri'),
('500', u'500 Staður'),
('510', u'510 Hólmavík'),
('512', u'512 Hólmavík'),
('520', u'520 Drangsnes'),
('522', u'522 Kjörvogur'),
('523', u'523 Bær'),
('524', u'524 Norðurfjörður'),
('530', u'530 Hvammstangi'),
('531', u'531 Hvammstangi'),
('540', u'540 Blönduós'),
('541', u'541 Blönduós'),
('545', u'545 Skagaströnd'),
('550', u'550 Sauðárkrókur'),
('551', u'551 Sauðárkrókur'),
('560', u'560 Varmahlíð'),
('565', u'565 Hofsós'),
('566', u'566 Hofsós'),
('570', u'570 Fljót'),
('580', u'580 Siglufjörður'),
('600', u'600 Akureyri'),
('601', u'601 Akureyri'),
('602', u'602 Akureyri'),
('603', u'603 Akureyri'),
('610', u'610 Grenivík'),
('611', u'611 Grímsey'),
('620', u'620 Dalvík'),
('621', u'621 Dalvík'),
('625', u'625 Ólafsfjörður'),
('630', u'630 Hrísey'),
('640', u'640 Húsavík'),
('641', u'641 Húsavík'),
('645', u'645 Fosshóll'),
('650', u'650 Laugar'),
('660', u'660 Mývatn'),
('670', u'670 Kópasker'),
('671', u'671 Kópasker'),
('675', u'675 Raufarhöfn'),
('680', u'680 Þórshöfn'),
('681', u'681 Þórshöfn'),
('685', u'685 Bakkafjörður'),
('690', u'690 Vopnafjörður'),
('700', u'700 Egilsstaðir'),
('701', u'701 Egilsstaðir'),
('710', u'710 Seyðisfjörður'),
('715', u'715 Mjóifjörður'),
('720', u'720 Borgarfjörður eystri'),
('730', u'730 Reyðarfjörður'),
('735', u'735 Eskifjörður'),
('740', u'740 Neskaupstaður'),
('750', u'750 Fáskrúðsfjörður'),
('755', u'755 Stöðvarfjörður'),
('760', u'760 Breiðdalsvík'),
('765', u'765 Djúpivogur'),
('780', u'780 Höfn í Hornafirði'),
('781', u'781 Höfn í Hornafirði'),
('785', u'785 Öræfi'),
('800', u'800 Selfoss'),
('801', u'801 Selfoss'),
('802', u'802 Selfoss'),
('810', u'810 Hveragerði'),
('815', u'815 Þorlákshöfn'),
('820', u'820 Eyrarbakki'),
('825', u'825 Stokkseyri'),
('840', u'840 Laugarvatn'),
('845', u'845 Flúðir'),
('850', u'850 Hella'),
('851', u'851 Hella'),
('860', u'860 Hvolsvöllur'),
('861', u'861 Hvolsvöllur'),
('870', u'870 Vík'),
('871', u'871 Vík'),
('880', u'880 Kirkjubæjarklaustur'),
('900', u'900 Vestmannaeyjar'),
('902', u'902 Vestmannaeyjar')
)
|
horance-liu/tensorflow
|
refs/heads/master
|
tensorflow/contrib/quantize/python/fold_batch_norms_test.py
|
8
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit tests for folding batch norm layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.layers.python.layers import layers
from tensorflow.contrib.quantize.python import fold_batch_norms
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.platform import googletest
batch_norm = layers.batch_norm
conv2d = layers.conv2d
fully_connected = layers.fully_connected
separable_conv2d = layers.separable_conv2d
# TODO(suharshs): Use parameterized test once OSS TF supports it.
class FoldBatchNormsTest(test_util.TensorFlowTestCase):
def _RunTestOverParameters(self, test_fn):
parameters_list = [
# (relu, relu_op_name, with_bypass, has_scaling, fused_batch_norm)
(nn_ops.relu6, 'Relu6', False, False, False),
(nn_ops.relu, 'Relu', False, False, False),
(nn_ops.relu6, 'Relu6', True, False, False),
(nn_ops.relu, 'Relu', True, False, False),
(nn_ops.relu6, 'Relu6', False, True, False),
(nn_ops.relu, 'Relu', False, True, False),
(nn_ops.relu6, 'Relu6', True, True, False),
(nn_ops.relu, 'Relu', True, True, False),
# Fused batch norm always has scaling enabled.
(nn_ops.relu6, 'Relu6', False, True, True),
(nn_ops.relu, 'Relu', False, True, True),
(nn_ops.relu6, 'Relu6', True, True, True),
(nn_ops.relu, 'Relu', True, True, True),
]
for params in parameters_list:
test_fn(params[0], params[1], params[2], params[3], params[4])
def _TestFoldConv2d(self, relu, relu_op_name, with_bypass, has_scaling,
fused_batch_norm):
"""Tests folding cases: inputs -> Conv2d with batch norm -> Relu*.
Args:
relu: Callable that returns an Operation, a factory method for the Relu*.
relu_op_name: String, name of the Relu* operation.
with_bypass: Bool, when true there is an extra connection added from
inputs to just before Relu*.
has_scaling: Bool, when true the batch norm has scaling.
fused_batch_norm: Bool, when true the batch norm is fused.
"""
g = ops.Graph()
with g.as_default():
batch_size, height, width = 5, 128, 128
inputs = array_ops.zeros((batch_size, height, width, 3))
out_depth = 3 if with_bypass else 32
stride = 1 if with_bypass else 2
activation_fn = None if with_bypass else relu
scope = 'test/test2' if with_bypass else 'test'
node = conv2d(
inputs,
out_depth, [5, 5],
stride=stride,
padding='SAME',
weights_initializer=self._WeightInit(0.09),
activation_fn=activation_fn,
normalizer_fn=batch_norm,
normalizer_params=self._BatchNormParams(
scale=has_scaling, fused=fused_batch_norm),
scope=scope)
if with_bypass:
node = math_ops.add(inputs, node, name='test/Add')
relu(node, name='test/' + relu_op_name)
fold_batch_norms.FoldBatchNorms(g)
folded_mul = g.get_operation_by_name(scope + '/mul_fold')
self.assertEqual(folded_mul.type, 'Mul')
self._AssertInputOpsAre(folded_mul, [
scope + '/weights/read',
self._BatchNormMultiplierName(scope, has_scaling, fused_batch_norm)
])
self._AssertOutputGoesToOps(folded_mul, g, [scope + '/Conv2D_Fold'])
folded_conv = g.get_operation_by_name(scope + '/Conv2D_Fold')
self.assertEqual(folded_conv.type, 'Conv2D')
self._AssertInputOpsAre(folded_conv,
[scope + '/mul_fold', inputs.op.name])
self._AssertOutputGoesToOps(folded_conv, g, [scope + '/add_fold'])
folded_add = g.get_operation_by_name(scope + '/add_fold')
self.assertEqual(folded_add.type, 'Add')
self._AssertInputOpsAre(folded_add, [
scope + '/Conv2D_Fold',
self._BathNormBiasName(scope, fused_batch_norm)
])
output_op_names = ['test/Add' if with_bypass else 'test/' + relu_op_name]
self._AssertOutputGoesToOps(folded_add, g, output_op_names)
def testFoldConv2d(self):
self._RunTestOverParameters(self._TestFoldConv2d)
def _TestFoldConv2dUnknownShape(self, relu, relu_op_name, with_bypass,
has_scaling, fused_batch_norm):
"""Tests folding cases: inputs -> Conv2d with batch norm -> Relu*.
Tests that folding works even with an input shape where some dimensions are
not known (i.e. None).
Args:
relu: Callable that returns an Operation, a factory method for the Relu*.
relu_op_name: String, name of the Relu* operation.
with_bypass: Bool, when true there is an extra connection added from
inputs to just before Relu*.
has_scaling: Bool, when true the batch norm has scaling.
fused_batch_norm: Bool, when true the batch norm is fused.
"""
g = ops.Graph()
with g.as_default():
inputs = array_ops.placeholder(dtypes.float32, shape=(5, None, None, 3))
out_depth = 3 if with_bypass else 32
stride = 1 if with_bypass else 2
activation_fn = None if with_bypass else relu
scope = 'test/test2' if with_bypass else 'test'
node = conv2d(
inputs,
out_depth, [5, 5],
stride=stride,
padding='SAME',
weights_initializer=self._WeightInit(0.09),
activation_fn=activation_fn,
normalizer_fn=batch_norm,
normalizer_params=self._BatchNormParams(
scale=has_scaling, fused=fused_batch_norm),
scope=scope)
if with_bypass:
node = math_ops.add(inputs, node, name='test/Add')
relu(node, name='test/' + relu_op_name)
fold_batch_norms.FoldBatchNorms(g)
folded_mul = g.get_operation_by_name(scope + '/mul_fold')
self.assertEqual(folded_mul.type, 'Mul')
self._AssertInputOpsAre(folded_mul, [
scope + '/weights/read',
self._BatchNormMultiplierName(scope, has_scaling, fused_batch_norm)
])
self._AssertOutputGoesToOps(folded_mul, g, [scope + '/Conv2D_Fold'])
folded_conv = g.get_operation_by_name(scope + '/Conv2D_Fold')
self.assertEqual(folded_conv.type, 'Conv2D')
self._AssertInputOpsAre(folded_conv, [scope + '/mul_fold', inputs.op.name])
self._AssertOutputGoesToOps(folded_conv, g, [scope + '/add_fold'])
folded_add = g.get_operation_by_name(scope + '/add_fold')
self.assertEqual(folded_add.type, 'Add')
self._AssertInputOpsAre(folded_add, [
scope + '/Conv2D_Fold',
self._BathNormBiasName(scope, fused_batch_norm)
])
output_op_names = ['test/Add' if with_bypass else 'test/' + relu_op_name]
self._AssertOutputGoesToOps(folded_add, g, output_op_names)
def testFoldConv2dUnknownShape(self):
self._RunTestOverParameters(self._TestFoldConv2dUnknownShape)
def _TestFoldFullyConnectedLayer(self, relu, relu_op_name, with_bypass,
has_scaling, fused_batch_norm):
"""Tests folding cases: inputs -> FC with batch norm -> Relu*.
Args:
relu: Callable that returns an Operation, a factory method for the Relu*.
relu_op_name: String, name of the Relu* operation.
with_bypass: Bool, when true there is an extra connection added from
inputs to just before Relu*.
has_scaling: Bool, when true the batch norm has scaling.
fused_batch_norm: Bool, when true the batch norm is fused.
"""
g = ops.Graph()
with g.as_default():
batch_size, depth = 5, 256
inputs = array_ops.zeros((batch_size, depth))
out_depth = 256 if with_bypass else 128
activation_fn = None if with_bypass else relu
scope = 'test/test2' if with_bypass else 'test'
node = fully_connected(
inputs,
out_depth,
weights_initializer=self._WeightInit(0.03),
activation_fn=activation_fn,
normalizer_fn=batch_norm,
normalizer_params=self._BatchNormParams(
scale=has_scaling, fused=fused_batch_norm),
scope=scope)
if with_bypass:
node = math_ops.add(inputs, node, name='test/Add')
relu(node, name='test/' + relu_op_name)
fold_batch_norms.FoldBatchNorms(g)
folded_mul = g.get_operation_by_name(scope + '/mul_fold')
self.assertEqual(folded_mul.type, 'Mul')
self._AssertInputOpsAre(folded_mul, [
scope + '/weights/read',
self._BatchNormMultiplierName(scope, has_scaling, fused_batch_norm)
])
self._AssertOutputGoesToOps(folded_mul, g, [scope + '/MatMul_Fold'])
folded_conv = g.get_operation_by_name(scope + '/MatMul_Fold')
self.assertEqual(folded_conv.type, 'MatMul')
self._AssertInputOpsAre(folded_conv,
[scope + '/mul_fold', inputs.op.name])
self._AssertOutputGoesToOps(folded_conv, g, [scope + '/add_fold'])
folded_add = g.get_operation_by_name(scope + '/add_fold')
self.assertEqual(folded_add.type, 'Add')
self._AssertInputOpsAre(folded_add, [
scope + '/MatMul_Fold',
self._BathNormBiasName(scope, fused_batch_norm)
])
output_op_names = ['test/Add' if with_bypass else 'test/' + relu_op_name]
self._AssertOutputGoesToOps(folded_add, g, output_op_names)
def testFoldFullyConnectedLayer(self):
self._RunTestOverParameters(self._TestFoldFullyConnectedLayer)
def _TestFoldDepthwiseConv2d(self, relu, relu_op_name, with_bypass,
has_scaling, fused_batch_norm):
"""Tests folding: inputs -> DepthwiseConv2d with batch norm -> Relu*.
Args:
relu: Callable that returns an Operation, a factory method for the Relu*.
relu_op_name: String, name of the Relu* operation.
with_bypass: Bool, when true there is an extra connection added from
inputs to just before Relu*.
has_scaling: Bool, when true the batch norm has scaling.
fused_batch_norm: Bool, when true the batch norm is fused.
"""
g = ops.Graph()
with g.as_default():
batch_size, height, width = 5, 128, 128
inputs = array_ops.zeros((batch_size, height, width, 3))
stride = 1 if with_bypass else 2
activation_fn = None if with_bypass else relu
scope = 'test/test2' if with_bypass else 'test'
node = separable_conv2d(
inputs,
None, [5, 5],
stride=stride,
depth_multiplier=1.0,
padding='SAME',
weights_initializer=self._WeightInit(0.09),
activation_fn=activation_fn,
normalizer_fn=batch_norm,
normalizer_params=self._BatchNormParams(
scale=has_scaling, fused=fused_batch_norm),
scope=scope)
if with_bypass:
node = math_ops.add(inputs, node, name='test/Add')
relu(node, name='test/' + relu_op_name)
fold_batch_norms.FoldBatchNorms(g)
folded_mul = g.get_operation_by_name(scope + '/mul_fold')
self.assertEqual(folded_mul.type, 'Mul')
self._AssertInputOpsAre(folded_mul,
[scope + '/depthwise_weights/read',
scope + '/scale_reshape'])
self._AssertOutputGoesToOps(folded_mul, g, [scope + '/depthwise_Fold'])
scale_reshape = g.get_operation_by_name(scope + '/scale_reshape')
self.assertEqual(scale_reshape.type, 'Reshape')
self._AssertInputOpsAre(scale_reshape, [
self._BatchNormMultiplierName(scope, has_scaling, fused_batch_norm),
scope + '/scale_reshape/shape'
])
self._AssertOutputGoesToOps(scale_reshape, g, [scope + '/mul_fold'])
folded_conv = g.get_operation_by_name(scope + '/depthwise_Fold')
self.assertEqual(folded_conv.type, 'DepthwiseConv2dNative')
self._AssertInputOpsAre(folded_conv,
[scope + '/mul_fold', inputs.op.name])
self._AssertOutputGoesToOps(folded_conv, g, [scope + '/add_fold'])
folded_add = g.get_operation_by_name(scope + '/add_fold')
self.assertEqual(folded_add.type, 'Add')
self._AssertInputOpsAre(folded_add, [
scope + '/depthwise_Fold',
self._BathNormBiasName(scope, fused_batch_norm)
])
output_op_names = ['test/Add' if with_bypass else 'test/' + relu_op_name]
self._AssertOutputGoesToOps(folded_add, g, output_op_names)
def testFoldDepthwiseConv2d(self):
self._RunTestOverParameters(self._TestFoldDepthwiseConv2d)
def _BatchNormParams(self, scale=True, fused=False):
return {
'center': True,
'scale': scale,
'decay': 1.0 - 0.003,
'fused': fused
}
def _BatchNormMultiplierName(self, scope, has_scaling, fused):
if has_scaling:
if fused:
return scope + '/mul'
return scope + '/BatchNorm/batchnorm/mul'
return scope + '/BatchNorm/batchnorm/Rsqrt'
def _BathNormBiasName(self, scope, fused):
if fused:
return scope + '/bias'
return scope + '/BatchNorm/batchnorm/sub'
def _WeightInit(self, stddev):
"""Returns a truncated normal variable initializer.
Function is defined purely to shorten the name so that it stops wrapping.
Args:
stddev: Standard deviation of normal variable.
Returns:
An initializer that initializes with a truncated normal variable.
"""
return init_ops.truncated_normal_initializer(stddev=stddev)
def _AssertInputOpsAre(self, op, in_op_names):
"""Asserts that all inputs to op come from in_op_names (disregarding order).
Args:
op: Operation to check inputs for.
in_op_names: List of strings, operations where all op's inputs should
come from.
"""
expected_inputs = [in_op_name + ':0' for in_op_name in in_op_names]
self.assertItemsEqual([t.name for t in op.inputs], expected_inputs)
def _AssertOutputGoesToOps(self, op, graph, out_op_names):
"""Asserts that outputs from op go to out_op_names (and perhaps others).
Args:
op: Operation to check outputs for.
graph: Graph where output operations are located.
out_op_names: List of strings, operations where op's outputs should go.
"""
for out_op_name in out_op_names:
out_op = graph.get_operation_by_name(out_op_name)
self.assertIn(op.outputs[0].name, [str(t.name) for t in out_op.inputs])
if __name__ == '__main__':
googletest.main()
|
django-bmf/django-bmf
|
refs/heads/develop
|
djangobmf/contrib/project/migrations/0001_initial.py
|
2
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('modified', models.DateTimeField(auto_now=True, verbose_name='Modified', null=True)),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created', null=True)),
('uuid', models.CharField(editable=False, max_length=100, blank=True, null=True, verbose_name='UUID', db_index=True)),
('name', models.CharField(max_length=255, verbose_name='Name')),
('is_bound', models.BooleanField(default=False, editable=False)),
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
('notes', models.TextField(blank=True)),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, related_name="+")),
('modified_by', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, related_name="+")),
],
options={
'ordering': ['name'],
'abstract': False,
'verbose_name': 'Project',
'verbose_name_plural': 'Project',
},
bases=(models.Model,),
),
]
|
XiaosongWei/crosswalk-test-suite
|
refs/heads/master
|
usecase/usecase-wrt-auto-tests/samples/ApkBuildTemporary/projectdir.py
|
5
|
#!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Hongjuan, Wang<hongjuanx.wang@intel.com>
import unittest
import os
import sys
import commands
import shutil
import comm
class TestPackertoolsFunctions(unittest.TestCase):
def test_projectdir_antbuild(self):
comm.setUp()
if os.path.exists(comm.Pck_Tools + "example"):
try:
shutil.rmtree(comm.Pck_Tools + "example")
os.remove(comm.Pck_Tools + comm.AppName)
except Exception as e:
os.system("rm -rf " + comm.Pck_Tools + "example &>/dev/null")
os.system("rm -rf " + comm.Pck_Tools + "*apk &>/dev/null")
os.chdir(comm.Pck_Tools)
manifestPath = comm.ConstPath + "/res/manifest.json"
cmd = "python make_apk.py --package=org.xwalk.example --arch=%s --mode=%s --manifest=%s --project-dir=example" % \
(comm.ARCH, comm.MODE, manifestPath)
packstatus = commands.getstatusoutput(cmd)
self.assertEqual(packstatus[0], 0)
os.remove(comm.Pck_Tools + comm.AppName)
buildDir = comm.Pck_Tools + "example/Example"
buildList = os.listdir(buildDir)
print 'buildList', buildList
self.assertIn("res", buildList)
self.assertIn("bin", buildList)
self.assertIn("AndroidManifest.xml", buildList)
buildstatus = commands.getstatusoutput(
"ant release -f " +
buildDir +
"/build.xml")
self.assertEqual(buildstatus[0], 0)
apkName = "Example-release.apk"
self.assertIn(apkName, os.listdir(buildDir + "/bin"))
shutil.copyfile(
buildDir +
"/bin/" +
apkName,
comm.Pck_Tools +
comm.AppName)
inststatus = commands.getstatusoutput(
"adb -s " +
comm.device +
" install -r " +
comm.AppName)
self.assertEquals(0, inststatus[0])
print "Install APK ----------------> OK"
pmstatus = commands.getstatusoutput(
"adb -s " +
comm.device +
" shell pm list packages |grep org.xwalk.example")
self.assertEquals(0, pmstatus[0])
print "Find Package in comm.device ---------------->O.K"
launchstatus = commands.getstatusoutput(
"adb -s " +
comm.device +
" shell am start -n org.xwalk.example/.ExampleActivity")
self.assertEquals(0, launchstatus[0])
print "Launch APK ---------------->OK"
stopstatus = commands.getstatusoutput(
"adb -s " +
comm.device +
" shell am force-stop org.xwalk.example")
if stopstatus[0] == 0:
print "Stop APK ---------------->O.K"
unistatus = commands.getstatusoutput(
"adb -s " +
comm.device +
" uninstall org.xwalk.example")
self.assertEquals(0, unistatus[0])
print "Uninstall APK ---------------->O.K"
else:
print "Stop APK ---------------->Error"
os.system("adb -s " + comm.device + " uninstall org.xwalk.example")
if os.path.exists(comm.Pck_Tools + "example"):
try:
shutil.rmtree(comm.Pck_Tools + "example")
os.remove(comm.Pck_Tools + comm.AppName)
except Exception as e:
os.system("rm -rf " + comm.Pck_Tools + "example &>/dev/null")
os.system("rm -rf " + comm.Pck_Tools + "*apk &>/dev/null")
if __name__ == '__main__':
unittest.main()
|
fabioz/PyDev.Debugger
|
refs/heads/master
|
tests_python/resources/_debugger_case_import_main.py
|
15
|
import _debugger_case_import_imported # break here
print('TEST SUCEEDED')
|
camptocamp/odoo
|
refs/heads/master
|
openerp/addons/test_workflow/tests/test_workflow.py
|
46
|
# -*- coding: utf-8 -*-
import openerp
from openerp import SUPERUSER_ID
from openerp.tests import common
class test_workflows(common.TransactionCase):
def check_activities(self, model_name, i, names):
""" Check that the record i has workitems in the given activity names.
"""
instance = self.registry('workflow.instance')
workitem = self.registry('workflow.workitem')
# Given the workflow instance associated to the record ...
instance_id = instance.search(
self.cr, SUPERUSER_ID,
[('res_type', '=', model_name), ('res_id', '=', i)])
self.assertTrue( instance_id, 'A workflow instance is expected.')
# ... get all its workitems ...
workitem_ids = workitem.search(
self.cr, SUPERUSER_ID,
[('inst_id', '=', instance_id[0])])
self.assertTrue(
workitem_ids,
'The workflow instance should have workitems.')
# ... and check the activity the are in against the provided names.
workitem_records = workitem.browse(
self.cr, SUPERUSER_ID, workitem_ids)
self.assertEqual(
sorted([item.act_id.name for item in workitem_records]),
sorted(names))
def check_value(self, model_name, i, value):
""" Check that the record i has the given value.
"""
model = self.registry(model_name)
record = model.read(self.cr, SUPERUSER_ID, [i], ['value'])[0]
self.assertEqual(record['value'], value)
def test_workflow(self):
model = self.registry('test.workflow.model')
trigger = self.registry('test.workflow.trigger')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
# a -> b is just a signal.
model.signal_workflow(self.cr, SUPERUSER_ID, [i], 'a-b')
self.check_activities(model._name, i, ['b'])
# b -> c is a trigger (which is False),
# so we remain in the b activity.
model.trigger(self.cr, SUPERUSER_ID, [i])
self.check_activities(model._name, i, ['b'])
# b -> c is a trigger (which is set to True).
# so we go in c when the trigger is called.
trigger.write(self.cr, SUPERUSER_ID, [1], {'value': True})
model.trigger(self.cr, SUPERUSER_ID)
self.check_activities(model._name, i, ['c'])
self.assertEqual(
True,
True)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_a(self):
model = self.registry('test.workflow.model.a')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 0)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_b(self):
model = self.registry('test.workflow.model.b')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 1)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_c(self):
model = self.registry('test.workflow.model.c')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 0)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_d(self):
model = self.registry('test.workflow.model.d')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 1)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_e(self):
model = self.registry('test.workflow.model.e')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['b'])
self.check_value(model._name, i, 2)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_f(self):
model = self.registry('test.workflow.model.f')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 1)
model.signal_workflow(self.cr, SUPERUSER_ID, [i], 'a-b')
self.check_activities(model._name, i, ['b'])
self.check_value(model._name, i, 2)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_g(self):
model = self.registry('test.workflow.model.g')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 1)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_h(self):
model = self.registry('test.workflow.model.h')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['b', 'c'])
self.check_value(model._name, i, 2)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_i(self):
model = self.registry('test.workflow.model.i')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['b'])
self.check_value(model._name, i, 2)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_j(self):
model = self.registry('test.workflow.model.j')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 1)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_k(self):
model = self.registry('test.workflow.model.k')
i = model.create(self.cr, SUPERUSER_ID, {})
# Non-determinisitic: can be b or c
# self.check_activities(model._name, i, ['b'])
# self.check_activities(model._name, i, ['c'])
self.check_value(model._name, i, 2)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_l(self):
model = self.registry('test.workflow.model.l')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['c', 'c', 'd'])
self.check_value(model._name, i, 3)
model.unlink(self.cr, SUPERUSER_ID, [i])
|
azureplus/chromium_depot_tools
|
refs/heads/master
|
third_party/coverage/templite.py
|
123
|
"""A simple Python template renderer, for a nano-subset of Django syntax."""
# Coincidentally named the same as http://code.activestate.com/recipes/496702/
import re, sys
class Templite(object):
"""A simple template renderer, for a nano-subset of Django syntax.
Supported constructs are extended variable access::
{{var.modifer.modifier|filter|filter}}
loops::
{% for var in list %}...{% endfor %}
and ifs::
{% if var %}...{% endif %}
Comments are within curly-hash markers::
{# This will be ignored #}
Construct a Templite with the template text, then use `render` against a
dictionary context to create a finished string.
"""
def __init__(self, text, *contexts):
"""Construct a Templite with the given `text`.
`contexts` are dictionaries of values to use for future renderings.
These are good for filters and global values.
"""
self.text = text
self.context = {}
for context in contexts:
self.context.update(context)
# Split the text to form a list of tokens.
toks = re.split(r"(?s)({{.*?}}|{%.*?%}|{#.*?#})", text)
# Parse the tokens into a nested list of operations. Each item in the
# list is a tuple with an opcode, and arguments. They'll be
# interpreted by TempliteEngine.
#
# When parsing an action tag with nested content (if, for), the current
# ops list is pushed onto ops_stack, and the parsing continues in a new
# ops list that is part of the arguments to the if or for op.
ops = []
ops_stack = []
for tok in toks:
if tok.startswith('{{'):
# Expression: ('exp', expr)
ops.append(('exp', tok[2:-2].strip()))
elif tok.startswith('{#'):
# Comment: ignore it and move on.
continue
elif tok.startswith('{%'):
# Action tag: split into words and parse further.
words = tok[2:-2].strip().split()
if words[0] == 'if':
# If: ('if', (expr, body_ops))
if_ops = []
assert len(words) == 2
ops.append(('if', (words[1], if_ops)))
ops_stack.append(ops)
ops = if_ops
elif words[0] == 'for':
# For: ('for', (varname, listexpr, body_ops))
assert len(words) == 4 and words[2] == 'in'
for_ops = []
ops.append(('for', (words[1], words[3], for_ops)))
ops_stack.append(ops)
ops = for_ops
elif words[0].startswith('end'):
# Endsomething. Pop the ops stack
ops = ops_stack.pop()
assert ops[-1][0] == words[0][3:]
else:
raise SyntaxError("Don't understand tag %r" % words)
else:
ops.append(('lit', tok))
assert not ops_stack, "Unmatched action tag: %r" % ops_stack[-1][0]
self.ops = ops
def render(self, context=None):
"""Render this template by applying it to `context`.
`context` is a dictionary of values to use in this rendering.
"""
# Make the complete context we'll use.
ctx = dict(self.context)
if context:
ctx.update(context)
# Run it through an engine, and return the result.
engine = _TempliteEngine(ctx)
engine.execute(self.ops)
return "".join(engine.result)
class _TempliteEngine(object):
"""Executes Templite objects to produce strings."""
def __init__(self, context):
self.context = context
self.result = []
def execute(self, ops):
"""Execute `ops` in the engine.
Called recursively for the bodies of if's and loops.
"""
for op, args in ops:
if op == 'lit':
self.result.append(args)
elif op == 'exp':
try:
self.result.append(str(self.evaluate(args)))
except:
exc_class, exc, _ = sys.exc_info()
new_exc = exc_class("Couldn't evaluate {{ %s }}: %s"
% (args, exc))
raise new_exc
elif op == 'if':
expr, body = args
if self.evaluate(expr):
self.execute(body)
elif op == 'for':
var, lis, body = args
vals = self.evaluate(lis)
for val in vals:
self.context[var] = val
self.execute(body)
else:
raise AssertionError("TempliteEngine doesn't grok op %r" % op)
def evaluate(self, expr):
"""Evaluate an expression.
`expr` can have pipes and dots to indicate data access and filtering.
"""
if "|" in expr:
pipes = expr.split("|")
value = self.evaluate(pipes[0])
for func in pipes[1:]:
value = self.evaluate(func)(value)
elif "." in expr:
dots = expr.split('.')
value = self.evaluate(dots[0])
for dot in dots[1:]:
try:
value = getattr(value, dot)
except AttributeError:
value = value[dot]
if hasattr(value, '__call__'):
value = value()
else:
value = self.context[expr]
return value
|
TheTypoMaster/chromium-crosswalk
|
refs/heads/master
|
tools/telemetry/third_party/pyserial/serial/urlhandler/protocol_hwgrep.py
|
159
|
#! python
#
# Python Serial Port Extension for Win32, Linux, BSD, Jython
# see __init__.py
#
# This module implements a special URL handler that uses the port listing to
# find ports by searching the string descriptions.
#
# (C) 2011 Chris Liechti <cliechti@gmx.net>
# this is distributed under a free software license, see license.txt
#
# URL format: hwgrep://regexp
import serial
import serial.tools.list_ports
class Serial(serial.Serial):
"""Just inherit the native Serial port implementation and patch the open function."""
def setPort(self, value):
"""translate port name before storing it"""
if isinstance(value, basestring) and value.startswith('hwgrep://'):
serial.Serial.setPort(self, self.fromURL(value))
else:
serial.Serial.setPort(self, value)
def fromURL(self, url):
"""extract host and port from an URL string"""
if url.lower().startswith("hwgrep://"): url = url[9:]
# use a for loop to get the 1st element from the generator
for port, desc, hwid in serial.tools.list_ports.grep(url):
return port
else:
raise serial.SerialException('no ports found matching regexp %r' % (url,))
# override property
port = property(serial.Serial.getPort, setPort, doc="Port setting")
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == '__main__':
#~ s = Serial('hwgrep://ttyS0')
s = Serial(None)
s.port = 'hwgrep://ttyS0'
print s
|
mancoast/CPythonPyc_test
|
refs/heads/master
|
cpython/263_test_wsgiref.py
|
55
|
from __future__ import nested_scopes # Backward compat for 2.1
from unittest import TestCase
from wsgiref.util import setup_testing_defaults
from wsgiref.headers import Headers
from wsgiref.handlers import BaseHandler, BaseCGIHandler
from wsgiref import util
from wsgiref.validate import validator
from wsgiref.simple_server import WSGIServer, WSGIRequestHandler, demo_app
from wsgiref.simple_server import make_server
from StringIO import StringIO
from SocketServer import BaseServer
import re, sys
from test import test_support
class MockServer(WSGIServer):
"""Non-socket HTTP server"""
def __init__(self, server_address, RequestHandlerClass):
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.server_bind()
def server_bind(self):
host, port = self.server_address
self.server_name = host
self.server_port = port
self.setup_environ()
class MockHandler(WSGIRequestHandler):
"""Non-socket HTTP handler"""
def setup(self):
self.connection = self.request
self.rfile, self.wfile = self.connection
def finish(self):
pass
def hello_app(environ,start_response):
start_response("200 OK", [
('Content-Type','text/plain'),
('Date','Mon, 05 Jun 2006 18:49:54 GMT')
])
return ["Hello, world!"]
def run_amock(app=hello_app, data="GET / HTTP/1.0\n\n"):
server = make_server("", 80, app, MockServer, MockHandler)
inp, out, err, olderr = StringIO(data), StringIO(), StringIO(), sys.stderr
sys.stderr = err
try:
server.finish_request((inp,out), ("127.0.0.1",8888))
finally:
sys.stderr = olderr
return out.getvalue(), err.getvalue()
def compare_generic_iter(make_it,match):
"""Utility to compare a generic 2.1/2.2+ iterator with an iterable
If running under Python 2.2+, this tests the iterator using iter()/next(),
as well as __getitem__. 'make_it' must be a function returning a fresh
iterator to be tested (since this may test the iterator twice)."""
it = make_it()
n = 0
for item in match:
if not it[n]==item: raise AssertionError
n+=1
try:
it[n]
except IndexError:
pass
else:
raise AssertionError("Too many items from __getitem__",it)
try:
iter, StopIteration
except NameError:
pass
else:
# Only test iter mode under 2.2+
it = make_it()
if not iter(it) is it: raise AssertionError
for item in match:
if not it.next()==item: raise AssertionError
try:
it.next()
except StopIteration:
pass
else:
raise AssertionError("Too many items from .next()",it)
class IntegrationTests(TestCase):
def check_hello(self, out, has_length=True):
self.assertEqual(out,
"HTTP/1.0 200 OK\r\n"
"Server: WSGIServer/0.1 Python/"+sys.version.split()[0]+"\r\n"
"Content-Type: text/plain\r\n"
"Date: Mon, 05 Jun 2006 18:49:54 GMT\r\n" +
(has_length and "Content-Length: 13\r\n" or "") +
"\r\n"
"Hello, world!"
)
def test_plain_hello(self):
out, err = run_amock()
self.check_hello(out)
def test_validated_hello(self):
out, err = run_amock(validator(hello_app))
# the middleware doesn't support len(), so content-length isn't there
self.check_hello(out, has_length=False)
def test_simple_validation_error(self):
def bad_app(environ,start_response):
start_response("200 OK", ('Content-Type','text/plain'))
return ["Hello, world!"]
out, err = run_amock(validator(bad_app))
self.failUnless(out.endswith(
"A server error occurred. Please contact the administrator."
))
self.assertEqual(
err.splitlines()[-2],
"AssertionError: Headers (('Content-Type', 'text/plain')) must"
" be of type list: <type 'tuple'>"
)
class UtilityTests(TestCase):
def checkShift(self,sn_in,pi_in,part,sn_out,pi_out):
env = {'SCRIPT_NAME':sn_in,'PATH_INFO':pi_in}
util.setup_testing_defaults(env)
self.assertEqual(util.shift_path_info(env),part)
self.assertEqual(env['PATH_INFO'],pi_out)
self.assertEqual(env['SCRIPT_NAME'],sn_out)
return env
def checkDefault(self, key, value, alt=None):
# Check defaulting when empty
env = {}
util.setup_testing_defaults(env)
if isinstance(value,StringIO):
self.failUnless(isinstance(env[key],StringIO))
else:
self.assertEqual(env[key],value)
# Check existing value
env = {key:alt}
util.setup_testing_defaults(env)
self.failUnless(env[key] is alt)
def checkCrossDefault(self,key,value,**kw):
util.setup_testing_defaults(kw)
self.assertEqual(kw[key],value)
def checkAppURI(self,uri,**kw):
util.setup_testing_defaults(kw)
self.assertEqual(util.application_uri(kw),uri)
def checkReqURI(self,uri,query=1,**kw):
util.setup_testing_defaults(kw)
self.assertEqual(util.request_uri(kw,query),uri)
def checkFW(self,text,size,match):
def make_it(text=text,size=size):
return util.FileWrapper(StringIO(text),size)
compare_generic_iter(make_it,match)
it = make_it()
self.failIf(it.filelike.closed)
for item in it:
pass
self.failIf(it.filelike.closed)
it.close()
self.failUnless(it.filelike.closed)
def testSimpleShifts(self):
self.checkShift('','/', '', '/', '')
self.checkShift('','/x', 'x', '/x', '')
self.checkShift('/','', None, '/', '')
self.checkShift('/a','/x/y', 'x', '/a/x', '/y')
self.checkShift('/a','/x/', 'x', '/a/x', '/')
def testNormalizedShifts(self):
self.checkShift('/a/b', '/../y', '..', '/a', '/y')
self.checkShift('', '/../y', '..', '', '/y')
self.checkShift('/a/b', '//y', 'y', '/a/b/y', '')
self.checkShift('/a/b', '//y/', 'y', '/a/b/y', '/')
self.checkShift('/a/b', '/./y', 'y', '/a/b/y', '')
self.checkShift('/a/b', '/./y/', 'y', '/a/b/y', '/')
self.checkShift('/a/b', '///./..//y/.//', '..', '/a', '/y/')
self.checkShift('/a/b', '///', '', '/a/b/', '')
self.checkShift('/a/b', '/.//', '', '/a/b/', '')
self.checkShift('/a/b', '/x//', 'x', '/a/b/x', '/')
self.checkShift('/a/b', '/.', None, '/a/b', '')
def testDefaults(self):
for key, value in [
('SERVER_NAME','127.0.0.1'),
('SERVER_PORT', '80'),
('SERVER_PROTOCOL','HTTP/1.0'),
('HTTP_HOST','127.0.0.1'),
('REQUEST_METHOD','GET'),
('SCRIPT_NAME',''),
('PATH_INFO','/'),
('wsgi.version', (1,0)),
('wsgi.run_once', 0),
('wsgi.multithread', 0),
('wsgi.multiprocess', 0),
('wsgi.input', StringIO("")),
('wsgi.errors', StringIO()),
('wsgi.url_scheme','http'),
]:
self.checkDefault(key,value)
def testCrossDefaults(self):
self.checkCrossDefault('HTTP_HOST',"foo.bar",SERVER_NAME="foo.bar")
self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="on")
self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="1")
self.checkCrossDefault('wsgi.url_scheme',"https",HTTPS="yes")
self.checkCrossDefault('wsgi.url_scheme',"http",HTTPS="foo")
self.checkCrossDefault('SERVER_PORT',"80",HTTPS="foo")
self.checkCrossDefault('SERVER_PORT',"443",HTTPS="on")
def testGuessScheme(self):
self.assertEqual(util.guess_scheme({}), "http")
self.assertEqual(util.guess_scheme({'HTTPS':"foo"}), "http")
self.assertEqual(util.guess_scheme({'HTTPS':"on"}), "https")
self.assertEqual(util.guess_scheme({'HTTPS':"yes"}), "https")
self.assertEqual(util.guess_scheme({'HTTPS':"1"}), "https")
def testAppURIs(self):
self.checkAppURI("http://127.0.0.1/")
self.checkAppURI("http://127.0.0.1/spam", SCRIPT_NAME="/spam")
self.checkAppURI("http://spam.example.com:2071/",
HTTP_HOST="spam.example.com:2071", SERVER_PORT="2071")
self.checkAppURI("http://spam.example.com/",
SERVER_NAME="spam.example.com")
self.checkAppURI("http://127.0.0.1/",
HTTP_HOST="127.0.0.1", SERVER_NAME="spam.example.com")
self.checkAppURI("https://127.0.0.1/", HTTPS="on")
self.checkAppURI("http://127.0.0.1:8000/", SERVER_PORT="8000",
HTTP_HOST=None)
def testReqURIs(self):
self.checkReqURI("http://127.0.0.1/")
self.checkReqURI("http://127.0.0.1/spam", SCRIPT_NAME="/spam")
self.checkReqURI("http://127.0.0.1/spammity/spam",
SCRIPT_NAME="/spammity", PATH_INFO="/spam")
self.checkReqURI("http://127.0.0.1/spammity/spam?say=ni",
SCRIPT_NAME="/spammity", PATH_INFO="/spam",QUERY_STRING="say=ni")
self.checkReqURI("http://127.0.0.1/spammity/spam", 0,
SCRIPT_NAME="/spammity", PATH_INFO="/spam",QUERY_STRING="say=ni")
def testFileWrapper(self):
self.checkFW("xyz"*50, 120, ["xyz"*40,"xyz"*10])
def testHopByHop(self):
for hop in (
"Connection Keep-Alive Proxy-Authenticate Proxy-Authorization "
"TE Trailers Transfer-Encoding Upgrade"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.failUnless(util.is_hop_by_hop(alt))
# Not comprehensive, just a few random header names
for hop in (
"Accept Cache-Control Date Pragma Trailer Via Warning"
).split():
for alt in hop, hop.title(), hop.upper(), hop.lower():
self.failIf(util.is_hop_by_hop(alt))
class HeaderTests(TestCase):
def testMappingInterface(self):
test = [('x','y')]
self.assertEqual(len(Headers([])),0)
self.assertEqual(len(Headers(test[:])),1)
self.assertEqual(Headers(test[:]).keys(), ['x'])
self.assertEqual(Headers(test[:]).values(), ['y'])
self.assertEqual(Headers(test[:]).items(), test)
self.failIf(Headers(test).items() is test) # must be copy!
h=Headers([])
del h['foo'] # should not raise an error
h['Foo'] = 'bar'
for m in h.has_key, h.__contains__, h.get, h.get_all, h.__getitem__:
self.failUnless(m('foo'))
self.failUnless(m('Foo'))
self.failUnless(m('FOO'))
self.failIf(m('bar'))
self.assertEqual(h['foo'],'bar')
h['foo'] = 'baz'
self.assertEqual(h['FOO'],'baz')
self.assertEqual(h.get_all('foo'),['baz'])
self.assertEqual(h.get("foo","whee"), "baz")
self.assertEqual(h.get("zoo","whee"), "whee")
self.assertEqual(h.setdefault("foo","whee"), "baz")
self.assertEqual(h.setdefault("zoo","whee"), "whee")
self.assertEqual(h["foo"],"baz")
self.assertEqual(h["zoo"],"whee")
def testRequireList(self):
self.assertRaises(TypeError, Headers, "foo")
def testExtras(self):
h = Headers([])
self.assertEqual(str(h),'\r\n')
h.add_header('foo','bar',baz="spam")
self.assertEqual(h['foo'], 'bar; baz="spam"')
self.assertEqual(str(h),'foo: bar; baz="spam"\r\n\r\n')
h.add_header('Foo','bar',cheese=None)
self.assertEqual(h.get_all('foo'),
['bar; baz="spam"', 'bar; cheese'])
self.assertEqual(str(h),
'foo: bar; baz="spam"\r\n'
'Foo: bar; cheese\r\n'
'\r\n'
)
class ErrorHandler(BaseCGIHandler):
"""Simple handler subclass for testing BaseHandler"""
def __init__(self,**kw):
setup_testing_defaults(kw)
BaseCGIHandler.__init__(
self, StringIO(''), StringIO(), StringIO(), kw,
multithread=True, multiprocess=True
)
class TestHandler(ErrorHandler):
"""Simple handler subclass for testing BaseHandler, w/error passthru"""
def handle_error(self):
raise # for testing, we want to see what's happening
class HandlerTests(TestCase):
def checkEnvironAttrs(self, handler):
env = handler.environ
for attr in [
'version','multithread','multiprocess','run_once','file_wrapper'
]:
if attr=='file_wrapper' and handler.wsgi_file_wrapper is None:
continue
self.assertEqual(getattr(handler,'wsgi_'+attr),env['wsgi.'+attr])
def checkOSEnviron(self,handler):
empty = {}; setup_testing_defaults(empty)
env = handler.environ
from os import environ
for k,v in environ.items():
if not empty.has_key(k):
self.assertEqual(env[k],v)
for k,v in empty.items():
self.failUnless(env.has_key(k))
def testEnviron(self):
h = TestHandler(X="Y")
h.setup_environ()
self.checkEnvironAttrs(h)
self.checkOSEnviron(h)
self.assertEqual(h.environ["X"],"Y")
def testCGIEnviron(self):
h = BaseCGIHandler(None,None,None,{})
h.setup_environ()
for key in 'wsgi.url_scheme', 'wsgi.input', 'wsgi.errors':
self.assert_(h.environ.has_key(key))
def testScheme(self):
h=TestHandler(HTTPS="on"); h.setup_environ()
self.assertEqual(h.environ['wsgi.url_scheme'],'https')
h=TestHandler(); h.setup_environ()
self.assertEqual(h.environ['wsgi.url_scheme'],'http')
def testAbstractMethods(self):
h = BaseHandler()
for name in [
'_flush','get_stdin','get_stderr','add_cgi_vars'
]:
self.assertRaises(NotImplementedError, getattr(h,name))
self.assertRaises(NotImplementedError, h._write, "test")
def testContentLength(self):
# Demo one reason iteration is better than write()... ;)
def trivial_app1(e,s):
s('200 OK',[])
return [e['wsgi.url_scheme']]
def trivial_app2(e,s):
s('200 OK',[])(e['wsgi.url_scheme'])
return []
h = TestHandler()
h.run(trivial_app1)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"Content-Length: 4\r\n"
"\r\n"
"http")
h = TestHandler()
h.run(trivial_app2)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"\r\n"
"http")
def testBasicErrorOutput(self):
def non_error_app(e,s):
s('200 OK',[])
return []
def error_app(e,s):
raise AssertionError("This should be caught by handler")
h = ErrorHandler()
h.run(non_error_app)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"Content-Length: 0\r\n"
"\r\n")
self.assertEqual(h.stderr.getvalue(),"")
h = ErrorHandler()
h.run(error_app)
self.assertEqual(h.stdout.getvalue(),
"Status: %s\r\n"
"Content-Type: text/plain\r\n"
"Content-Length: %d\r\n"
"\r\n%s" % (h.error_status,len(h.error_body),h.error_body))
self.failUnless(h.stderr.getvalue().find("AssertionError")<>-1)
def testErrorAfterOutput(self):
MSG = "Some output has been sent"
def error_app(e,s):
s("200 OK",[])(MSG)
raise AssertionError("This should be caught by handler")
h = ErrorHandler()
h.run(error_app)
self.assertEqual(h.stdout.getvalue(),
"Status: 200 OK\r\n"
"\r\n"+MSG)
self.failUnless(h.stderr.getvalue().find("AssertionError")<>-1)
def testHeaderFormats(self):
def non_error_app(e,s):
s('200 OK',[])
return []
stdpat = (
r"HTTP/%s 200 OK\r\n"
r"Date: \w{3}, [ 0123]\d \w{3} \d{4} \d\d:\d\d:\d\d GMT\r\n"
r"%s" r"Content-Length: 0\r\n" r"\r\n"
)
shortpat = (
"Status: 200 OK\r\n" "Content-Length: 0\r\n" "\r\n"
)
for ssw in "FooBar/1.0", None:
sw = ssw and "Server: %s\r\n" % ssw or ""
for version in "1.0", "1.1":
for proto in "HTTP/0.9", "HTTP/1.0", "HTTP/1.1":
h = TestHandler(SERVER_PROTOCOL=proto)
h.origin_server = False
h.http_version = version
h.server_software = ssw
h.run(non_error_app)
self.assertEqual(shortpat,h.stdout.getvalue())
h = TestHandler(SERVER_PROTOCOL=proto)
h.origin_server = True
h.http_version = version
h.server_software = ssw
h.run(non_error_app)
if proto=="HTTP/0.9":
self.assertEqual(h.stdout.getvalue(),"")
else:
self.failUnless(
re.match(stdpat%(version,sw), h.stdout.getvalue()),
(stdpat%(version,sw), h.stdout.getvalue())
)
# This epilogue is needed for compatibility with the Python 2.5 regrtest module
def test_main():
test_support.run_unittest(__name__)
if __name__ == "__main__":
test_main()
# the above lines intentionally left blank
|
ryfeus/lambda-packs
|
refs/heads/master
|
Tensorflow_LightGBM_Scipy_nightly/source/setuptools/windows_support.py
|
1015
|
import platform
import ctypes
def windows_only(func):
if platform.system() != 'Windows':
return lambda *args, **kwargs: None
return func
@windows_only
def hide_file(path):
"""
Set the hidden attribute on a file or directory.
From http://stackoverflow.com/questions/19622133/
`path` must be text.
"""
__import__('ctypes.wintypes')
SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
SetFileAttributes.restype = ctypes.wintypes.BOOL
FILE_ATTRIBUTE_HIDDEN = 0x02
ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN)
if not ret:
raise ctypes.WinError()
|
NaturalHistoryMuseum/inselect
|
refs/heads/master
|
inselect/gui/views/boxes/box_item.py
|
1
|
import sys
from itertools import chain
from PyQt5.QtCore import Qt, QRect, QRectF
from PyQt5.QtGui import QPen
from PyQt5.QtWidgets import QGraphicsItem, QGraphicsRectItem
from inselect.lib.utils import debug_print
from inselect.gui.colours import colour_scheme_choice
from inselect.gui.utils import painter_state
from .resize_handle import ResizeHandle
from .reticle import Reticle
class BoxItem(QGraphicsRectItem):
# Might be some relevant stuff here:
# http://stackoverflow.com/questions/10590881/events-and-signals-in-qts-qgraphicsitem-how-is-this-supposed-to-work
# The width of the line (in pixels) drawn around the box.
# A width of 1 on Mac OS X is too thin. 2 is too thick on Windows.
BOX_WIDTH = 2 if 'darwin' == sys.platform else 1
def __init__(self, x, y, w, h, isvalid, parent=None):
super(BoxItem, self).__init__(x, y, w, h, parent)
self.setFlags(QGraphicsItem.ItemIsFocusable |
QGraphicsItem.ItemIsSelectable |
QGraphicsItem.ItemSendsGeometryChanges |
QGraphicsItem.ItemIsMovable)
self.setCursor(Qt.OpenHandCursor)
self.setAcceptHoverEvents(True)
# True if the box has valid metadata
self._isvalid = isvalid
# Points of interest as represented by instances of Reticle
self._pois = []
# Resize handles
positions = (Qt.TopLeftCorner, Qt.TopRightCorner, Qt.BottomLeftCorner,
Qt.BottomRightCorner)
self._handles = []
self._handles = [self._create_handle(pos) for pos in positions]
self._layout_children()
self._set_z_index()
def paint(self, painter, option, widget=None):
"""QGraphicsRectItem virtual
"""
# TODO LH Is there a way to clip to overlapping
# QAbstractGraphicsItems with a larger zorder
# TODO LH Get pixmap without tight coupling to scene
if not self.has_mouse():
painter.drawPixmap(self.boundingRect(),
self.scene().pixmap,
self.sceneBoundingRect())
with painter_state(painter):
outline_colour, fill_colour = self.colours
# Cosmetic pens "...draw strokes that have a constant width
# regardless of any transformations applied to the QPainter they are
# used with."
pen = QPen(outline_colour, self.BOX_WIDTH, Qt.SolidLine)
pen.setCosmetic(True)
painter.setPen(pen)
r = self.boundingRect()
painter.drawRect(r)
if fill_colour:
painter.fillRect(r, fill_colour)
def has_mouse(self):
"""True if self or self._handles has grabbed the mouse
"""
return self.scene().mouseGrabberItem() in chain([self], self._handles)
@property
def colours(self):
"""Tuple of two QColors to use for the box's border and fill
respectively. Fill might be None.
"""
colours = colour_scheme_choice().current['Colours']
has_mouse = self.has_mouse()
if has_mouse:
outline = colours['Resizing']
elif self.isSelected():
outline = colours['Selected']
elif self._isvalid:
outline = colours['Valid']
else:
outline = colours['Invalid']
if not self._isvalid and not has_mouse:
fill = colours['InvalidFill']
else:
fill = None
return outline, fill
def update(self, rect=QRectF()):
"""QGraphicsRectItem function
"""
# TODO LH QGraphicsRectItem::update is not a virtual function - is it
# OK to implement this function and call the base class's
# implementation?
super(BoxItem, self).update(rect)
for item in self._handles:
item.update()
def hoverEnterEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.hoverEnterEvent')
super(BoxItem, self).hoverEnterEvent(event)
self._set_handles_visible(True)
self._set_z_index()
self.update()
def hoverLeaveEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.hoverLeaveEvent')
super(BoxItem, self).hoverLeaveEvent(event)
self._set_handles_visible(False)
self._set_z_index()
self.update()
def _set_handles_visible(self, visible):
for handle in self._handles:
handle.setVisible(visible)
def _create_handle(self, corner):
# Creates and returns a new ResizeHandle at the given Qt.Corner
handle = ResizeHandle(corner, self)
handle.setVisible(False)
handle.setFlags(QGraphicsItem.ItemStacksBehindParent |
QGraphicsItem.ItemIgnoresTransformations)
return handle
def _layout_children(self):
"""Moves child graphics items to the appropriate positions
"""
bounding = self.boundingRect()
for child in chain(self._handles, self._pois):
child.layout(bounding)
def setRect(self, rect):
"""QGraphicsRectItem function
"""
debug_print('BoxItem.setRect')
super(BoxItem, self).setRect(rect)
self._set_z_index()
self._layout_children()
def mousePressEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.mousePressEvent')
super(BoxItem, self).mousePressEvent(event)
self._set_z_index()
if Qt.ShiftModifier == event.modifiers():
# Add a point of interest
self.append_point_of_interest(event.pos())
else:
# Starting a move
self.setCursor(Qt.ClosedHandCursor)
self.update()
def mouseReleaseEvent(self, event):
"""QGraphicsRectItem virtual
"""
debug_print('BoxItem.mouseReleaseEvent')
super(BoxItem, self).mouseReleaseEvent(event)
self.setCursor(Qt.OpenHandCursor)
self._set_z_index()
self.update()
def itemChange(self, change, value):
"""QGraphicsItem virtual
"""
if change == self.ItemSelectedHasChanged:
# Clear points of interest
scene = self.scene()
while self._pois:
scene.removeItem(self._pois.pop())
# Item has gained or lost selection
self._set_z_index()
return super(BoxItem, self).itemChange(change, value)
def set_rect(self, new_rect):
"""Sets a new QRect in integer coordinates
"""
# Cumbersome conversion to ints
current = self.sceneBoundingRect()
current = QRect(current.left(), current.top(),
current.width(), current.height())
if current != new_rect:
msg = 'Update rect for [{0}] from [{1}] to [{2}]'
debug_print(msg.format(self, current, new_rect))
self.prepareGeometryChange()
# setrect() expects floating point rect
self.setRect(QRectF(new_rect))
def set_isvalid(self, isvalid):
"""Sets a new 'is valid'
"""
if isvalid != self._isvalid:
self._isvalid = isvalid
self.update()
def _set_z_index(self):
"""Updates the Z-index of the box
This sorts the boxes such that the bigger the area of a box, the lower
it's Z-index is; and boxes that are selected and have mouse or keyboard
focus are always above other boxes.
"""
rect = self.rect()
# Smaller items have a higher z
z = 1.0
if rect.width() and rect.height():
z += + 1.0 / float(rect.width() * rect.height())
if self.isSelected():
z += 1.0
else:
# Newly created items have zero width and height
pass
self.setZValue(z)
def adjust_rect(self, dx1, dy1, dx2, dy2):
"""Adjusts rect
"""
r = self.rect()
r.adjust(dx1, dy1, dx2, dy2)
if r.width() > 1.0 and r.height() > 1.0:
self.prepareGeometryChange()
self.setRect(r)
def append_point_of_interest(self, pos):
"""Appends pos (a QPoint relative to the top-left of this box) to the
list of points of interest
"""
debug_print('New point of interest at [{0}]'.format(pos))
self._pois.append(Reticle(pos - self.boundingRect().topLeft(), self))
self._pois[-1].layout(self.boundingRect())
self._pois[-1].setFlags(QGraphicsItem.ItemIgnoresTransformations)
@property
def points_of_interest(self):
"""An iterable of QPointFs in item coordinates
"""
return [poi.offset for poi in self._pois]
|
modulexcite/PTVS
|
refs/heads/master
|
Python/Product/TestAdapter/visualstudio_py_testlauncher.py
|
13
|
# ############################################################################
#
# Copyright (c) Microsoft Corporation.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
# ###########################################################################
def main():
import os
import sys
import unittest
from optparse import OptionParser
parser = OptionParser(prog = 'visualstudio_py_testlauncher', usage = 'Usage: %prog [<option>] <test names>... ')
parser.add_option('-s', '--secret', metavar='<secret>', help='restrict server to only allow clients that specify <secret> when connecting')
parser.add_option('-p', '--port', type='int', metavar='<port>', help='listen for debugger connections on <port>')
parser.add_option('-x', '--mixed-mode', action='store_true', help='wait for mixed-mode debugger to attach')
parser.add_option('-t', '--test', type='str', dest='tests', action='append', help='specifies a test to run')
parser.add_option('-m', '--module', type='str', help='name of the module to import the tests from')
(opts, _) = parser.parse_args()
sys.path[0] = os.getcwd()
if opts.secret and opts.port:
from ptvsd.visualstudio_py_debugger import DONT_DEBUG, DEBUG_ENTRYPOINTS, get_code
from ptvsd.attach_server import DEFAULT_PORT, enable_attach, wait_for_attach
DONT_DEBUG.append(os.path.normcase(__file__))
DEBUG_ENTRYPOINTS.add(get_code(main))
enable_attach(opts.secret, ('127.0.0.1', getattr(opts, 'port', DEFAULT_PORT)), redirect_output = True)
wait_for_attach()
elif opts.mixed_mode:
# For mixed-mode attach, there's no ptvsd and hence no wait_for_attach(),
# so we have to use Win32 API in a loop to do the same thing.
from time import sleep
from ctypes import windll, c_char
while True:
if windll.kernel32.IsDebuggerPresent() != 0:
break
sleep(0.1)
try:
debugger_helper = windll['Microsoft.PythonTools.Debugger.Helper.x86.dll']
except WinError:
debugger_helper = windll['Microsoft.PythonTools.Debugger.Helper.x64.dll']
isTracing = c_char.in_dll(debugger_helper, "isTracing")
while True:
if isTracing.value != 0:
break
sleep(0.1)
__import__(opts.module)
module = sys.modules[opts.module]
test = unittest.defaultTestLoader.loadTestsFromNames(opts.tests, module)
runner = unittest.TextTestRunner(verbosity=0)
result = runner.run(test)
sys.exit(not result.wasSuccessful())
if __name__ == '__main__':
main()
|
virneo/opencog
|
refs/heads/master
|
opencog/python/pln_old/__init__.py
|
32
|
from agents import InferenceAgent, TestInferenceAgent, \
BackwardInferenceAgent, ForwardInferenceAgent
|
lykahb/aeson
|
refs/heads/master
|
benchmarks/bench-parse.py
|
22
|
#!/usr/bin/env python
import os, re, subprocess, sys
result_re = re.compile(r'^\s*(\d+) good, (\d+\.\d+)s$', re.M)
if len(sys.argv) > 1:
parser_exe = sys.argv[1]
else:
parser_exe = ('dist/build/aeson-benchmark-aeson-parse/' +
'aeson-benchmark-aeson-parse')
def run(count, filename):
print ' %s :: %s times' % (filename, count)
p = subprocess.Popen([parser_exe, '65536', str(count), filename],
stdout=subprocess.PIPE)
output = p.stdout.read()
p.wait()
m = result_re.search(output)
if not m:
print >> sys.stderr, 'run gave confusing output!?'
sys.stderr.write(output)
return
else:
#sys.stdout.write(output)
pass
good, elapsed = m.groups()
good, elapsed = int(good), float(elapsed)
st = os.stat(filename)
parses_per_second = good / elapsed
mb_per_second = st.st_size * parses_per_second / 1048576
print (' %.3f seconds, %d parses/sec, %.3f MB/sec' %
(elapsed, parses_per_second, mb_per_second))
return parses_per_second, mb_per_second, st.st_size, elapsed
def runtimes(count, filename, times=1):
for i in xrange(times):
yield run(count, filename)
info = '''
json-data/twitter1.json 60000
json-data/twitter10.json 13000
json-data/twitter20.json 7500
json-data/twitter50.json 2500
json-data/twitter100.json 1000
json-data/jp10.json 4000
json-data/jp50.json 1200
json-data/jp100.json 700
'''
for i in info.strip().splitlines():
name, count = i.split()
best = sorted(runtimes(int(count), name, times=3), reverse=True)[0]
parses_per_second, mb_per_second, size, elapsed = best
print ('%.1f KB: %d msg\\/sec (%.1f MB\\/sec)' %
(size / 1024.0, int(round(parses_per_second)), mb_per_second))
|
crossgovernmentservices/people
|
refs/heads/master
|
application/home_blueprint/views.py
|
1
|
from flask import (
Blueprint,
)
from flask.ext.pushrod import pushrod_view
blueprint = Blueprint(
'home',
__name__,
template_folder='templates')
@blueprint.route('/')
@pushrod_view(jinja_template='index.html')
def index():
return {'msg':'todo home page stuff'}
|
kcpawan/django
|
refs/heads/master
|
tests/user_commands/management/commands/transaction.py
|
553
|
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Say hello."
args = ''
output_transaction = True
def handle(self, *args, **options):
return 'Hello!'
|
r-lyeh/scriptorium
|
refs/heads/master
|
python/micropython/tests/io/file_seek.py
|
18
|
f = open("io/data/file1", "rb")
print(f.seek(6))
print(f.read(5))
print(f.seek(0, 1))
print(f.read(4))
print(f.seek(-6, 2))
print(f.read(20))
print(f.seek(0, 0))
print(f.read(5))
f.close()
# test text mode
f = open("io/data/file1", "rt")
print(f.seek(6))
print(f.read(5))
f.close()
|
badbytes/pymeg
|
refs/heads/master
|
meg/leadfield_parallel.py
|
1
|
# Copyright 2008 Dan Collins
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# And is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Build; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# A portion of this code was used and adapted from meg_leadfield1.m function distributed with
# the matlab fieldtrip package at fcdonders, which was adapted from Luetkenhoener, Habilschrift '92.
"""Compute leadfields
pos=R point
pos= channel location vector
ori=signal channel orientation
ex. x=leadfield.calclf(channelinstance, grid) ....
will return the leadfield sum from both upper and lower coils
grid is N X 3 array. Make sure to reshape it if Ndim=1 to by 1X3
ex. IF....shape(grid) RETURNS (3,) THEN grid=grid.reshape(1,3)
or just make grid like... grid=array([[0, 6, 3]])"""
from numpy import * #zeros, array, dot, cross, shape, append, reshape, size
from numpy.linalg import norm
from pdf2py import pdf
from time import time,sleep
import sys, logging
from misc import progressbar as pb
from multiprocessing import Pool,Process,cpu_count
class shared_data:
def __init__(self,grid=None):
self.grid = grid
class calc:
def __init__(self,channelinstance=None,grid=None,centerofsphere=None,chlowerpos=None,chupperpos=None,chlowerdir=None,chupperdir=None):
'''calc leadfield script to use pos.py and getlf class to get the sum of upper and lower coils
returns leadfield
grid=voxel location. dimensions: N X 3'''
self.ext = 'pymlf'
global chlpos,chldir,cos,chupos,chudir
if channelinstance == None:
if chlpos == None or chupos == None or chldir == None or chudir == None:
print('if channelobject not supplied, you need to supply ch position and direction for upper and lower coil')
raise Exception('InputError')
else:
chlpos=channelinstance.chlpos;
chldir=channelinstance.chldir;
chupos=channelinstance.chupos;
chudir=channelinstance.chudir;
#check grid and guess if units are correct
if grid.max() < 15 and grid.min() > -15:
print ''
print 'Warning...!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
print 'Warning...!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'
print 'Warning...!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n'
print 'Your grid point values are small. Max ==', grid.max(), 'Are you sure they are in mm\n'
ts = time()
coilsperch=2; #gradiometer config
channelinstance.getposition()
if centerofsphere == None:
self.spherecenter = array([0,0,40])#c=cos.center()
else:
self.spherecenter = centerofsphere
if array(self.spherecenter).max() < 10:
print 'COS Warning!!! Your center of sphere is quite close to 0,0,0. Make sure your units are in mm'
cos = self.spherecenter
gridshape=shape(grid)
if len(shape(grid)) == 1:
grid = grid.reshape([1,3])
print 'make sure your grid units are in mm'
if size(grid,1) != 3: #check grid dimensions
if size(grid,0) != 3:
print 'grid dimensions wrong'
return
grid = grid.transpose()
print 'reshaping grid'
global y; y = float(len(grid))/float(cpu_count())
global pbar; pbar = pb.ProgressBar().start()
#Parallel Compute Leadfields
logging.basicConfig(level=logging.DEBUG,format='(%(threadName)-10s) %(message)s',)
pool = Pool(processes=cpu_count())
self.p = pool.map(code, grid)
te = time()
print 'Done. Elapsed time', te-ts, 'seconds'
self.leadfield = squeeze(array(self.p))
del(self.p)
self.grid = grid
return
import threading
import os
def code(grid):#=None, pos=None, ori=None, cent=None):
#print("Task(%s) processid = %s" % ('layer', os.getpid()))
try: x;
except NameError: global x; x = 0
x = x +1
#print y
"""grid=voxel location. dimensions: N X 3
pos=position of channels. returned by pos.py
ori=orientation of channels. returned by pos.py"""
#danc changed hdr read to convert to mm upfront.
#pos=pos*1000 #convert channel pos from meters to mm
#pbar = pb.ProgressBar().start()
#sys.stdout.flush()
if (float(x)/float(y))*100 < 100:
pbar.update((float(x)/float(y))*100)
#for i in [10,20,30,40,50,60,70,80,90,100]:
#if (float(x)/float(y))*100 > 10:
#pass
#print (float(x)/float(y))*100, '%complete'
R = grid - cos
nchans = len(chupos)
ncoils = 2
nrank = 3
lf=zeros((ncoils,nchans,nrank))
#pbar = progressbar.ProgressBar().start()
for h in [[chupos,chudir,0],[chlpos,chldir,1]]:
#for j in range(0,len(loc)): #for each grid point
"for each chp calculate the leadfield"
chp = h[0] - cos
position = chp
orientation = h[1]
lftmp = zeros((nchans,3),float);
tmp2 = norm(R);
for i in range(0,nchans):
t = position[i]
o = orientation[i]
tmp1 = norm(t);
tmp3 = norm(t-R);
tmp4 = dot(t,R);
tmp5 = dot(t,t-R);
tmp6 = dot(R,t-R);
tmp7 = (tmp1*tmp2)**2 - tmp4**2; #% cross(r,R)**2
alpha = 1 / (-tmp3 * (tmp1*tmp3+tmp5));
A = 1/tmp3 - 2*alpha*tmp2**2 - 1/tmp1;
B = 2*alpha*tmp4;
C = -tmp6/(tmp3**3);
beta = dot(A*t + B*R + C*(t-R), o)/tmp7;
lftmp[i,:] = cross(alpha*o + beta*t, R);
#lf = append(lf, 1e-7*lftmp); #% multiply with u0/4pi
lf[h[2]] = 1e-7*lftmp; #For each coil, multiply with u0/4pi
if ncoils > 1 and ncoils <= 2:
lf = lf[1] + lf[0] #Average LeadField between the two coils
return lf
if __name__ == '__main__':
from numpy import *
from gui.gtk import progressbar
MT = progressbar.MainThread()
def leadfieldthread():
fn = '/home/danc/python/data/0611/0611SEF/e,rfhp1.0Hz,n,x,baha001-1SEF,f50lp'
from pdf2py import pdf
p = pdf.read(fn)
p.data.setchannels('meg')
grid=random.randn(3,10)
lft = calc(p.data.channels,grid)
print type(lft.leadfield), shape(lft.leadfield)
MT.main(leadfieldthread)#,progresstype='fraction')
|
olegpshenichniy/Booktype
|
refs/heads/master
|
lib/booktype/importer/utils.py
|
7
|
import os
import urllib
import importlib
from booktype.utils import config
from booktype.utils.misc import booktype_slugify
def convert_file_name(file_name):
name = os.path.basename(file_name)
if name.rfind('.') != -1:
_np = name[:name.rfind('.')]
_ext = name[name.rfind('.'):]
name = booktype_slugify(_np)+_ext
name = urllib.unquote(name)
name = name.replace(' ', '_')
return name
def get_importer_module(ext):
"""
Function to retrieve the right module to import a file into a book
based on a given extension
"""
IMPORTER_MAP = config.get_configuration('BOOKTYPE_IMPORTERS')
if ext not in IMPORTER_MAP.keys():
raise NotImplemented("Importer for extension {} hasn't been implemented yet".format(ext))
module_path, import_func = IMPORTER_MAP[ext]
module = importlib.import_module(module_path)
return getattr(module, import_func)
|
dashea/pykickstart
|
refs/heads/master
|
pykickstart/commands/btrfs.py
|
2
|
#
# Chris Lumens <clumens@redhat.com>
# David Lehman <dlehman@redhat.com>
#
# Copyright 2005, 2006, 2007, 2011 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
from pykickstart.base import BaseData, KickstartCommand
from pykickstart.errors import KickstartValueError, formatErrorMsg
from pykickstart.options import KSOptionParser
import warnings
from pykickstart.i18n import _
class F17_BTRFSData(BaseData):
removedKeywords = BaseData.removedKeywords
removedAttrs = BaseData.removedAttrs
def __init__(self, *args, **kwargs):
BaseData.__init__(self, *args, **kwargs)
self.format = kwargs.get("format", True)
self.preexist = kwargs.get("preexist", False)
self.label = kwargs.get("label", "")
self.mountpoint = kwargs.get("mountpoint", "")
self.devices = kwargs.get("devices", [])
self.dataLevel = kwargs.get("data", None)
self.metaDataLevel = kwargs.get("metadata", None)
# subvolume-specific
self.subvol = kwargs.get("subvol", False)
self.name = kwargs.get("name", None) # required
def __eq__(self, y):
if not y:
return False
return self.mountpoint == y.mountpoint
def __ne__(self, y):
return not self == y
def _getArgsAsStr(self):
retval = ""
if not self.format:
retval += " --noformat"
if self.preexist:
retval += " --useexisting"
if self.label:
retval += " --label=%s" % self.label
if self.dataLevel:
retval += " --data=%s" % self.dataLevel.lower()
if self.metaDataLevel:
retval += " --metadata=%s" % self.metaDataLevel.lower()
if self.subvol:
retval += " --subvol --name=%s" % self.name
return retval
def __str__(self):
retval = BaseData.__str__(self)
retval += "btrfs %s" % self.mountpoint
retval += self._getArgsAsStr()
return retval + " " + " ".join(self.devices) + "\n"
class F17_BTRFS(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=132, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.op = self._getParser()
# A dict of all the RAID levels we support. This means that if we
# support more levels in the future, subclasses don't have to
# duplicate too much.
self.levelMap = { "raid0": "raid0", "0": "raid0",
"raid1": "raid1", "1": "raid1",
"raid10": "raid10", "10": "raid10",
"single": "single" }
self.btrfsList = kwargs.get("btrfsList", [])
def __str__(self):
retval = ""
for btr in self.btrfsList:
retval += btr.__str__()
return retval
def _getParser(self):
# Have to be a little more complicated to set two values.
def btrfs_cb (option, opt_str, value, parser):
parser.values.format = False
parser.values.preexist = True
def level_cb (option, opt_str, value, parser):
if value.lower() in self.levelMap:
parser.values.ensure_value(option.dest, self.levelMap[value.lower()])
op = KSOptionParser()
op.add_option("--noformat", action="callback", callback=btrfs_cb,
dest="format", default=True, nargs=0)
op.add_option("--useexisting", action="callback", callback=btrfs_cb,
dest="preexist", default=False, nargs=0)
# label, data, metadata
op.add_option("--label", dest="label", default="")
op.add_option("--data", dest="dataLevel", action="callback",
callback=level_cb, type="string", nargs=1)
op.add_option("--metadata", dest="metaDataLevel", action="callback",
callback=level_cb, type="string", nargs=1)
#
# subvolumes
#
op.add_option("--subvol", dest="subvol", action="store_true",
default=False)
# parent must be a device spec (LABEL, UUID, &c)
op.add_option("--parent", dest="parent", default="")
op.add_option("--name", dest="name", default="")
return op
def parse(self, args):
(opts, extra) = self.op.parse_args(args=args, lineno=self.lineno)
data = self.handler.BTRFSData()
self._setToObj(self.op, opts, data)
data.lineno = self.lineno
if len(extra) == 0:
raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("btrfs must be given a mountpoint")))
if len(extra) == 1 and not data.subvol:
raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("btrfs must be given a list of partitions")))
elif len(extra) == 1:
raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("btrfs subvol requires specification of parent volume")))
if data.subvol and not data.name:
raise KickstartValueError(formatErrorMsg(self.lineno, msg=_("btrfs subvolume requires a name")))
data.mountpoint = extra[0]
data.devices = extra[1:]
# Check for duplicates in the data list.
if data in self.dataList():
warnings.warn(_("A btrfs volume with the mountpoint %s has already been defined.") % data.label)
return data
def dataList(self):
return self.btrfsList
|
googleapis/python-bigquery-datatransfer
|
refs/heads/master
|
google/cloud/bigquery_datatransfer_v1/types/transfer.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import struct_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.bigquery.datatransfer.v1",
manifest={
"TransferType",
"TransferState",
"EmailPreferences",
"ScheduleOptions",
"TransferConfig",
"TransferRun",
"TransferMessage",
},
)
class TransferType(proto.Enum):
r"""DEPRECATED. Represents data transfer type."""
_pb_options = {"deprecated": True}
TRANSFER_TYPE_UNSPECIFIED = 0
BATCH = 1
STREAMING = 2
class TransferState(proto.Enum):
r"""Represents data transfer run state."""
TRANSFER_STATE_UNSPECIFIED = 0
PENDING = 2
RUNNING = 3
SUCCEEDED = 4
FAILED = 5
CANCELLED = 6
class EmailPreferences(proto.Message):
r"""Represents preferences for sending email notifications for
transfer run events.
Attributes:
enable_failure_email (bool):
If true, email notifications will be sent on
transfer run failures.
"""
enable_failure_email = proto.Field(proto.BOOL, number=1,)
class ScheduleOptions(proto.Message):
r"""Options customizing the data transfer schedule.
Attributes:
disable_auto_scheduling (bool):
If true, automatic scheduling of data
transfer runs for this configuration will be
disabled. The runs can be started on ad-hoc
basis using StartManualTransferRuns API. When
automatic scheduling is disabled, the
TransferConfig.schedule field will be ignored.
start_time (google.protobuf.timestamp_pb2.Timestamp):
Specifies time to start scheduling transfer
runs. The first run will be scheduled at or
after the start time according to a recurrence
pattern defined in the schedule string. The
start time can be changed at any moment. The
time when a data transfer can be trigerred
manually is not limited by this option.
end_time (google.protobuf.timestamp_pb2.Timestamp):
Defines time to stop scheduling transfer
runs. A transfer run cannot be scheduled at or
after the end time. The end time can be changed
at any moment. The time when a data transfer can
be trigerred manually is not limited by this
option.
"""
disable_auto_scheduling = proto.Field(proto.BOOL, number=3,)
start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
class TransferConfig(proto.Message):
r"""Represents a data transfer configuration. A transfer configuration
contains all metadata needed to perform a data transfer. For
example, ``destination_dataset_id`` specifies where data should be
stored. When a new transfer configuration is created, the specified
``destination_dataset_id`` is created when needed and shared with
the appropriate data source service account.
Attributes:
name (str):
The resource name of the transfer config. Transfer config
names have the form of
``projects/{project_id}/locations/{region}/transferConfigs/{config_id}``.
The name is automatically generated based on the config_id
specified in CreateTransferConfigRequest along with
project_id and region. If config_id is not provided, usually
a uuid, even though it is not guaranteed or required, will
be generated for config_id.
destination_dataset_id (str):
The BigQuery target dataset id.
display_name (str):
User specified display name for the data
transfer.
data_source_id (str):
Data source id. Cannot be changed once data
transfer is created.
params (google.protobuf.struct_pb2.Struct):
Data transfer specific parameters.
schedule (str):
Data transfer schedule. If the data source does not support
a custom schedule, this should be empty. If it is empty, the
default value for the data source will be used. The
specified times are in UTC. Examples of valid format:
``1st,3rd monday of month 15:30``,
``every wed,fri of jan,jun 13:15``, and
``first sunday of quarter 00:00``. See more explanation
about the format here:
https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
NOTE: the granularity should be at least 8 hours, or less
frequent.
schedule_options (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptions):
Options customizing the data transfer
schedule.
data_refresh_window_days (int):
The number of days to look back to automatically refresh the
data. For example, if ``data_refresh_window_days = 10``,
then every day BigQuery reingests data for [today-10,
today-1], rather than ingesting data for just [today-1].
Only valid if the data source supports the feature. Set the
value to 0 to use the default value.
disabled (bool):
Is this config disabled. When set to true, no
runs are scheduled for a given transfer.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Data transfer modification time.
Ignored by server on input.
next_run_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Next time when data transfer
will run.
state (google.cloud.bigquery_datatransfer_v1.types.TransferState):
Output only. State of the most recently
updated transfer run.
user_id (int):
Deprecated. Unique ID of the user on whose
behalf transfer is done.
dataset_region (str):
Output only. Region in which BigQuery dataset
is located.
notification_pubsub_topic (str):
Pub/Sub topic where notifications will be
sent after transfer runs associated with this
transfer config finish.
email_preferences (google.cloud.bigquery_datatransfer_v1.types.EmailPreferences):
Email notifications will be sent according to
these preferences to the email address of the
user who owns this transfer config.
"""
name = proto.Field(proto.STRING, number=1,)
destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination",)
display_name = proto.Field(proto.STRING, number=3,)
data_source_id = proto.Field(proto.STRING, number=5,)
params = proto.Field(proto.MESSAGE, number=9, message=struct_pb2.Struct,)
schedule = proto.Field(proto.STRING, number=7,)
schedule_options = proto.Field(proto.MESSAGE, number=24, message="ScheduleOptions",)
data_refresh_window_days = proto.Field(proto.INT32, number=12,)
disabled = proto.Field(proto.BOOL, number=13,)
update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,)
next_run_time = proto.Field(
proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp,
)
state = proto.Field(proto.ENUM, number=10, enum="TransferState",)
user_id = proto.Field(proto.INT64, number=11,)
dataset_region = proto.Field(proto.STRING, number=14,)
notification_pubsub_topic = proto.Field(proto.STRING, number=15,)
email_preferences = proto.Field(
proto.MESSAGE, number=18, message="EmailPreferences",
)
class TransferRun(proto.Message):
r"""Represents a data transfer run.
Attributes:
name (str):
The resource name of the transfer run. Transfer run names
have the form
``projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}``.
The name is ignored when creating a transfer run.
schedule_time (google.protobuf.timestamp_pb2.Timestamp):
Minimum time after which a transfer run can
be started.
run_time (google.protobuf.timestamp_pb2.Timestamp):
For batch transfer runs, specifies the date
and time of the data should be ingested.
error_status (google.rpc.status_pb2.Status):
Status of the transfer run.
start_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Time when transfer run was
started. Parameter ignored by server for input
requests.
end_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Time when transfer run ended.
Parameter ignored by server for input requests.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Last time the data transfer run
state was updated.
params (google.protobuf.struct_pb2.Struct):
Output only. Data transfer specific
parameters.
destination_dataset_id (str):
Output only. The BigQuery target dataset id.
data_source_id (str):
Output only. Data source id.
state (google.cloud.bigquery_datatransfer_v1.types.TransferState):
Data transfer run state. Ignored for input
requests.
user_id (int):
Deprecated. Unique ID of the user on whose
behalf transfer is done.
schedule (str):
Output only. Describes the schedule of this transfer run if
it was created as part of a regular schedule. For batch
transfer runs that are scheduled manually, this is empty.
NOTE: the system might choose to delay the schedule
depending on the current load, so ``schedule_time`` doesn't
always match this.
notification_pubsub_topic (str):
Output only. Pub/Sub topic where a
notification will be sent after this transfer
run finishes
email_preferences (google.cloud.bigquery_datatransfer_v1.types.EmailPreferences):
Output only. Email notifications will be sent
according to these preferences to the email
address of the user who owns the transfer config
this run was derived from.
"""
name = proto.Field(proto.STRING, number=1,)
schedule_time = proto.Field(
proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,
)
run_time = proto.Field(proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp,)
error_status = proto.Field(proto.MESSAGE, number=21, message=status_pb2.Status,)
start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,)
end_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,)
update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,)
params = proto.Field(proto.MESSAGE, number=9, message=struct_pb2.Struct,)
destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination",)
data_source_id = proto.Field(proto.STRING, number=7,)
state = proto.Field(proto.ENUM, number=8, enum="TransferState",)
user_id = proto.Field(proto.INT64, number=11,)
schedule = proto.Field(proto.STRING, number=12,)
notification_pubsub_topic = proto.Field(proto.STRING, number=23,)
email_preferences = proto.Field(
proto.MESSAGE, number=25, message="EmailPreferences",
)
class TransferMessage(proto.Message):
r"""Represents a user facing message for a particular data
transfer run.
Attributes:
message_time (google.protobuf.timestamp_pb2.Timestamp):
Time when message was logged.
severity (google.cloud.bigquery_datatransfer_v1.types.TransferMessage.MessageSeverity):
Message severity.
message_text (str):
Message text.
"""
class MessageSeverity(proto.Enum):
r"""Represents data transfer user facing message severity."""
MESSAGE_SEVERITY_UNSPECIFIED = 0
INFO = 1
WARNING = 2
ERROR = 3
message_time = proto.Field(
proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,
)
severity = proto.Field(proto.ENUM, number=2, enum=MessageSeverity,)
message_text = proto.Field(proto.STRING, number=3,)
__all__ = tuple(sorted(__protobuf__.manifest))
|
vadimtk/chrome4sdp
|
refs/heads/master
|
tools/telemetry/telemetry/internal/backends/chrome/extension_backend.py
|
32
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
from telemetry.internal.backends.chrome_inspector import inspector_backend_list
from telemetry.internal.browser import extension_page
class ExtensionBackendList(inspector_backend_list.InspectorBackendList):
"""A dynamic sequence of extension_page.ExtensionPages."""
def __init__(self, browser_backend):
super(ExtensionBackendList, self).__init__(browser_backend)
def ShouldIncludeContext(self, context):
return context['url'].startswith('chrome-extension://')
def CreateWrapper(self, inspector_backend):
return extension_page.ExtensionPage(inspector_backend)
class ExtensionBackendDict(collections.Mapping):
"""A dynamic mapping of extension_id to extension_page.ExtensionPages."""
def __init__(self, browser_backend):
self._extension_backend_list = ExtensionBackendList(browser_backend)
def __getitem__(self, extension_id):
extensions = []
for context_id in self._extension_backend_list.IterContextIds():
if self.ContextIdToExtensionId(context_id) == extension_id:
extensions.append(
self._extension_backend_list.GetBackendFromContextId(context_id))
if not extensions:
raise KeyError('Cannot find an extension with id=%s' % extension_id)
return extensions
def __iter__(self):
for context_id in self._extension_backend_list.IterContextIds():
yield self._extension_backend_list.GetBackendFromContextId(context_id)
def __len__(self):
return len(self._extension_backend_list)
def ContextIdToExtensionId(self, context_id):
context = self._extension_backend_list.GetContextInfo(context_id)
return extension_page.UrlToExtensionId(context['url'])
|
Jaironlanda/yowsup
|
refs/heads/master
|
yowsup/layers/protocol_groups/protocolentities/iq_groups_participants_add_success.py
|
61
|
from yowsup.structs import ProtocolTreeNode
from yowsup.layers.protocol_iq.protocolentities import ResultIqProtocolEntity
class SuccessAddParticipantsIqProtocolEntity(ResultIqProtocolEntity):
'''
<iq type="result" from="{{group_jid}}" id="{{id}}">
<add type="success" participant="{{jid}}"></add>
<add type="success" participant="{{jid}}"></add>
</iq>
'''
def __init__(self, _id, groupId, participantList):
super(SuccessAddParticipantsIqProtocolEntity, self).__init__(_from = groupId, _id = _id)
self.setProps(groupId, participantList)
def setProps(self, groupId, participantList):
self.groupId = groupId
self.participantList = participantList
self.action = 'add'
def getAction(self):
return self.action
def toProtocolTreeNode(self):
node = super(SuccessAddParticipantsIqProtocolEntity, self).toProtocolTreeNode()
participantNodes = [
ProtocolTreeNode("add", {
"type": "success",
"participant": participant
})
for participant in self.participantList
]
node.addChildren(participantNodes)
return node
@staticmethod
def fromProtocolTreeNode(node):
entity = super(SuccessAddParticipantsIqProtocolEntity, SuccessAddParticipantsIqProtocolEntity).fromProtocolTreeNode(node)
entity.__class__ = SuccessAddParticipantsIqProtocolEntity
participantList = []
for participantNode in node.getAllChildren():
if participantNode["type"]=="success":
participantList.append(participantNode["participant"])
entity.setProps(node.getAttributeValue("from"), participantList)
return entity
|
Filechaser/nzbToMedia
|
refs/heads/master
|
core/nzbToMediaConfig.py
|
2
|
# coding=utf-8
from six import iteritems
import os
import shutil
import copy
import core
from configobj import *
from core import logger
from itertools import chain
class Section(configobj.Section, object):
def isenabled(section):
# checks if subsection enabled, returns true/false if subsection specified otherwise returns true/false in {}
if not section.sections:
try:
value = list(ConfigObj.find_key(section, 'enabled'))[0]
except:
value = 0
if int(value) == 1:
return section
else:
to_return = copy.deepcopy(section)
for section_name, subsections in to_return.items():
for subsection in subsections:
try:
value = list(ConfigObj.find_key(subsections, 'enabled'))[0]
except:
value = 0
if int(value) != 1:
del to_return[section_name][subsection]
# cleanout empty sections and subsections
for key in [k for (k, v) in to_return.items() if not v]:
del to_return[key]
return to_return
def findsection(section, key):
to_return = copy.deepcopy(section)
for subsection in to_return:
try:
value = list(ConfigObj.find_key(to_return[subsection], key))[0]
except:
value = None
if not value:
del to_return[subsection]
else:
for category in to_return[subsection]:
if category != key:
del to_return[subsection][category]
# cleanout empty sections and subsections
for key in [k for (k, v) in to_return.items() if not v]:
del to_return[key]
return to_return
def __getitem__(self, key):
if key in self.keys():
return dict.__getitem__(self, key)
to_return = copy.deepcopy(self)
for section, subsections in to_return.items():
if section in key:
continue
if isinstance(subsections, Section) and subsections.sections:
for subsection, options in subsections.items():
if subsection in key:
continue
if key in options:
return options[key]
del subsections[subsection]
else:
if section not in key:
del to_return[section]
# cleanout empty sections and subsections
for key in [k for (k, v) in to_return.items() if not v]:
del to_return[key]
return to_return
class ConfigObj(configobj.ConfigObj, Section):
def __init__(self, *args, **kw):
if len(args) == 0:
args = (core.CONFIG_FILE,)
super(configobj.ConfigObj, self).__init__(*args, **kw)
self.interpolation = False
@staticmethod
def find_key(node, kv):
if isinstance(node, list):
for i in node:
for x in ConfigObj.find_key(i, kv):
yield x
elif isinstance(node, dict):
if kv in node:
yield node[kv]
for j in node.values():
for x in ConfigObj.find_key(j, kv):
yield x
@staticmethod
def migrate():
global CFG_NEW, CFG_OLD
CFG_NEW = None
CFG_OLD = None
try:
# check for autoProcessMedia.cfg and create if it does not exist
if not os.path.isfile(core.CONFIG_FILE):
shutil.copyfile(core.CONFIG_SPEC_FILE, core.CONFIG_FILE)
CFG_OLD = config(core.CONFIG_FILE)
except Exception as error:
logger.debug("Error {msg} when copying to .cfg".format(msg=error))
try:
# check for autoProcessMedia.cfg.spec and create if it does not exist
if not os.path.isfile(core.CONFIG_SPEC_FILE):
shutil.copyfile(core.CONFIG_FILE, core.CONFIG_SPEC_FILE)
CFG_NEW = config(core.CONFIG_SPEC_FILE)
except Exception as error:
logger.debug("Error {msg} when copying to .spec".format(msg=error))
# check for autoProcessMedia.cfg and autoProcessMedia.cfg.spec and if they don't exist return and fail
if CFG_NEW is None or CFG_OLD is None:
return False
subsections = {}
# gather all new-style and old-style sub-sections
for newsection, newitems in CFG_NEW.items():
if CFG_NEW[newsection].sections:
subsections.update({newsection: CFG_NEW[newsection].sections})
for section, items in CFG_OLD.items():
if CFG_OLD[section].sections:
subsections.update({section: CFG_OLD[section].sections})
for option, value in CFG_OLD[section].items():
if option in ["category", "cpsCategory", "sbCategory", "hpCategory", "mlCategory", "gzCategory", "raCategory", "ndCategory"]:
if not isinstance(value, list):
value = [value]
# add subsection
subsections.update({section: value})
CFG_OLD[section].pop(option)
continue
def cleanup_values(values, section):
for option, value in iteritems(values):
if section in ['CouchPotato']:
if option == ['outputDirectory']:
CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0]
values.pop(option)
if section in ['CouchPotato', 'HeadPhones', 'Gamez']:
if option in ['username', 'password']:
values.pop(option)
if section in ["SickBeard", "Mylar"]:
if option == "wait_for": # remove old format
values.pop(option)
if section in ["SickBeard", "NzbDrone"]:
if option == "failed_fork": # change this old format
values['failed'] = 'auto'
values.pop(option)
if option == "outputDirectory": # move this to new location format
CFG_NEW['Torrent'][option] = os.path.split(os.path.normpath(value))[0]
values.pop(option)
if section in ["Torrent"]:
if option in ["compressedExtensions", "mediaExtensions", "metaExtensions", "minSampleSize"]:
CFG_NEW['Extensions'][option] = value
values.pop(option)
if option == "useLink": # Sym links supported now as well.
if value in ['1', 1]:
value = 'hard'
elif value in ['0', 0]:
value = 'no'
values[option] = value
if option == "forceClean":
CFG_NEW['General']['force_clean'] = value
values.pop(option)
if section in ["Transcoder"]:
if option in ["niceness"]:
CFG_NEW['Posix'][option] = value
values.pop(option)
if option == "remote_path":
if value and value not in ['0', '1', 0, 1]:
value = 1
elif not value:
value = 0
values[option] = value
# remove any options that we no longer need so they don't migrate into our new config
if not list(ConfigObj.find_key(CFG_NEW, option)):
try:
values.pop(option)
except:
pass
return values
def process_section(section, subsections=None):
if subsections:
for subsection in subsections:
if subsection in CFG_OLD.sections:
values = cleanup_values(CFG_OLD[subsection], section)
if subsection not in CFG_NEW[section].sections:
CFG_NEW[section][subsection] = {}
for option, value in values.items():
CFG_NEW[section][subsection][option] = value
elif subsection in CFG_OLD[section].sections:
values = cleanup_values(CFG_OLD[section][subsection], section)
if subsection not in CFG_NEW[section].sections:
CFG_NEW[section][subsection] = {}
for option, value in values.items():
CFG_NEW[section][subsection][option] = value
else:
values = cleanup_values(CFG_OLD[section], section)
if section not in CFG_NEW.sections:
CFG_NEW[section] = {}
for option, value in values.items():
CFG_NEW[section][option] = value
# convert old-style categories to new-style sub-sections
for section in CFG_OLD.keys():
subsection = None
if section in list(chain.from_iterable(subsections.values())):
subsection = section
section = ''.join([k for k, v in iteritems(subsections) if subsection in v])
process_section(section, subsection)
elif section in subsections.keys():
subsection = subsections[section]
process_section(section, subsection)
elif section in CFG_OLD.keys():
process_section(section, subsection)
# create a backup of our old config
CFG_OLD.filename ="{config}.old".format(config=core.CONFIG_FILE)
CFG_OLD.write()
# write our new config to autoProcessMedia.cfg
CFG_NEW.filename = core.CONFIG_FILE
CFG_NEW.write()
return True
@staticmethod
def addnzbget():
# load configs into memory
CFG_NEW = config()
try:
if 'NZBPO_NDCATEGORY' in os.environ and 'NZBPO_SBCATEGORY' in os.environ:
if os.environ['NZBPO_NDCATEGORY'] == os.environ['NZBPO_SBCATEGORY']:
logger.warning("{x} category is set for SickBeard and Sonarr. "
"Please check your config in NZBGet".format
(x=os.environ['NZBPO_NDCATEGORY']))
if 'NZBPO_RACATEGORY' in os.environ and 'NZBPO_CPSCATEGORY' in os.environ:
if os.environ['NZBPO_RACATEGORY'] == os.environ['NZBPO_CPSCATEGORY']:
logger.warning("{x} category is set for CouchPotato and Radarr. "
"Please check your config in NZBGet".format
(x=os.environ['NZBPO_RACATEGORY']))
if 'NZBPO_LICATEGORY' in os.environ and 'NZBPO_HPCATEGORY' in os.environ:
if os.environ['NZBPO_LICATEGORY'] == os.environ['NZBPO_HPCATEGORY']:
logger.warning("{x} category is set for HeadPhones and Lidarr. "
"Please check your config in NZBGet".format
(x=os.environ['NZBPO_LICATEGORY']))
section = "Nzb"
key = 'NZBOP_DESTDIR'
if key in os.environ:
option = 'default_downloadDirectory'
value = os.environ[key]
CFG_NEW[section][option] = value
section = "General"
envKeys = ['AUTO_UPDATE', 'CHECK_MEDIA', 'SAFE_MODE', 'NO_EXTRACT_FAILED']
cfgKeys = ['auto_update', 'check_media', 'safe_mode', 'no_extract_failed']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
section = "Network"
envKeys = ['MOUNTPOINTS']
cfgKeys = ['mount_points']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
section = "CouchPotato"
envCatKey = 'NZBPO_CPSCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'METHOD', 'DELETE_FAILED', 'REMOTE_PATH',
'WAIT_FOR', 'WATCH_DIR', 'OMDBAPIKEY']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'method', 'delete_failed', 'remote_path',
'wait_for', 'watch_dir', 'omdbapikey']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_CPS{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['Radarr'].sections:
CFG_NEW['Radarr'][envCatKey]['enabled'] = 0
section = "SickBeard"
envCatKey = 'NZBPO_SBCATEGORY'
envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK',
'DELETE_FAILED', 'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'REMOTE_PATH', 'PROCESS_METHOD']
cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'ssl', 'web_root', 'watch_dir', 'fork',
'delete_failed', 'Torrent_NoLink', 'nzbExtractionBy', 'remote_path', 'process_method']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_SB{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['NzbDrone'].sections:
CFG_NEW['NzbDrone'][envCatKey]['enabled'] = 0
section = "HeadPhones"
envCatKey = 'NZBPO_HPCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WAIT_FOR', 'WATCH_DIR', 'REMOTE_PATH']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'wait_for', 'watch_dir', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_HP{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
section = "Mylar"
envCatKey = 'NZBPO_MYCATEGORY'
envKeys = ['ENABLED', 'HOST', 'PORT', 'USERNAME', 'PASSWORD', 'APIKEY', 'SSL', 'WEB_ROOT', 'WATCH_DIR',
'REMOTE_PATH']
cfgKeys = ['enabled', 'host', 'port', 'username', 'password', 'apikey', 'ssl', 'web_root', 'watch_dir',
'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_MY{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
section = "Gamez"
envCatKey = 'NZBPO_GZCATEGORY'
envKeys = ['ENABLED', 'APIKEY', 'HOST', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'LIBRARY', 'REMOTE_PATH']
cfgKeys = ['enabled', 'apikey', 'host', 'port', 'ssl', 'web_root', 'watch_dir', 'library', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_GZ{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
section = "NzbDrone"
envCatKey = 'NZBPO_NDCATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_ND{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['SickBeard'].sections:
CFG_NEW['SickBeard'][envCatKey]['enabled'] = 0
section = "Radarr"
envCatKey = 'NZBPO_RACATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH', 'OMDBAPIKEY']
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path', 'omdbapikey']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_RA{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['CouchPotato'].sections:
CFG_NEW['CouchPotato'][envCatKey]['enabled'] = 0
section = "Lidarr"
envCatKey = 'NZBPO_LICATEGORY'
envKeys = ['ENABLED', 'HOST', 'APIKEY', 'PORT', 'SSL', 'WEB_ROOT', 'WATCH_DIR', 'FORK', 'DELETE_FAILED',
'TORRENT_NOLINK', 'NZBEXTRACTIONBY', 'WAIT_FOR', 'DELETE_FAILED', 'REMOTE_PATH']
cfgKeys = ['enabled', 'host', 'apikey', 'port', 'ssl', 'web_root', 'watch_dir', 'fork', 'delete_failed',
'Torrent_NoLink', 'nzbExtractionBy', 'wait_for', 'delete_failed', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_RA{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
if os.environ[envCatKey] in CFG_NEW['CouchPotato'].sections:
CFG_NEW['CouchPotato'][envCatKey]['enabled'] = 0
section = "Extensions"
envKeys = ['COMPRESSEDEXTENSIONS', 'MEDIAEXTENSIONS', 'METAEXTENSIONS']
cfgKeys = ['compressedExtensions', 'mediaExtensions', 'metaExtensions']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
section = "Posix"
envKeys = ['NICENESS', 'IONICE_CLASS', 'IONICE_CLASSDATA']
cfgKeys = ['niceness', 'ionice_class', 'ionice_classdata']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
section = "Transcoder"
envKeys = ['TRANSCODE', 'DUPLICATE', 'IGNOREEXTENSIONS', 'OUTPUTFASTSTART', 'OUTPUTVIDEOPATH',
'PROCESSOUTPUT', 'AUDIOLANGUAGE', 'ALLAUDIOLANGUAGES', 'SUBLANGUAGES',
'ALLSUBLANGUAGES', 'EMBEDSUBS', 'BURNINSUBTITLE', 'EXTRACTSUBS', 'EXTERNALSUBDIR',
'OUTPUTDEFAULT', 'OUTPUTVIDEOEXTENSION', 'OUTPUTVIDEOCODEC', 'VIDEOCODECALLOW',
'OUTPUTVIDEOPRESET', 'OUTPUTVIDEOFRAMERATE', 'OUTPUTVIDEOBITRATE', 'OUTPUTAUDIOCODEC',
'AUDIOCODECALLOW', 'OUTPUTAUDIOBITRATE', 'OUTPUTQUALITYPERCENT', 'GETSUBS',
'OUTPUTAUDIOTRACK2CODEC', 'AUDIOCODEC2ALLOW', 'OUTPUTAUDIOTRACK2BITRATE',
'OUTPUTAUDIOOTHERCODEC', 'AUDIOOTHERCODECALLOW', 'OUTPUTAUDIOOTHERBITRATE',
'OUTPUTSUBTITLECODEC', 'OUTPUTAUDIOCHANNELS', 'OUTPUTAUDIOTRACK2CHANNELS',
'OUTPUTAUDIOOTHERCHANNELS','OUTPUTVIDEORESOLUTION']
cfgKeys = ['transcode', 'duplicate', 'ignoreExtensions', 'outputFastStart', 'outputVideoPath',
'processOutput', 'audioLanguage', 'allAudioLanguages', 'subLanguages',
'allSubLanguages', 'embedSubs', 'burnInSubtitle', 'extractSubs', 'externalSubDir',
'outputDefault', 'outputVideoExtension', 'outputVideoCodec', 'VideoCodecAllow',
'outputVideoPreset', 'outputVideoFramerate', 'outputVideoBitrate', 'outputAudioCodec',
'AudioCodecAllow', 'outputAudioBitrate', 'outputQualityPercent', 'getSubs',
'outputAudioTrack2Codec', 'AudioCodec2Allow', 'outputAudioTrack2Bitrate',
'outputAudioOtherCodec', 'AudioOtherCodecAllow', 'outputAudioOtherBitrate',
'outputSubtitleCodec', 'outputAudioChannels', 'outputAudioTrack2Channels',
'outputAudioOtherChannels', 'outputVideoResolution']
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
section = "WakeOnLan"
envKeys = ['WAKE', 'HOST', 'PORT', 'MAC']
cfgKeys = ['wake', 'host', 'port', 'mac']
for index in range(len(envKeys)):
key = 'NZBPO_WOL{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
CFG_NEW[section][option] = value
section = "UserScript"
envCatKey = 'NZBPO_USCATEGORY'
envKeys = ['USER_SCRIPT_MEDIAEXTENSIONS', 'USER_SCRIPT_PATH', 'USER_SCRIPT_PARAM', 'USER_SCRIPT_RUNONCE',
'USER_SCRIPT_SUCCESSCODES', 'USER_SCRIPT_CLEAN', 'USDELAY', 'USREMOTE_PATH']
cfgKeys = ['user_script_mediaExtensions', 'user_script_path', 'user_script_param', 'user_script_runOnce',
'user_script_successCodes', 'user_script_clean', 'delay', 'remote_path']
if envCatKey in os.environ:
for index in range(len(envKeys)):
key = 'NZBPO_{index}'.format(index=envKeys[index])
if key in os.environ:
option = cfgKeys[index]
value = os.environ[key]
if os.environ[envCatKey] not in CFG_NEW[section].sections:
CFG_NEW[section][os.environ[envCatKey]] = {}
CFG_NEW[section][os.environ[envCatKey]][option] = value
CFG_NEW[section][os.environ[envCatKey]]['enabled'] = 1
except Exception as error:
logger.debug("Error {msg} when applying NZBGet config".format(msg=error))
try:
# write our new config to autoProcessMedia.cfg
CFG_NEW.filename = core.CONFIG_FILE
CFG_NEW.write()
except Exception as error:
logger.debug("Error {msg} when writing changes to .cfg".format(msg=error))
return CFG_NEW
configobj.Section = Section
configobj.ConfigObj = ConfigObj
config = ConfigObj
|
siosio/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyUnresolvedReferencesInspection/DynamicDunderAll/a.py
|
24
|
from m1 import *
print(m1m1)
from m2 import *
print(m2m1)
from m3 import *
print(m3m1)
from m4 import *
print(m4m1)
from m5 import *
print(m5m1)
|
nemesisdesign/django
|
refs/heads/master
|
tests/utils_tests/test_ipv6.py
|
46
|
from __future__ import unicode_literals
import unittest
from django.utils.ipv6 import clean_ipv6_address, is_valid_ipv6_address
class TestUtilsIPv6(unittest.TestCase):
def test_validates_correct_plain_address(self):
self.assertTrue(is_valid_ipv6_address('fe80::223:6cff:fe8a:2e8a'))
self.assertTrue(is_valid_ipv6_address('2a02::223:6cff:fe8a:2e8a'))
self.assertTrue(is_valid_ipv6_address('1::2:3:4:5:6:7'))
self.assertTrue(is_valid_ipv6_address('::'))
self.assertTrue(is_valid_ipv6_address('::a'))
self.assertTrue(is_valid_ipv6_address('2::'))
def test_validates_correct_with_v4mapping(self):
self.assertTrue(is_valid_ipv6_address('::ffff:254.42.16.14'))
self.assertTrue(is_valid_ipv6_address('::ffff:0a0a:0a0a'))
def test_validates_incorrect_plain_address(self):
self.assertFalse(is_valid_ipv6_address('foo'))
self.assertFalse(is_valid_ipv6_address('127.0.0.1'))
self.assertFalse(is_valid_ipv6_address('12345::'))
self.assertFalse(is_valid_ipv6_address('1::2:3::4'))
self.assertFalse(is_valid_ipv6_address('1::zzz'))
self.assertFalse(is_valid_ipv6_address('1::2:3:4:5:6:7:8'))
self.assertFalse(is_valid_ipv6_address('1:2'))
self.assertFalse(is_valid_ipv6_address('1:::2'))
self.assertFalse(is_valid_ipv6_address('fe80::223: 6cff:fe8a:2e8a'))
self.assertFalse(is_valid_ipv6_address('2a02::223:6cff :fe8a:2e8a'))
def test_validates_incorrect_with_v4mapping(self):
self.assertFalse(is_valid_ipv6_address('::ffff:999.42.16.14'))
self.assertFalse(is_valid_ipv6_address('::ffff:zzzz:0a0a'))
# The ::1.2.3.4 format used to be valid but was deprecated
# in rfc4291 section 2.5.5.1
self.assertTrue(is_valid_ipv6_address('::254.42.16.14'))
self.assertTrue(is_valid_ipv6_address('::0a0a:0a0a'))
self.assertFalse(is_valid_ipv6_address('::999.42.16.14'))
self.assertFalse(is_valid_ipv6_address('::zzzz:0a0a'))
def test_cleans_plain_address(self):
self.assertEqual(clean_ipv6_address('DEAD::0:BEEF'), 'dead::beef')
self.assertEqual(clean_ipv6_address('2001:000:a:0000:0:fe:fe:beef'), '2001:0:a::fe:fe:beef')
self.assertEqual(clean_ipv6_address('2001::a:0000:0:fe:fe:beef'), '2001:0:a::fe:fe:beef')
def test_cleans_with_v4_mapping(self):
self.assertEqual(clean_ipv6_address('::ffff:0a0a:0a0a'), '::ffff:10.10.10.10')
self.assertEqual(clean_ipv6_address('::ffff:1234:1234'), '::ffff:18.52.18.52')
self.assertEqual(clean_ipv6_address('::ffff:18.52.18.52'), '::ffff:18.52.18.52')
self.assertEqual(clean_ipv6_address('::ffff:0.52.18.52'), '::ffff:0.52.18.52')
self.assertEqual(clean_ipv6_address('::ffff:0.0.0.0'), '::ffff:0.0.0.0')
def test_unpacks_ipv4(self):
self.assertEqual(clean_ipv6_address('::ffff:0a0a:0a0a', unpack_ipv4=True), '10.10.10.10')
self.assertEqual(clean_ipv6_address('::ffff:1234:1234', unpack_ipv4=True), '18.52.18.52')
self.assertEqual(clean_ipv6_address('::ffff:18.52.18.52', unpack_ipv4=True), '18.52.18.52')
|
iulian787/spack
|
refs/heads/develop
|
var/spack/repos/builtin/packages/r-biomartr/package.py
|
5
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RBiomartr(RPackage):
"""Perform large scale genomic data retrieval and functional annotation
retrieval. This package aims to provide users with a standardized way to
automate genome, proteome, 'RNA', coding sequence ('CDS'), 'GFF', and
metagenome retrieval from 'NCBI RefSeq', 'NCBI Genbank', 'ENSEMBL',
'ENSEMBLGENOMES', and 'UniProt' databases. Furthermore, an interface to the
'BioMart' database (Smedley et al. (2009) <doi:10.1186/1471-2164-10-22>)
allows users to retrieve functional annotation for genomic loci. In
addition, users can download entire databases such as 'NCBI RefSeq' (Pruitt
et al. (2007) <doi:10.1093/nar/gkl842>), 'NCBI nr', 'NCBI nt', 'NCBI
Genbank' (Benson et al. (2013) <doi:10.1093/nar/gks1195>), etc. as well as
'ENSEMBL' and 'ENSEMBLGENOMES' with only one command."""
homepage = "https://docs.ropensci.org/biomartr"
url = "https://cloud.r-project.org/src/contrib/biomartr_0.9.2.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/biomartr"
version('0.9.2', sha256='d88085696e9c5614828602254c33f2cdd3bbfeebc2f21a705eee3cb961097c89')
depends_on('r-biomart', type=('build', 'run'))
depends_on('r-biostrings', type=('build', 'run'))
depends_on('r-curl', type=('build', 'run'))
depends_on('r-tibble', type=('build', 'run'))
depends_on('r-jsonlite', type=('build', 'run'))
depends_on('r-data-table@1.9.4:', type=('build', 'run'))
depends_on('r-dplyr@0.3.0:', type=('build', 'run'))
depends_on('r-readr@0.2.2:', type=('build', 'run'))
depends_on('r-downloader@0.3:', type=('build', 'run'))
depends_on('r-rcurl@1.95-4.5:', type=('build', 'run'))
depends_on('r-xml@3.98-1.1:', type=('build', 'run'))
depends_on('r-httr@0.6.1:', type=('build', 'run'))
depends_on('r-stringr@0.6.2:', type=('build', 'run'))
depends_on('r-purrr', type=('build', 'run'))
depends_on('r-r-utils', type=('build', 'run'))
depends_on('r-philentropy', type=('build', 'run'))
depends_on('r-fs@1.3.1:', type=('build', 'run'))
|
GehenHe/Recognize-Face-on-Android
|
refs/heads/master
|
tensorflow/python/user_ops/__init__.py
|
12133432
| |
zWingz/webbasketball
|
refs/heads/master
|
apps/admin/__init__.py
|
12133432
| |
w1r0x/ansible-modules-core
|
refs/heads/devel
|
database/__init__.py
|
12133432
| |
ESSolutions/ESSArch_Core
|
refs/heads/master
|
ESSArch_Core/tests/__init__.py
|
12133432
| |
cparawhore/ProyectoSubastas
|
refs/heads/master
|
site-packages/django/conf/locale/tr/__init__.py
|
12133432
| |
pitomba/pitomba-site
|
refs/heads/master
|
pitomba/settings.py
|
1
|
from settings_base import *
from os.path import dirname, abspath, join
DEBUG = True
PORT = 9082
SERVER_NAME = 'http://0.0.0.0:%s' % PORT
UPLOAD_PATH = get_path_to('uploads')
|
sander76/home-assistant
|
refs/heads/dev
|
homeassistant/components/demo/__init__.py
|
6
|
"""Set up the demo environment that mimics interaction with devices."""
import asyncio
from homeassistant import bootstrap, config_entries
from homeassistant.const import ATTR_ENTITY_ID, EVENT_HOMEASSISTANT_START
import homeassistant.core as ha
DOMAIN = "demo"
COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM = [
"air_quality",
"alarm_control_panel",
"binary_sensor",
"camera",
"climate",
"cover",
"fan",
"humidifier",
"light",
"lock",
"media_player",
"number",
"sensor",
"switch",
"vacuum",
"water_heater",
]
COMPONENTS_WITH_DEMO_PLATFORM = [
"tts",
"stt",
"mailbox",
"notify",
"image_processing",
"calendar",
"device_tracker",
]
async def async_setup(hass, config):
"""Set up the demo environment."""
if DOMAIN not in config:
return True
if not hass.config_entries.async_entries(DOMAIN):
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={}
)
)
# Set up demo platforms
for platform in COMPONENTS_WITH_DEMO_PLATFORM:
hass.async_create_task(
hass.helpers.discovery.async_load_platform(platform, DOMAIN, {}, config)
)
config.setdefault(ha.DOMAIN, {})
config.setdefault(DOMAIN, {})
# Set up sun
if not hass.config.latitude:
hass.config.latitude = 32.87336
if not hass.config.longitude:
hass.config.longitude = 117.22743
tasks = [bootstrap.async_setup_component(hass, "sun", config)]
# Set up input select
tasks.append(
bootstrap.async_setup_component(
hass,
"input_select",
{
"input_select": {
"living_room_preset": {
"options": ["Visitors", "Visitors with kids", "Home Alone"]
},
"who_cooks": {
"icon": "mdi:panda",
"initial": "Anne Therese",
"name": "Cook today",
"options": ["Paulus", "Anne Therese"],
},
}
},
)
)
# Set up input boolean
tasks.append(
bootstrap.async_setup_component(
hass,
"input_boolean",
{
"input_boolean": {
"notify": {
"icon": "mdi:car",
"initial": False,
"name": "Notify Anne Therese is home",
}
}
},
)
)
# Set up input number
tasks.append(
bootstrap.async_setup_component(
hass,
"input_number",
{
"input_number": {
"noise_allowance": {
"icon": "mdi:bell-ring",
"min": 0,
"max": 10,
"name": "Allowed Noise",
"unit_of_measurement": "dB",
}
}
},
)
)
results = await asyncio.gather(*tasks)
if any(not result for result in results):
return False
# Set up example persistent notification
hass.components.persistent_notification.async_create(
"This is an example of a persistent notification.", title="Example Notification"
)
async def demo_start_listener(_event):
"""Finish set up."""
await finish_setup(hass, config)
hass.bus.async_listen(EVENT_HOMEASSISTANT_START, demo_start_listener)
return True
async def async_setup_entry(hass, config_entry):
"""Set the config entry up."""
# Set up demo platforms with config entry
for platform in COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, platform)
)
return True
async def finish_setup(hass, config):
"""Finish set up once demo platforms are set up."""
switches = None
lights = None
while not switches and not lights:
# Not all platforms might be loaded.
if switches is not None:
await asyncio.sleep(0)
switches = sorted(hass.states.async_entity_ids("switch"))
lights = sorted(hass.states.async_entity_ids("light"))
# Set up scripts
await bootstrap.async_setup_component(
hass,
"script",
{
"script": {
"demo": {
"alias": f"Toggle {lights[0].split('.')[1]}",
"sequence": [
{
"service": "light.turn_off",
"data": {ATTR_ENTITY_ID: lights[0]},
},
{"delay": {"seconds": 5}},
{
"service": "light.turn_on",
"data": {ATTR_ENTITY_ID: lights[0]},
},
{"delay": {"seconds": 5}},
{
"service": "light.turn_off",
"data": {ATTR_ENTITY_ID: lights[0]},
},
],
}
}
},
)
# Set up scenes
await bootstrap.async_setup_component(
hass,
"scene",
{
"scene": [
{
"name": "Romantic lights",
"entities": {
lights[0]: True,
lights[1]: {
"state": "on",
"xy_color": [0.33, 0.66],
"brightness": 200,
},
},
},
{
"name": "Switch on and off",
"entities": {switches[0]: True, switches[1]: False},
},
]
},
)
|
CenterForOpenScience/SHARE
|
refs/heads/develop
|
tests/share/regulate/steps/test_deduplicate.py
|
2
|
import pytest
from share.regulate.steps.deduplicate import Deduplicate
from tests.share.normalize.factories import (
CreativeWork,
Preprint,
Registration,
Subject,
WorkIdentifier,
)
class TestDeduplicate:
@pytest.mark.parametrize('input', [
[Preprint(0, identifiers=[WorkIdentifier(1)])]
])
def test_no_change(self, Graph, input):
graph = Graph(input)
Deduplicate().run(graph)
assert graph == Graph(input)
@pytest.mark.parametrize('input, output', [
([
Preprint(0, identifiers=[WorkIdentifier(id=1, uri='http://osf.io/guidguid')]),
CreativeWork(id=1, sparse=True, identifiers=[WorkIdentifier(uri='http://osf.io/guidguid')])
], [
Preprint(0, identifiers=[WorkIdentifier(uri='http://osf.io/guidguid')]),
]),
([
Preprint(0, identifiers=[
WorkIdentifier(uri='http://osf.io/guidguid'),
WorkIdentifier(4),
]),
CreativeWork(id=1, sparse=True, identifiers=[WorkIdentifier(uri='http://osf.io/guidguid')])
], [
Preprint(0, identifiers=[
WorkIdentifier(4),
WorkIdentifier(uri='http://osf.io/guidguid'),
]),
]),
([
Registration(0, subjects=[
Subject(
0,
name='custom-child',
central_synonym=Subject(1, name='central-child', parent=Subject(3, name='central-parent')),
parent=Subject(2, name='custom-parent', central_synonym=Subject(3, name='central-parent')),
)
for _ in range(3)
]),
], [
Registration(0, subjects=[
Subject(
0,
name='custom-child',
central_synonym=Subject(1, name='central-child', parent=Subject(3, id='central-parent', name='central-parent')),
parent=Subject(2, name='custom-parent', central_synonym=Subject(id='central-parent')),
)
]),
]),
])
def test_deduplicate(self, Graph, input, output):
graph = Graph(input)
Deduplicate().run(graph)
assert graph == Graph(output)
|
DMOJ/onemirror
|
refs/heads/master
|
onemirror/main.py
|
1
|
import argparse
import logging
from onemirror.mirror import OneDriveMirror
def main():
parser = argparse.ArgumentParser(description='OneDrive mirroring program: create a complete '
'mirror of OneDrive contents')
parser.add_argument('remote', nargs='?', default='/')
parser.add_argument('local', nargs='?', default='OneDrive')
parser.add_argument('database', nargs='?', default='onedrive.db')
parser.add_argument('--client-id', default='000000004C17987A')
parser.add_argument('--secret', default='xk9GckVE6ZUM-rgSmjDx8JuTNvWLXdV3')
parser.add_argument('--interval', default=10, type=int)
parser.add_argument('--full-update', default=3600, type=int)
parser.add_argument('-x', '--exclude')
args = parser.parse_args()
logging.basicConfig(level=logging.INFO, format='%(levelname)s %(asctime)s %(module)s %(message)s')
with OneDriveMirror(args.local, args.remote, args.database, args.client_id, args.secret,
interval=args.interval, exclude=args.exclude, full_update=args.full_update) as mirror:
mirror.run()
if __name__ == '__main__':
main()
|
asedunov/intellij-community
|
refs/heads/master
|
python/testData/surround/SurroundWithIf.py
|
150
|
def foo():
<selection>print "hello"</selection>
|
magul/pywikibot-core
|
refs/heads/master
|
pywikibot/interwiki_graph.py
|
1
|
# -*- coding: utf-8 -*-
"""Module with the Graphviz drawing calls."""
#
# (C) Pywikibot team, 2006-2016
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
import itertools
import threading
try:
import pydot
except ImportError as e:
pydot = e
import pywikibot
from pywikibot import config2 as config
from pywikibot.tools import Counter
# deprecated value
pydotfound = not isinstance(pydot, ImportError)
class GraphImpossible(Exception):
"""Drawing a graph is not possible on your system."""
class GraphSavingThread(threading.Thread):
"""
Threaded graph renderer.
Rendering a graph can take extremely long. We use
multithreading because of that.
TODO: Find out if several threads running in parallel
can slow down the system too much. Consider adding a
mechanism to kill a thread if it takes too long.
"""
def __init__(self, graph, originPage):
"""Constructor."""
threading.Thread.__init__(self)
self.graph = graph
self.originPage = originPage
def run(self):
"""Write graphs to the data directory."""
for format in config.interwiki_graph_formats:
filename = config.datafilepath(
'interwiki-graphs/' + getFilename(self.originPage, format))
if self.graph.write(filename, prog='dot', format=format):
pywikibot.output(u'Graph saved as %s' % filename)
else:
pywikibot.output(u'Graph could not be saved as %s' % filename)
class Subject(object):
"""Data about a page with translations on multiple wikis."""
def __init__(self, origin=None):
"""Constructor.
@param originPage: the page on the 'origin' wiki
@type originPage: Page
"""
# Remember the "origin page"
self._origin = origin
# Temporary variable to support git blame; do not use
originPage = origin
self.found_in = None
# foundIn is a dictionary where pages are keys and lists of
# pages are values. It stores where we found each page.
# As we haven't yet found a page that links to the origin page, we
# start with an empty list for it.
if originPage:
self.foundIn = {self.originPage: []}
else:
self.foundIn = {}
@property
def origin(self):
"""Page on the origin wiki."""
return self._origin
@origin.setter
def origin(self, value):
"""Page on the origin wiki."""
self._origin = value
@property
def originPage(self):
"""Deprecated property for the origin page.
DEPRECATED. Use origin.
"""
# TODO: deprecate this property
return self.origin
@originPage.setter
def originPage(self, value):
"""Deprecated property for the origin page.
DEPRECATED. Use origin.
"""
self.origin = value
@property
def foundIn(self):
"""Mapping of pages to others pages interwiki linked to it.
DEPRECATED. Use found_in.
"""
# TODO: deprecate this property
return self.found_in
@foundIn.setter
def foundIn(self, value):
"""Temporary property setter to support code migration."""
self.found_in = value
class GraphDrawer(object):
"""Graphviz (dot) code creator."""
def __init__(self, subject):
"""Constructor.
@param subject: page data to graph
@type subject: Subject
@raises GraphImpossible: pydot is not installed
"""
if isinstance(pydot, ImportError):
raise GraphImpossible('pydot is not installed: %s.' % pydot)
self.graph = None
self.subject = subject
def getLabel(self, page):
"""Get label for page."""
return '"%s:%s"' % (page.site.code, page.title())
def _octagon_site_set(self):
"""Build a list of sites with more than one valid page."""
page_list = self.subject.found_in.keys()
# Only track sites of normal pages
each_site = [page.site for page in page_list
if page.exists() and not page.isRedirectPage()]
return set(x[0] for x in itertools.takewhile(
lambda x: x[1] > 1,
Counter(each_site).most_common()))
def addNode(self, page):
"""Add a node for page."""
node = pydot.Node(self.getLabel(page), shape='rectangle')
node.set_URL("\"http://%s%s\""
% (page.site.hostname(),
page.site.get_address(page.title(asUrl=True))))
node.set_style('filled')
node.set_fillcolor('white')
node.set_fontsize('11')
if not page.exists():
node.set_fillcolor('red')
elif page.isRedirectPage():
node.set_fillcolor('blue')
elif page.isDisambig():
node.set_fillcolor('orange')
if page.namespace() != self.subject.originPage.namespace():
node.set_color('green')
node.set_style('filled,bold')
if page.site in self.octagon_sites:
# mark conflict by octagonal node
node.set_shape('octagon')
self.graph.add_node(node)
def addDirectedEdge(self, page, refPage):
"""Add a directed edge from refPage to page."""
# if page was given as a hint, referrers would be [None]
if refPage is not None:
sourceLabel = self.getLabel(refPage)
targetLabel = self.getLabel(page)
edge = pydot.Edge(sourceLabel, targetLabel)
oppositeEdge = self.graph.get_edge(targetLabel, sourceLabel)
if oppositeEdge:
if isinstance(oppositeEdge, list):
# bugfix for pydot >= 1.0.3
oppositeEdge = oppositeEdge[0]
oppositeEdge.set_dir('both')
# workaround for sf.net bug 401: prevent duplicate edges
# (it is unclear why duplicate edges occur)
# https://sourceforge.net/p/pywikipediabot/bugs/401/
elif self.graph.get_edge(sourceLabel, targetLabel):
pywikibot.output(
u'BUG: Tried to create duplicate edge from %s to %s'
% (refPage.title(asLink=True), page.title(asLink=True)))
# duplicate edges would be bad because then get_edge() would
# give a list of edges, not a single edge when we handle the
# opposite edge.
else:
# add edge
if refPage.site == page.site:
edge.set_color('blue')
elif not page.exists():
# mark dead links
edge.set_color('red')
elif refPage.isDisambig() != page.isDisambig():
# mark links between disambiguation and non-disambiguation
# pages
edge.set_color('orange')
if refPage.namespace() != page.namespace():
edge.set_color('green')
self.graph.add_edge(edge)
def saveGraphFile(self):
"""Write graphs to the data directory."""
thread = GraphSavingThread(self.graph, self.subject.originPage)
thread.start()
def createGraph(self):
"""
Create graph of the interwiki links.
For more info see U{https://meta.wikimedia.org/wiki/Interwiki_graphs}
"""
pywikibot.output(u'Preparing graph for %s'
% self.subject.originPage.title())
# create empty graph
self.graph = pydot.Dot()
# self.graph.set('concentrate', 'true')
self.octagon_sites = self._octagon_site_set()
for page in self.subject.foundIn.keys():
# a node for each found page
self.addNode(page)
# mark start node by pointing there from a black dot.
firstLabel = self.getLabel(self.subject.originPage)
self.graph.add_node(pydot.Node('start', shape='point'))
self.graph.add_edge(pydot.Edge('start', firstLabel))
for page, referrers in self.subject.foundIn.items():
for refPage in referrers:
self.addDirectedEdge(page, refPage)
self.saveGraphFile()
def getFilename(page, extension=None):
"""
Create a filename that is unique for the page.
@param page: page used to create the new filename
@type page: Page
@param extension: file extension
@type extension: str
@return: filename of <family>-<lang>-<page>.<ext>
@rtype: str
"""
filename = '%s-%s-%s' % (page.site.family.name,
page.site.code,
page.title(as_filename=True))
if extension:
filename += '.%s' % extension
return filename
|
FoamyGuy/MatterMaker
|
refs/heads/master
|
makematter_server/admin.py
|
1
|
from django.contrib import admin
# Register your models here.
from makematter_server.models import MatterObject, MatterTemplate, MatterTemplateVar
admin.site.register(MatterObject)
admin.site.register(MatterTemplate)
admin.site.register(MatterTemplateVar)
|
charlesvdv/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/test/testdata/handlers/blank_wsh.py
|
499
|
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# intentionally left blank
|
40223202/2015cdb_g2
|
refs/heads/master
|
2015scrum-3a6fed94d45237e506e5e1539e27a2c9e89e6740/static/Brython3.1.1-20150328-091302/Lib/browser/timer.py
|
610
|
from browser import window
def wrap(func):
# Transforms a function f into another function that prints a
# traceback in case of exception
def f(*args, **kw):
try:
return func(*args, **kw)
except Exception as exc:
sys.stderr.write(exc)
return f
clear_interval = window.clearInterval
clear_timeout = window.clearTimeout
def set_interval(func,interval):
return window.setInterval(wrap(func),interval)
def set_timeout(func,interval):
return int(window.setTimeout(wrap(func),interval))
def request_animation_frame(func):
return int(window.requestAnimationFrame(func))
def cancel_animation_frame(int_id):
window.cancelAnimationFrame(int_id)
|
indictranstech/buyback-erp
|
refs/heads/master
|
erpnext/utilities/doctype/note/test_note.py
|
38
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors and Contributors
# See license.txt
import frappe
import unittest
test_records = frappe.get_test_records('Note')
class TestNote(unittest.TestCase):
pass
|
GREO/GNU-Radio
|
refs/heads/master
|
gr-wxgui/src/python/constants.py
|
1
|
#
# Copyright 2008,2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
##################################################
# Controller Keys
##################################################
AC_COUPLE_KEY = 'ac_couple'
ALPHA_KEY = 'alpha'
AUTORANGE_KEY = 'autorange'
AVERAGE_KEY = 'average'
AVG_ALPHA_KEY = 'avg_alpha'
USE_PERSISTENCE_KEY = 'use_persistence'
PERSIST_ALPHA_KEY = 'persist_alpha'
BASEBAND_FREQ_KEY = 'baseband_freq'
BETA_KEY = 'beta'
COLOR_MODE_KEY = 'color_mode'
DECIMATION_KEY = 'decimation'
DYNAMIC_RANGE_KEY = 'dynamic_range'
FRAME_RATE_KEY = 'frame_rate'
GAIN_MU_KEY = 'gain_mu'
GAIN_OMEGA_KEY = 'gain_omega'
MARKER_KEY = 'marker'
XY_MARKER_KEY = 'xy_marker'
MSG_KEY = 'msg'
NUM_LINES_KEY = 'num_lines'
OMEGA_KEY = 'omega'
PEAK_HOLD_KEY = 'peak_hold'
TRACE_STORE_KEY = 'trace_store'
TRACE_SHOW_KEY = 'trace_show'
REF_LEVEL_KEY = 'ref_level'
RUNNING_KEY = 'running'
SAMPLE_RATE_KEY = 'sample_rate'
TRIGGER_CHANNEL_KEY = 'trigger_channel'
TRIGGER_LEVEL_KEY = 'trigger_level'
TRIGGER_MODE_KEY = 'trigger_mode'
TRIGGER_SLOPE_KEY = 'trigger_slope'
TRIGGER_SHOW_KEY = 'trigger_show'
XY_MODE_KEY = 'xy_mode'
X_CHANNEL_KEY = 'x_channel'
Y_CHANNEL_KEY = 'y_channel'
T_FRAC_OFF_KEY = 't_frac_off'
T_DIVS_KEY = 't_divs'
T_OFF_KEY = 't_off'
T_PER_DIV_KEY = 't_per_div'
X_DIVS_KEY = 'x_divs'
X_OFF_KEY = 'x_off'
X_PER_DIV_KEY = 'x_per_div'
Y_DIVS_KEY = 'y_divs'
Y_OFF_KEY = 'y_off'
Y_PER_DIV_KEY = 'y_per_div'
MAXIMUM_KEY = 'maximum'
MINIMUM_KEY = 'minimum'
NUM_BINS_KEY = 'num_bins'
FRAME_SIZE_KEY = 'frame_size'
CHANNEL_OPTIONS_KEY = 'channel_options'
SHOW_CONTROL_PANEL_KEY = 'show_control_panel'
|
ipazc/oculus-crawl
|
refs/heads/master
|
main/dataset/__init__.py
|
9
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = "Ivan de Paz Centeno"
|
switowski/invenio
|
refs/heads/master
|
invenio/modules/upgrader/upgrades/invenio_2013_01_08_new_goto_table.py
|
15
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.legacy.dbquery import run_sql
depends_on = ['invenio_release_1_1_0']
def info():
return "New goto table"
def do_upgrade():
run_sql("""
CREATE TABLE IF NOT EXISTS goto (
label varchar(150) NOT NULL,
plugin varchar(150) NOT NULL,
parameters text NOT NULL,
creation_date datetime NOT NULL,
modification_date datetime NOT NULL,
PRIMARY KEY (label),
KEY (creation_date),
KEY (modification_date)
) ENGINE=MyISAM;
""")
def estimate():
""" Estimate running time of upgrade in seconds (optional). """
return 1
|
mheap/ansible
|
refs/heads/devel
|
lib/ansible/modules/clustering/znode.py
|
55
|
#!/usr/bin/python
# Copyright 2015 WP Engine, Inc. All rights reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: znode
version_added: "2.0"
short_description: Create, delete, retrieve, and update znodes using ZooKeeper
description:
- Create, delete, retrieve, and update znodes using ZooKeeper.
options:
hosts:
description:
- A list of ZooKeeper servers (format '[server]:[port]').
required: true
name:
description:
- The path of the znode.
required: true
value:
description:
- The value assigned to the znode.
op:
description:
- An operation to perform. Mutually exclusive with state.
state:
description:
- The state to enforce. Mutually exclusive with op.
timeout:
description:
- The amount of time to wait for a node to appear.
default: 300
recursive:
description:
- Recursively delete node and all its children.
type: bool
default: 'no'
version_added: "2.1"
requirements:
- kazoo >= 2.1
- python >= 2.6
author: "Trey Perry (@treyperry)"
"""
EXAMPLES = """
# Creating or updating a znode with a given value
- znode:
hosts: 'localhost:2181'
name: /mypath
value: myvalue
state: present
# Getting the value and stat structure for a znode
- znode:
hosts: 'localhost:2181'
name: /mypath
op: get
# Listing a particular znode's children
- znode:
hosts: 'localhost:2181'
name: /zookeeper
op: list
# Waiting 20 seconds for a znode to appear at path /mypath
- znode:
hosts: 'localhost:2181'
name: /mypath
op: wait
timeout: 20
# Deleting a znode at path /mypath
- znode:
hosts: 'localhost:2181'
name: /mypath
state: absent
# Creating or updating a znode with a given value on a remote Zookeeper
- znode:
hosts: 'my-zookeeper-node:2181'
name: /mypath
value: myvalue
state: present
delegate_to: 127.0.0.1
"""
import time
try:
from kazoo.client import KazooClient
from kazoo.handlers.threading import KazooTimeoutError
KAZOO_INSTALLED = True
except ImportError:
KAZOO_INSTALLED = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_bytes
def main():
module = AnsibleModule(
argument_spec=dict(
hosts=dict(required=True, type='str'),
name=dict(required=True, type='str'),
value=dict(required=False, default=None, type='str'),
op=dict(required=False, default=None, choices=['get', 'wait', 'list']),
state=dict(choices=['present', 'absent']),
timeout=dict(required=False, default=300, type='int'),
recursive=dict(required=False, default=False, type='bool')
),
supports_check_mode=False
)
if not KAZOO_INSTALLED:
module.fail_json(msg='kazoo >= 2.1 is required to use this module. Use pip to install it.')
check = check_params(module.params)
if not check['success']:
module.fail_json(msg=check['msg'])
zoo = KazooCommandProxy(module)
try:
zoo.start()
except KazooTimeoutError:
module.fail_json(msg='The connection to the ZooKeeper ensemble timed out.')
command_dict = {
'op': {
'get': zoo.get,
'list': zoo.list,
'wait': zoo.wait
},
'state': {
'present': zoo.present,
'absent': zoo.absent
}
}
command_type = 'op' if 'op' in module.params and module.params['op'] is not None else 'state'
method = module.params[command_type]
result, result_dict = command_dict[command_type][method]()
zoo.shutdown()
if result:
module.exit_json(**result_dict)
else:
module.fail_json(**result_dict)
def check_params(params):
if not params['state'] and not params['op']:
return {'success': False, 'msg': 'Please define an operation (op) or a state.'}
if params['state'] and params['op']:
return {'success': False, 'msg': 'Please choose an operation (op) or a state, but not both.'}
return {'success': True}
class KazooCommandProxy():
def __init__(self, module):
self.module = module
self.zk = KazooClient(module.params['hosts'])
def absent(self):
return self._absent(self.module.params['name'])
def exists(self, znode):
return self.zk.exists(znode)
def list(self):
children = self.zk.get_children(self.module.params['name'])
return True, {'count': len(children), 'items': children, 'msg': 'Retrieved znodes in path.',
'znode': self.module.params['name']}
def present(self):
return self._present(self.module.params['name'], self.module.params['value'])
def get(self):
return self._get(self.module.params['name'])
def shutdown(self):
self.zk.stop()
self.zk.close()
def start(self):
self.zk.start()
def wait(self):
return self._wait(self.module.params['name'], self.module.params['timeout'])
def _absent(self, znode):
if self.exists(znode):
self.zk.delete(znode, recursive=self.module.params['recursive'])
return True, {'changed': True, 'msg': 'The znode was deleted.'}
else:
return True, {'changed': False, 'msg': 'The znode does not exist.'}
def _get(self, path):
if self.exists(path):
value, zstat = self.zk.get(path)
stat_dict = {}
for i in dir(zstat):
if not i.startswith('_'):
attr = getattr(zstat, i)
if isinstance(attr, (int, str)):
stat_dict[i] = attr
result = True, {'msg': 'The node was retrieved.', 'znode': path, 'value': value,
'stat': stat_dict}
else:
result = False, {'msg': 'The requested node does not exist.'}
return result
def _present(self, path, value):
if self.exists(path):
(current_value, zstat) = self.zk.get(path)
if value != current_value:
self.zk.set(path, to_bytes(value))
return True, {'changed': True, 'msg': 'Updated the znode value.', 'znode': path,
'value': value}
else:
return True, {'changed': False, 'msg': 'No changes were necessary.', 'znode': path, 'value': value}
else:
self.zk.create(path, to_bytes(value), makepath=True)
return True, {'changed': True, 'msg': 'Created a new znode.', 'znode': path, 'value': value}
def _wait(self, path, timeout, interval=5):
lim = time.time() + timeout
while time.time() < lim:
if self.exists(path):
return True, {'msg': 'The node appeared before the configured timeout.',
'znode': path, 'timeout': timeout}
else:
time.sleep(interval)
return False, {'msg': 'The node did not appear before the operation timed out.', 'timeout': timeout,
'znode': path}
if __name__ == '__main__':
main()
|
dsprenkels/servo
|
refs/heads/master
|
tests/wpt/harness/wptrunner/config.py
|
196
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import ConfigParser
import os
import sys
from collections import OrderedDict
here = os.path.split(__file__)[0]
class ConfigDict(dict):
def __init__(self, base_path, *args, **kwargs):
self.base_path = base_path
dict.__init__(self, *args, **kwargs)
def get_path(self, key, default=None):
if key not in self:
return default
path = self[key]
os.path.expanduser(path)
return os.path.abspath(os.path.join(self.base_path, path))
def read(config_path):
config_path = os.path.abspath(config_path)
config_root = os.path.split(config_path)[0]
parser = ConfigParser.SafeConfigParser()
success = parser.read(config_path)
assert config_path in success, success
subns = {"pwd": os.path.abspath(os.path.curdir)}
rv = OrderedDict()
for section in parser.sections():
rv[section] = ConfigDict(config_root)
for key in parser.options(section):
rv[section][key] = parser.get(section, key, False, subns)
return rv
def path(argv=None):
if argv is None:
argv = []
path = None
for i, arg in enumerate(argv):
if arg == "--config":
if i + 1 < len(argv):
path = argv[i + 1]
elif arg.startswith("--config="):
path = arg.split("=", 1)[1]
if path is not None:
break
if path is None:
if os.path.exists("wptrunner.ini"):
path = os.path.abspath("wptrunner.ini")
else:
path = os.path.join(here, "..", "wptrunner.default.ini")
return os.path.abspath(path)
def load():
return read(path(sys.argv))
|
jetty840/ReplicatorG
|
refs/heads/master
|
skein_engines/skeinforge-47/fabmetheus_utilities/miscellaneous/fabricate/frank_davies/bring_to_temp.py
|
24
|
# bring reprap to temperature
# Frank Davies
import serial
import time
import sys
def out_rep(out_string):
ser.write(out_string)
print out_string
#print "waiting for OK"
start_time=time.clock()
while (ser.inWaiting()==0) and (time.clock()<start_time+40):
c=2
line=ser.readline() # read a '\n' terminated line
#print "02:",line
return(0)
print "starting"
ser=serial.Serial('/dev/ttyUSB0',19200,timeout = 1)
print "wait 1 seconds for serial port to settle"
time.sleep(1)
print "sending termperature command\n"
ser.write("M104 S230\n") # set initial temperature
time.sleep(1)
ser.write("M104 S230\n") # set initial temperature
line=ser.readline()
# read temperature until it is good
t=0
while (t<225):
ser.write("M105\n") # set initial temperature
while (ser.inWaiting()==0):
t=t
line1=ser.readline() # read a '\n' terminated line
#print "line1:",line1
line2=line1[(line1.find(":")+1):]
#print "line2:",line2
t=int(line2)
print "t:",t
#ser.close
|
yangwii/graphql_py_client
|
refs/heads/master
|
app/utils/utils.py
|
1
|
# -*- coding: utf-8 -*-
import json
from gql import Client, gql
from gql.transport.requests import RequestsHTTPTransport
def send(host, port, data):
client = Client(
retries=3,
transport=RequestsHTTPTransport(url=host + ':' + port)
)
query = gql(data)
response = client.execute(query)
return json.dumps(response)
|
kblauer/cs-outreach
|
refs/heads/master
|
server_django/server_django/tests.py
|
1
|
from django.test import TestCase
from django.test import Client
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
# These are tests designed to ensure the base URL structure is setup and working as desired.
class SurveyorRootTests(TestCase):
def setUp(self):
"""
This function contains the setup required to run the tests
"""
# create a test user
User.objects.create_user(username='test_case_user', password='password')
self.client = Client()
def test_index_page_get(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200, "Index Response status code")
def test_absurd_url(self):
response = self.client.get('/resfsd')
self.assertEqual(response.status_code, 404, "Testing absurd URL")
def test_index_context_boundaries(self):
response = self.client.get('/')
self.assertEqual(response.context['username'], None)
login = self.client.login(username='test_case_user', password="password")
self.assertTrue(login)
response = self.client.get('/')
self.assertEqual(response.context['username'], 'test_case_user')
|
openmv/micropython
|
refs/heads/master
|
tests/basics/bytes_subscr.py
|
96
|
# test [...] of bytes
print(b'123'[0])
print(b'123'[1])
print(b'123'[-1])
try:
b'123'[1] = 4
except TypeError:
print('TypeError')
try:
del b'123'[1]
except TypeError:
print('TypeError')
|
TeXitoi/navitia
|
refs/heads/dev
|
source/jormungandr/jormungandr/timezone.py
|
11
|
# encoding: utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
import logging
import pytz
from flask import g
from jormungandr.exceptions import TechnicalError, RegionNotFound
from jormungandr import i_manager
def set_request_timezone(region):
logger = logging.getLogger(__name__)
instance = i_manager.instances.get(region, None)
if not instance:
raise RegionNotFound(region)
if not instance.timezone:
logger.warn("region {} has no timezone".format(region))
g.timezone = None
return
tz = pytz.timezone(instance.timezone)
if not tz:
logger.warn("impossible to find timezone: '{}' for region {}".format(instance.timezone, region))
g.timezone = tz
def get_timezone():
"""
return the time zone associated with the query
Note: for the moment ONLY ONE time zone is used for a region (a kraken instances)
It is this default timezone that is returned in this method
"""
if not hasattr(g, 'timezone'):
raise TechnicalError("No timezone set for this API") # the set_request_timezone has to be called at init
return g.timezone
|
SergiuMir/python-phonenumbers
|
refs/heads/dev
|
python/tests/testdata/region_SE.py
|
4
|
"""Auto-generated file, do not edit by hand. SE metadata"""
from phonenumbers.phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_SE = PhoneMetadata(id='SE', country_code=46, international_prefix='00',
general_desc=PhoneNumberDesc(),
fixed_line=PhoneNumberDesc(),
mobile=PhoneNumberDesc(),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'))
|
kastnermario/yaml-cpp
|
refs/heads/master
|
test/gmock-1.7.0/scripts/generator/cpp/__init__.py
|
12133432
| |
vaginessa/wifuzzit
|
refs/heads/master
|
requests/ndmp.py
|
10
|
from sulley import *
import struct
import time
ndmp_messages = \
[
# Connect Interface
0x900, # NDMP_CONNECT_OPEN
0x901, # NDMP_CONECT_CLIENT_AUTH
0x902, # NDMP_CONNECT_CLOSE
0x903, # NDMP_CONECT_SERVER_AUTH
# Config Interface
0x100, # NDMP_CONFIG_GET_HOST_INFO
0x102, # NDMP_CONFIG_GET_CONNECTION_TYPE
0x103, # NDMP_CONFIG_GET_AUTH_ATTR
0x104, # NDMP_CONFIG_GET_BUTYPE_INFO
0x105, # NDMP_CONFIG_GET_FS_INFO
0x106, # NDMP_CONFIG_GET_TAPE_INFO
0x107, # NDMP_CONFIG_GET_SCSI_INFO
0x108, # NDMP_CONFIG_GET_SERVER_INFO
# SCSI Interface
0x200, # NDMP_SCSI_OPEN
0x201, # NDMP_SCSI_CLOSE
0x202, # NDMP_SCSI_GET_STATE
0x203, # NDMP_SCSI_SET_TARGET
0x204, # NDMP_SCSI_RESET_DEVICE
0x205, # NDMP_SCSI_RESET_BUS
0x206, # NDMP_SCSI_EXECUTE_CDB
# Tape Interface
0x300, # NDMP_TAPE_OPEN
0x301, # NDMP_TAPE_CLOSE
0x302, # NDMP_TAPE_GET_STATE
0x303, # NDMP_TAPE_MTIO
0x304, # NDMP_TAPE_WRITE
0x305, # NDMP_TAPE_READ
0x307, # NDMP_TAPE_EXECUTE_CDB
# Data Interface
0x400, # NDMP_DATA_GET_STATE
0x401, # NDMP_DATA_START_BACKUP
0x402, # NDMP_DATA_START_RECOVER
0x403, # NDMP_DATA_ABORT
0x404, # NDMP_DATA_GET_ENV
0x407, # NDMP_DATA_STOP
0x409, # NDMP_DATA_LISTEN
0x40a, # NDMP_DATA_CONNECT
# Notify Interface
0x501, # NDMP_NOTIFY_DATA_HALTED
0x502, # NDMP_NOTIFY_CONNECTED
0x503, # NDMP_NOTIFY_MOVER_HALTED
0x504, # NDMP_NOTIFY_MOVER_PAUSED
0x505, # NDMP_NOTIFY_DATA_READ
# Log Interface
0x602, # NDMP_LOG_FILES
0x603, # NDMP_LOG_MESSAGE
# File History Interface
0x703, # NDMP_FH_ADD_FILE
0x704, # NDMP_FH_ADD_DIR
0x705, # NDMP_FH_ADD_NODE
# Mover Interface
0xa00, # NDMP_MOVER_GET_STATE
0xa01, # NDMP_MOVER_LISTEN
0xa02, # NDMP_MOVER_CONTINUE
0xa03, # NDMP_MOVER_ABORT
0xa04, # NDMP_MOVER_STOP
0xa05, # NDMP_MOVER_SET_WINDOW
0xa06, # NDMP_MOVER_READ
0xa07, # NDMP_MOVER_CLOSE
0xa08, # NDMP_MOVER_SET_RECORD_SIZE
0xa09, # NDMP_MOVER_CONNECT
# Reserved for the vendor specific usage (from 0xf000 to 0xffff)
0xf000, # NDMP_VENDORS_BASE
# Reserved for Prototyping (from 0xff00 to 0xffff)
0xff00, # NDMP_RESERVED_BASE
]
########################################################################################################################
s_initialize("Veritas NDMP_CONECT_CLIENT_AUTH")
# the first bit is the last frag flag, we'll always set it and truncate our size to 3 bytes.
# 3 bytes of size gives us a max 16mb ndmp message, plenty of space.
s_static("\x80")
s_size("request", length=3, endian=">")
if s_block_start("request"):
if s_block_start("ndmp header"):
s_static(struct.pack(">L", 1), name="sequence")
s_static(struct.pack(">L", time.time()), name="timestamp")
s_static(struct.pack(">L", 0), name="message type") # request (0)
s_static(struct.pack(">L", 0x901), name="NDMP_CONECT_CLIENT_AUTH")
s_static(struct.pack(">L", 1), name="reply sequence")
s_static(struct.pack(">L", 0), name="error")
s_block_end("ndmp header")
s_group("auth types", values=[struct.pack(">L", 190), struct.pack(">L", 5), struct.pack(">L", 4)])
if s_block_start("body", group="auth types"):
# do random data.
s_random(0, min_length=1000, max_length=50000, num_mutations=500)
# random valid XDR string.
#s_lego("xdr_string", "pedram")
s_block_end("body")
s_block_end("request")
########################################################################################################################
s_initialize("Veritas Proprietary Message Types")
# the first bit is the last frag flag, we'll always set it and truncate our size to 3 bytes.
# 3 bytes of size gives us a max 16mb ndmp message, plenty of space.
s_static("\x80")
s_size("request", length=3, endian=">")
if s_block_start("request"):
if s_block_start("ndmp header"):
s_static(struct.pack(">L", 1), name="sequence")
s_static(struct.pack(">L", time.time()), name="timestamp")
s_static(struct.pack(">L", 0), name="message type") # request (0)
s_group("prop ops", values = \
[
struct.pack(">L", 0xf315), # file list?
struct.pack(">L", 0xf316),
struct.pack(">L", 0xf317),
struct.pack(">L", 0xf200), #
struct.pack(">L", 0xf201),
struct.pack(">L", 0xf202),
struct.pack(">L", 0xf31b),
struct.pack(">L", 0xf270), # send strings like NDMP_PROP_PEER_PROTOCOL_VERSION
struct.pack(">L", 0xf271),
struct.pack(">L", 0xf33b),
struct.pack(">L", 0xf33c),
])
s_static(struct.pack(">L", 1), name="reply sequence")
s_static(struct.pack(">L", 0), name="error")
s_block_end("ndmp header")
if s_block_start("body", group="prop ops"):
s_random("\x00\x00\x00\x00", min_length=1000, max_length=50000, num_mutations=100)
s_block_end("body")
s_block_end("request")
|
wkschwartz/django
|
refs/heads/stable/3.2.x
|
tests/admin_widgets/test_autocomplete_widget.py
|
11
|
from django import forms
from django.contrib import admin
from django.contrib.admin.widgets import AutocompleteSelect
from django.forms import ModelChoiceField
from django.test import TestCase, override_settings
from django.utils import translation
from .models import Album, Band, ReleaseEvent, VideoStream
class AlbumForm(forms.ModelForm):
class Meta:
model = Album
fields = ['band', 'featuring']
widgets = {
'band': AutocompleteSelect(
Album._meta.get_field('band'),
admin.site,
attrs={'class': 'my-class'},
),
'featuring': AutocompleteSelect(
Album._meta.get_field('featuring'),
admin.site,
)
}
class NotRequiredBandForm(forms.Form):
band = ModelChoiceField(
queryset=Album.objects.all(),
widget=AutocompleteSelect(Album._meta.get_field('band').remote_field, admin.site),
required=False,
)
class RequiredBandForm(forms.Form):
band = ModelChoiceField(
queryset=Album.objects.all(),
widget=AutocompleteSelect(Album._meta.get_field('band').remote_field, admin.site),
required=True,
)
class VideoStreamForm(forms.ModelForm):
class Meta:
model = VideoStream
fields = ['release_event']
widgets = {
'release_event': AutocompleteSelect(
VideoStream._meta.get_field('release_event'),
admin.site,
),
}
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AutocompleteMixinTests(TestCase):
empty_option = '<option value=""></option>'
maxDiff = 1000
def test_build_attrs(self):
form = AlbumForm()
attrs = form['band'].field.widget.get_context(name='my_field', value=None, attrs={})['widget']['attrs']
self.assertEqual(attrs, {
'class': 'my-class admin-autocomplete',
'data-ajax--cache': 'true',
'data-ajax--delay': 250,
'data-ajax--type': 'GET',
'data-ajax--url': '/autocomplete/',
'data-theme': 'admin-autocomplete',
'data-allow-clear': 'false',
'data-app-label': 'admin_widgets',
'data-field-name': 'band',
'data-model-name': 'album',
'data-placeholder': ''
})
def test_build_attrs_no_custom_class(self):
form = AlbumForm()
attrs = form['featuring'].field.widget.get_context(name='name', value=None, attrs={})['widget']['attrs']
self.assertEqual(attrs['class'], 'admin-autocomplete')
def test_build_attrs_not_required_field(self):
form = NotRequiredBandForm()
attrs = form['band'].field.widget.build_attrs({})
self.assertJSONEqual(attrs['data-allow-clear'], True)
def test_build_attrs_required_field(self):
form = RequiredBandForm()
attrs = form['band'].field.widget.build_attrs({})
self.assertJSONEqual(attrs['data-allow-clear'], False)
def test_get_url(self):
rel = Album._meta.get_field('band')
w = AutocompleteSelect(rel, admin.site)
url = w.get_url()
self.assertEqual(url, '/autocomplete/')
def test_render_options(self):
beatles = Band.objects.create(name='The Beatles', style='rock')
who = Band.objects.create(name='The Who', style='rock')
# With 'band', a ForeignKey.
form = AlbumForm(initial={'band': beatles.uuid})
output = form.as_table()
selected_option = '<option value="%s" selected>The Beatles</option>' % beatles.uuid
option = '<option value="%s">The Who</option>' % who.uuid
self.assertIn(selected_option, output)
self.assertNotIn(option, output)
# With 'featuring', a ManyToManyField.
form = AlbumForm(initial={'featuring': [beatles.pk, who.pk]})
output = form.as_table()
selected_option = '<option value="%s" selected>The Beatles</option>' % beatles.pk
option = '<option value="%s" selected>The Who</option>' % who.pk
self.assertIn(selected_option, output)
self.assertIn(option, output)
def test_render_options_required_field(self):
"""Empty option is present if the field isn't required."""
form = NotRequiredBandForm()
output = form.as_table()
self.assertIn(self.empty_option, output)
def test_render_options_not_required_field(self):
"""Empty option isn't present if the field isn't required."""
form = RequiredBandForm()
output = form.as_table()
self.assertNotIn(self.empty_option, output)
def test_render_options_fk_as_pk(self):
beatles = Band.objects.create(name='The Beatles', style='rock')
rubber_soul = Album.objects.create(name='Rubber Soul', band=beatles)
release_event = ReleaseEvent.objects.create(name='Test Target', album=rubber_soul)
form = VideoStreamForm(initial={'release_event': release_event.pk})
output = form.as_table()
selected_option = '<option value="%s" selected>Test Target</option>' % release_event.pk
self.assertIn(selected_option, output)
def test_media(self):
rel = Album._meta.get_field('band').remote_field
base_files = (
'admin/js/vendor/jquery/jquery.min.js',
'admin/js/vendor/select2/select2.full.min.js',
# Language file is inserted here.
'admin/js/jquery.init.js',
'admin/js/autocomplete.js',
)
languages = (
('de', 'de'),
# Language with code 00 does not exist.
('00', None),
# Language files are case sensitive.
('sr-cyrl', 'sr-Cyrl'),
('zh-hans', 'zh-CN'),
('zh-hant', 'zh-TW'),
)
for lang, select_lang in languages:
with self.subTest(lang=lang):
if select_lang:
expected_files = (
base_files[:2] +
(('admin/js/vendor/select2/i18n/%s.js' % select_lang),) +
base_files[2:]
)
else:
expected_files = base_files
with translation.override(lang):
self.assertEqual(AutocompleteSelect(rel, admin.site).media._js, list(expected_files))
|
pasinskim/integration
|
refs/heads/master
|
tests/tests/test_fault_tolerance.py
|
1
|
#!/usr/bin/python
# Copyright 2017 Northern.tech AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fabric.api import *
import json
import pytest
import re
import shutil
import subprocess
import tempfile
import time
from ..common_setup import *
from ..helpers import Helpers
from ..MenderAPI import deploy, image, logger
from .common_update import *
from .mendertesting import MenderTesting
from ..common import *
DOWNLOAD_RETRY_TIMEOUT_TEST_SETS = [
{
"blockAfterStart": False,
"logMessageToLookFor": "update fetch failed:",
},
{
"blockAfterStart": True,
"logMessageToLookFor": "Download connection broken:",
},
]
@pytest.mark.usefixtures("standard_setup_one_client_bootstrapped")
class TestFaultTolerance(MenderTesting):
def wait_for_download_retry_attempts(self, search_string):
""" Block until logs contain messages related to failed downlaod attempts """
timeout_time = int(time.time()) + (60 * 10)
while int(time.time()) < timeout_time:
with quiet():
output = run("journalctl -u mender -l --no-pager | grep 'msg=\".*%s' | wc -l"
% re.escape(search_string))
time.sleep(2)
if int(output) >= 2: # check that some retries have occured
logging.info("Looks like the download was retried 2 times, restoring download functionality")
break
if timeout_time <= int(time.time()):
pytest.fail("timed out waiting for download retries")
# make sure that retries happen after 2 minutes have passed
assert timeout_time - int(time.time()) >= 2 * 60, "Ooops, looks like the retry happend within less than 5 minutes"
logging.info("Waiting for system to finish download")
@MenderTesting.slow
def test_update_image_breaks_networking(self, install_image="core-image-full-cmdline-%s-broken-network.ext4" % conftest.machine_name):
"""
Install an image without systemd-networkd binary existing.
The network will not function, mender will not be able to send any logs.
The expected status is the update will rollback, and be considered a failure
"""
if not env.host_string:
execute(self.test_update_image_breaks_networking,
hosts=get_mender_clients(),
install_image=install_image)
return
with Helpers.RebootDetector() as reboot:
deployment_id, _ = common_update_procedure(install_image)
reboot.verify_reboot_performed() # since the network is broken, two reboots will be performed, and the last one will be detected
deploy.check_expected_statistics(deployment_id, "failure", len(get_mender_clients()))
@MenderTesting.slow
def test_deployed_during_network_outage(self, install_image=conftest.get_valid_image()):
"""
Install a valid upgrade image while there is no network availability on the device
Re-establishing the network connectivity results in the upgrade to be triggered.
Emulate a flaky network connection, and ensure that the deployment still succeeds.
"""
if not env.host_string:
execute(self.test_deployed_during_network_outage,
hosts=get_mender_clients(),
install_image=install_image)
return
Helpers.gateway_connectivity(False)
with Helpers.RebootDetector() as reboot:
deployment_id, expected_yocto_id = common_update_procedure(install_image, verify_status=False)
time.sleep(60)
for i in range(5):
time.sleep(5)
Helpers.gateway_connectivity(i % 2 == 0)
Helpers.gateway_connectivity(True)
logging.info("Network stabilized")
reboot.verify_reboot_performed()
deploy.check_expected_statistics(deployment_id, "success", len(get_mender_clients()))
assert Helpers.yocto_id_installed_on_machine() == expected_yocto_id
@MenderTesting.slow
@pytest.mark.parametrize("test_set", DOWNLOAD_RETRY_TIMEOUT_TEST_SETS)
def test_image_download_retry_timeout(self, test_set, install_image=conftest.get_valid_image()):
"""
Install an update, and block storage connection when we detect it's
being copied over to the inactive parition.
The test should result in a successful download retry.
"""
if not env.host_string:
execute(self.test_image_download_retry_timeout,
test_set,
hosts=get_mender_clients(),
install_image=install_image)
return
# make tcp timeout quicker, none persistent changes
run("echo 2 > /proc/sys/net/ipv4/tcp_keepalive_time")
run("echo 2 > /proc/sys/net/ipv4/tcp_keepalive_intvl")
run("echo 3 > /proc/sys/net/ipv4/tcp_syn_retries")
# to speed up timeouting client connection
run("echo 1 > /proc/sys/net/ipv4/tcp_keepalive_probes")
inactive_part = Helpers.get_passive_partition()
with Helpers.RebootDetector() as reboot:
if test_set['blockAfterStart']:
# Block after we start the download.
deployment_id, new_yocto_id = common_update_procedure(install_image)
for _ in range(60):
time.sleep(0.5)
with quiet():
# make sure we are writing to the inactive partition
output = run("fuser -mv %s" % (inactive_part))
if output.return_code == 0:
break
else:
pytest.fail("Download never started?")
# use iptables to block traffic to storage
Helpers.gateway_connectivity(False, hosts=["s3.docker.mender.io"]) # disable connectivity
if not test_set['blockAfterStart']:
# Block before we start the download.
deployment_id, new_yocto_id = common_update_procedure(install_image)
# re-enable connectivity after 2 retries
self.wait_for_download_retry_attempts(test_set['logMessageToLookFor'])
Helpers.gateway_connectivity(True, hosts=["s3.docker.mender.io"]) # re-enable connectivity
reboot.verify_reboot_performed()
deploy.check_expected_status("finished", deployment_id)
assert Helpers.get_active_partition() == inactive_part
assert Helpers.yocto_id_installed_on_machine() == new_yocto_id
reboot.verify_reboot_not_performed()
@MenderTesting.nightly
def test_image_download_retry_hosts_broken(self, install_image=conftest.get_valid_image()):
"""
Block storage host (minio) by modifying the hosts file.
"""
if not env.host_string:
execute(self.test_image_download_retry_hosts_broken,
hosts=get_mender_clients(),
install_image=install_image)
return
inactive_part = Helpers.get_passive_partition()
run("echo '1.1.1.1 s3.docker.mender.io' >> /etc/hosts") # break s3 connectivity before triggering deployment
with Helpers.RebootDetector() as reboot:
deployment_id, new_yocto_id = common_update_procedure(install_image)
self.wait_for_download_retry_attempts()
run("sed -i.bak '/1.1.1.1/d' /etc/hosts")
reboot.verify_reboot_performed()
assert Helpers.get_active_partition() == inactive_part
assert Helpers.yocto_id_installed_on_machine() == new_yocto_id
reboot.verify_reboot_not_performed()
def test_rootfs_conf_missing_from_new_update(self):
"""Test that the client is able to reboot to roll back if module or rootfs
config is missing from the new partition. This only works for cases where a
reboot restores the state."""
if not env.host_string:
execute(self.test_rootfs_conf_missing_from_new_update,
hosts=get_mender_clients())
return
with settings(warn_only=True):
output = run("test -e /data/mender/mender.conf")
if output.return_code != 0:
pytest.skip("Needs split mender.conf configuration to run this test")
tmpdir = tempfile.mkdtemp()
try:
orig_image = conftest.get_valid_image()
image_name = os.path.join(tmpdir, os.path.basename(orig_image))
shutil.copyfile(orig_image, image_name)
# With the persistent mender.conf in /data, and the transient
# mender.conf in /etc, we can simply delete the former (rootfs
# config) to break the config, and add it back into the transient
# one to keep the config valid for the existing artifact (but not
# the new one).
output = run("cat /data/mender/mender.conf")
persistent_conf = json.loads(output)
run("rm /data/mender/mender.conf")
output = run("cat /etc/mender/mender.conf")
conf = json.loads(output)
conf["RootfsPartA"] = persistent_conf["RootfsPartA"]
conf["RootfsPartB"] = persistent_conf["RootfsPartB"]
mender_conf = os.path.join(tmpdir, "mender.conf")
with open(mender_conf, "w") as fd:
json.dump(conf, fd)
put(os.path.basename(mender_conf), local_path=os.path.dirname(mender_conf), remote_path="/etc/mender")
update_image_failed(install_image=image_name,
expected_log_message="Unable to roll back with a stub module, but will try to reboot to restore state")
finally:
shutil.rmtree(tmpdir)
|
raghavrv/scikit-learn
|
refs/heads/master
|
examples/covariance/plot_covariance_estimation.py
|
4
|
"""
=======================================================================
Shrinkage covariance estimation: LedoitWolf vs OAS and max-likelihood
=======================================================================
When working with covariance estimation, the usual approach is to use
a maximum likelihood estimator, such as the
:class:`sklearn.covariance.EmpiricalCovariance`. It is unbiased, i.e. it
converges to the true (population) covariance when given many
observations. However, it can also be beneficial to regularize it, in
order to reduce its variance; this, in turn, introduces some bias. This
example illustrates the simple regularization used in
:ref:`shrunk_covariance` estimators. In particular, it focuses on how to
set the amount of regularization, i.e. how to choose the bias-variance
trade-off.
Here we compare 3 approaches:
* Setting the parameter by cross-validating the likelihood on three folds
according to a grid of potential shrinkage parameters.
* A close formula proposed by Ledoit and Wolf to compute
the asymptotically optimal regularization parameter (minimizing a MSE
criterion), yielding the :class:`sklearn.covariance.LedoitWolf`
covariance estimate.
* An improvement of the Ledoit-Wolf shrinkage, the
:class:`sklearn.covariance.OAS`, proposed by Chen et al. Its
convergence is significantly better under the assumption that the data
are Gaussian, in particular for small samples.
To quantify estimation error, we plot the likelihood of unseen data for
different values of the shrinkage parameter. We also show the choices by
cross-validation, or with the LedoitWolf and OAS estimates.
Note that the maximum likelihood estimate corresponds to no shrinkage,
and thus performs poorly. The Ledoit-Wolf estimate performs really well,
as it is close to the optimal and is computational not costly. In this
example, the OAS estimate is a bit further away. Interestingly, both
approaches outperform cross-validation, which is significantly most
computationally costly.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from scipy import linalg
from sklearn.covariance import LedoitWolf, OAS, ShrunkCovariance, \
log_likelihood, empirical_covariance
from sklearn.model_selection import GridSearchCV
# #############################################################################
# Generate sample data
n_features, n_samples = 40, 20
np.random.seed(42)
base_X_train = np.random.normal(size=(n_samples, n_features))
base_X_test = np.random.normal(size=(n_samples, n_features))
# Color samples
coloring_matrix = np.random.normal(size=(n_features, n_features))
X_train = np.dot(base_X_train, coloring_matrix)
X_test = np.dot(base_X_test, coloring_matrix)
# #############################################################################
# Compute the likelihood on test data
# spanning a range of possible shrinkage coefficient values
shrinkages = np.logspace(-2, 0, 30)
negative_logliks = [-ShrunkCovariance(shrinkage=s).fit(X_train).score(X_test)
for s in shrinkages]
# under the ground-truth model, which we would not have access to in real
# settings
real_cov = np.dot(coloring_matrix.T, coloring_matrix)
emp_cov = empirical_covariance(X_train)
loglik_real = -log_likelihood(emp_cov, linalg.inv(real_cov))
# #############################################################################
# Compare different approaches to setting the parameter
# GridSearch for an optimal shrinkage coefficient
tuned_parameters = [{'shrinkage': shrinkages}]
cv = GridSearchCV(ShrunkCovariance(), tuned_parameters)
cv.fit(X_train)
# Ledoit-Wolf optimal shrinkage coefficient estimate
lw = LedoitWolf()
loglik_lw = lw.fit(X_train).score(X_test)
# OAS coefficient estimate
oa = OAS()
loglik_oa = oa.fit(X_train).score(X_test)
# #############################################################################
# Plot results
fig = plt.figure()
plt.title("Regularized covariance: likelihood and shrinkage coefficient")
plt.xlabel('Regularizaton parameter: shrinkage coefficient')
plt.ylabel('Error: negative log-likelihood on test data')
# range shrinkage curve
plt.loglog(shrinkages, negative_logliks, label="Negative log-likelihood")
plt.plot(plt.xlim(), 2 * [loglik_real], '--r',
label="Real covariance likelihood")
# adjust view
lik_max = np.amax(negative_logliks)
lik_min = np.amin(negative_logliks)
ymin = lik_min - 6. * np.log((plt.ylim()[1] - plt.ylim()[0]))
ymax = lik_max + 10. * np.log(lik_max - lik_min)
xmin = shrinkages[0]
xmax = shrinkages[-1]
# LW likelihood
plt.vlines(lw.shrinkage_, ymin, -loglik_lw, color='magenta',
linewidth=3, label='Ledoit-Wolf estimate')
# OAS likelihood
plt.vlines(oa.shrinkage_, ymin, -loglik_oa, color='purple',
linewidth=3, label='OAS estimate')
# best CV estimator likelihood
plt.vlines(cv.best_estimator_.shrinkage, ymin,
-cv.best_estimator_.score(X_test), color='cyan',
linewidth=3, label='Cross-validation best estimate')
plt.ylim(ymin, ymax)
plt.xlim(xmin, xmax)
plt.legend()
plt.show()
|
blaze/distributed
|
refs/heads/master
|
distributed/cli/utils.py
|
1
|
from tornado.ioloop import IOLoop
py3_err_msg = """
Warning: Your terminal does not set locales.
If you use unicode text inputs for command line options then this may cause
undesired behavior. This is rare.
If you don't use unicode characters in command line options then you can safely
ignore this message. This is the common case.
You can support unicode inputs by specifying encoding environment variables,
though exact solutions may depend on your system:
$ export LC_ALL=C.UTF-8
$ export LANG=C.UTF-8
For more information see: http://click.pocoo.org/5/python3/
""".lstrip()
def check_python_3():
"""Ensures that the environment is good for unicode on Python 3."""
# https://github.com/pallets/click/issues/448#issuecomment-246029304
import click.core
click.core._verify_python3_env = lambda: None
try:
from click import _unicodefun
_unicodefun._verify_python3_env()
except (TypeError, RuntimeError) as e:
import click
click.echo(py3_err_msg, err=True)
def install_signal_handlers(loop=None, cleanup=None):
"""
Install global signal handlers to halt the Tornado IOLoop in case of
a SIGINT or SIGTERM. *cleanup* is an optional callback called,
before the loop stops, with a single signal number argument.
"""
import signal
loop = loop or IOLoop.current()
old_handlers = {}
def handle_signal(sig, frame):
async def cleanup_and_stop():
try:
if cleanup is not None:
await cleanup(sig)
finally:
loop.stop()
loop.add_callback_from_signal(cleanup_and_stop)
# Restore old signal handler to allow for a quicker exit
# if the user sends the signal again.
signal.signal(sig, old_handlers[sig])
for sig in [signal.SIGINT, signal.SIGTERM]:
old_handlers[sig] = signal.signal(sig, handle_signal)
|
clchiou/garage
|
refs/heads/master
|
shipyard/rules/py/cryptography/build.py
|
1
|
from templates import py
py.define_pip_package(
package='cryptography',
version='2.3.1',
patterns=[
'cryptography',
# And its dependencies.
'asn1crypto',
'cffi',
'_cffi_backend*',
'.libs_cffi_backend*',
'idna',
'pycparser',
'six.py',
],
)
|
hiei23/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/resource-timing/SyntheticResponse.py
|
61
|
import urllib
import time
def main(request, response):
index = request.request_path.index("?")
args = request.request_path[index+1:].split("&")
headersSent = 0
for arg in args:
if arg.startswith("ignored"):
continue
elif arg.endswith("ms"):
time.sleep(float(arg[0:-2]) / 1E3);
elif arg.startswith("redirect:"):
return (302, "WEBPERF MARKETING"), [("Location", urllib.unquote(arg[9:]))], "TEST"
elif arg.startswith("mime:"):
response.headers.set("Content-Type", urllib.unquote(arg[5:]))
elif arg.startswith("send:"):
text = urllib.unquote(arg[5:])
if headersSent == 0:
response.write_status_headers()
headersSent = 1
response.writer.write(text)
# else:
# error " INVALID ARGUMENT %s" % arg
|
servo/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/third_party/websockets/src/websockets/__init__.py
|
17
|
# This relies on each of the submodules having an __all__ variable.
from .auth import * # noqa
from .client import * # noqa
from .exceptions import * # noqa
from .protocol import * # noqa
from .server import * # noqa
from .typing import * # noqa
from .uri import * # noqa
from .version import version as __version__ # noqa
__all__ = [
"AbortHandshake",
"basic_auth_protocol_factory",
"BasicAuthWebSocketServerProtocol",
"connect",
"ConnectionClosed",
"ConnectionClosedError",
"ConnectionClosedOK",
"Data",
"DuplicateParameter",
"ExtensionHeader",
"ExtensionParameter",
"InvalidHandshake",
"InvalidHeader",
"InvalidHeaderFormat",
"InvalidHeaderValue",
"InvalidMessage",
"InvalidOrigin",
"InvalidParameterName",
"InvalidParameterValue",
"InvalidState",
"InvalidStatusCode",
"InvalidUpgrade",
"InvalidURI",
"NegotiationError",
"Origin",
"parse_uri",
"PayloadTooBig",
"ProtocolError",
"RedirectHandshake",
"SecurityError",
"serve",
"Subprotocol",
"unix_connect",
"unix_serve",
"WebSocketClientProtocol",
"WebSocketCommonProtocol",
"WebSocketException",
"WebSocketProtocolError",
"WebSocketServer",
"WebSocketServerProtocol",
"WebSocketURI",
]
|
jjffryan/pymtl
|
refs/heads/master
|
pymtl/tools/deprecated/ast_typer.py
|
8
|
#=========================================================================
# ast_typer.py
#=========================================================================
# Create a simplified representation of the Python AST for help with
# source to source translation.
from __future__ import print_function
import ast, _ast
import re
from ...datatypes.Bits import Bits
from ...model.signals import InPort, OutPort
#-------------------------------------------------------------------------
# TypeAST
#-------------------------------------------------------------------------
# ASTTransformer which uses type information to simplify the AST:
#
# - clears references to the module
# - clears the decorator, attaches relevant notation to func instead
# - removes Index nodes
# - replaces Name nodes with Self if they reference the self object
# - replaces Name nodes with Temp if they reference a local temporary
# - replaces Subscript nodes with BitSlice if they reference a Bits
# or BitStruct object
# - replaces Subscript nodes with ArrayIndex if they reference a list
# - attaches object references to each node
# - removes '.next', '.value', '.n', and '.v' Attribute nodes on Ports
#
# TODO: fix ctx references on newly created nodes
#
class TypeAST( ast.NodeTransformer ):
def __init__( self, model, func ):
self.model = model
self.func = func
self.closed_vars = get_closure_dict( func )
self.current_obj = None
#-----------------------------------------------------------------------
# visit_Module
#-----------------------------------------------------------------------
def visit_Module( self, node ):
# visit children
self.generic_visit( node )
# copy the function body, delete module references
return ast.copy_location( node.body[0], node )
#-----------------------------------------------------------------------
# visit_FunctionDef
#-----------------------------------------------------------------------
def visit_FunctionDef( self, node ):
# visit children
self.generic_visit( node )
# TODO: add annotation to self.func based on decorator type
#dec = node.decorator_list[0].attr
# create a new FunctionDef node that deletes the decorators
#new_node = ast.FunctionDef( name=node.name, args=node.args,
# body=node.body, decorator_list=)
#return ast.copy_location( new_node, node )
return node
#-----------------------------------------------------------------------
# visit_Attribute
#-----------------------------------------------------------------------
def visit_Attribute( self, node ):
self.generic_visit( node )
# TODO: handle self.current_obj == None. These are temporary
# locals that we should check to ensure their types don't
# change!
if self.current_obj:
try :
x = self.current_obj.getattr( node.attr )
self.current_obj.update( node.attr, x )
except AttributeError:
if node.attr in ['next', 'value', 'n', 'v']:
node.value.ctx = node.ctx # Update the Load/Store information
return node.value
else:
raise Exception("Error: Unknown attribute for this object: {}"
.format( node.attr ) )
node._object = self.current_obj.inst if self.current_obj else None
return node
#-----------------------------------------------------------------------
# visit_Name
#-----------------------------------------------------------------------
def visit_Name( self, node ):
# If the name is not in closed_vars, it is a local temporary
if node.id not in self.closed_vars:
new_node = Temp( id=node.id )
new_obj = None
# If the name points to the model, this is a reference to self (or s)
elif self.closed_vars[ node.id ] is self.model:
new_node = Self( id=node.id )
new_obj = PyObj( '', self.closed_vars[ node.id ] )
# Otherwise, we have some other variable captured by the closure...
# TODO: should we allow this?
else:
new_node = node
new_obj = PyObj( node.id, self.closed_vars[ node.id ] )
# Store the new_obj
self.current_obj = new_obj
node._object = self.current_obj.inst if self.current_obj else None
# Return the new_node
return ast.copy_location( new_node, node )
#-----------------------------------------------------------------------
# visit_Subscript
#-----------------------------------------------------------------------
def visit_Subscript( self, node ):
# Visit the object being sliced
new_value = self.visit( node.value )
# Visit the index of the slice; stash and restore the current_obj
stash = self.current_obj
self.current_obj = None
new_slice = self.visit( node.slice )
self.current_obj = stash
# If current_obj not initialized, it is a local temp. Don't replace.
if not self.current_obj:
new_node = _ast.Subscript( value=new_value, slice=new_slice, ctx=node.ctx )
# If current_obj is a Bits object, replace with a BitSlice node.
elif isinstance( self.current_obj.inst, (Bits, InPort, OutPort) ):
new_node = BitSlice( value=new_value, slice=new_slice, ctx=node.ctx )
# If current_obj is a list object, replace with an ArrayIndex node.
elif isinstance( self.current_obj.inst, list ):
new_node = ArrayIndex( value=new_value, slice=new_slice, ctx=node.ctx )
# TODO: Want to do this for lists, but can't add attribute
# handling in translation instead
#self.current_obj.inst.name = self.current_obj.inst[0].name.split('[')[0]
# Otherwise, throw an exception
else:
print( self.current_obj )
raise Exception("Unknown type being subscripted!")
# Update the current_obj to contain the obj returned by subscript
# TODO: check that type of all elements in item are identical
# TODO: won't work for lists that are initially empty
# TODO: what about lists that initially contain None?
if self.current_obj:
self.current_obj.update( '[]', self.current_obj.inst[0] )
node._object = self.current_obj.inst if self.current_obj else None
return ast.copy_location( new_node, node )
#-----------------------------------------------------------------------
# visit_Index
#-----------------------------------------------------------------------
def visit_Index( self, node ):
# Remove Index nodes, they seem pointless
child = self.visit( node.value )
return ast.copy_location( child, node )
#-----------------------------------------------------------------------
# visit_Call
#-----------------------------------------------------------------------
# Specially handle certain function calls
def visit_Call( self, node ):
# func, args, keywords, starargs, kwargs
# Check that this is just a normal function call, not something weird
self.generic_visit( node )
if node.func.id == 'range':
if len( node.args ) == 1:
start = _ast.Num( n=0 )
stop = node.args[0]
step = _ast.Num( n=1 )
elif len( node.args ) == 2:
start = node.args[0]
stop = node.args[1]
step = _ast.Num( n=1 ) # TODO: should be an expression
elif len( node.args ) == 3:
start = node.args[0]
stop = node.args[1]
step = node.args[2]
else:
raise Exception("Invalid # of arguments to range function!")
new_node = _ast.Slice( lower=start, upper=stop, step=step )
else:
new_node = node
return ast.copy_location( new_node, node )
#------------------------------------------------------------------------
# PyObj
#------------------------------------------------------------------------
class PyObj( object ):
def __init__( self, name, inst ):
self.name = name
self.inst = inst
def update( self, name, inst ):
self.name += name
self.inst = inst
def getattr( self, name ):
return getattr( self.inst, name )
def __repr__( self ):
return "PyObj( name={} inst={} )".format( self.name, type(self.inst) )
#------------------------------------------------------------------------
# get_closure_dict
#------------------------------------------------------------------------
# http://stackoverflow.com/a/19416942
def get_closure_dict( fn ):
closure_objects = [c.cell_contents for c in fn.func_closure]
return dict( zip( fn.func_code.co_freevars, closure_objects ))
#------------------------------------------------------------------------
# ArrayIndex
#------------------------------------------------------------------------
class ArrayIndex( _ast.Subscript ):
pass
#------------------------------------------------------------------------
# BitSlice
#------------------------------------------------------------------------
class BitSlice( _ast.Subscript ):
pass
#------------------------------------------------------------------------
# Self
#------------------------------------------------------------------------
# New AST Node for references to self. Based on Name node.
class Self( _ast.Name ):
pass
#------------------------------------------------------------------------
# Temp
#------------------------------------------------------------------------
# New AST Node for local temporaries. Based on Name node.
class Temp( _ast.Name ):
pass
|
saurabh6790/omnisys-app
|
refs/heads/master
|
patches/may_2013/p05_update_cancelled_gl_entries.py
|
30
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import cint
def execute():
aii_enabled = cint(webnotes.defaults.get_global_default("auto_accounting_for_stock"))
if aii_enabled:
webnotes.conn.sql("""update `tabGL Entry` gle set is_cancelled = 'Yes'
where voucher_type = 'Delivery Note'
and exists(select name from `tabDelivery Note`
where name = gle.voucher_no and docstatus = 2)""")
|
teslaji/homebase
|
refs/heads/master
|
venv/HomeBase/lib/python3.5/site-packages/wheel/archive.py
|
239
|
"""
Archive tools for wheel.
"""
import logging
import os.path
import zipfile
log = logging.getLogger("wheel")
def archive_wheelfile(base_name, base_dir):
'''Archive all files under `base_dir` in a whl file and name it like
`base_name`.
'''
olddir = os.path.abspath(os.curdir)
base_name = os.path.abspath(base_name)
try:
os.chdir(base_dir)
return make_wheelfile_inner(base_name)
finally:
os.chdir(olddir)
def make_wheelfile_inner(base_name, base_dir='.'):
"""Create a whl file from all the files under 'base_dir'.
Places .dist-info at the end of the archive."""
zip_filename = base_name + ".whl"
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
# XXX support bz2, xz when available
zip = zipfile.ZipFile(open(zip_filename, "wb+"), "w",
compression=zipfile.ZIP_DEFLATED)
score = {'WHEEL': 1, 'METADATA': 2, 'RECORD': 3}
deferred = []
def writefile(path):
zip.write(path, path)
log.info("adding '%s'" % path)
for dirpath, dirnames, filenames in os.walk(base_dir):
for name in filenames:
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
if dirpath.endswith('.dist-info'):
deferred.append((score.get(name, 0), path))
else:
writefile(path)
deferred.sort()
for score, path in deferred:
writefile(path)
zip.close()
return zip_filename
|
reinhrst/panda
|
refs/heads/master
|
usr/lib/python2.7/shutil.py
|
32
|
"""Utility functions for copying and archiving files and directory trees.
XXX The functions here don't copy the resource fork or other metadata on Mac.
"""
import os
import sys
import stat
from os.path import abspath
import fnmatch
import collections
import errno
try:
from pwd import getpwnam
except ImportError:
getpwnam = None
try:
from grp import getgrnam
except ImportError:
getgrnam = None
__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
"copytree", "move", "rmtree", "Error", "SpecialFileError",
"ExecError", "make_archive", "get_archive_formats",
"register_archive_format", "unregister_archive_format",
"ignore_patterns"]
class Error(EnvironmentError):
pass
class SpecialFileError(EnvironmentError):
"""Raised when trying to do a kind of operation (e.g. copying) which is
not supported on a special file (e.g. a named pipe)"""
class ExecError(EnvironmentError):
"""Raised when a command could not be executed"""
try:
WindowsError
except NameError:
WindowsError = None
def copyfileobj(fsrc, fdst, length=16*1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while 1:
buf = fsrc.read(length)
if not buf:
break
fdst.write(buf)
def _samefile(src, dst):
# Macintosh, Unix.
if hasattr(os.path, 'samefile'):
try:
return os.path.samefile(src, dst)
except OSError:
return False
# All other platforms: check for same pathname.
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
def copyfile(src, dst):
"""Copy data from src to dst"""
if _samefile(src, dst):
raise Error("`%s` and `%s` are the same file" % (src, dst))
for fn in [src, dst]:
try:
st = os.stat(fn)
except OSError:
# File most likely does not exist
pass
else:
# XXX What about other special files? (sockets, devices...)
if stat.S_ISFIFO(st.st_mode):
raise SpecialFileError("`%s` is a named pipe" % fn)
with open(src, 'rb') as fsrc:
with open(dst, 'wb') as fdst:
copyfileobj(fsrc, fdst)
def copymode(src, dst):
"""Copy mode bits from src to dst"""
if hasattr(os, 'chmod'):
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
os.chmod(dst, mode)
def copystat(src, dst):
"""Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
if hasattr(os, 'utime'):
os.utime(dst, (st.st_atime, st.st_mtime))
if hasattr(os, 'chmod'):
os.chmod(dst, mode)
if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
try:
os.chflags(dst, st.st_flags)
except OSError, why:
if (not hasattr(errno, 'EOPNOTSUPP') or
why.errno != errno.EOPNOTSUPP):
raise
def copy(src, dst):
"""Copy data and mode bits ("cp src dst").
The destination may be a directory.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst)
copymode(src, dst)
def copy2(src, dst):
"""Copy data and all stat info ("cp -p src dst").
The destination may be a directory.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst)
copystat(src, dst)
def ignore_patterns(*patterns):
"""Function that can be used as copytree() ignore parameter.
Patterns is a sequence of glob-style patterns
that are used to exclude files"""
def _ignore_patterns(path, names):
ignored_names = []
for pattern in patterns:
ignored_names.extend(fnmatch.filter(names, pattern))
return set(ignored_names)
return _ignore_patterns
def copytree(src, dst, symlinks=False, ignore=None):
"""Recursively copy a directory tree using copy2().
The destination directory must not already exist.
If exception(s) occur, an Error is raised with a list of reasons.
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied.
The optional ignore argument is a callable. If given, it
is called with the `src` parameter, which is the directory
being visited by copytree(), and `names` which is the list of
`src` contents, as returned by os.listdir():
callable(src, names) -> ignored_names
Since copytree() is called recursively, the callable will be
called once for each directory that is copied. It returns a
list of names relative to the `src` directory that should
not be copied.
XXX Consider this example code rather than the ultimate tool.
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore)
else:
# Will raise a SpecialFileError for unsupported file types
copy2(srcname, dstname)
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error, err:
errors.extend(err.args[0])
except EnvironmentError, why:
errors.append((srcname, dstname, str(why)))
try:
copystat(src, dst)
except OSError, why:
if WindowsError is not None and isinstance(why, WindowsError):
# Copying file access times may fail on Windows
pass
else:
errors.extend((src, dst, str(why)))
if errors:
raise Error, errors
def rmtree(path, ignore_errors=False, onerror=None):
"""Recursively delete a directory tree.
If ignore_errors is set, errors are ignored; otherwise, if onerror
is set, it is called to handle the error with arguments (func,
path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
path is the argument to that function that caused it to fail; and
exc_info is a tuple returned by sys.exc_info(). If ignore_errors
is false and onerror is None, an exception is raised.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
try:
if os.path.islink(path):
# symlinks to directories are forbidden, see bug #1669
raise OSError("Cannot call rmtree on a symbolic link")
except OSError:
onerror(os.path.islink, path, sys.exc_info())
# can't continue even if onerror hook returns
return
names = []
try:
names = os.listdir(path)
except os.error, err:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except os.error:
mode = 0
if stat.S_ISDIR(mode):
rmtree(fullname, ignore_errors, onerror)
else:
try:
os.remove(fullname)
except os.error, err:
onerror(os.remove, fullname, sys.exc_info())
try:
os.rmdir(path)
except os.error:
onerror(os.rmdir, path, sys.exc_info())
def _basename(path):
# A basename() variant which first strips the trailing slash, if present.
# Thus we always get the last component of the path, even for directories.
return os.path.basename(path.rstrip(os.path.sep))
def move(src, dst):
"""Recursively move a file or directory to another location. This is
similar to the Unix "mv" command.
If the destination is a directory or a symlink to a directory, the source
is moved inside the directory. The destination path must not already
exist.
If the destination already exists but is not a directory, it may be
overwritten depending on os.rename() semantics.
If the destination is on our current filesystem, then rename() is used.
Otherwise, src is copied to the destination and then removed.
A lot more could be done here... A look at a mv.c shows a lot of
the issues this implementation glosses over.
"""
real_dst = dst
if os.path.isdir(dst):
if _samefile(src, dst):
# We might be on a case insensitive filesystem,
# perform the rename anyway.
os.rename(src, dst)
return
real_dst = os.path.join(dst, _basename(src))
if os.path.exists(real_dst):
raise Error, "Destination path '%s' already exists" % real_dst
try:
os.rename(src, real_dst)
except OSError:
if os.path.isdir(src):
if _destinsrc(src, dst):
raise Error, "Cannot move a directory '%s' into itself '%s'." % (src, dst)
copytree(src, real_dst, symlinks=True)
rmtree(src)
else:
copy2(src, real_dst)
os.unlink(src)
def _destinsrc(src, dst):
src = abspath(src)
dst = abspath(dst)
if not src.endswith(os.path.sep):
src += os.path.sep
if not dst.endswith(os.path.sep):
dst += os.path.sep
return dst.startswith(src)
def _get_gid(name):
"""Returns a gid, given a group name."""
if getgrnam is None or name is None:
return None
try:
result = getgrnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _get_uid(name):
"""Returns an uid, given a user name."""
if getpwnam is None or name is None:
return None
try:
result = getpwnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
owner=None, group=None, logger=None):
"""Create a (possibly compressed) tar file from all the files under
'base_dir'.
'compress' must be "gzip" (the default), "bzip2", or None.
'owner' and 'group' can be used to define an owner and a group for the
archive that is being built. If not provided, the current owner and group
will be used.
The output tar file will be named 'base_name' + ".tar", possibly plus
the appropriate compression extension (".gz", or ".bz2").
Returns the output filename.
"""
tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', None: ''}
compress_ext = {'gzip': '.gz', 'bzip2': '.bz2'}
# flags for compression program, each element of list will be an argument
if compress is not None and compress not in compress_ext.keys():
raise ValueError, \
("bad value for 'compress': must be None, 'gzip' or 'bzip2'")
archive_name = base_name + '.tar' + compress_ext.get(compress, '')
archive_dir = os.path.dirname(archive_name)
if not os.path.exists(archive_dir):
if logger is not None:
logger.info("creating %s", archive_dir)
if not dry_run:
os.makedirs(archive_dir)
# creating the tarball
import tarfile # late import so Python build itself doesn't break
if logger is not None:
logger.info('Creating tar archive')
uid = _get_uid(owner)
gid = _get_gid(group)
def _set_uid_gid(tarinfo):
if gid is not None:
tarinfo.gid = gid
tarinfo.gname = group
if uid is not None:
tarinfo.uid = uid
tarinfo.uname = owner
return tarinfo
if not dry_run:
tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
try:
tar.add(base_dir, filter=_set_uid_gid)
finally:
tar.close()
return archive_name
def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
# XXX see if we want to keep an external call here
if verbose:
zipoptions = "-r"
else:
zipoptions = "-rq"
from distutils.errors import DistutilsExecError
from distutils.spawn import spawn
try:
spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
except DistutilsExecError:
# XXX really should distinguish between "couldn't find
# external 'zip' command" and "zip failed".
raise ExecError, \
("unable to create zip file '%s': "
"could neither import the 'zipfile' module nor "
"find a standalone zip utility") % zip_filename
def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
"""Create a zip file from all the files under 'base_dir'.
The output zip file will be named 'base_name' + ".zip". Uses either the
"zipfile" Python module (if available) or the InfoZIP "zip" utility
(if installed and found on the default search path). If neither tool is
available, raises ExecError. Returns the name of the output zip
file.
"""
zip_filename = base_name + ".zip"
archive_dir = os.path.dirname(base_name)
if not os.path.exists(archive_dir):
if logger is not None:
logger.info("creating %s", archive_dir)
if not dry_run:
os.makedirs(archive_dir)
# If zipfile module is not available, try spawning an external 'zip'
# command.
try:
import zipfile
except ImportError:
zipfile = None
if zipfile is None:
_call_external_zip(base_dir, zip_filename, verbose, dry_run)
else:
if logger is not None:
logger.info("creating '%s' and adding '%s' to it",
zip_filename, base_dir)
if not dry_run:
zip = zipfile.ZipFile(zip_filename, "w",
compression=zipfile.ZIP_DEFLATED)
for dirpath, dirnames, filenames in os.walk(base_dir):
for name in filenames:
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
zip.write(path, path)
if logger is not None:
logger.info("adding '%s'", path)
zip.close()
return zip_filename
_ARCHIVE_FORMATS = {
'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
'zip': (_make_zipfile, [],"ZIP file")
}
def get_archive_formats():
"""Returns a list of supported formats for archiving and unarchiving.
Each element of the returned sequence is a tuple (name, description)
"""
formats = [(name, registry[2]) for name, registry in
_ARCHIVE_FORMATS.items()]
formats.sort()
return formats
def register_archive_format(name, function, extra_args=None, description=''):
"""Registers an archive format.
name is the name of the format. function is the callable that will be
used to create archives. If provided, extra_args is a sequence of
(name, value) tuples that will be passed as arguments to the callable.
description can be provided to describe the format, and will be returned
by the get_archive_formats() function.
"""
if extra_args is None:
extra_args = []
if not isinstance(function, collections.Callable):
raise TypeError('The %s object is not callable' % function)
if not isinstance(extra_args, (tuple, list)):
raise TypeError('extra_args needs to be a sequence')
for element in extra_args:
if not isinstance(element, (tuple, list)) or len(element) !=2 :
raise TypeError('extra_args elements are : (arg_name, value)')
_ARCHIVE_FORMATS[name] = (function, extra_args, description)
def unregister_archive_format(name):
del _ARCHIVE_FORMATS[name]
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
dry_run=0, owner=None, group=None, logger=None):
"""Create an archive file (eg. zip or tar).
'base_name' is the name of the file to create, minus any format-specific
extension; 'format' is the archive format: one of "zip", "tar", "bztar"
or "gztar".
'root_dir' is a directory that will be the root directory of the
archive; ie. we typically chdir into 'root_dir' before creating the
archive. 'base_dir' is the directory where we start archiving from;
ie. 'base_dir' will be the common prefix of all files and
directories in the archive. 'root_dir' and 'base_dir' both default
to the current directory. Returns the name of the archive file.
'owner' and 'group' are used when creating a tar archive. By default,
uses the current owner and group.
"""
save_cwd = os.getcwd()
if root_dir is not None:
if logger is not None:
logger.debug("changing into '%s'", root_dir)
base_name = os.path.abspath(base_name)
if not dry_run:
os.chdir(root_dir)
if base_dir is None:
base_dir = os.curdir
kwargs = {'dry_run': dry_run, 'logger': logger}
try:
format_info = _ARCHIVE_FORMATS[format]
except KeyError:
raise ValueError, "unknown archive format '%s'" % format
func = format_info[0]
for arg, val in format_info[1]:
kwargs[arg] = val
if format != 'zip':
kwargs['owner'] = owner
kwargs['group'] = group
try:
filename = func(base_name, base_dir, **kwargs)
finally:
if root_dir is not None:
if logger is not None:
logger.debug("changing back to '%s'", save_cwd)
os.chdir(save_cwd)
return filename
|
zephyrplugins/zephyr
|
refs/heads/master
|
zephyr.plugin.jython/jython2.5.2rc3/Lib/test/test_hmac.py
|
10
|
import hmac
import sha
import hashlib
import unittest
from test import test_support
class TestVectorsTestCase(unittest.TestCase):
def test_md5_vectors(self):
# Test the HMAC module against test vectors from the RFC.
def md5test(key, data, digest):
h = hmac.HMAC(key, data)
self.assertEqual(h.hexdigest().upper(), digest.upper())
md5test(chr(0x0b) * 16,
"Hi There",
"9294727A3638BB1C13F48EF8158BFC9D")
md5test("Jefe",
"what do ya want for nothing?",
"750c783e6ab0b503eaa86e310a5db738")
md5test(chr(0xAA)*16,
chr(0xDD)*50,
"56be34521d144c88dbb8c733f0e8b3f6")
md5test("".join([chr(i) for i in range(1, 26)]),
chr(0xCD) * 50,
"697eaf0aca3a3aea3a75164746ffaa79")
md5test(chr(0x0C) * 16,
"Test With Truncation",
"56461ef2342edc00f9bab995690efd4c")
md5test(chr(0xAA) * 80,
"Test Using Larger Than Block-Size Key - Hash Key First",
"6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd")
md5test(chr(0xAA) * 80,
("Test Using Larger Than Block-Size Key "
"and Larger Than One Block-Size Data"),
"6f630fad67cda0ee1fb1f562db3aa53e")
def test_sha_vectors(self):
def shatest(key, data, digest):
h = hmac.HMAC(key, data, digestmod=sha)
self.assertEqual(h.hexdigest().upper(), digest.upper())
shatest(chr(0x0b) * 20,
"Hi There",
"b617318655057264e28bc0b6fb378c8ef146be00")
shatest("Jefe",
"what do ya want for nothing?",
"effcdf6ae5eb2fa2d27416d5f184df9c259a7c79")
shatest(chr(0xAA)*20,
chr(0xDD)*50,
"125d7342b9ac11cd91a39af48aa17b4f63f175d3")
shatest("".join([chr(i) for i in range(1, 26)]),
chr(0xCD) * 50,
"4c9007f4026250c6bc8414f9bf50c86c2d7235da")
shatest(chr(0x0C) * 20,
"Test With Truncation",
"4c1a03424b55e07fe7f27be1d58bb9324a9a5a04")
shatest(chr(0xAA) * 80,
"Test Using Larger Than Block-Size Key - Hash Key First",
"aa4ae5e15272d00e95705637ce8a3b55ed402112")
shatest(chr(0xAA) * 80,
("Test Using Larger Than Block-Size Key "
"and Larger Than One Block-Size Data"),
"e8e99d0f45237d786d6bbaa7965c7808bbff1a91")
def _rfc4231_test_cases(self, hashfunc):
def hmactest(key, data, hexdigests):
h = hmac.HMAC(key, data, digestmod=hashfunc)
self.assertEqual(h.hexdigest().lower(), hexdigests[hashfunc])
# 4.2. Test Case 1
hmactest(key = '\x0b'*20,
data = 'Hi There',
hexdigests = {
hashlib.sha224: '896fb1128abbdf196832107cd49df33f'
'47b4b1169912ba4f53684b22',
hashlib.sha256: 'b0344c61d8db38535ca8afceaf0bf12b'
'881dc200c9833da726e9376c2e32cff7',
hashlib.sha384: 'afd03944d84895626b0825f4ab46907f'
'15f9dadbe4101ec682aa034c7cebc59c'
'faea9ea9076ede7f4af152e8b2fa9cb6',
hashlib.sha512: '87aa7cdea5ef619d4ff0b4241a1d6cb0'
'2379f4e2ce4ec2787ad0b30545e17cde'
'daa833b7d6b8a702038b274eaea3f4e4'
'be9d914eeb61f1702e696c203a126854',
})
# 4.3. Test Case 2
hmactest(key = 'Jefe',
data = 'what do ya want for nothing?',
hexdigests = {
hashlib.sha224: 'a30e01098bc6dbbf45690f3a7e9e6d0f'
'8bbea2a39e6148008fd05e44',
hashlib.sha256: '5bdcc146bf60754e6a042426089575c7'
'5a003f089d2739839dec58b964ec3843',
hashlib.sha384: 'af45d2e376484031617f78d2b58a6b1b'
'9c7ef464f5a01b47e42ec3736322445e'
'8e2240ca5e69e2c78b3239ecfab21649',
hashlib.sha512: '164b7a7bfcf819e2e395fbe73b56e0a3'
'87bd64222e831fd610270cd7ea250554'
'9758bf75c05a994a6d034f65f8f0e6fd'
'caeab1a34d4a6b4b636e070a38bce737',
})
# 4.4. Test Case 3
hmactest(key = '\xaa'*20,
data = '\xdd'*50,
hexdigests = {
hashlib.sha224: '7fb3cb3588c6c1f6ffa9694d7d6ad264'
'9365b0c1f65d69d1ec8333ea',
hashlib.sha256: '773ea91e36800e46854db8ebd09181a7'
'2959098b3ef8c122d9635514ced565fe',
hashlib.sha384: '88062608d3e6ad8a0aa2ace014c8a86f'
'0aa635d947ac9febe83ef4e55966144b'
'2a5ab39dc13814b94e3ab6e101a34f27',
hashlib.sha512: 'fa73b0089d56a284efb0f0756c890be9'
'b1b5dbdd8ee81a3655f83e33b2279d39'
'bf3e848279a722c806b485a47e67c807'
'b946a337bee8942674278859e13292fb',
})
# 4.5. Test Case 4
hmactest(key = ''.join([chr(x) for x in xrange(0x01, 0x19+1)]),
data = '\xcd'*50,
hexdigests = {
hashlib.sha224: '6c11506874013cac6a2abc1bb382627c'
'ec6a90d86efc012de7afec5a',
hashlib.sha256: '82558a389a443c0ea4cc819899f2083a'
'85f0faa3e578f8077a2e3ff46729665b',
hashlib.sha384: '3e8a69b7783c25851933ab6290af6ca7'
'7a9981480850009cc5577c6e1f573b4e'
'6801dd23c4a7d679ccf8a386c674cffb',
hashlib.sha512: 'b0ba465637458c6990e5a8c5f61d4af7'
'e576d97ff94b872de76f8050361ee3db'
'a91ca5c11aa25eb4d679275cc5788063'
'a5f19741120c4f2de2adebeb10a298dd',
})
# 4.7. Test Case 6
hmactest(key = '\xaa'*131,
data = 'Test Using Larger Than Block-Siz'
'e Key - Hash Key First',
hexdigests = {
hashlib.sha224: '95e9a0db962095adaebe9b2d6f0dbce2'
'd499f112f2d2b7273fa6870e',
hashlib.sha256: '60e431591ee0b67f0d8a26aacbf5b77f'
'8e0bc6213728c5140546040f0ee37f54',
hashlib.sha384: '4ece084485813e9088d2c63a041bc5b4'
'4f9ef1012a2b588f3cd11f05033ac4c6'
'0c2ef6ab4030fe8296248df163f44952',
hashlib.sha512: '80b24263c7c1a3ebb71493c1dd7be8b4'
'9b46d1f41b4aeec1121b013783f8f352'
'6b56d037e05f2598bd0fd2215d6a1e52'
'95e64f73f63f0aec8b915a985d786598',
})
# 4.8. Test Case 7
hmactest(key = '\xaa'*131,
data = 'This is a test using a larger th'
'an block-size key and a larger t'
'han block-size data. The key nee'
'ds to be hashed before being use'
'd by the HMAC algorithm.',
hexdigests = {
hashlib.sha224: '3a854166ac5d9f023f54d517d0b39dbd'
'946770db9c2b95c9f6f565d1',
hashlib.sha256: '9b09ffa71b942fcb27635fbcd5b0e944'
'bfdc63644f0713938a7f51535c3a35e2',
hashlib.sha384: '6617178e941f020d351e2f254e8fd32c'
'602420feb0b8fb9adccebb82461e99c5'
'a678cc31e799176d3860e6110c46523e',
hashlib.sha512: 'e37b6a775dc87dbaa4dfa9f96e5e3ffd'
'debd71f8867289865df5a32d20cdc944'
'b6022cac3c4982b10d5eeb55c3e4de15'
'134676fb6de0446065c97440fa8c6a58',
})
def test_sha224_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha224)
def test_sha256_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha256)
def test_sha384_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha384)
def test_sha512_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha512)
class ConstructorTestCase(unittest.TestCase):
def test_normal(self):
# Standard constructor call.
failed = 0
try:
h = hmac.HMAC("key")
except:
self.fail("Standard constructor call raised exception.")
def test_withtext(self):
# Constructor call with text.
try:
h = hmac.HMAC("key", "hash this!")
except:
self.fail("Constructor call with text argument raised exception.")
def test_withmodule(self):
# Constructor call with text and digest module.
import sha
try:
h = hmac.HMAC("key", "", sha)
except:
self.fail("Constructor call with sha module raised exception.")
class SanityTestCase(unittest.TestCase):
def test_default_is_md5(self):
# Testing if HMAC defaults to MD5 algorithm.
# NOTE: this whitebox test depends on the hmac class internals
import hashlib
h = hmac.HMAC("key")
self.failUnless(h.digest_cons == hashlib.md5)
def test_exercise_all_methods(self):
# Exercising all methods once.
# This must not raise any exceptions
try:
h = hmac.HMAC("my secret key")
h.update("compute the hash of this text!")
dig = h.digest()
dig = h.hexdigest()
h2 = h.copy()
except:
self.fail("Exception raised during normal usage of HMAC class.")
class CopyTestCase(unittest.TestCase):
def test_attributes(self):
# Testing if attributes are of same type.
h1 = hmac.HMAC("key")
h2 = h1.copy()
self.failUnless(h1.digest_cons == h2.digest_cons,
"digest constructors don't match.")
self.failUnless(type(h1.inner) == type(h2.inner),
"Types of inner don't match.")
self.failUnless(type(h1.outer) == type(h2.outer),
"Types of outer don't match.")
def test_realcopy(self):
# Testing if the copy method created a real copy.
h1 = hmac.HMAC("key")
h2 = h1.copy()
# Using id() in case somebody has overridden __cmp__.
self.failUnless(id(h1) != id(h2), "No real copy of the HMAC instance.")
self.failUnless(id(h1.inner) != id(h2.inner),
"No real copy of the attribute 'inner'.")
self.failUnless(id(h1.outer) != id(h2.outer),
"No real copy of the attribute 'outer'.")
def test_equality(self):
# Testing if the copy has the same digests.
h1 = hmac.HMAC("key")
h1.update("some random text")
h2 = h1.copy()
self.failUnless(h1.digest() == h2.digest(),
"Digest of copy doesn't match original digest.")
self.failUnless(h1.hexdigest() == h2.hexdigest(),
"Hexdigest of copy doesn't match original hexdigest.")
def test_main():
if test_support.is_jython:
# XXX: Jython doesn't support sha224
del TestVectorsTestCase.test_sha224_rfc4231
hashlib.sha224 = None
test_support.run_unittest(
TestVectorsTestCase,
ConstructorTestCase,
SanityTestCase,
CopyTestCase
)
if __name__ == "__main__":
test_main()
|
jswope00/griffinx
|
refs/heads/master
|
common/djangoapps/enrollment/urls.py
|
14
|
"""
URLs for the Enrollment API
"""
from django.conf import settings
from django.conf.urls import patterns, url
from .views import (
EnrollmentView,
EnrollmentListView,
EnrollmentCourseDetailView
)
USER_PATTERN = '(?P<user>[\w.@+-]+)'
urlpatterns = patterns(
'enrollment.views',
url(
r'^enrollment/{user},{course_key}$'.format(user=USER_PATTERN, course_key=settings.COURSE_ID_PATTERN),
EnrollmentView.as_view(),
name='courseenrollment'
),
url(
r'^enrollment/{course_key}$'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentView.as_view(),
name='courseenrollment'
),
url(r'^enrollment$', EnrollmentListView.as_view(), name='courseenrollments'),
url(
r'^course/{course_key}$'.format(course_key=settings.COURSE_ID_PATTERN),
EnrollmentCourseDetailView.as_view(),
name='courseenrollmentdetails'
),
)
|
wd5/jangr
|
refs/heads/master
|
sorl/thumbnail/engines/pgmagick_engine.py
|
11
|
from pgmagick import Blob, ColorspaceType, Geometry, Image, ImageType
from pgmagick import InterlaceType, OrientationType
from sorl.thumbnail.engines.base import EngineBase
try:
from pgmagick._pgmagick import get_blob_data
except ImportError:
from base64 import b64decode
def get_blob_data(blob):
return b64decode(blob.base64())
class Engine(EngineBase):
def get_image(self, source):
blob = Blob()
blob.update(source.read())
return Image(blob)
def get_image_size(self, image):
geometry = image.size()
return geometry.width(), geometry.height()
def is_valid_image(self, raw_data):
blob = Blob()
blob.update(raw_data)
im = Image(blob)
return im.isValid()
def _orientation(self, image):
orientation = image.orientation()
if orientation == OrientationType.TopRightOrientation:
image.flop()
elif orientation == OrientationType.BottomRightOrientation:
image.rotate(180)
elif orientation == OrientationType.BottomLeftOrientation:
image.flip()
elif orientation == OrientationType.LeftTopOrientation:
image.rotate(90)
image.flop()
elif orientation == OrientationType.RightTopOrientation:
image.rotate(90)
elif orientation == OrientationType.RightBottomOrientation:
image.rotate(-90)
image.flop()
elif orientation == OrientationType.LeftBottomOrientation:
image.rotate(-90)
image.orientation(OrientationType.TopLeftOrientation)
return image
def _colorspace(self, image, colorspace):
if colorspace == 'RGB':
image.type(ImageType.TrueColorMatteType)
elif colorspace == 'GRAY':
image.type(ImageType.GrayscaleMatteType)
else:
return image
return image
def _scale(self, image, width, height):
geometry = Geometry(width, height)
image.scale(geometry)
return image
def _crop(self, image, width, height, x_offset, y_offset):
geometry = Geometry(width, height, x_offset, y_offset)
image.crop(geometry)
return image
def _get_raw_data(self, image, format_, quality, progressive=False):
image.magick(format_.encode('utf8'))
image.quality(quality)
if format_ == 'JPEG' and progressive:
image.interlaceType(InterlaceType.LineInterlace)
blob = Blob()
image.write(blob)
return get_blob_data(blob)
|
devinbalkind/eden
|
refs/heads/master
|
tests/unit_tests/modules/s3/s3gis/WFSLayer.py
|
43
|
s3gis_tests = load_module("tests.unit_tests.modules.s3.s3gis")
def test_WFSLayer():
s3gis_tests.layer_test(
db,
db.gis_layer_wfs,
dict(
name = "Test WFS",
description = "Test WFS layer",
enabled = True,
created_on = datetime.datetime.now(),
modified_on = datetime.datetime.now(),
url = "test://test_WFS",
visible = True,
),
"S3.gis.layers_wfs",
[
{
"name": u"Test WFS",
"url": u"test://test_WFS",
"featureNS": None,
"featureType": None,
"schema": None,
"title": u"name",
}
],
session = session,
request = request,
)
|
sebastien-forestier/explaupoppydiva
|
refs/heads/master
|
scripts/cogsci2016/analysis_mvt.py
|
1
|
import numpy as np
import matplotlib.pylab as plt
from explaupoppydiva.environment.cogsci2016 import CogSci2016Environment
env_cfg = dict(
max_params=[500, 500, 500, 500, 500, 500, 500, 500, 1, 1, 1, 1],
gui=True)
env = CogSci2016Environment(**env_cfg)
m = [0.02]*12
# m = [ 0.48981662, -0.23193498, 0.26313222, -0.29216035, 0.28578779, -0.63969026,
# 0.82305975, 0.86425687, -0.05611152, -0.34211432, -0.60495534, -0.93206953]
#####1 1 2 2 3 3 4 4
print env.update(m)
m_traj = env.compute_motor_command(m)
m_traj = np.clip(m_traj, -1, 1)
fig, ax = plt.subplots(figsize=(18,5))
ax.plot(m_traj, lw=3)
# x_track = env.motor_dmp.dmp.cs.rollout()
# psi_track = env.motor_dmp.dmp.gen_psi(x_track)
# print "centers", env.motor_dmp.dmp.c
# print "std", 1. / np.sqrt(env.motor_dmp.dmp.h)
# print x_track, psi_track
# fig, ax = plt.subplots()
# ax.plot(psi_track)
plt.xlim([0, 49])
plt.ylim([-1.1, 1.1])
plt.tick_params(labelsize=16)
plt.legend(["Joint 1", "Joint 2", "Joint 3", "Gripper"],fontsize=18)
#plt.savefig("/home/sforesti/scm/PhD/cogsci2016/include/dmp.pdf", format='pdf', dpi=1000, bbox_inches='tight')
plt.show()
|
zenefits/sentry
|
refs/heads/master
|
src/sentry/utils/db.py
|
10
|
"""
sentry.utils.db
~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import six
from django.conf import settings
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.models.fields.related import SingleRelatedObjectDescriptor
def get_db_engine(alias='default'):
value = settings.DATABASES[alias]['ENGINE']
if value == 'mysql.connector.django':
return 'mysql'
return value.rsplit('.', 1)[-1]
def is_postgres(alias='default'):
engine = get_db_engine(alias)
return 'postgres' in engine
def is_mysql(alias='default'):
engine = get_db_engine(alias)
return 'mysql' in engine
def is_sqlite(alias='default'):
engine = get_db_engine(alias)
return 'sqlite' in engine
def has_charts(db):
if is_sqlite(db):
return False
return True
def attach_foreignkey(objects, field, related=[], database=None):
"""
Shortcut method which handles a pythonic LEFT OUTER JOIN.
``attach_foreignkey(posts, Post.thread)``
Works with both ForeignKey and OneToOne (reverse) lookups.
"""
if not objects:
return
if database is None:
database = list(objects)[0]._state.db
is_foreignkey = isinstance(field, SingleRelatedObjectDescriptor)
if not is_foreignkey:
field = field.field
accessor = '_%s_cache' % field.name
model = field.rel.to
lookup = 'pk'
column = field.column
key = lookup
else:
accessor = field.cache_name
field = field.related.field
model = field.model
lookup = field.name
column = 'pk'
key = field.column
objects = [o for o in objects if (related or getattr(o, accessor, False) is False)]
if not objects:
return
# Ensure values are unique, do not contain already present values, and are not missing
# values specified in select_related
values = set(filter(None, (getattr(o, column) for o in objects)))
if values:
qs = model.objects
if database:
qs = qs.using(database)
if related:
qs = qs.select_related(*related)
if len(values) > 1:
qs = qs.filter(**{'%s__in' % lookup: values})
else:
qs = [qs.get(**{lookup: six.next(iter(values))})]
queryset = dict((getattr(o, key), o) for o in qs)
else:
queryset = {}
for o in objects:
setattr(o, accessor, queryset.get(getattr(o, column)))
def table_exists(name, using=DEFAULT_DB_ALIAS):
return name in connections[using].introspection.table_names()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.